1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
53 #include "coretypes.h"
64 #include "langhooks.h"
67 /* Non-zero if we are folding constants inside an initializer; zero
69 int folding_initializer
= 0;
71 /* The following constants represent a bit based encoding of GCC's
72 comparison operators. This encoding simplifies transformations
73 on relational comparison operators, such as AND and OR. */
74 enum comparison_code
{
93 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
94 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
95 static bool negate_mathfn_p (enum built_in_function
);
96 static bool negate_expr_p (tree
);
97 static tree
negate_expr (tree
);
98 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
99 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
100 static tree
const_binop (enum tree_code
, tree
, tree
, int);
101 static enum comparison_code
comparison_to_compcode (enum tree_code
);
102 static enum tree_code
compcode_to_comparison (enum comparison_code
);
103 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
104 enum tree_code
, tree
, tree
, tree
);
105 static int truth_value_p (enum tree_code
);
106 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
107 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
108 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
109 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
110 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
111 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
112 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
113 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
114 enum machine_mode
*, int *, int *,
116 static int all_ones_mask_p (tree
, int);
117 static tree
sign_bit_p (tree
, tree
);
118 static int simple_operand_p (tree
);
119 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
120 static tree
range_predecessor (tree
);
121 static tree
range_successor (tree
);
122 static tree
make_range (tree
, int *, tree
*, tree
*);
123 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
124 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
126 static tree
fold_range_test (enum tree_code
, tree
, tree
, tree
);
127 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
128 static tree
unextend (tree
, int, int, tree
);
129 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
130 static tree
optimize_minmax_comparison (enum tree_code
, tree
, tree
, tree
);
131 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
);
132 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
);
133 static int multiple_of_p (tree
, tree
, tree
);
134 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
,
137 static bool fold_real_zero_addition_p (tree
, tree
, int);
138 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
140 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
141 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
142 static bool reorder_operands_p (tree
, tree
);
143 static tree
fold_negate_const (tree
, tree
);
144 static tree
fold_not_const (tree
, tree
);
145 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
146 static int native_encode_expr (tree
, unsigned char *, int);
147 static tree
native_interpret_expr (tree
, unsigned char *, int);
150 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
151 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
152 and SUM1. Then this yields nonzero if overflow occurred during the
155 Overflow occurs if A and B have the same sign, but A and SUM differ in
156 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
158 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
160 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
161 We do that by representing the two-word integer in 4 words, with only
162 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
163 number. The value of the word is LOWPART + HIGHPART * BASE. */
166 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
167 #define HIGHPART(x) \
168 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
169 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
171 /* Unpack a two-word integer into 4 words.
172 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
173 WORDS points to the array of HOST_WIDE_INTs. */
176 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
178 words
[0] = LOWPART (low
);
179 words
[1] = HIGHPART (low
);
180 words
[2] = LOWPART (hi
);
181 words
[3] = HIGHPART (hi
);
184 /* Pack an array of 4 words into a two-word integer.
185 WORDS points to the array of words.
186 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
189 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
192 *low
= words
[0] + words
[1] * BASE
;
193 *hi
= words
[2] + words
[3] * BASE
;
196 /* Force the double-word integer L1, H1 to be within the range of the
197 integer type TYPE. Stores the properly truncated and sign-extended
198 double-word integer in *LV, *HV. Returns true if the operation
199 overflows, that is, argument and result are different. */
202 fit_double_type (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
203 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, tree type
)
205 unsigned HOST_WIDE_INT low0
= l1
;
206 HOST_WIDE_INT high0
= h1
;
208 int sign_extended_type
;
210 if (POINTER_TYPE_P (type
)
211 || TREE_CODE (type
) == OFFSET_TYPE
)
214 prec
= TYPE_PRECISION (type
);
216 /* Size types *are* sign extended. */
217 sign_extended_type
= (!TYPE_UNSIGNED (type
)
218 || (TREE_CODE (type
) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (type
)));
221 /* First clear all bits that are beyond the type's precision. */
222 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
224 else if (prec
> HOST_BITS_PER_WIDE_INT
)
225 h1
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
229 if (prec
< HOST_BITS_PER_WIDE_INT
)
230 l1
&= ~((HOST_WIDE_INT
) (-1) << prec
);
233 /* Then do sign extension if necessary. */
234 if (!sign_extended_type
)
235 /* No sign extension */;
236 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
237 /* Correct width already. */;
238 else if (prec
> HOST_BITS_PER_WIDE_INT
)
240 /* Sign extend top half? */
241 if (h1
& ((unsigned HOST_WIDE_INT
)1
242 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
243 h1
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
245 else if (prec
== HOST_BITS_PER_WIDE_INT
)
247 if ((HOST_WIDE_INT
)l1
< 0)
252 /* Sign extend bottom half? */
253 if (l1
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
256 l1
|= (HOST_WIDE_INT
)(-1) << prec
;
263 /* If the value didn't fit, signal overflow. */
264 return l1
!= low0
|| h1
!= high0
;
267 /* We force the double-int HIGH:LOW to the range of the type TYPE by
268 sign or zero extending it.
269 OVERFLOWABLE indicates if we are interested
270 in overflow of the value, when >0 we are only interested in signed
271 overflow, for <0 we are interested in any overflow. OVERFLOWED
272 indicates whether overflow has already occurred. CONST_OVERFLOWED
273 indicates whether constant overflow has already occurred. We force
274 T's value to be within range of T's type (by setting to 0 or 1 all
275 the bits outside the type's range). We set TREE_OVERFLOWED if,
276 OVERFLOWED is nonzero,
277 or OVERFLOWABLE is >0 and signed overflow occurs
278 or OVERFLOWABLE is <0 and any overflow occurs
279 We return a new tree node for the extended double-int. The node
280 is shared if no overflow flags are set. */
283 force_fit_type_double (tree type
, unsigned HOST_WIDE_INT low
,
284 HOST_WIDE_INT high
, int overflowable
,
287 int sign_extended_type
;
290 /* Size types *are* sign extended. */
291 sign_extended_type
= (!TYPE_UNSIGNED (type
)
292 || (TREE_CODE (type
) == INTEGER_TYPE
293 && TYPE_IS_SIZETYPE (type
)));
295 overflow
= fit_double_type (low
, high
, &low
, &high
, type
);
297 /* If we need to set overflow flags, return a new unshared node. */
298 if (overflowed
|| overflow
)
302 || (overflowable
> 0 && sign_extended_type
))
304 tree t
= make_node (INTEGER_CST
);
305 TREE_INT_CST_LOW (t
) = low
;
306 TREE_INT_CST_HIGH (t
) = high
;
307 TREE_TYPE (t
) = type
;
308 TREE_OVERFLOW (t
) = 1;
313 /* Else build a shared node. */
314 return build_int_cst_wide (type
, low
, high
);
317 /* Add two doubleword integers with doubleword result.
318 Return nonzero if the operation overflows according to UNSIGNED_P.
319 Each argument is given as two `HOST_WIDE_INT' pieces.
320 One argument is L1 and H1; the other, L2 and H2.
321 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
324 add_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
325 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
326 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
329 unsigned HOST_WIDE_INT l
;
333 h
= h1
+ h2
+ (l
< l1
);
339 return (unsigned HOST_WIDE_INT
) h
< (unsigned HOST_WIDE_INT
) h1
;
341 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
344 /* Negate a doubleword integer with doubleword result.
345 Return nonzero if the operation overflows, assuming it's signed.
346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
351 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
357 return (*hv
& h1
) < 0;
367 /* Multiply two doubleword integers with doubleword result.
368 Return nonzero if the operation overflows according to UNSIGNED_P.
369 Each argument is given as two `HOST_WIDE_INT' pieces.
370 One argument is L1 and H1; the other, L2 and H2.
371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374 mul_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
375 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
376 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
379 HOST_WIDE_INT arg1
[4];
380 HOST_WIDE_INT arg2
[4];
381 HOST_WIDE_INT prod
[4 * 2];
382 unsigned HOST_WIDE_INT carry
;
384 unsigned HOST_WIDE_INT toplow
, neglow
;
385 HOST_WIDE_INT tophigh
, neghigh
;
387 encode (arg1
, l1
, h1
);
388 encode (arg2
, l2
, h2
);
390 memset (prod
, 0, sizeof prod
);
392 for (i
= 0; i
< 4; i
++)
395 for (j
= 0; j
< 4; j
++)
398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
399 carry
+= arg1
[i
] * arg2
[j
];
400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
402 prod
[k
] = LOWPART (carry
);
403 carry
= HIGHPART (carry
);
408 decode (prod
, lv
, hv
);
409 decode (prod
+ 4, &toplow
, &tophigh
);
411 /* Unsigned overflow is immediate. */
413 return (toplow
| tophigh
) != 0;
415 /* Check for signed overflow by calculating the signed representation of the
416 top half of the result; it should agree with the low half's sign bit. */
419 neg_double (l2
, h2
, &neglow
, &neghigh
);
420 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
424 neg_double (l1
, h1
, &neglow
, &neghigh
);
425 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
427 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
430 /* Shift the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Shift right if COUNT is negative.
433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
437 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
438 HOST_WIDE_INT count
, unsigned int prec
,
439 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
441 unsigned HOST_WIDE_INT signmask
;
445 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
449 if (SHIFT_COUNT_TRUNCATED
)
452 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
454 /* Shifting by the host word size is undefined according to the
455 ANSI standard, so we must handle this as a special case. */
459 else if (count
>= HOST_BITS_PER_WIDE_INT
)
461 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
466 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
467 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
471 /* Sign extend all bits that are beyond the precision. */
473 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
474 ? ((unsigned HOST_WIDE_INT
) *hv
475 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
476 : (*lv
>> (prec
- 1))) & 1);
478 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
480 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
482 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
483 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
488 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
489 *lv
|= signmask
<< prec
;
493 /* Shift the doubleword integer in L1, H1 right by COUNT places
494 keeping only PREC bits of result. COUNT must be positive.
495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
499 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
500 HOST_WIDE_INT count
, unsigned int prec
,
501 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
504 unsigned HOST_WIDE_INT signmask
;
507 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
510 if (SHIFT_COUNT_TRUNCATED
)
513 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
515 /* Shifting by the host word size is undefined according to the
516 ANSI standard, so we must handle this as a special case. */
520 else if (count
>= HOST_BITS_PER_WIDE_INT
)
523 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
527 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
529 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
532 /* Zero / sign extend all bits that are beyond the precision. */
534 if (count
>= (HOST_WIDE_INT
)prec
)
539 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
541 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
543 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
544 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
549 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
550 *lv
|= signmask
<< (prec
- count
);
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result.
556 Rotate right if COUNT is negative.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
560 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
561 HOST_WIDE_INT count
, unsigned int prec
,
562 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
564 unsigned HOST_WIDE_INT s1l
, s2l
;
565 HOST_WIDE_INT s1h
, s2h
;
571 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
572 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
577 /* Rotate the doubleword integer in L1, H1 left by COUNT places
578 keeping only PREC bits of result. COUNT must be positive.
579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
582 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
583 HOST_WIDE_INT count
, unsigned int prec
,
584 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
586 unsigned HOST_WIDE_INT s1l
, s2l
;
587 HOST_WIDE_INT s1h
, s2h
;
593 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
594 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601 CODE is a tree code for a kind of division, one of
602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
604 It controls how the quotient is rounded to an integer.
605 Return nonzero if the operation overflows.
606 UNS nonzero says do unsigned division. */
609 div_and_round_double (enum tree_code code
, int uns
,
610 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
611 HOST_WIDE_INT hnum_orig
,
612 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
613 HOST_WIDE_INT hden_orig
,
614 unsigned HOST_WIDE_INT
*lquo
,
615 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
619 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
620 HOST_WIDE_INT den
[4], quo
[4];
622 unsigned HOST_WIDE_INT work
;
623 unsigned HOST_WIDE_INT carry
= 0;
624 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
625 HOST_WIDE_INT hnum
= hnum_orig
;
626 unsigned HOST_WIDE_INT lden
= lden_orig
;
627 HOST_WIDE_INT hden
= hden_orig
;
630 if (hden
== 0 && lden
== 0)
631 overflow
= 1, lden
= 1;
633 /* Calculate quotient sign and convert operands to unsigned. */
639 /* (minimum integer) / (-1) is the only overflow case. */
640 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
641 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
647 neg_double (lden
, hden
, &lden
, &hden
);
651 if (hnum
== 0 && hden
== 0)
652 { /* single precision */
654 /* This unsigned division rounds toward zero. */
660 { /* trivial case: dividend < divisor */
661 /* hden != 0 already checked. */
668 memset (quo
, 0, sizeof quo
);
670 memset (num
, 0, sizeof num
); /* to zero 9th element */
671 memset (den
, 0, sizeof den
);
673 encode (num
, lnum
, hnum
);
674 encode (den
, lden
, hden
);
676 /* Special code for when the divisor < BASE. */
677 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
679 /* hnum != 0 already checked. */
680 for (i
= 4 - 1; i
>= 0; i
--)
682 work
= num
[i
] + carry
* BASE
;
683 quo
[i
] = work
/ lden
;
689 /* Full double precision division,
690 with thanks to Don Knuth's "Seminumerical Algorithms". */
691 int num_hi_sig
, den_hi_sig
;
692 unsigned HOST_WIDE_INT quo_est
, scale
;
694 /* Find the highest nonzero divisor digit. */
695 for (i
= 4 - 1;; i
--)
702 /* Insure that the first digit of the divisor is at least BASE/2.
703 This is required by the quotient digit estimation algorithm. */
705 scale
= BASE
/ (den
[den_hi_sig
] + 1);
707 { /* scale divisor and dividend */
709 for (i
= 0; i
<= 4 - 1; i
++)
711 work
= (num
[i
] * scale
) + carry
;
712 num
[i
] = LOWPART (work
);
713 carry
= HIGHPART (work
);
718 for (i
= 0; i
<= 4 - 1; i
++)
720 work
= (den
[i
] * scale
) + carry
;
721 den
[i
] = LOWPART (work
);
722 carry
= HIGHPART (work
);
723 if (den
[i
] != 0) den_hi_sig
= i
;
730 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
732 /* Guess the next quotient digit, quo_est, by dividing the first
733 two remaining dividend digits by the high order quotient digit.
734 quo_est is never low and is at most 2 high. */
735 unsigned HOST_WIDE_INT tmp
;
737 num_hi_sig
= i
+ den_hi_sig
+ 1;
738 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
739 if (num
[num_hi_sig
] != den
[den_hi_sig
])
740 quo_est
= work
/ den
[den_hi_sig
];
744 /* Refine quo_est so it's usually correct, and at most one high. */
745 tmp
= work
- quo_est
* den
[den_hi_sig
];
747 && (den
[den_hi_sig
- 1] * quo_est
748 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
751 /* Try QUO_EST as the quotient digit, by multiplying the
752 divisor by QUO_EST and subtracting from the remaining dividend.
753 Keep in mind that QUO_EST is the I - 1st digit. */
756 for (j
= 0; j
<= den_hi_sig
; j
++)
758 work
= quo_est
* den
[j
] + carry
;
759 carry
= HIGHPART (work
);
760 work
= num
[i
+ j
] - LOWPART (work
);
761 num
[i
+ j
] = LOWPART (work
);
762 carry
+= HIGHPART (work
) != 0;
765 /* If quo_est was high by one, then num[i] went negative and
766 we need to correct things. */
767 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
770 carry
= 0; /* add divisor back in */
771 for (j
= 0; j
<= den_hi_sig
; j
++)
773 work
= num
[i
+ j
] + den
[j
] + carry
;
774 carry
= HIGHPART (work
);
775 num
[i
+ j
] = LOWPART (work
);
778 num
[num_hi_sig
] += carry
;
781 /* Store the quotient digit. */
786 decode (quo
, lquo
, hquo
);
789 /* If result is negative, make it so. */
791 neg_double (*lquo
, *hquo
, lquo
, hquo
);
793 /* Compute trial remainder: rem = num - (quo * den) */
794 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
795 neg_double (*lrem
, *hrem
, lrem
, hrem
);
796 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
801 case TRUNC_MOD_EXPR
: /* round toward zero */
802 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
806 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
807 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
810 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
818 case CEIL_MOD_EXPR
: /* round toward positive infinity */
819 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
821 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
829 case ROUND_MOD_EXPR
: /* round to closest integer */
831 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
832 HOST_WIDE_INT habs_rem
= *hrem
;
833 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
834 HOST_WIDE_INT habs_den
= hden
, htwice
;
836 /* Get absolute values. */
838 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
840 neg_double (lden
, hden
, &labs_den
, &habs_den
);
842 /* If (2 * abs (lrem) >= abs (lden)) */
843 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
844 labs_rem
, habs_rem
, <wice
, &htwice
);
846 if (((unsigned HOST_WIDE_INT
) habs_den
847 < (unsigned HOST_WIDE_INT
) htwice
)
848 || (((unsigned HOST_WIDE_INT
) habs_den
849 == (unsigned HOST_WIDE_INT
) htwice
)
850 && (labs_den
< ltwice
)))
854 add_double (*lquo
, *hquo
,
855 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
858 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
870 /* Compute true remainder: rem = num - (quo * den) */
871 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
872 neg_double (*lrem
, *hrem
, lrem
, hrem
);
873 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
877 /* If ARG2 divides ARG1 with zero remainder, carries out the division
878 of type CODE and returns the quotient.
879 Otherwise returns NULL_TREE. */
882 div_if_zero_remainder (enum tree_code code
, tree arg1
, tree arg2
)
884 unsigned HOST_WIDE_INT int1l
, int2l
;
885 HOST_WIDE_INT int1h
, int2h
;
886 unsigned HOST_WIDE_INT quol
, reml
;
887 HOST_WIDE_INT quoh
, remh
;
888 tree type
= TREE_TYPE (arg1
);
889 int uns
= TYPE_UNSIGNED (type
);
891 int1l
= TREE_INT_CST_LOW (arg1
);
892 int1h
= TREE_INT_CST_HIGH (arg1
);
893 int2l
= TREE_INT_CST_LOW (arg2
);
894 int2h
= TREE_INT_CST_HIGH (arg2
);
896 div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
897 &quol
, &quoh
, &reml
, &remh
);
898 if (remh
!= 0 || reml
!= 0)
901 return build_int_cst_wide (type
, quol
, quoh
);
904 /* Return true if the built-in mathematical function specified by CODE
905 is odd, i.e. -f(x) == f(-x). */
908 negate_mathfn_p (enum built_in_function code
)
912 CASE_FLT_FN (BUILT_IN_ASIN
):
913 CASE_FLT_FN (BUILT_IN_ASINH
):
914 CASE_FLT_FN (BUILT_IN_ATAN
):
915 CASE_FLT_FN (BUILT_IN_ATANH
):
916 CASE_FLT_FN (BUILT_IN_CBRT
):
917 CASE_FLT_FN (BUILT_IN_ERF
):
918 CASE_FLT_FN (BUILT_IN_LLROUND
):
919 CASE_FLT_FN (BUILT_IN_LROUND
):
920 CASE_FLT_FN (BUILT_IN_ROUND
):
921 CASE_FLT_FN (BUILT_IN_SIN
):
922 CASE_FLT_FN (BUILT_IN_SINH
):
923 CASE_FLT_FN (BUILT_IN_TAN
):
924 CASE_FLT_FN (BUILT_IN_TANH
):
925 CASE_FLT_FN (BUILT_IN_TRUNC
):
928 CASE_FLT_FN (BUILT_IN_LLRINT
):
929 CASE_FLT_FN (BUILT_IN_LRINT
):
930 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
931 CASE_FLT_FN (BUILT_IN_RINT
):
932 return !flag_rounding_math
;
940 /* Check whether we may negate an integer constant T without causing
944 may_negate_without_overflow_p (tree t
)
946 unsigned HOST_WIDE_INT val
;
950 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
952 type
= TREE_TYPE (t
);
953 if (TYPE_UNSIGNED (type
))
956 prec
= TYPE_PRECISION (type
);
957 if (prec
> HOST_BITS_PER_WIDE_INT
)
959 if (TREE_INT_CST_LOW (t
) != 0)
961 prec
-= HOST_BITS_PER_WIDE_INT
;
962 val
= TREE_INT_CST_HIGH (t
);
965 val
= TREE_INT_CST_LOW (t
);
966 if (prec
< HOST_BITS_PER_WIDE_INT
)
967 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
968 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
971 /* Determine whether an expression T can be cheaply negated using
972 the function negate_expr without introducing undefined overflow. */
975 negate_expr_p (tree t
)
982 type
= TREE_TYPE (t
);
985 switch (TREE_CODE (t
))
988 if (TYPE_UNSIGNED (type
)
989 || (flag_wrapv
&& ! flag_trapv
))
992 /* Check that -CST will not overflow type. */
993 return may_negate_without_overflow_p (t
);
995 return INTEGRAL_TYPE_P (type
)
996 && (TYPE_UNSIGNED (type
)
997 || (flag_wrapv
&& !flag_trapv
));
1004 return negate_expr_p (TREE_REALPART (t
))
1005 && negate_expr_p (TREE_IMAGPART (t
));
1008 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1009 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1011 /* -(A + B) -> (-B) - A. */
1012 if (negate_expr_p (TREE_OPERAND (t
, 1))
1013 && reorder_operands_p (TREE_OPERAND (t
, 0),
1014 TREE_OPERAND (t
, 1)))
1016 /* -(A + B) -> (-A) - B. */
1017 return negate_expr_p (TREE_OPERAND (t
, 0));
1020 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1021 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1022 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1023 && reorder_operands_p (TREE_OPERAND (t
, 0),
1024 TREE_OPERAND (t
, 1));
1027 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1033 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1034 return negate_expr_p (TREE_OPERAND (t
, 1))
1035 || negate_expr_p (TREE_OPERAND (t
, 0));
1038 case TRUNC_DIV_EXPR
:
1039 case ROUND_DIV_EXPR
:
1040 case FLOOR_DIV_EXPR
:
1042 case EXACT_DIV_EXPR
:
1043 if (TYPE_UNSIGNED (TREE_TYPE (t
)) || flag_wrapv
)
1045 return negate_expr_p (TREE_OPERAND (t
, 1))
1046 || negate_expr_p (TREE_OPERAND (t
, 0));
1049 /* Negate -((double)float) as (double)(-float). */
1050 if (TREE_CODE (type
) == REAL_TYPE
)
1052 tree tem
= strip_float_extensions (t
);
1054 return negate_expr_p (tem
);
1059 /* Negate -f(x) as f(-x). */
1060 if (negate_mathfn_p (builtin_mathfn_code (t
)))
1061 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1)));
1065 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1066 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1068 tree op1
= TREE_OPERAND (t
, 1);
1069 if (TREE_INT_CST_HIGH (op1
) == 0
1070 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1071 == TREE_INT_CST_LOW (op1
))
1082 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1083 simplification is possible.
1084 If negate_expr_p would return true for T, NULL_TREE will never be
1088 fold_negate_expr (tree t
)
1090 tree type
= TREE_TYPE (t
);
1093 switch (TREE_CODE (t
))
1095 /* Convert - (~A) to A + 1. */
1097 if (INTEGRAL_TYPE_P (type
))
1098 return fold_build2 (PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
1099 build_int_cst (type
, 1));
1103 tem
= fold_negate_const (t
, type
);
1104 if (!TREE_OVERFLOW (tem
)
1105 || TYPE_UNSIGNED (type
)
1111 tem
= fold_negate_const (t
, type
);
1112 /* Two's complement FP formats, such as c4x, may overflow. */
1113 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
1119 tree rpart
= negate_expr (TREE_REALPART (t
));
1120 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1122 if ((TREE_CODE (rpart
) == REAL_CST
1123 && TREE_CODE (ipart
) == REAL_CST
)
1124 || (TREE_CODE (rpart
) == INTEGER_CST
1125 && TREE_CODE (ipart
) == INTEGER_CST
))
1126 return build_complex (type
, rpart
, ipart
);
1131 return TREE_OPERAND (t
, 0);
1134 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1135 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1137 /* -(A + B) -> (-B) - A. */
1138 if (negate_expr_p (TREE_OPERAND (t
, 1))
1139 && reorder_operands_p (TREE_OPERAND (t
, 0),
1140 TREE_OPERAND (t
, 1)))
1142 tem
= negate_expr (TREE_OPERAND (t
, 1));
1143 return fold_build2 (MINUS_EXPR
, type
,
1144 tem
, TREE_OPERAND (t
, 0));
1147 /* -(A + B) -> (-A) - B. */
1148 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1150 tem
= negate_expr (TREE_OPERAND (t
, 0));
1151 return fold_build2 (MINUS_EXPR
, type
,
1152 tem
, TREE_OPERAND (t
, 1));
1158 /* - (A - B) -> B - A */
1159 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1160 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1161 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1162 return fold_build2 (MINUS_EXPR
, type
,
1163 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
1167 if (TYPE_UNSIGNED (type
))
1173 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
1175 tem
= TREE_OPERAND (t
, 1);
1176 if (negate_expr_p (tem
))
1177 return fold_build2 (TREE_CODE (t
), type
,
1178 TREE_OPERAND (t
, 0), negate_expr (tem
));
1179 tem
= TREE_OPERAND (t
, 0);
1180 if (negate_expr_p (tem
))
1181 return fold_build2 (TREE_CODE (t
), type
,
1182 negate_expr (tem
), TREE_OPERAND (t
, 1));
1186 case TRUNC_DIV_EXPR
:
1187 case ROUND_DIV_EXPR
:
1188 case FLOOR_DIV_EXPR
:
1190 case EXACT_DIV_EXPR
:
1191 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
1193 tem
= TREE_OPERAND (t
, 1);
1194 if (negate_expr_p (tem
))
1195 return fold_build2 (TREE_CODE (t
), type
,
1196 TREE_OPERAND (t
, 0), negate_expr (tem
));
1197 tem
= TREE_OPERAND (t
, 0);
1198 if (negate_expr_p (tem
))
1199 return fold_build2 (TREE_CODE (t
), type
,
1200 negate_expr (tem
), TREE_OPERAND (t
, 1));
1205 /* Convert -((double)float) into (double)(-float). */
1206 if (TREE_CODE (type
) == REAL_TYPE
)
1208 tem
= strip_float_extensions (t
);
1209 if (tem
!= t
&& negate_expr_p (tem
))
1210 return negate_expr (tem
);
1215 /* Negate -f(x) as f(-x). */
1216 if (negate_mathfn_p (builtin_mathfn_code (t
))
1217 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1))))
1219 tree fndecl
, arg
, arglist
;
1221 fndecl
= get_callee_fndecl (t
);
1222 arg
= negate_expr (TREE_VALUE (TREE_OPERAND (t
, 1)));
1223 arglist
= build_tree_list (NULL_TREE
, arg
);
1224 return build_function_call_expr (fndecl
, arglist
);
1229 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1230 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1232 tree op1
= TREE_OPERAND (t
, 1);
1233 if (TREE_INT_CST_HIGH (op1
) == 0
1234 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1235 == TREE_INT_CST_LOW (op1
))
1237 tree ntype
= TYPE_UNSIGNED (type
)
1238 ? lang_hooks
.types
.signed_type (type
)
1239 : lang_hooks
.types
.unsigned_type (type
);
1240 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1241 temp
= fold_build2 (RSHIFT_EXPR
, ntype
, temp
, op1
);
1242 return fold_convert (type
, temp
);
1254 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1255 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1256 return NULL_TREE. */
1259 negate_expr (tree t
)
1266 type
= TREE_TYPE (t
);
1267 STRIP_SIGN_NOPS (t
);
1269 tem
= fold_negate_expr (t
);
1271 tem
= build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1272 return fold_convert (type
, tem
);
1275 /* Split a tree IN into a constant, literal and variable parts that could be
1276 combined with CODE to make IN. "constant" means an expression with
1277 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1278 commutative arithmetic operation. Store the constant part into *CONP,
1279 the literal in *LITP and return the variable part. If a part isn't
1280 present, set it to null. If the tree does not decompose in this way,
1281 return the entire tree as the variable part and the other parts as null.
1283 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1284 case, we negate an operand that was subtracted. Except if it is a
1285 literal for which we use *MINUS_LITP instead.
1287 If NEGATE_P is true, we are negating all of IN, again except a literal
1288 for which we use *MINUS_LITP instead.
1290 If IN is itself a literal or constant, return it as appropriate.
1292 Note that we do not guarantee that any of the three values will be the
1293 same type as IN, but they will have the same signedness and mode. */
1296 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1297 tree
*minus_litp
, int negate_p
)
1305 /* Strip any conversions that don't change the machine mode or signedness. */
1306 STRIP_SIGN_NOPS (in
);
1308 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1310 else if (TREE_CODE (in
) == code
1311 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1312 /* We can associate addition and subtraction together (even
1313 though the C standard doesn't say so) for integers because
1314 the value is not affected. For reals, the value might be
1315 affected, so we can't. */
1316 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1317 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1319 tree op0
= TREE_OPERAND (in
, 0);
1320 tree op1
= TREE_OPERAND (in
, 1);
1321 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1322 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1324 /* First see if either of the operands is a literal, then a constant. */
1325 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1326 *litp
= op0
, op0
= 0;
1327 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1328 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1330 if (op0
!= 0 && TREE_CONSTANT (op0
))
1331 *conp
= op0
, op0
= 0;
1332 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1333 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1335 /* If we haven't dealt with either operand, this is not a case we can
1336 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1337 if (op0
!= 0 && op1
!= 0)
1342 var
= op1
, neg_var_p
= neg1_p
;
1344 /* Now do any needed negations. */
1346 *minus_litp
= *litp
, *litp
= 0;
1348 *conp
= negate_expr (*conp
);
1350 var
= negate_expr (var
);
1352 else if (TREE_CONSTANT (in
))
1360 *minus_litp
= *litp
, *litp
= 0;
1361 else if (*minus_litp
)
1362 *litp
= *minus_litp
, *minus_litp
= 0;
1363 *conp
= negate_expr (*conp
);
1364 var
= negate_expr (var
);
1370 /* Re-associate trees split by the above function. T1 and T2 are either
1371 expressions to associate or null. Return the new expression, if any. If
1372 we build an operation, do it in TYPE and with CODE. */
1375 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1382 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1383 try to fold this since we will have infinite recursion. But do
1384 deal with any NEGATE_EXPRs. */
1385 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1386 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1388 if (code
== PLUS_EXPR
)
1390 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1391 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1392 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1393 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1394 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1395 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1396 else if (integer_zerop (t2
))
1397 return fold_convert (type
, t1
);
1399 else if (code
== MINUS_EXPR
)
1401 if (integer_zerop (t2
))
1402 return fold_convert (type
, t1
);
1405 return build2 (code
, type
, fold_convert (type
, t1
),
1406 fold_convert (type
, t2
));
1409 return fold_build2 (code
, type
, fold_convert (type
, t1
),
1410 fold_convert (type
, t2
));
1413 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1414 for use in int_const_binop, size_binop and size_diffop. */
1417 int_binop_types_match_p (enum tree_code code
, tree type1
, tree type2
)
1419 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
1421 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
1436 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
1437 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
1438 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
1442 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1443 to produce a new constant. Return NULL_TREE if we don't know how
1444 to evaluate CODE at compile-time.
1446 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1449 int_const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1451 unsigned HOST_WIDE_INT int1l
, int2l
;
1452 HOST_WIDE_INT int1h
, int2h
;
1453 unsigned HOST_WIDE_INT low
;
1455 unsigned HOST_WIDE_INT garbagel
;
1456 HOST_WIDE_INT garbageh
;
1458 tree type
= TREE_TYPE (arg1
);
1459 int uns
= TYPE_UNSIGNED (type
);
1461 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1464 int1l
= TREE_INT_CST_LOW (arg1
);
1465 int1h
= TREE_INT_CST_HIGH (arg1
);
1466 int2l
= TREE_INT_CST_LOW (arg2
);
1467 int2h
= TREE_INT_CST_HIGH (arg2
);
1472 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1476 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1480 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1486 /* It's unclear from the C standard whether shifts can overflow.
1487 The following code ignores overflow; perhaps a C standard
1488 interpretation ruling is needed. */
1489 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1496 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1501 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1505 neg_double (int2l
, int2h
, &low
, &hi
);
1506 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1507 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1511 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1514 case TRUNC_DIV_EXPR
:
1515 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1516 case EXACT_DIV_EXPR
:
1517 /* This is a shortcut for a common special case. */
1518 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1519 && !TREE_OVERFLOW (arg1
)
1520 && !TREE_OVERFLOW (arg2
)
1521 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1523 if (code
== CEIL_DIV_EXPR
)
1526 low
= int1l
/ int2l
, hi
= 0;
1530 /* ... fall through ... */
1532 case ROUND_DIV_EXPR
:
1533 if (int2h
== 0 && int2l
== 0)
1535 if (int2h
== 0 && int2l
== 1)
1537 low
= int1l
, hi
= int1h
;
1540 if (int1l
== int2l
&& int1h
== int2h
1541 && ! (int1l
== 0 && int1h
== 0))
1546 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1547 &low
, &hi
, &garbagel
, &garbageh
);
1550 case TRUNC_MOD_EXPR
:
1551 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1552 /* This is a shortcut for a common special case. */
1553 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1554 && !TREE_OVERFLOW (arg1
)
1555 && !TREE_OVERFLOW (arg2
)
1556 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1558 if (code
== CEIL_MOD_EXPR
)
1560 low
= int1l
% int2l
, hi
= 0;
1564 /* ... fall through ... */
1566 case ROUND_MOD_EXPR
:
1567 if (int2h
== 0 && int2l
== 0)
1569 overflow
= div_and_round_double (code
, uns
,
1570 int1l
, int1h
, int2l
, int2h
,
1571 &garbagel
, &garbageh
, &low
, &hi
);
1577 low
= (((unsigned HOST_WIDE_INT
) int1h
1578 < (unsigned HOST_WIDE_INT
) int2h
)
1579 || (((unsigned HOST_WIDE_INT
) int1h
1580 == (unsigned HOST_WIDE_INT
) int2h
)
1583 low
= (int1h
< int2h
1584 || (int1h
== int2h
&& int1l
< int2l
));
1586 if (low
== (code
== MIN_EXPR
))
1587 low
= int1l
, hi
= int1h
;
1589 low
= int2l
, hi
= int2h
;
1598 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1600 /* Propagate overflow flags ourselves. */
1601 if (((!uns
|| is_sizetype
) && overflow
)
1602 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1605 TREE_OVERFLOW (t
) = 1;
1609 t
= force_fit_type_double (TREE_TYPE (arg1
), low
, hi
, 1,
1610 ((!uns
|| is_sizetype
) && overflow
)
1611 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1616 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1617 constant. We assume ARG1 and ARG2 have the same data type, or at least
1618 are the same kind of constant and the same machine mode. Return zero if
1619 combining the constants is not allowed in the current operating mode.
1621 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1624 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1626 /* Sanity check for the recursive cases. */
1633 if (TREE_CODE (arg1
) == INTEGER_CST
)
1634 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1636 if (TREE_CODE (arg1
) == REAL_CST
)
1638 enum machine_mode mode
;
1641 REAL_VALUE_TYPE value
;
1642 REAL_VALUE_TYPE result
;
1646 /* The following codes are handled by real_arithmetic. */
1661 d1
= TREE_REAL_CST (arg1
);
1662 d2
= TREE_REAL_CST (arg2
);
1664 type
= TREE_TYPE (arg1
);
1665 mode
= TYPE_MODE (type
);
1667 /* Don't perform operation if we honor signaling NaNs and
1668 either operand is a NaN. */
1669 if (HONOR_SNANS (mode
)
1670 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1673 /* Don't perform operation if it would raise a division
1674 by zero exception. */
1675 if (code
== RDIV_EXPR
1676 && REAL_VALUES_EQUAL (d2
, dconst0
)
1677 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1680 /* If either operand is a NaN, just return it. Otherwise, set up
1681 for floating-point trap; we return an overflow. */
1682 if (REAL_VALUE_ISNAN (d1
))
1684 else if (REAL_VALUE_ISNAN (d2
))
1687 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1688 real_convert (&result
, mode
, &value
);
1690 /* Don't constant fold this floating point operation if
1691 the result has overflowed and flag_trapping_math. */
1692 if (flag_trapping_math
1693 && MODE_HAS_INFINITIES (mode
)
1694 && REAL_VALUE_ISINF (result
)
1695 && !REAL_VALUE_ISINF (d1
)
1696 && !REAL_VALUE_ISINF (d2
))
1699 /* Don't constant fold this floating point operation if the
1700 result may dependent upon the run-time rounding mode and
1701 flag_rounding_math is set, or if GCC's software emulation
1702 is unable to accurately represent the result. */
1703 if ((flag_rounding_math
1704 || (REAL_MODE_FORMAT_COMPOSITE_P (mode
)
1705 && !flag_unsafe_math_optimizations
))
1706 && (inexact
|| !real_identical (&result
, &value
)))
1709 t
= build_real (type
, result
);
1711 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1715 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1717 tree type
= TREE_TYPE (arg1
);
1718 tree r1
= TREE_REALPART (arg1
);
1719 tree i1
= TREE_IMAGPART (arg1
);
1720 tree r2
= TREE_REALPART (arg2
);
1721 tree i2
= TREE_IMAGPART (arg2
);
1728 real
= const_binop (code
, r1
, r2
, notrunc
);
1729 imag
= const_binop (code
, i1
, i2
, notrunc
);
1733 real
= const_binop (MINUS_EXPR
,
1734 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1735 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1737 imag
= const_binop (PLUS_EXPR
,
1738 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1739 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1746 = const_binop (PLUS_EXPR
,
1747 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1748 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1751 = const_binop (PLUS_EXPR
,
1752 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1753 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1756 = const_binop (MINUS_EXPR
,
1757 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1758 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1761 if (INTEGRAL_TYPE_P (TREE_TYPE (r1
)))
1762 code
= TRUNC_DIV_EXPR
;
1764 real
= const_binop (code
, t1
, magsquared
, notrunc
);
1765 imag
= const_binop (code
, t2
, magsquared
, notrunc
);
1774 return build_complex (type
, real
, imag
);
1780 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1781 indicates which particular sizetype to create. */
1784 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1786 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1789 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1790 is a tree code. The type of the result is taken from the operands.
1791 Both must be equivalent integer types, ala int_binop_types_match_p.
1792 If the operands are constant, so is the result. */
1795 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
1797 tree type
= TREE_TYPE (arg0
);
1799 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1800 return error_mark_node
;
1802 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1805 /* Handle the special case of two integer constants faster. */
1806 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1808 /* And some specific cases even faster than that. */
1809 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1811 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1812 && integer_zerop (arg1
))
1814 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1817 /* Handle general case of two integer constants. */
1818 return int_const_binop (code
, arg0
, arg1
, 0);
1821 return fold_build2 (code
, type
, arg0
, arg1
);
1824 /* Given two values, either both of sizetype or both of bitsizetype,
1825 compute the difference between the two values. Return the value
1826 in signed type corresponding to the type of the operands. */
1829 size_diffop (tree arg0
, tree arg1
)
1831 tree type
= TREE_TYPE (arg0
);
1834 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1837 /* If the type is already signed, just do the simple thing. */
1838 if (!TYPE_UNSIGNED (type
))
1839 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1841 if (type
== sizetype
)
1843 else if (type
== bitsizetype
)
1844 ctype
= sbitsizetype
;
1846 ctype
= lang_hooks
.types
.signed_type (type
);
1848 /* If either operand is not a constant, do the conversions to the signed
1849 type and subtract. The hardware will do the right thing with any
1850 overflow in the subtraction. */
1851 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1852 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
1853 fold_convert (ctype
, arg1
));
1855 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1856 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1857 overflow) and negate (which can't either). Special-case a result
1858 of zero while we're here. */
1859 if (tree_int_cst_equal (arg0
, arg1
))
1860 return build_int_cst (ctype
, 0);
1861 else if (tree_int_cst_lt (arg1
, arg0
))
1862 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1864 return size_binop (MINUS_EXPR
, build_int_cst (ctype
, 0),
1865 fold_convert (ctype
, size_binop (MINUS_EXPR
,
1869 /* A subroutine of fold_convert_const handling conversions of an
1870 INTEGER_CST to another integer type. */
1873 fold_convert_const_int_from_int (tree type
, tree arg1
)
1877 /* Given an integer constant, make new constant with new type,
1878 appropriately sign-extended or truncated. */
1879 t
= force_fit_type_double (type
, TREE_INT_CST_LOW (arg1
),
1880 TREE_INT_CST_HIGH (arg1
),
1881 /* Don't set the overflow when
1882 converting a pointer */
1883 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1884 (TREE_INT_CST_HIGH (arg1
) < 0
1885 && (TYPE_UNSIGNED (type
)
1886 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1887 | TREE_OVERFLOW (arg1
));
1892 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1893 to an integer type. */
1896 fold_convert_const_int_from_real (enum tree_code code
, tree type
, tree arg1
)
1901 /* The following code implements the floating point to integer
1902 conversion rules required by the Java Language Specification,
1903 that IEEE NaNs are mapped to zero and values that overflow
1904 the target precision saturate, i.e. values greater than
1905 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1906 are mapped to INT_MIN. These semantics are allowed by the
1907 C and C++ standards that simply state that the behavior of
1908 FP-to-integer conversion is unspecified upon overflow. */
1910 HOST_WIDE_INT high
, low
;
1912 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1916 case FIX_TRUNC_EXPR
:
1917 real_trunc (&r
, VOIDmode
, &x
);
1924 /* If R is NaN, return zero and show we have an overflow. */
1925 if (REAL_VALUE_ISNAN (r
))
1932 /* See if R is less than the lower bound or greater than the
1937 tree lt
= TYPE_MIN_VALUE (type
);
1938 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1939 if (REAL_VALUES_LESS (r
, l
))
1942 high
= TREE_INT_CST_HIGH (lt
);
1943 low
= TREE_INT_CST_LOW (lt
);
1949 tree ut
= TYPE_MAX_VALUE (type
);
1952 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1953 if (REAL_VALUES_LESS (u
, r
))
1956 high
= TREE_INT_CST_HIGH (ut
);
1957 low
= TREE_INT_CST_LOW (ut
);
1963 REAL_VALUE_TO_INT (&low
, &high
, r
);
1965 t
= force_fit_type_double (type
, low
, high
, -1,
1966 overflow
| TREE_OVERFLOW (arg1
));
1970 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1971 to another floating point type. */
1974 fold_convert_const_real_from_real (tree type
, tree arg1
)
1976 REAL_VALUE_TYPE value
;
1979 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1980 t
= build_real (type
, value
);
1982 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1986 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1987 type TYPE. If no simplification can be done return NULL_TREE. */
1990 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1992 if (TREE_TYPE (arg1
) == type
)
1995 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
1997 if (TREE_CODE (arg1
) == INTEGER_CST
)
1998 return fold_convert_const_int_from_int (type
, arg1
);
1999 else if (TREE_CODE (arg1
) == REAL_CST
)
2000 return fold_convert_const_int_from_real (code
, type
, arg1
);
2002 else if (TREE_CODE (type
) == REAL_TYPE
)
2004 if (TREE_CODE (arg1
) == INTEGER_CST
)
2005 return build_real_from_int_cst (type
, arg1
);
2006 if (TREE_CODE (arg1
) == REAL_CST
)
2007 return fold_convert_const_real_from_real (type
, arg1
);
2012 /* Construct a vector of zero elements of vector type TYPE. */
2015 build_zero_vector (tree type
)
2020 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2021 units
= TYPE_VECTOR_SUBPARTS (type
);
2024 for (i
= 0; i
< units
; i
++)
2025 list
= tree_cons (NULL_TREE
, elem
, list
);
2026 return build_vector (type
, list
);
2029 /* Convert expression ARG to type TYPE. Used by the middle-end for
2030 simple conversions in preference to calling the front-end's convert. */
2033 fold_convert (tree type
, tree arg
)
2035 tree orig
= TREE_TYPE (arg
);
2041 if (TREE_CODE (arg
) == ERROR_MARK
2042 || TREE_CODE (type
) == ERROR_MARK
2043 || TREE_CODE (orig
) == ERROR_MARK
)
2044 return error_mark_node
;
2046 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
)
2047 || lang_hooks
.types_compatible_p (TYPE_MAIN_VARIANT (type
),
2048 TYPE_MAIN_VARIANT (orig
)))
2049 return fold_build1 (NOP_EXPR
, type
, arg
);
2051 switch (TREE_CODE (type
))
2053 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2054 case POINTER_TYPE
: case REFERENCE_TYPE
:
2056 if (TREE_CODE (arg
) == INTEGER_CST
)
2058 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2059 if (tem
!= NULL_TREE
)
2062 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2063 || TREE_CODE (orig
) == OFFSET_TYPE
)
2064 return fold_build1 (NOP_EXPR
, type
, arg
);
2065 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2067 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2068 return fold_convert (type
, tem
);
2070 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2071 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2072 return fold_build1 (NOP_EXPR
, type
, arg
);
2075 if (TREE_CODE (arg
) == INTEGER_CST
)
2077 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2078 if (tem
!= NULL_TREE
)
2081 else if (TREE_CODE (arg
) == REAL_CST
)
2083 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2084 if (tem
!= NULL_TREE
)
2088 switch (TREE_CODE (orig
))
2091 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2092 case POINTER_TYPE
: case REFERENCE_TYPE
:
2093 return fold_build1 (FLOAT_EXPR
, type
, arg
);
2096 return fold_build1 (NOP_EXPR
, type
, arg
);
2099 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2100 return fold_convert (type
, tem
);
2107 switch (TREE_CODE (orig
))
2110 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2111 case POINTER_TYPE
: case REFERENCE_TYPE
:
2113 return build2 (COMPLEX_EXPR
, type
,
2114 fold_convert (TREE_TYPE (type
), arg
),
2115 fold_convert (TREE_TYPE (type
), integer_zero_node
));
2120 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2122 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
2123 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
2124 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2127 arg
= save_expr (arg
);
2128 rpart
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2129 ipart
= fold_build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2130 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
2131 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
2132 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2140 if (integer_zerop (arg
))
2141 return build_zero_vector (type
);
2142 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2143 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2144 || TREE_CODE (orig
) == VECTOR_TYPE
);
2145 return fold_build1 (VIEW_CONVERT_EXPR
, type
, arg
);
2148 tem
= fold_ignored_result (arg
);
2149 if (TREE_CODE (tem
) == GIMPLE_MODIFY_STMT
)
2151 return fold_build1 (NOP_EXPR
, type
, tem
);
2158 /* Return false if expr can be assumed not to be an lvalue, true
2162 maybe_lvalue_p (tree x
)
2164 /* We only need to wrap lvalue tree codes. */
2165 switch (TREE_CODE (x
))
2176 case ALIGN_INDIRECT_REF
:
2177 case MISALIGNED_INDIRECT_REF
:
2179 case ARRAY_RANGE_REF
:
2185 case PREINCREMENT_EXPR
:
2186 case PREDECREMENT_EXPR
:
2188 case TRY_CATCH_EXPR
:
2189 case WITH_CLEANUP_EXPR
:
2192 case GIMPLE_MODIFY_STMT
:
2201 /* Assume the worst for front-end tree codes. */
2202 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2210 /* Return an expr equal to X but certainly not valid as an lvalue. */
2215 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2220 if (! maybe_lvalue_p (x
))
2222 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2225 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2226 Zero means allow extended lvalues. */
2228 int pedantic_lvalues
;
2230 /* When pedantic, return an expr equal to X but certainly not valid as a
2231 pedantic lvalue. Otherwise, return X. */
2234 pedantic_non_lvalue (tree x
)
2236 if (pedantic_lvalues
)
2237 return non_lvalue (x
);
2242 /* Given a tree comparison code, return the code that is the logical inverse
2243 of the given code. It is not safe to do this for floating-point
2244 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2245 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2248 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2250 if (honor_nans
&& flag_trapping_math
)
2260 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2262 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2264 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2266 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2280 return UNORDERED_EXPR
;
2281 case UNORDERED_EXPR
:
2282 return ORDERED_EXPR
;
2288 /* Similar, but return the comparison that results if the operands are
2289 swapped. This is safe for floating-point. */
2292 swap_tree_comparison (enum tree_code code
)
2299 case UNORDERED_EXPR
:
2325 /* Convert a comparison tree code from an enum tree_code representation
2326 into a compcode bit-based encoding. This function is the inverse of
2327 compcode_to_comparison. */
2329 static enum comparison_code
2330 comparison_to_compcode (enum tree_code code
)
2347 return COMPCODE_ORD
;
2348 case UNORDERED_EXPR
:
2349 return COMPCODE_UNORD
;
2351 return COMPCODE_UNLT
;
2353 return COMPCODE_UNEQ
;
2355 return COMPCODE_UNLE
;
2357 return COMPCODE_UNGT
;
2359 return COMPCODE_LTGT
;
2361 return COMPCODE_UNGE
;
2367 /* Convert a compcode bit-based encoding of a comparison operator back
2368 to GCC's enum tree_code representation. This function is the
2369 inverse of comparison_to_compcode. */
2371 static enum tree_code
2372 compcode_to_comparison (enum comparison_code code
)
2389 return ORDERED_EXPR
;
2390 case COMPCODE_UNORD
:
2391 return UNORDERED_EXPR
;
2409 /* Return a tree for the comparison which is the combination of
2410 doing the AND or OR (depending on CODE) of the two operations LCODE
2411 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2412 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2413 if this makes the transformation invalid. */
2416 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2417 enum tree_code rcode
, tree truth_type
,
2418 tree ll_arg
, tree lr_arg
)
2420 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2421 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2422 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2423 enum comparison_code compcode
;
2427 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2428 compcode
= lcompcode
& rcompcode
;
2431 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2432 compcode
= lcompcode
| rcompcode
;
2441 /* Eliminate unordered comparisons, as well as LTGT and ORD
2442 which are not used unless the mode has NaNs. */
2443 compcode
&= ~COMPCODE_UNORD
;
2444 if (compcode
== COMPCODE_LTGT
)
2445 compcode
= COMPCODE_NE
;
2446 else if (compcode
== COMPCODE_ORD
)
2447 compcode
= COMPCODE_TRUE
;
2449 else if (flag_trapping_math
)
2451 /* Check that the original operation and the optimized ones will trap
2452 under the same condition. */
2453 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2454 && (lcompcode
!= COMPCODE_EQ
)
2455 && (lcompcode
!= COMPCODE_ORD
);
2456 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2457 && (rcompcode
!= COMPCODE_EQ
)
2458 && (rcompcode
!= COMPCODE_ORD
);
2459 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2460 && (compcode
!= COMPCODE_EQ
)
2461 && (compcode
!= COMPCODE_ORD
);
2463 /* In a short-circuited boolean expression the LHS might be
2464 such that the RHS, if evaluated, will never trap. For
2465 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2466 if neither x nor y is NaN. (This is a mixed blessing: for
2467 example, the expression above will never trap, hence
2468 optimizing it to x < y would be invalid). */
2469 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2470 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2473 /* If the comparison was short-circuited, and only the RHS
2474 trapped, we may now generate a spurious trap. */
2476 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2479 /* If we changed the conditions that cause a trap, we lose. */
2480 if ((ltrap
|| rtrap
) != trap
)
2484 if (compcode
== COMPCODE_TRUE
)
2485 return constant_boolean_node (true, truth_type
);
2486 else if (compcode
== COMPCODE_FALSE
)
2487 return constant_boolean_node (false, truth_type
);
2489 return fold_build2 (compcode_to_comparison (compcode
),
2490 truth_type
, ll_arg
, lr_arg
);
2493 /* Return nonzero if CODE is a tree code that represents a truth value. */
2496 truth_value_p (enum tree_code code
)
2498 return (TREE_CODE_CLASS (code
) == tcc_comparison
2499 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2500 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2501 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2504 /* Return nonzero if two operands (typically of the same tree node)
2505 are necessarily equal. If either argument has side-effects this
2506 function returns zero. FLAGS modifies behavior as follows:
2508 If OEP_ONLY_CONST is set, only return nonzero for constants.
2509 This function tests whether the operands are indistinguishable;
2510 it does not test whether they are equal using C's == operation.
2511 The distinction is important for IEEE floating point, because
2512 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2513 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2515 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2516 even though it may hold multiple values during a function.
2517 This is because a GCC tree node guarantees that nothing else is
2518 executed between the evaluation of its "operands" (which may often
2519 be evaluated in arbitrary order). Hence if the operands themselves
2520 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2521 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2522 unset means assuming isochronic (or instantaneous) tree equivalence.
2523 Unless comparing arbitrary expression trees, such as from different
2524 statements, this flag can usually be left unset.
2526 If OEP_PURE_SAME is set, then pure functions with identical arguments
2527 are considered the same. It is used when the caller has other ways
2528 to ensure that global memory is unchanged in between. */
2531 operand_equal_p (tree arg0
, tree arg1
, unsigned int flags
)
2533 /* If either is ERROR_MARK, they aren't equal. */
2534 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
2537 /* If both types don't have the same signedness, then we can't consider
2538 them equal. We must check this before the STRIP_NOPS calls
2539 because they may change the signedness of the arguments. */
2540 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2543 /* If both types don't have the same precision, then it is not safe
2545 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
2551 /* In case both args are comparisons but with different comparison
2552 code, try to swap the comparison operands of one arg to produce
2553 a match and compare that variant. */
2554 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2555 && COMPARISON_CLASS_P (arg0
)
2556 && COMPARISON_CLASS_P (arg1
))
2558 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2560 if (TREE_CODE (arg0
) == swap_code
)
2561 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2562 TREE_OPERAND (arg1
, 1), flags
)
2563 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2564 TREE_OPERAND (arg1
, 0), flags
);
2567 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2568 /* This is needed for conversions and for COMPONENT_REF.
2569 Might as well play it safe and always test this. */
2570 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2571 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2572 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2575 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2576 We don't care about side effects in that case because the SAVE_EXPR
2577 takes care of that for us. In all other cases, two expressions are
2578 equal if they have no side effects. If we have two identical
2579 expressions with side effects that should be treated the same due
2580 to the only side effects being identical SAVE_EXPR's, that will
2581 be detected in the recursive calls below. */
2582 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2583 && (TREE_CODE (arg0
) == SAVE_EXPR
2584 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2587 /* Next handle constant cases, those for which we can return 1 even
2588 if ONLY_CONST is set. */
2589 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2590 switch (TREE_CODE (arg0
))
2593 return tree_int_cst_equal (arg0
, arg1
);
2596 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2597 TREE_REAL_CST (arg1
)))
2601 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2603 /* If we do not distinguish between signed and unsigned zero,
2604 consider them equal. */
2605 if (real_zerop (arg0
) && real_zerop (arg1
))
2614 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2615 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2618 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2621 v1
= TREE_CHAIN (v1
);
2622 v2
= TREE_CHAIN (v2
);
2629 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2631 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2635 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2636 && ! memcmp (TREE_STRING_POINTER (arg0
),
2637 TREE_STRING_POINTER (arg1
),
2638 TREE_STRING_LENGTH (arg0
)));
2641 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2647 if (flags
& OEP_ONLY_CONST
)
2650 /* Define macros to test an operand from arg0 and arg1 for equality and a
2651 variant that allows null and views null as being different from any
2652 non-null value. In the latter case, if either is null, the both
2653 must be; otherwise, do the normal comparison. */
2654 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2655 TREE_OPERAND (arg1, N), flags)
2657 #define OP_SAME_WITH_NULL(N) \
2658 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2659 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2661 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2664 /* Two conversions are equal only if signedness and modes match. */
2665 switch (TREE_CODE (arg0
))
2669 case FIX_TRUNC_EXPR
:
2670 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2671 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2681 case tcc_comparison
:
2683 if (OP_SAME (0) && OP_SAME (1))
2686 /* For commutative ops, allow the other order. */
2687 return (commutative_tree_code (TREE_CODE (arg0
))
2688 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2689 TREE_OPERAND (arg1
, 1), flags
)
2690 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2691 TREE_OPERAND (arg1
, 0), flags
));
2694 /* If either of the pointer (or reference) expressions we are
2695 dereferencing contain a side effect, these cannot be equal. */
2696 if (TREE_SIDE_EFFECTS (arg0
)
2697 || TREE_SIDE_EFFECTS (arg1
))
2700 switch (TREE_CODE (arg0
))
2703 case ALIGN_INDIRECT_REF
:
2704 case MISALIGNED_INDIRECT_REF
:
2710 case ARRAY_RANGE_REF
:
2711 /* Operands 2 and 3 may be null. */
2714 && OP_SAME_WITH_NULL (2)
2715 && OP_SAME_WITH_NULL (3));
2718 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2719 may be NULL when we're called to compare MEM_EXPRs. */
2720 return OP_SAME_WITH_NULL (0)
2722 && OP_SAME_WITH_NULL (2);
2725 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2731 case tcc_expression
:
2732 switch (TREE_CODE (arg0
))
2735 case TRUTH_NOT_EXPR
:
2738 case TRUTH_ANDIF_EXPR
:
2739 case TRUTH_ORIF_EXPR
:
2740 return OP_SAME (0) && OP_SAME (1);
2742 case TRUTH_AND_EXPR
:
2744 case TRUTH_XOR_EXPR
:
2745 if (OP_SAME (0) && OP_SAME (1))
2748 /* Otherwise take into account this is a commutative operation. */
2749 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2750 TREE_OPERAND (arg1
, 1), flags
)
2751 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2752 TREE_OPERAND (arg1
, 0), flags
));
2755 /* If the CALL_EXPRs call different functions, then they
2756 clearly can not be equal. */
2761 unsigned int cef
= call_expr_flags (arg0
);
2762 if (flags
& OEP_PURE_SAME
)
2763 cef
&= ECF_CONST
| ECF_PURE
;
2770 /* Now see if all the arguments are the same. operand_equal_p
2771 does not handle TREE_LIST, so we walk the operands here
2772 feeding them to operand_equal_p. */
2773 arg0
= TREE_OPERAND (arg0
, 1);
2774 arg1
= TREE_OPERAND (arg1
, 1);
2775 while (arg0
&& arg1
)
2777 if (! operand_equal_p (TREE_VALUE (arg0
), TREE_VALUE (arg1
),
2781 arg0
= TREE_CHAIN (arg0
);
2782 arg1
= TREE_CHAIN (arg1
);
2785 /* If we get here and both argument lists are exhausted
2786 then the CALL_EXPRs are equal. */
2787 return ! (arg0
|| arg1
);
2793 case tcc_declaration
:
2794 /* Consider __builtin_sqrt equal to sqrt. */
2795 return (TREE_CODE (arg0
) == FUNCTION_DECL
2796 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2797 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2798 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2805 #undef OP_SAME_WITH_NULL
2808 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2809 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2811 When in doubt, return 0. */
2814 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2816 int unsignedp1
, unsignedpo
;
2817 tree primarg0
, primarg1
, primother
;
2818 unsigned int correct_width
;
2820 if (operand_equal_p (arg0
, arg1
, 0))
2823 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2824 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2827 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2828 and see if the inner values are the same. This removes any
2829 signedness comparison, which doesn't matter here. */
2830 primarg0
= arg0
, primarg1
= arg1
;
2831 STRIP_NOPS (primarg0
);
2832 STRIP_NOPS (primarg1
);
2833 if (operand_equal_p (primarg0
, primarg1
, 0))
2836 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2837 actual comparison operand, ARG0.
2839 First throw away any conversions to wider types
2840 already present in the operands. */
2842 primarg1
= get_narrower (arg1
, &unsignedp1
);
2843 primother
= get_narrower (other
, &unsignedpo
);
2845 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2846 if (unsignedp1
== unsignedpo
2847 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2848 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2850 tree type
= TREE_TYPE (arg0
);
2852 /* Make sure shorter operand is extended the right way
2853 to match the longer operand. */
2854 primarg1
= fold_convert (lang_hooks
.types
.signed_or_unsigned_type
2855 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2857 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2864 /* See if ARG is an expression that is either a comparison or is performing
2865 arithmetic on comparisons. The comparisons must only be comparing
2866 two different values, which will be stored in *CVAL1 and *CVAL2; if
2867 they are nonzero it means that some operands have already been found.
2868 No variables may be used anywhere else in the expression except in the
2869 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2870 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2872 If this is true, return 1. Otherwise, return zero. */
2875 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2877 enum tree_code code
= TREE_CODE (arg
);
2878 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2880 /* We can handle some of the tcc_expression cases here. */
2881 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2883 else if (class == tcc_expression
2884 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2885 || code
== COMPOUND_EXPR
))
2888 else if (class == tcc_expression
&& code
== SAVE_EXPR
2889 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2891 /* If we've already found a CVAL1 or CVAL2, this expression is
2892 two complex to handle. */
2893 if (*cval1
|| *cval2
)
2903 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2906 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2907 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2908 cval1
, cval2
, save_p
));
2913 case tcc_expression
:
2914 if (code
== COND_EXPR
)
2915 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2916 cval1
, cval2
, save_p
)
2917 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2918 cval1
, cval2
, save_p
)
2919 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2920 cval1
, cval2
, save_p
));
2923 case tcc_comparison
:
2924 /* First see if we can handle the first operand, then the second. For
2925 the second operand, we know *CVAL1 can't be zero. It must be that
2926 one side of the comparison is each of the values; test for the
2927 case where this isn't true by failing if the two operands
2930 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2931 TREE_OPERAND (arg
, 1), 0))
2935 *cval1
= TREE_OPERAND (arg
, 0);
2936 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2938 else if (*cval2
== 0)
2939 *cval2
= TREE_OPERAND (arg
, 0);
2940 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2945 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2947 else if (*cval2
== 0)
2948 *cval2
= TREE_OPERAND (arg
, 1);
2949 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2961 /* ARG is a tree that is known to contain just arithmetic operations and
2962 comparisons. Evaluate the operations in the tree substituting NEW0 for
2963 any occurrence of OLD0 as an operand of a comparison and likewise for
2967 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
2969 tree type
= TREE_TYPE (arg
);
2970 enum tree_code code
= TREE_CODE (arg
);
2971 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2973 /* We can handle some of the tcc_expression cases here. */
2974 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2976 else if (class == tcc_expression
2977 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2983 return fold_build1 (code
, type
,
2984 eval_subst (TREE_OPERAND (arg
, 0),
2985 old0
, new0
, old1
, new1
));
2988 return fold_build2 (code
, type
,
2989 eval_subst (TREE_OPERAND (arg
, 0),
2990 old0
, new0
, old1
, new1
),
2991 eval_subst (TREE_OPERAND (arg
, 1),
2992 old0
, new0
, old1
, new1
));
2994 case tcc_expression
:
2998 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
3001 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
3004 return fold_build3 (code
, type
,
3005 eval_subst (TREE_OPERAND (arg
, 0),
3006 old0
, new0
, old1
, new1
),
3007 eval_subst (TREE_OPERAND (arg
, 1),
3008 old0
, new0
, old1
, new1
),
3009 eval_subst (TREE_OPERAND (arg
, 2),
3010 old0
, new0
, old1
, new1
));
3014 /* Fall through - ??? */
3016 case tcc_comparison
:
3018 tree arg0
= TREE_OPERAND (arg
, 0);
3019 tree arg1
= TREE_OPERAND (arg
, 1);
3021 /* We need to check both for exact equality and tree equality. The
3022 former will be true if the operand has a side-effect. In that
3023 case, we know the operand occurred exactly once. */
3025 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3027 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3030 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3032 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3035 return fold_build2 (code
, type
, arg0
, arg1
);
3043 /* Return a tree for the case when the result of an expression is RESULT
3044 converted to TYPE and OMITTED was previously an operand of the expression
3045 but is now not needed (e.g., we folded OMITTED * 0).
3047 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3048 the conversion of RESULT to TYPE. */
3051 omit_one_operand (tree type
, tree result
, tree omitted
)
3053 tree t
= fold_convert (type
, result
);
3055 if (TREE_SIDE_EFFECTS (omitted
))
3056 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3058 return non_lvalue (t
);
3061 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3064 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
3066 tree t
= fold_convert (type
, result
);
3068 if (TREE_SIDE_EFFECTS (omitted
))
3069 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3071 return pedantic_non_lvalue (t
);
3074 /* Return a tree for the case when the result of an expression is RESULT
3075 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3076 of the expression but are now not needed.
3078 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3079 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3080 evaluated before OMITTED2. Otherwise, if neither has side effects,
3081 just do the conversion of RESULT to TYPE. */
3084 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
3086 tree t
= fold_convert (type
, result
);
3088 if (TREE_SIDE_EFFECTS (omitted2
))
3089 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
3090 if (TREE_SIDE_EFFECTS (omitted1
))
3091 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
3093 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
3097 /* Return a simplified tree node for the truth-negation of ARG. This
3098 never alters ARG itself. We assume that ARG is an operation that
3099 returns a truth value (0 or 1).
3101 FIXME: one would think we would fold the result, but it causes
3102 problems with the dominator optimizer. */
3105 fold_truth_not_expr (tree arg
)
3107 tree type
= TREE_TYPE (arg
);
3108 enum tree_code code
= TREE_CODE (arg
);
3110 /* If this is a comparison, we can simply invert it, except for
3111 floating-point non-equality comparisons, in which case we just
3112 enclose a TRUTH_NOT_EXPR around what we have. */
3114 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3116 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3117 if (FLOAT_TYPE_P (op_type
)
3118 && flag_trapping_math
3119 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3120 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3124 code
= invert_tree_comparison (code
,
3125 HONOR_NANS (TYPE_MODE (op_type
)));
3126 if (code
== ERROR_MARK
)
3129 return build2 (code
, type
,
3130 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
3137 return constant_boolean_node (integer_zerop (arg
), type
);
3139 case TRUTH_AND_EXPR
:
3140 return build2 (TRUTH_OR_EXPR
, type
,
3141 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3142 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3145 return build2 (TRUTH_AND_EXPR
, type
,
3146 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3147 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3149 case TRUTH_XOR_EXPR
:
3150 /* Here we can invert either operand. We invert the first operand
3151 unless the second operand is a TRUTH_NOT_EXPR in which case our
3152 result is the XOR of the first operand with the inside of the
3153 negation of the second operand. */
3155 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3156 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3157 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3159 return build2 (TRUTH_XOR_EXPR
, type
,
3160 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3161 TREE_OPERAND (arg
, 1));
3163 case TRUTH_ANDIF_EXPR
:
3164 return build2 (TRUTH_ORIF_EXPR
, type
,
3165 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3166 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3168 case TRUTH_ORIF_EXPR
:
3169 return build2 (TRUTH_ANDIF_EXPR
, type
,
3170 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3171 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3173 case TRUTH_NOT_EXPR
:
3174 return TREE_OPERAND (arg
, 0);
3178 tree arg1
= TREE_OPERAND (arg
, 1);
3179 tree arg2
= TREE_OPERAND (arg
, 2);
3180 /* A COND_EXPR may have a throw as one operand, which
3181 then has void type. Just leave void operands
3183 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3184 VOID_TYPE_P (TREE_TYPE (arg1
))
3185 ? arg1
: invert_truthvalue (arg1
),
3186 VOID_TYPE_P (TREE_TYPE (arg2
))
3187 ? arg2
: invert_truthvalue (arg2
));
3191 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3192 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3194 case NON_LVALUE_EXPR
:
3195 return invert_truthvalue (TREE_OPERAND (arg
, 0));
3198 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3199 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3203 return build1 (TREE_CODE (arg
), type
,
3204 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3207 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3209 return build2 (EQ_EXPR
, type
, arg
,
3210 build_int_cst (type
, 0));
3213 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3215 case CLEANUP_POINT_EXPR
:
3216 return build1 (CLEANUP_POINT_EXPR
, type
,
3217 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3226 /* Return a simplified tree node for the truth-negation of ARG. This
3227 never alters ARG itself. We assume that ARG is an operation that
3228 returns a truth value (0 or 1).
3230 FIXME: one would think we would fold the result, but it causes
3231 problems with the dominator optimizer. */
3234 invert_truthvalue (tree arg
)
3238 if (TREE_CODE (arg
) == ERROR_MARK
)
3241 tem
= fold_truth_not_expr (arg
);
3243 tem
= build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3248 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3249 operands are another bit-wise operation with a common input. If so,
3250 distribute the bit operations to save an operation and possibly two if
3251 constants are involved. For example, convert
3252 (A | B) & (A | C) into A | (B & C)
3253 Further simplification will occur if B and C are constants.
3255 If this optimization cannot be done, 0 will be returned. */
3258 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3263 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3264 || TREE_CODE (arg0
) == code
3265 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3266 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3269 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3271 common
= TREE_OPERAND (arg0
, 0);
3272 left
= TREE_OPERAND (arg0
, 1);
3273 right
= TREE_OPERAND (arg1
, 1);
3275 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3277 common
= TREE_OPERAND (arg0
, 0);
3278 left
= TREE_OPERAND (arg0
, 1);
3279 right
= TREE_OPERAND (arg1
, 0);
3281 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3283 common
= TREE_OPERAND (arg0
, 1);
3284 left
= TREE_OPERAND (arg0
, 0);
3285 right
= TREE_OPERAND (arg1
, 1);
3287 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3289 common
= TREE_OPERAND (arg0
, 1);
3290 left
= TREE_OPERAND (arg0
, 0);
3291 right
= TREE_OPERAND (arg1
, 0);
3296 return fold_build2 (TREE_CODE (arg0
), type
, common
,
3297 fold_build2 (code
, type
, left
, right
));
3300 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3301 with code CODE. This optimization is unsafe. */
3303 distribute_real_division (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3305 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3306 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3308 /* (A / C) +- (B / C) -> (A +- B) / C. */
3310 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3311 TREE_OPERAND (arg1
, 1), 0))
3312 return fold_build2 (mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3313 fold_build2 (code
, type
,
3314 TREE_OPERAND (arg0
, 0),
3315 TREE_OPERAND (arg1
, 0)),
3316 TREE_OPERAND (arg0
, 1));
3318 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3319 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3320 TREE_OPERAND (arg1
, 0), 0)
3321 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3322 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3324 REAL_VALUE_TYPE r0
, r1
;
3325 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3326 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3328 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3330 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3331 real_arithmetic (&r0
, code
, &r0
, &r1
);
3332 return fold_build2 (MULT_EXPR
, type
,
3333 TREE_OPERAND (arg0
, 0),
3334 build_real (type
, r0
));
3340 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3341 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3344 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
3351 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3352 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3353 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3354 && host_integerp (size
, 0)
3355 && tree_low_cst (size
, 0) == bitsize
)
3356 return fold_convert (type
, inner
);
3359 result
= build3 (BIT_FIELD_REF
, type
, inner
,
3360 size_int (bitsize
), bitsize_int (bitpos
));
3362 BIT_FIELD_REF_UNSIGNED (result
) = unsignedp
;
3367 /* Optimize a bit-field compare.
3369 There are two cases: First is a compare against a constant and the
3370 second is a comparison of two items where the fields are at the same
3371 bit position relative to the start of a chunk (byte, halfword, word)
3372 large enough to contain it. In these cases we can avoid the shift
3373 implicit in bitfield extractions.
3375 For constants, we emit a compare of the shifted constant with the
3376 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3377 compared. For two fields at the same position, we do the ANDs with the
3378 similar mask and compare the result of the ANDs.
3380 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3381 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3382 are the left and right operands of the comparison, respectively.
3384 If the optimization described above can be done, we return the resulting
3385 tree. Otherwise we return zero. */
3388 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
3391 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3392 tree type
= TREE_TYPE (lhs
);
3393 tree signed_type
, unsigned_type
;
3394 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3395 enum machine_mode lmode
, rmode
, nmode
;
3396 int lunsignedp
, runsignedp
;
3397 int lvolatilep
= 0, rvolatilep
= 0;
3398 tree linner
, rinner
= NULL_TREE
;
3402 /* Get all the information about the extractions being done. If the bit size
3403 if the same as the size of the underlying object, we aren't doing an
3404 extraction at all and so can do nothing. We also don't want to
3405 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3406 then will no longer be able to replace it. */
3407 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3408 &lunsignedp
, &lvolatilep
, false);
3409 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3410 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3415 /* If this is not a constant, we can only do something if bit positions,
3416 sizes, and signedness are the same. */
3417 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3418 &runsignedp
, &rvolatilep
, false);
3420 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3421 || lunsignedp
!= runsignedp
|| offset
!= 0
3422 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3426 /* See if we can find a mode to refer to this field. We should be able to,
3427 but fail if we can't. */
3428 nmode
= get_best_mode (lbitsize
, lbitpos
,
3429 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3430 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3431 TYPE_ALIGN (TREE_TYPE (rinner
))),
3432 word_mode
, lvolatilep
|| rvolatilep
);
3433 if (nmode
== VOIDmode
)
3436 /* Set signed and unsigned types of the precision of this mode for the
3438 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3439 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3441 /* Compute the bit position and size for the new reference and our offset
3442 within it. If the new reference is the same size as the original, we
3443 won't optimize anything, so return zero. */
3444 nbitsize
= GET_MODE_BITSIZE (nmode
);
3445 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3447 if (nbitsize
== lbitsize
)
3450 if (BYTES_BIG_ENDIAN
)
3451 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3453 /* Make the mask to be used against the extracted field. */
3454 mask
= build_int_cst_type (unsigned_type
, -1);
3455 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
3456 mask
= const_binop (RSHIFT_EXPR
, mask
,
3457 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
3460 /* If not comparing with constant, just rework the comparison
3462 return fold_build2 (code
, compare_type
,
3463 fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3464 make_bit_field_ref (linner
,
3469 fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3470 make_bit_field_ref (rinner
,
3476 /* Otherwise, we are handling the constant case. See if the constant is too
3477 big for the field. Warn and return a tree of for 0 (false) if so. We do
3478 this not only for its own sake, but to avoid having to test for this
3479 error case below. If we didn't, we might generate wrong code.
3481 For unsigned fields, the constant shifted right by the field length should
3482 be all zero. For signed fields, the high-order bits should agree with
3487 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3488 fold_convert (unsigned_type
, rhs
),
3489 size_int (lbitsize
), 0)))
3491 warning (0, "comparison is always %d due to width of bit-field",
3493 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3498 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
3499 size_int (lbitsize
- 1), 0);
3500 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3502 warning (0, "comparison is always %d due to width of bit-field",
3504 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3508 /* Single-bit compares should always be against zero. */
3509 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3511 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3512 rhs
= build_int_cst (type
, 0);
3515 /* Make a new bitfield reference, shift the constant over the
3516 appropriate number of bits and mask it with the computed mask
3517 (in case this was a signed field). If we changed it, make a new one. */
3518 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3521 TREE_SIDE_EFFECTS (lhs
) = 1;
3522 TREE_THIS_VOLATILE (lhs
) = 1;
3525 rhs
= const_binop (BIT_AND_EXPR
,
3526 const_binop (LSHIFT_EXPR
,
3527 fold_convert (unsigned_type
, rhs
),
3528 size_int (lbitpos
), 0),
3531 return build2 (code
, compare_type
,
3532 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
3536 /* Subroutine for fold_truthop: decode a field reference.
3538 If EXP is a comparison reference, we return the innermost reference.
3540 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3541 set to the starting bit number.
3543 If the innermost field can be completely contained in a mode-sized
3544 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3546 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3547 otherwise it is not changed.
3549 *PUNSIGNEDP is set to the signedness of the field.
3551 *PMASK is set to the mask used. This is either contained in a
3552 BIT_AND_EXPR or derived from the width of the field.
3554 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3556 Return 0 if this is not a component reference or is one that we can't
3557 do anything with. */
3560 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3561 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3562 int *punsignedp
, int *pvolatilep
,
3563 tree
*pmask
, tree
*pand_mask
)
3565 tree outer_type
= 0;
3567 tree mask
, inner
, offset
;
3569 unsigned int precision
;
3571 /* All the optimizations using this function assume integer fields.
3572 There are problems with FP fields since the type_for_size call
3573 below can fail for, e.g., XFmode. */
3574 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3577 /* We are interested in the bare arrangement of bits, so strip everything
3578 that doesn't affect the machine mode. However, record the type of the
3579 outermost expression if it may matter below. */
3580 if (TREE_CODE (exp
) == NOP_EXPR
3581 || TREE_CODE (exp
) == CONVERT_EXPR
3582 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3583 outer_type
= TREE_TYPE (exp
);
3586 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3588 and_mask
= TREE_OPERAND (exp
, 1);
3589 exp
= TREE_OPERAND (exp
, 0);
3590 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3591 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3595 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3596 punsignedp
, pvolatilep
, false);
3597 if ((inner
== exp
&& and_mask
== 0)
3598 || *pbitsize
< 0 || offset
!= 0
3599 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3602 /* If the number of bits in the reference is the same as the bitsize of
3603 the outer type, then the outer type gives the signedness. Otherwise
3604 (in case of a small bitfield) the signedness is unchanged. */
3605 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3606 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3608 /* Compute the mask to access the bitfield. */
3609 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3610 precision
= TYPE_PRECISION (unsigned_type
);
3612 mask
= build_int_cst_type (unsigned_type
, -1);
3614 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3615 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3617 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3619 mask
= fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3620 fold_convert (unsigned_type
, and_mask
), mask
);
3623 *pand_mask
= and_mask
;
3627 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3631 all_ones_mask_p (tree mask
, int size
)
3633 tree type
= TREE_TYPE (mask
);
3634 unsigned int precision
= TYPE_PRECISION (type
);
3637 tmask
= build_int_cst_type (lang_hooks
.types
.signed_type (type
), -1);
3640 tree_int_cst_equal (mask
,
3641 const_binop (RSHIFT_EXPR
,
3642 const_binop (LSHIFT_EXPR
, tmask
,
3643 size_int (precision
- size
),
3645 size_int (precision
- size
), 0));
3648 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3649 represents the sign bit of EXP's type. If EXP represents a sign
3650 or zero extension, also test VAL against the unextended type.
3651 The return value is the (sub)expression whose sign bit is VAL,
3652 or NULL_TREE otherwise. */
3655 sign_bit_p (tree exp
, tree val
)
3657 unsigned HOST_WIDE_INT mask_lo
, lo
;
3658 HOST_WIDE_INT mask_hi
, hi
;
3662 /* Tree EXP must have an integral type. */
3663 t
= TREE_TYPE (exp
);
3664 if (! INTEGRAL_TYPE_P (t
))
3667 /* Tree VAL must be an integer constant. */
3668 if (TREE_CODE (val
) != INTEGER_CST
3669 || TREE_OVERFLOW (val
))
3672 width
= TYPE_PRECISION (t
);
3673 if (width
> HOST_BITS_PER_WIDE_INT
)
3675 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3678 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3679 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3685 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3688 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3689 >> (HOST_BITS_PER_WIDE_INT
- width
));
3692 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3693 treat VAL as if it were unsigned. */
3694 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3695 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3698 /* Handle extension from a narrower type. */
3699 if (TREE_CODE (exp
) == NOP_EXPR
3700 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3701 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3706 /* Subroutine for fold_truthop: determine if an operand is simple enough
3707 to be evaluated unconditionally. */
3710 simple_operand_p (tree exp
)
3712 /* Strip any conversions that don't change the machine mode. */
3715 return (CONSTANT_CLASS_P (exp
)
3716 || TREE_CODE (exp
) == SSA_NAME
3718 && ! TREE_ADDRESSABLE (exp
)
3719 && ! TREE_THIS_VOLATILE (exp
)
3720 && ! DECL_NONLOCAL (exp
)
3721 /* Don't regard global variables as simple. They may be
3722 allocated in ways unknown to the compiler (shared memory,
3723 #pragma weak, etc). */
3724 && ! TREE_PUBLIC (exp
)
3725 && ! DECL_EXTERNAL (exp
)
3726 /* Loading a static variable is unduly expensive, but global
3727 registers aren't expensive. */
3728 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3731 /* The following functions are subroutines to fold_range_test and allow it to
3732 try to change a logical combination of comparisons into a range test.
3735 X == 2 || X == 3 || X == 4 || X == 5
3739 (unsigned) (X - 2) <= 3
3741 We describe each set of comparisons as being either inside or outside
3742 a range, using a variable named like IN_P, and then describe the
3743 range with a lower and upper bound. If one of the bounds is omitted,
3744 it represents either the highest or lowest value of the type.
3746 In the comments below, we represent a range by two numbers in brackets
3747 preceded by a "+" to designate being inside that range, or a "-" to
3748 designate being outside that range, so the condition can be inverted by
3749 flipping the prefix. An omitted bound is represented by a "-". For
3750 example, "- [-, 10]" means being outside the range starting at the lowest
3751 possible value and ending at 10, in other words, being greater than 10.
3752 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3755 We set up things so that the missing bounds are handled in a consistent
3756 manner so neither a missing bound nor "true" and "false" need to be
3757 handled using a special case. */
3759 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3760 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3761 and UPPER1_P are nonzero if the respective argument is an upper bound
3762 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3763 must be specified for a comparison. ARG1 will be converted to ARG0's
3764 type if both are specified. */
3767 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3768 tree arg1
, int upper1_p
)
3774 /* If neither arg represents infinity, do the normal operation.
3775 Else, if not a comparison, return infinity. Else handle the special
3776 comparison rules. Note that most of the cases below won't occur, but
3777 are handled for consistency. */
3779 if (arg0
!= 0 && arg1
!= 0)
3781 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3782 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3784 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3787 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3790 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3791 for neither. In real maths, we cannot assume open ended ranges are
3792 the same. But, this is computer arithmetic, where numbers are finite.
3793 We can therefore make the transformation of any unbounded range with
3794 the value Z, Z being greater than any representable number. This permits
3795 us to treat unbounded ranges as equal. */
3796 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3797 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3801 result
= sgn0
== sgn1
;
3804 result
= sgn0
!= sgn1
;
3807 result
= sgn0
< sgn1
;
3810 result
= sgn0
<= sgn1
;
3813 result
= sgn0
> sgn1
;
3816 result
= sgn0
>= sgn1
;
3822 return constant_boolean_node (result
, type
);
3825 /* Given EXP, a logical expression, set the range it is testing into
3826 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3827 actually being tested. *PLOW and *PHIGH will be made of the same type
3828 as the returned expression. If EXP is not a comparison, we will most
3829 likely not be returning a useful value and range. */
3832 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
)
3834 enum tree_code code
;
3835 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
3836 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
3838 tree low
, high
, n_low
, n_high
;
3840 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3841 and see if we can refine the range. Some of the cases below may not
3842 happen, but it doesn't seem worth worrying about this. We "continue"
3843 the outer loop when we've changed something; otherwise we "break"
3844 the switch, which will "break" the while. */
3847 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
3851 code
= TREE_CODE (exp
);
3852 exp_type
= TREE_TYPE (exp
);
3854 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3856 if (TREE_CODE_LENGTH (code
) > 0)
3857 arg0
= TREE_OPERAND (exp
, 0);
3858 if (TREE_CODE_CLASS (code
) == tcc_comparison
3859 || TREE_CODE_CLASS (code
) == tcc_unary
3860 || TREE_CODE_CLASS (code
) == tcc_binary
)
3861 arg0_type
= TREE_TYPE (arg0
);
3862 if (TREE_CODE_CLASS (code
) == tcc_binary
3863 || TREE_CODE_CLASS (code
) == tcc_comparison
3864 || (TREE_CODE_CLASS (code
) == tcc_expression
3865 && TREE_CODE_LENGTH (code
) > 1))
3866 arg1
= TREE_OPERAND (exp
, 1);
3871 case TRUTH_NOT_EXPR
:
3872 in_p
= ! in_p
, exp
= arg0
;
3875 case EQ_EXPR
: case NE_EXPR
:
3876 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3877 /* We can only do something if the range is testing for zero
3878 and if the second operand is an integer constant. Note that
3879 saying something is "in" the range we make is done by
3880 complementing IN_P since it will set in the initial case of
3881 being not equal to zero; "out" is leaving it alone. */
3882 if (low
== 0 || high
== 0
3883 || ! integer_zerop (low
) || ! integer_zerop (high
)
3884 || TREE_CODE (arg1
) != INTEGER_CST
)
3889 case NE_EXPR
: /* - [c, c] */
3892 case EQ_EXPR
: /* + [c, c] */
3893 in_p
= ! in_p
, low
= high
= arg1
;
3895 case GT_EXPR
: /* - [-, c] */
3896 low
= 0, high
= arg1
;
3898 case GE_EXPR
: /* + [c, -] */
3899 in_p
= ! in_p
, low
= arg1
, high
= 0;
3901 case LT_EXPR
: /* - [c, -] */
3902 low
= arg1
, high
= 0;
3904 case LE_EXPR
: /* + [-, c] */
3905 in_p
= ! in_p
, low
= 0, high
= arg1
;
3911 /* If this is an unsigned comparison, we also know that EXP is
3912 greater than or equal to zero. We base the range tests we make
3913 on that fact, so we record it here so we can parse existing
3914 range tests. We test arg0_type since often the return type
3915 of, e.g. EQ_EXPR, is boolean. */
3916 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3918 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3920 build_int_cst (arg0_type
, 0),
3924 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3926 /* If the high bound is missing, but we have a nonzero low
3927 bound, reverse the range so it goes from zero to the low bound
3929 if (high
== 0 && low
&& ! integer_zerop (low
))
3932 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3933 integer_one_node
, 0);
3934 low
= build_int_cst (arg0_type
, 0);
3942 /* (-x) IN [a,b] -> x in [-b, -a] */
3943 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3944 build_int_cst (exp_type
, 0),
3946 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3947 build_int_cst (exp_type
, 0),
3949 low
= n_low
, high
= n_high
;
3955 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3956 build_int_cst (exp_type
, 1));
3959 case PLUS_EXPR
: case MINUS_EXPR
:
3960 if (TREE_CODE (arg1
) != INTEGER_CST
)
3963 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3964 move a constant to the other side. */
3965 if (flag_wrapv
&& !TYPE_UNSIGNED (arg0_type
))
3968 /* If EXP is signed, any overflow in the computation is undefined,
3969 so we don't worry about it so long as our computations on
3970 the bounds don't overflow. For unsigned, overflow is defined
3971 and this is exactly the right thing. */
3972 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3973 arg0_type
, low
, 0, arg1
, 0);
3974 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3975 arg0_type
, high
, 1, arg1
, 0);
3976 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3977 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3980 /* Check for an unsigned range which has wrapped around the maximum
3981 value thus making n_high < n_low, and normalize it. */
3982 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3984 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3985 integer_one_node
, 0);
3986 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3987 integer_one_node
, 0);
3989 /* If the range is of the form +/- [ x+1, x ], we won't
3990 be able to normalize it. But then, it represents the
3991 whole range or the empty set, so make it
3993 if (tree_int_cst_equal (n_low
, low
)
3994 && tree_int_cst_equal (n_high
, high
))
4000 low
= n_low
, high
= n_high
;
4005 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
4006 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4009 if (! INTEGRAL_TYPE_P (arg0_type
)
4010 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4011 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4014 n_low
= low
, n_high
= high
;
4017 n_low
= fold_convert (arg0_type
, n_low
);
4020 n_high
= fold_convert (arg0_type
, n_high
);
4023 /* If we're converting arg0 from an unsigned type, to exp,
4024 a signed type, we will be doing the comparison as unsigned.
4025 The tests above have already verified that LOW and HIGH
4028 So we have to ensure that we will handle large unsigned
4029 values the same way that the current signed bounds treat
4032 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4035 tree equiv_type
= lang_hooks
.types
.type_for_mode
4036 (TYPE_MODE (arg0_type
), 1);
4038 /* A range without an upper bound is, naturally, unbounded.
4039 Since convert would have cropped a very large value, use
4040 the max value for the destination type. */
4042 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4043 : TYPE_MAX_VALUE (arg0_type
);
4045 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4046 high_positive
= fold_build2 (RSHIFT_EXPR
, arg0_type
,
4047 fold_convert (arg0_type
,
4049 build_int_cst (arg0_type
, 1));
4051 /* If the low bound is specified, "and" the range with the
4052 range for which the original unsigned value will be
4056 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4057 1, n_low
, n_high
, 1,
4058 fold_convert (arg0_type
,
4063 in_p
= (n_in_p
== in_p
);
4067 /* Otherwise, "or" the range with the range of the input
4068 that will be interpreted as negative. */
4069 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4070 0, n_low
, n_high
, 1,
4071 fold_convert (arg0_type
,
4076 in_p
= (in_p
!= n_in_p
);
4081 low
= n_low
, high
= n_high
;
4091 /* If EXP is a constant, we can evaluate whether this is true or false. */
4092 if (TREE_CODE (exp
) == INTEGER_CST
)
4094 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4096 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4102 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4106 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4107 type, TYPE, return an expression to test if EXP is in (or out of, depending
4108 on IN_P) the range. Return 0 if the test couldn't be created. */
4111 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
4113 tree etype
= TREE_TYPE (exp
);
4116 #ifdef HAVE_canonicalize_funcptr_for_compare
4117 /* Disable this optimization for function pointer expressions
4118 on targets that require function pointer canonicalization. */
4119 if (HAVE_canonicalize_funcptr_for_compare
4120 && TREE_CODE (etype
) == POINTER_TYPE
4121 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4127 value
= build_range_check (type
, exp
, 1, low
, high
);
4129 return invert_truthvalue (value
);
4134 if (low
== 0 && high
== 0)
4135 return build_int_cst (type
, 1);
4138 return fold_build2 (LE_EXPR
, type
, exp
,
4139 fold_convert (etype
, high
));
4142 return fold_build2 (GE_EXPR
, type
, exp
,
4143 fold_convert (etype
, low
));
4145 if (operand_equal_p (low
, high
, 0))
4146 return fold_build2 (EQ_EXPR
, type
, exp
,
4147 fold_convert (etype
, low
));
4149 if (integer_zerop (low
))
4151 if (! TYPE_UNSIGNED (etype
))
4153 etype
= lang_hooks
.types
.unsigned_type (etype
);
4154 high
= fold_convert (etype
, high
);
4155 exp
= fold_convert (etype
, exp
);
4157 return build_range_check (type
, exp
, 1, 0, high
);
4160 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4161 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4163 unsigned HOST_WIDE_INT lo
;
4167 prec
= TYPE_PRECISION (etype
);
4168 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4171 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4175 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4176 lo
= (unsigned HOST_WIDE_INT
) -1;
4179 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4181 if (TYPE_UNSIGNED (etype
))
4183 etype
= lang_hooks
.types
.signed_type (etype
);
4184 exp
= fold_convert (etype
, exp
);
4186 return fold_build2 (GT_EXPR
, type
, exp
,
4187 build_int_cst (etype
, 0));
4191 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4192 This requires wrap-around arithmetics for the type of the expression. */
4193 switch (TREE_CODE (etype
))
4196 /* There is no requirement that LOW be within the range of ETYPE
4197 if the latter is a subtype. It must, however, be within the base
4198 type of ETYPE. So be sure we do the subtraction in that type. */
4199 if (TREE_TYPE (etype
))
4200 etype
= TREE_TYPE (etype
);
4205 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4206 TYPE_UNSIGNED (etype
));
4213 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4214 if (TREE_CODE (etype
) == INTEGER_TYPE
4215 && !TYPE_UNSIGNED (etype
) && !flag_wrapv
)
4217 tree utype
, minv
, maxv
;
4219 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4220 for the type in question, as we rely on this here. */
4221 utype
= lang_hooks
.types
.unsigned_type (etype
);
4222 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
4223 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4224 integer_one_node
, 1);
4225 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
4227 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4234 high
= fold_convert (etype
, high
);
4235 low
= fold_convert (etype
, low
);
4236 exp
= fold_convert (etype
, exp
);
4238 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4240 if (value
!= 0 && !TREE_OVERFLOW (value
))
4241 return build_range_check (type
,
4242 fold_build2 (MINUS_EXPR
, etype
, exp
, low
),
4243 1, build_int_cst (etype
, 0), value
);
4248 /* Return the predecessor of VAL in its type, handling the infinite case. */
4251 range_predecessor (tree val
)
4253 tree type
= TREE_TYPE (val
);
4255 if (INTEGRAL_TYPE_P (type
)
4256 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4259 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4262 /* Return the successor of VAL in its type, handling the infinite case. */
4265 range_successor (tree val
)
4267 tree type
= TREE_TYPE (val
);
4269 if (INTEGRAL_TYPE_P (type
)
4270 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4273 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4276 /* Given two ranges, see if we can merge them into one. Return 1 if we
4277 can, 0 if we can't. Set the output range into the specified parameters. */
4280 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4281 tree high0
, int in1_p
, tree low1
, tree high1
)
4289 int lowequal
= ((low0
== 0 && low1
== 0)
4290 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4291 low0
, 0, low1
, 0)));
4292 int highequal
= ((high0
== 0 && high1
== 0)
4293 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4294 high0
, 1, high1
, 1)));
4296 /* Make range 0 be the range that starts first, or ends last if they
4297 start at the same value. Swap them if it isn't. */
4298 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4301 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4302 high1
, 1, high0
, 1))))
4304 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4305 tem
= low0
, low0
= low1
, low1
= tem
;
4306 tem
= high0
, high0
= high1
, high1
= tem
;
4309 /* Now flag two cases, whether the ranges are disjoint or whether the
4310 second range is totally subsumed in the first. Note that the tests
4311 below are simplified by the ones above. */
4312 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4313 high0
, 1, low1
, 0));
4314 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4315 high1
, 1, high0
, 1));
4317 /* We now have four cases, depending on whether we are including or
4318 excluding the two ranges. */
4321 /* If they don't overlap, the result is false. If the second range
4322 is a subset it is the result. Otherwise, the range is from the start
4323 of the second to the end of the first. */
4325 in_p
= 0, low
= high
= 0;
4327 in_p
= 1, low
= low1
, high
= high1
;
4329 in_p
= 1, low
= low1
, high
= high0
;
4332 else if (in0_p
&& ! in1_p
)
4334 /* If they don't overlap, the result is the first range. If they are
4335 equal, the result is false. If the second range is a subset of the
4336 first, and the ranges begin at the same place, we go from just after
4337 the end of the second range to the end of the first. If the second
4338 range is not a subset of the first, or if it is a subset and both
4339 ranges end at the same place, the range starts at the start of the
4340 first range and ends just before the second range.
4341 Otherwise, we can't describe this as a single range. */
4343 in_p
= 1, low
= low0
, high
= high0
;
4344 else if (lowequal
&& highequal
)
4345 in_p
= 0, low
= high
= 0;
4346 else if (subset
&& lowequal
)
4348 low
= range_successor (high1
);
4352 else if (! subset
|| highequal
)
4355 high
= range_predecessor (low1
);
4362 else if (! in0_p
&& in1_p
)
4364 /* If they don't overlap, the result is the second range. If the second
4365 is a subset of the first, the result is false. Otherwise,
4366 the range starts just after the first range and ends at the
4367 end of the second. */
4369 in_p
= 1, low
= low1
, high
= high1
;
4370 else if (subset
|| highequal
)
4371 in_p
= 0, low
= high
= 0;
4374 low
= range_successor (high0
);
4382 /* The case where we are excluding both ranges. Here the complex case
4383 is if they don't overlap. In that case, the only time we have a
4384 range is if they are adjacent. If the second is a subset of the
4385 first, the result is the first. Otherwise, the range to exclude
4386 starts at the beginning of the first range and ends at the end of the
4390 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4391 range_successor (high0
),
4393 in_p
= 0, low
= low0
, high
= high1
;
4396 /* Canonicalize - [min, x] into - [-, x]. */
4397 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4398 switch (TREE_CODE (TREE_TYPE (low0
)))
4401 if (TYPE_PRECISION (TREE_TYPE (low0
))
4402 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4406 if (tree_int_cst_equal (low0
,
4407 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4411 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4412 && integer_zerop (low0
))
4419 /* Canonicalize - [x, max] into - [x, -]. */
4420 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4421 switch (TREE_CODE (TREE_TYPE (high1
)))
4424 if (TYPE_PRECISION (TREE_TYPE (high1
))
4425 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4429 if (tree_int_cst_equal (high1
,
4430 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4434 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4435 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4437 integer_one_node
, 1)))
4444 /* The ranges might be also adjacent between the maximum and
4445 minimum values of the given type. For
4446 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4447 return + [x + 1, y - 1]. */
4448 if (low0
== 0 && high1
== 0)
4450 low
= range_successor (high0
);
4451 high
= range_predecessor (low1
);
4452 if (low
== 0 || high
== 0)
4462 in_p
= 0, low
= low0
, high
= high0
;
4464 in_p
= 0, low
= low0
, high
= high1
;
4467 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4472 /* Subroutine of fold, looking inside expressions of the form
4473 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4474 of the COND_EXPR. This function is being used also to optimize
4475 A op B ? C : A, by reversing the comparison first.
4477 Return a folded expression whose code is not a COND_EXPR
4478 anymore, or NULL_TREE if no folding opportunity is found. */
4481 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
4483 enum tree_code comp_code
= TREE_CODE (arg0
);
4484 tree arg00
= TREE_OPERAND (arg0
, 0);
4485 tree arg01
= TREE_OPERAND (arg0
, 1);
4486 tree arg1_type
= TREE_TYPE (arg1
);
4492 /* If we have A op 0 ? A : -A, consider applying the following
4495 A == 0? A : -A same as -A
4496 A != 0? A : -A same as A
4497 A >= 0? A : -A same as abs (A)
4498 A > 0? A : -A same as abs (A)
4499 A <= 0? A : -A same as -abs (A)
4500 A < 0? A : -A same as -abs (A)
4502 None of these transformations work for modes with signed
4503 zeros. If A is +/-0, the first two transformations will
4504 change the sign of the result (from +0 to -0, or vice
4505 versa). The last four will fix the sign of the result,
4506 even though the original expressions could be positive or
4507 negative, depending on the sign of A.
4509 Note that all these transformations are correct if A is
4510 NaN, since the two alternatives (A and -A) are also NaNs. */
4511 if ((FLOAT_TYPE_P (TREE_TYPE (arg01
))
4512 ? real_zerop (arg01
)
4513 : integer_zerop (arg01
))
4514 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4515 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4516 /* In the case that A is of the form X-Y, '-A' (arg2) may
4517 have already been folded to Y-X, check for that. */
4518 || (TREE_CODE (arg1
) == MINUS_EXPR
4519 && TREE_CODE (arg2
) == MINUS_EXPR
4520 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4521 TREE_OPERAND (arg2
, 1), 0)
4522 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4523 TREE_OPERAND (arg2
, 0), 0))))
4528 tem
= fold_convert (arg1_type
, arg1
);
4529 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
4532 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4535 if (flag_trapping_math
)
4540 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4541 arg1
= fold_convert (lang_hooks
.types
.signed_type
4542 (TREE_TYPE (arg1
)), arg1
);
4543 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4544 return pedantic_non_lvalue (fold_convert (type
, tem
));
4547 if (flag_trapping_math
)
4551 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4552 arg1
= fold_convert (lang_hooks
.types
.signed_type
4553 (TREE_TYPE (arg1
)), arg1
);
4554 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4555 return negate_expr (fold_convert (type
, tem
));
4557 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4561 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4562 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4563 both transformations are correct when A is NaN: A != 0
4564 is then true, and A == 0 is false. */
4566 if (integer_zerop (arg01
) && integer_zerop (arg2
))
4568 if (comp_code
== NE_EXPR
)
4569 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4570 else if (comp_code
== EQ_EXPR
)
4571 return build_int_cst (type
, 0);
4574 /* Try some transformations of A op B ? A : B.
4576 A == B? A : B same as B
4577 A != B? A : B same as A
4578 A >= B? A : B same as max (A, B)
4579 A > B? A : B same as max (B, A)
4580 A <= B? A : B same as min (A, B)
4581 A < B? A : B same as min (B, A)
4583 As above, these transformations don't work in the presence
4584 of signed zeros. For example, if A and B are zeros of
4585 opposite sign, the first two transformations will change
4586 the sign of the result. In the last four, the original
4587 expressions give different results for (A=+0, B=-0) and
4588 (A=-0, B=+0), but the transformed expressions do not.
4590 The first two transformations are correct if either A or B
4591 is a NaN. In the first transformation, the condition will
4592 be false, and B will indeed be chosen. In the case of the
4593 second transformation, the condition A != B will be true,
4594 and A will be chosen.
4596 The conversions to max() and min() are not correct if B is
4597 a number and A is not. The conditions in the original
4598 expressions will be false, so all four give B. The min()
4599 and max() versions would give a NaN instead. */
4600 if (operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4601 /* Avoid these transformations if the COND_EXPR may be used
4602 as an lvalue in the C++ front-end. PR c++/19199. */
4604 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4605 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4606 || ! maybe_lvalue_p (arg1
)
4607 || ! maybe_lvalue_p (arg2
)))
4609 tree comp_op0
= arg00
;
4610 tree comp_op1
= arg01
;
4611 tree comp_type
= TREE_TYPE (comp_op0
);
4613 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4614 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4624 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4626 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4631 /* In C++ a ?: expression can be an lvalue, so put the
4632 operand which will be used if they are equal first
4633 so that we can convert this back to the
4634 corresponding COND_EXPR. */
4635 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4637 comp_op0
= fold_convert (comp_type
, comp_op0
);
4638 comp_op1
= fold_convert (comp_type
, comp_op1
);
4639 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4640 ? fold_build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4641 : fold_build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
);
4642 return pedantic_non_lvalue (fold_convert (type
, tem
));
4649 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4651 comp_op0
= fold_convert (comp_type
, comp_op0
);
4652 comp_op1
= fold_convert (comp_type
, comp_op1
);
4653 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4654 ? fold_build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4655 : fold_build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
);
4656 return pedantic_non_lvalue (fold_convert (type
, tem
));
4660 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4661 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4664 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4665 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4668 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4673 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4674 we might still be able to simplify this. For example,
4675 if C1 is one less or one more than C2, this might have started
4676 out as a MIN or MAX and been transformed by this function.
4677 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4679 if (INTEGRAL_TYPE_P (type
)
4680 && TREE_CODE (arg01
) == INTEGER_CST
4681 && TREE_CODE (arg2
) == INTEGER_CST
)
4685 /* We can replace A with C1 in this case. */
4686 arg1
= fold_convert (type
, arg01
);
4687 return fold_build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
);
4690 /* If C1 is C2 + 1, this is min(A, C2). */
4691 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4693 && operand_equal_p (arg01
,
4694 const_binop (PLUS_EXPR
, arg2
,
4695 build_int_cst (type
, 1), 0),
4697 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4702 /* If C1 is C2 - 1, this is min(A, C2). */
4703 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4705 && operand_equal_p (arg01
,
4706 const_binop (MINUS_EXPR
, arg2
,
4707 build_int_cst (type
, 1), 0),
4709 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4714 /* If C1 is C2 - 1, this is max(A, C2). */
4715 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4717 && operand_equal_p (arg01
,
4718 const_binop (MINUS_EXPR
, arg2
,
4719 build_int_cst (type
, 1), 0),
4721 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4726 /* If C1 is C2 + 1, this is max(A, C2). */
4727 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4729 && operand_equal_p (arg01
,
4730 const_binop (PLUS_EXPR
, arg2
,
4731 build_int_cst (type
, 1), 0),
4733 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4747 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4748 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4751 /* EXP is some logical combination of boolean tests. See if we can
4752 merge it into some range test. Return the new tree if so. */
4755 fold_range_test (enum tree_code code
, tree type
, tree op0
, tree op1
)
4757 int or_op
= (code
== TRUTH_ORIF_EXPR
4758 || code
== TRUTH_OR_EXPR
);
4759 int in0_p
, in1_p
, in_p
;
4760 tree low0
, low1
, low
, high0
, high1
, high
;
4761 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
);
4762 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
);
4765 /* If this is an OR operation, invert both sides; we will invert
4766 again at the end. */
4768 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4770 /* If both expressions are the same, if we can merge the ranges, and we
4771 can build the range test, return it or it inverted. If one of the
4772 ranges is always true or always false, consider it to be the same
4773 expression as the other. */
4774 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4775 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4777 && 0 != (tem
= (build_range_check (type
,
4779 : rhs
!= 0 ? rhs
: integer_zero_node
,
4781 return or_op
? invert_truthvalue (tem
) : tem
;
4783 /* On machines where the branch cost is expensive, if this is a
4784 short-circuited branch and the underlying object on both sides
4785 is the same, make a non-short-circuit operation. */
4786 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4787 && lhs
!= 0 && rhs
!= 0
4788 && (code
== TRUTH_ANDIF_EXPR
4789 || code
== TRUTH_ORIF_EXPR
)
4790 && operand_equal_p (lhs
, rhs
, 0))
4792 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4793 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4794 which cases we can't do this. */
4795 if (simple_operand_p (lhs
))
4796 return build2 (code
== TRUTH_ANDIF_EXPR
4797 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4800 else if (lang_hooks
.decls
.global_bindings_p () == 0
4801 && ! CONTAINS_PLACEHOLDER_P (lhs
))
4803 tree common
= save_expr (lhs
);
4805 if (0 != (lhs
= build_range_check (type
, common
,
4806 or_op
? ! in0_p
: in0_p
,
4808 && (0 != (rhs
= build_range_check (type
, common
,
4809 or_op
? ! in1_p
: in1_p
,
4811 return build2 (code
== TRUTH_ANDIF_EXPR
4812 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4820 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4821 bit value. Arrange things so the extra bits will be set to zero if and
4822 only if C is signed-extended to its full width. If MASK is nonzero,
4823 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4826 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4828 tree type
= TREE_TYPE (c
);
4829 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4832 if (p
== modesize
|| unsignedp
)
4835 /* We work by getting just the sign bit into the low-order bit, then
4836 into the high-order bit, then sign-extend. We then XOR that value
4838 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
4839 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
4841 /* We must use a signed type in order to get an arithmetic right shift.
4842 However, we must also avoid introducing accidental overflows, so that
4843 a subsequent call to integer_zerop will work. Hence we must
4844 do the type conversion here. At this point, the constant is either
4845 zero or one, and the conversion to a signed type can never overflow.
4846 We could get an overflow if this conversion is done anywhere else. */
4847 if (TYPE_UNSIGNED (type
))
4848 temp
= fold_convert (lang_hooks
.types
.signed_type (type
), temp
);
4850 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
4851 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
4853 temp
= const_binop (BIT_AND_EXPR
, temp
,
4854 fold_convert (TREE_TYPE (c
), mask
), 0);
4855 /* If necessary, convert the type back to match the type of C. */
4856 if (TYPE_UNSIGNED (type
))
4857 temp
= fold_convert (type
, temp
);
4859 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
4862 /* Find ways of folding logical expressions of LHS and RHS:
4863 Try to merge two comparisons to the same innermost item.
4864 Look for range tests like "ch >= '0' && ch <= '9'".
4865 Look for combinations of simple terms on machines with expensive branches
4866 and evaluate the RHS unconditionally.
4868 For example, if we have p->a == 2 && p->b == 4 and we can make an
4869 object large enough to span both A and B, we can do this with a comparison
4870 against the object ANDed with the a mask.
4872 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4873 operations to do this with one comparison.
4875 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4876 function and the one above.
4878 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4879 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4881 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4884 We return the simplified tree or 0 if no optimization is possible. */
4887 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
4889 /* If this is the "or" of two comparisons, we can do something if
4890 the comparisons are NE_EXPR. If this is the "and", we can do something
4891 if the comparisons are EQ_EXPR. I.e.,
4892 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4894 WANTED_CODE is this operation code. For single bit fields, we can
4895 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4896 comparison for one-bit fields. */
4898 enum tree_code wanted_code
;
4899 enum tree_code lcode
, rcode
;
4900 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
4901 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
4902 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
4903 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
4904 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
4905 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
4906 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
4907 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
4908 enum machine_mode lnmode
, rnmode
;
4909 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
4910 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
4911 tree l_const
, r_const
;
4912 tree lntype
, rntype
, result
;
4913 int first_bit
, end_bit
;
4915 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
4916 enum tree_code orig_code
= code
;
4918 /* Start by getting the comparison codes. Fail if anything is volatile.
4919 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4920 it were surrounded with a NE_EXPR. */
4922 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
4925 lcode
= TREE_CODE (lhs
);
4926 rcode
= TREE_CODE (rhs
);
4928 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
4930 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
4931 build_int_cst (TREE_TYPE (lhs
), 0));
4935 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
4937 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
4938 build_int_cst (TREE_TYPE (rhs
), 0));
4942 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
4943 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
4946 ll_arg
= TREE_OPERAND (lhs
, 0);
4947 lr_arg
= TREE_OPERAND (lhs
, 1);
4948 rl_arg
= TREE_OPERAND (rhs
, 0);
4949 rr_arg
= TREE_OPERAND (rhs
, 1);
4951 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4952 if (simple_operand_p (ll_arg
)
4953 && simple_operand_p (lr_arg
))
4956 if (operand_equal_p (ll_arg
, rl_arg
, 0)
4957 && operand_equal_p (lr_arg
, rr_arg
, 0))
4959 result
= combine_comparisons (code
, lcode
, rcode
,
4960 truth_type
, ll_arg
, lr_arg
);
4964 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
4965 && operand_equal_p (lr_arg
, rl_arg
, 0))
4967 result
= combine_comparisons (code
, lcode
,
4968 swap_tree_comparison (rcode
),
4969 truth_type
, ll_arg
, lr_arg
);
4975 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
4976 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
4978 /* If the RHS can be evaluated unconditionally and its operands are
4979 simple, it wins to evaluate the RHS unconditionally on machines
4980 with expensive branches. In this case, this isn't a comparison
4981 that can be merged. Avoid doing this if the RHS is a floating-point
4982 comparison since those can trap. */
4984 if (BRANCH_COST
>= 2
4985 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
4986 && simple_operand_p (rl_arg
)
4987 && simple_operand_p (rr_arg
))
4989 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4990 if (code
== TRUTH_OR_EXPR
4991 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
4992 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
4993 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4994 return build2 (NE_EXPR
, truth_type
,
4995 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4997 build_int_cst (TREE_TYPE (ll_arg
), 0));
4999 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5000 if (code
== TRUTH_AND_EXPR
5001 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5002 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5003 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
5004 return build2 (EQ_EXPR
, truth_type
,
5005 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5007 build_int_cst (TREE_TYPE (ll_arg
), 0));
5009 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
5011 if (code
!= orig_code
|| lhs
!= orig_lhs
|| rhs
!= orig_rhs
)
5012 return build2 (code
, truth_type
, lhs
, rhs
);
5017 /* See if the comparisons can be merged. Then get all the parameters for
5020 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5021 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5025 ll_inner
= decode_field_reference (ll_arg
,
5026 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5027 &ll_unsignedp
, &volatilep
, &ll_mask
,
5029 lr_inner
= decode_field_reference (lr_arg
,
5030 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5031 &lr_unsignedp
, &volatilep
, &lr_mask
,
5033 rl_inner
= decode_field_reference (rl_arg
,
5034 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5035 &rl_unsignedp
, &volatilep
, &rl_mask
,
5037 rr_inner
= decode_field_reference (rr_arg
,
5038 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5039 &rr_unsignedp
, &volatilep
, &rr_mask
,
5042 /* It must be true that the inner operation on the lhs of each
5043 comparison must be the same if we are to be able to do anything.
5044 Then see if we have constants. If not, the same must be true for
5046 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5047 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5050 if (TREE_CODE (lr_arg
) == INTEGER_CST
5051 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5052 l_const
= lr_arg
, r_const
= rr_arg
;
5053 else if (lr_inner
== 0 || rr_inner
== 0
5054 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5057 l_const
= r_const
= 0;
5059 /* If either comparison code is not correct for our logical operation,
5060 fail. However, we can convert a one-bit comparison against zero into
5061 the opposite comparison against that bit being set in the field. */
5063 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5064 if (lcode
!= wanted_code
)
5066 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5068 /* Make the left operand unsigned, since we are only interested
5069 in the value of one bit. Otherwise we are doing the wrong
5078 /* This is analogous to the code for l_const above. */
5079 if (rcode
!= wanted_code
)
5081 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5090 /* See if we can find a mode that contains both fields being compared on
5091 the left. If we can't, fail. Otherwise, update all constants and masks
5092 to be relative to a field of that size. */
5093 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5094 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5095 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5096 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5098 if (lnmode
== VOIDmode
)
5101 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5102 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5103 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5104 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5106 if (BYTES_BIG_ENDIAN
)
5108 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5109 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5112 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
5113 size_int (xll_bitpos
), 0);
5114 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
5115 size_int (xrl_bitpos
), 0);
5119 l_const
= fold_convert (lntype
, l_const
);
5120 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5121 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
5122 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5123 fold_build1 (BIT_NOT_EXPR
,
5127 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5129 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5134 r_const
= fold_convert (lntype
, r_const
);
5135 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5136 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
5137 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5138 fold_build1 (BIT_NOT_EXPR
,
5142 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5144 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5148 /* If the right sides are not constant, do the same for it. Also,
5149 disallow this optimization if a size or signedness mismatch occurs
5150 between the left and right sides. */
5153 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5154 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5155 /* Make sure the two fields on the right
5156 correspond to the left without being swapped. */
5157 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5160 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5161 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5162 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5163 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5165 if (rnmode
== VOIDmode
)
5168 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5169 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5170 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5171 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5173 if (BYTES_BIG_ENDIAN
)
5175 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5176 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5179 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
5180 size_int (xlr_bitpos
), 0);
5181 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
5182 size_int (xrr_bitpos
), 0);
5184 /* Make a mask that corresponds to both fields being compared.
5185 Do this for both items being compared. If the operands are the
5186 same size and the bits being compared are in the same position
5187 then we can do this by masking both and comparing the masked
5189 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5190 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
5191 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5193 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5194 ll_unsignedp
|| rl_unsignedp
);
5195 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5196 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5198 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5199 lr_unsignedp
|| rr_unsignedp
);
5200 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5201 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5203 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5206 /* There is still another way we can do something: If both pairs of
5207 fields being compared are adjacent, we may be able to make a wider
5208 field containing them both.
5210 Note that we still must mask the lhs/rhs expressions. Furthermore,
5211 the mask must be shifted to account for the shift done by
5212 make_bit_field_ref. */
5213 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5214 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5215 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5216 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5220 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
5221 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5222 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
5223 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5225 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5226 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
5227 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5228 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
5230 /* Convert to the smaller type before masking out unwanted bits. */
5232 if (lntype
!= rntype
)
5234 if (lnbitsize
> rnbitsize
)
5236 lhs
= fold_convert (rntype
, lhs
);
5237 ll_mask
= fold_convert (rntype
, ll_mask
);
5240 else if (lnbitsize
< rnbitsize
)
5242 rhs
= fold_convert (lntype
, rhs
);
5243 lr_mask
= fold_convert (lntype
, lr_mask
);
5248 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5249 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5251 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5252 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5254 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5260 /* Handle the case of comparisons with constants. If there is something in
5261 common between the masks, those bits of the constants must be the same.
5262 If not, the condition is always false. Test for this to avoid generating
5263 incorrect code below. */
5264 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
5265 if (! integer_zerop (result
)
5266 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
5267 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
5269 if (wanted_code
== NE_EXPR
)
5271 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5272 return constant_boolean_node (true, truth_type
);
5276 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5277 return constant_boolean_node (false, truth_type
);
5281 /* Construct the expression we will return. First get the component
5282 reference we will make. Unless the mask is all ones the width of
5283 that field, perform the mask operation. Then compare with the
5285 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5286 ll_unsignedp
|| rl_unsignedp
);
5288 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5289 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5290 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5292 return build2 (wanted_code
, truth_type
, result
,
5293 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
5296 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5300 optimize_minmax_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
5303 enum tree_code op_code
;
5304 tree comp_const
= op1
;
5306 int consts_equal
, consts_lt
;
5309 STRIP_SIGN_NOPS (arg0
);
5311 op_code
= TREE_CODE (arg0
);
5312 minmax_const
= TREE_OPERAND (arg0
, 1);
5313 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5314 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5315 inner
= TREE_OPERAND (arg0
, 0);
5317 /* If something does not permit us to optimize, return the original tree. */
5318 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5319 || TREE_CODE (comp_const
) != INTEGER_CST
5320 || TREE_OVERFLOW (comp_const
)
5321 || TREE_CODE (minmax_const
) != INTEGER_CST
5322 || TREE_OVERFLOW (minmax_const
))
5325 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5326 and GT_EXPR, doing the rest with recursive calls using logical
5330 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5332 tree tem
= optimize_minmax_comparison (invert_tree_comparison (code
, false),
5335 return invert_truthvalue (tem
);
5341 fold_build2 (TRUTH_ORIF_EXPR
, type
,
5342 optimize_minmax_comparison
5343 (EQ_EXPR
, type
, arg0
, comp_const
),
5344 optimize_minmax_comparison
5345 (GT_EXPR
, type
, arg0
, comp_const
));
5348 if (op_code
== MAX_EXPR
&& consts_equal
)
5349 /* MAX (X, 0) == 0 -> X <= 0 */
5350 return fold_build2 (LE_EXPR
, type
, inner
, comp_const
);
5352 else if (op_code
== MAX_EXPR
&& consts_lt
)
5353 /* MAX (X, 0) == 5 -> X == 5 */
5354 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5356 else if (op_code
== MAX_EXPR
)
5357 /* MAX (X, 0) == -1 -> false */
5358 return omit_one_operand (type
, integer_zero_node
, inner
);
5360 else if (consts_equal
)
5361 /* MIN (X, 0) == 0 -> X >= 0 */
5362 return fold_build2 (GE_EXPR
, type
, inner
, comp_const
);
5365 /* MIN (X, 0) == 5 -> false */
5366 return omit_one_operand (type
, integer_zero_node
, inner
);
5369 /* MIN (X, 0) == -1 -> X == -1 */
5370 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5373 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5374 /* MAX (X, 0) > 0 -> X > 0
5375 MAX (X, 0) > 5 -> X > 5 */
5376 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5378 else if (op_code
== MAX_EXPR
)
5379 /* MAX (X, 0) > -1 -> true */
5380 return omit_one_operand (type
, integer_one_node
, inner
);
5382 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5383 /* MIN (X, 0) > 0 -> false
5384 MIN (X, 0) > 5 -> false */
5385 return omit_one_operand (type
, integer_zero_node
, inner
);
5388 /* MIN (X, 0) > -1 -> X > -1 */
5389 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5396 /* T is an integer expression that is being multiplied, divided, or taken a
5397 modulus (CODE says which and what kind of divide or modulus) by a
5398 constant C. See if we can eliminate that operation by folding it with
5399 other operations already in T. WIDE_TYPE, if non-null, is a type that
5400 should be used for the computation if wider than our type.
5402 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5403 (X * 2) + (Y * 4). We must, however, be assured that either the original
5404 expression would not overflow or that overflow is undefined for the type
5405 in the language in question.
5407 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5408 the machine has a multiply-accumulate insn or that this is part of an
5409 addressing calculation.
5411 If we return a non-null expression, it is an equivalent form of the
5412 original computation, but need not be in the original type. */
5415 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5417 /* To avoid exponential search depth, refuse to allow recursion past
5418 three levels. Beyond that (1) it's highly unlikely that we'll find
5419 something interesting and (2) we've probably processed it before
5420 when we built the inner expression. */
5429 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
);
5436 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5438 tree type
= TREE_TYPE (t
);
5439 enum tree_code tcode
= TREE_CODE (t
);
5440 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5441 > GET_MODE_SIZE (TYPE_MODE (type
)))
5442 ? wide_type
: type
);
5444 int same_p
= tcode
== code
;
5445 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5447 /* Don't deal with constants of zero here; they confuse the code below. */
5448 if (integer_zerop (c
))
5451 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5452 op0
= TREE_OPERAND (t
, 0);
5454 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5455 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5457 /* Note that we need not handle conditional operations here since fold
5458 already handles those cases. So just do arithmetic here. */
5462 /* For a constant, we can always simplify if we are a multiply
5463 or (for divide and modulus) if it is a multiple of our constant. */
5464 if (code
== MULT_EXPR
5465 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
5466 return const_binop (code
, fold_convert (ctype
, t
),
5467 fold_convert (ctype
, c
), 0);
5470 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
5471 /* If op0 is an expression ... */
5472 if ((COMPARISON_CLASS_P (op0
)
5473 || UNARY_CLASS_P (op0
)
5474 || BINARY_CLASS_P (op0
)
5475 || EXPRESSION_CLASS_P (op0
))
5476 /* ... and is unsigned, and its type is smaller than ctype,
5477 then we cannot pass through as widening. */
5478 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
5479 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5480 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5481 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
5482 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
5483 /* ... or this is a truncation (t is narrower than op0),
5484 then we cannot pass through this narrowing. */
5485 || (GET_MODE_SIZE (TYPE_MODE (type
))
5486 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
5487 /* ... or signedness changes for division or modulus,
5488 then we cannot pass through this conversion. */
5489 || (code
!= MULT_EXPR
5490 && (TYPE_UNSIGNED (ctype
)
5491 != TYPE_UNSIGNED (TREE_TYPE (op0
))))))
5494 /* Pass the constant down and see if we can make a simplification. If
5495 we can, replace this expression with the inner simplification for
5496 possible later conversion to our or some other type. */
5497 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5498 && TREE_CODE (t2
) == INTEGER_CST
5499 && !TREE_OVERFLOW (t2
)
5500 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5502 ? ctype
: NULL_TREE
))))
5507 /* If widening the type changes it from signed to unsigned, then we
5508 must avoid building ABS_EXPR itself as unsigned. */
5509 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5511 tree cstype
= (*lang_hooks
.types
.signed_type
) (ctype
);
5512 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
)) != 0)
5514 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5515 return fold_convert (ctype
, t1
);
5521 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5522 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5525 case MIN_EXPR
: case MAX_EXPR
:
5526 /* If widening the type changes the signedness, then we can't perform
5527 this optimization as that changes the result. */
5528 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5531 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5532 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
5533 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5535 if (tree_int_cst_sgn (c
) < 0)
5536 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5538 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5539 fold_convert (ctype
, t2
));
5543 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5544 /* If the second operand is constant, this is a multiplication
5545 or floor division, by a power of two, so we can treat it that
5546 way unless the multiplier or divisor overflows. Signed
5547 left-shift overflow is implementation-defined rather than
5548 undefined in C90, so do not convert signed left shift into
5550 if (TREE_CODE (op1
) == INTEGER_CST
5551 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5552 /* const_binop may not detect overflow correctly,
5553 so check for it explicitly here. */
5554 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5555 && TREE_INT_CST_HIGH (op1
) == 0
5556 && 0 != (t1
= fold_convert (ctype
,
5557 const_binop (LSHIFT_EXPR
,
5560 && !TREE_OVERFLOW (t1
))
5561 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5562 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5563 ctype
, fold_convert (ctype
, op0
), t1
),
5564 c
, code
, wide_type
);
5567 case PLUS_EXPR
: case MINUS_EXPR
:
5568 /* See if we can eliminate the operation on both sides. If we can, we
5569 can return a new PLUS or MINUS. If we can't, the only remaining
5570 cases where we can do anything are if the second operand is a
5572 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
5573 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
5574 if (t1
!= 0 && t2
!= 0
5575 && (code
== MULT_EXPR
5576 /* If not multiplication, we can only do this if both operands
5577 are divisible by c. */
5578 || (multiple_of_p (ctype
, op0
, c
)
5579 && multiple_of_p (ctype
, op1
, c
))))
5580 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5581 fold_convert (ctype
, t2
));
5583 /* If this was a subtraction, negate OP1 and set it to be an addition.
5584 This simplifies the logic below. */
5585 if (tcode
== MINUS_EXPR
)
5586 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5588 if (TREE_CODE (op1
) != INTEGER_CST
)
5591 /* If either OP1 or C are negative, this optimization is not safe for
5592 some of the division and remainder types while for others we need
5593 to change the code. */
5594 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5596 if (code
== CEIL_DIV_EXPR
)
5597 code
= FLOOR_DIV_EXPR
;
5598 else if (code
== FLOOR_DIV_EXPR
)
5599 code
= CEIL_DIV_EXPR
;
5600 else if (code
!= MULT_EXPR
5601 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5605 /* If it's a multiply or a division/modulus operation of a multiple
5606 of our constant, do the operation and verify it doesn't overflow. */
5607 if (code
== MULT_EXPR
5608 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5610 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5611 fold_convert (ctype
, c
), 0);
5612 /* We allow the constant to overflow with wrapping semantics. */
5614 || (TREE_OVERFLOW (op1
) && ! flag_wrapv
))
5620 /* If we have an unsigned type is not a sizetype, we cannot widen
5621 the operation since it will change the result if the original
5622 computation overflowed. */
5623 if (TYPE_UNSIGNED (ctype
)
5624 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5628 /* If we were able to eliminate our operation from the first side,
5629 apply our operation to the second side and reform the PLUS. */
5630 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5631 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5633 /* The last case is if we are a multiply. In that case, we can
5634 apply the distributive law to commute the multiply and addition
5635 if the multiplication of the constants doesn't overflow. */
5636 if (code
== MULT_EXPR
)
5637 return fold_build2 (tcode
, ctype
,
5638 fold_build2 (code
, ctype
,
5639 fold_convert (ctype
, op0
),
5640 fold_convert (ctype
, c
)),
5646 /* We have a special case here if we are doing something like
5647 (C * 8) % 4 since we know that's zero. */
5648 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5649 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5650 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5651 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5652 return omit_one_operand (type
, integer_zero_node
, op0
);
5654 /* ... fall through ... */
5656 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5657 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5658 /* If we can extract our operation from the LHS, do so and return a
5659 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5660 do something only if the second operand is a constant. */
5662 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5663 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5664 fold_convert (ctype
, op1
));
5665 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5666 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5667 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5668 fold_convert (ctype
, t1
));
5669 else if (TREE_CODE (op1
) != INTEGER_CST
)
5672 /* If these are the same operation types, we can associate them
5673 assuming no overflow. */
5675 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
5676 fold_convert (ctype
, c
), 0))
5677 && !TREE_OVERFLOW (t1
))
5678 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
5680 /* If these operations "cancel" each other, we have the main
5681 optimizations of this pass, which occur when either constant is a
5682 multiple of the other, in which case we replace this with either an
5683 operation or CODE or TCODE.
5685 If we have an unsigned type that is not a sizetype, we cannot do
5686 this since it will change the result if the original computation
5688 if ((! TYPE_UNSIGNED (ctype
)
5689 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5691 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5692 || (tcode
== MULT_EXPR
5693 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5694 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
5696 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5697 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5698 fold_convert (ctype
,
5699 const_binop (TRUNC_DIV_EXPR
,
5701 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
5702 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5703 fold_convert (ctype
,
5704 const_binop (TRUNC_DIV_EXPR
,
5716 /* Return a node which has the indicated constant VALUE (either 0 or
5717 1), and is of the indicated TYPE. */
5720 constant_boolean_node (int value
, tree type
)
5722 if (type
== integer_type_node
)
5723 return value
? integer_one_node
: integer_zero_node
;
5724 else if (type
== boolean_type_node
)
5725 return value
? boolean_true_node
: boolean_false_node
;
5727 return build_int_cst (type
, value
);
5731 /* Return true if expr looks like an ARRAY_REF and set base and
5732 offset to the appropriate trees. If there is no offset,
5733 offset is set to NULL_TREE. Base will be canonicalized to
5734 something you can get the element type from using
5735 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5736 in bytes to the base. */
5739 extract_array_ref (tree expr
, tree
*base
, tree
*offset
)
5741 /* One canonical form is a PLUS_EXPR with the first
5742 argument being an ADDR_EXPR with a possible NOP_EXPR
5744 if (TREE_CODE (expr
) == PLUS_EXPR
)
5746 tree op0
= TREE_OPERAND (expr
, 0);
5747 tree inner_base
, dummy1
;
5748 /* Strip NOP_EXPRs here because the C frontends and/or
5749 folders present us (int *)&x.a + 4B possibly. */
5751 if (extract_array_ref (op0
, &inner_base
, &dummy1
))
5754 if (dummy1
== NULL_TREE
)
5755 *offset
= TREE_OPERAND (expr
, 1);
5757 *offset
= fold_build2 (PLUS_EXPR
, TREE_TYPE (expr
),
5758 dummy1
, TREE_OPERAND (expr
, 1));
5762 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5763 which we transform into an ADDR_EXPR with appropriate
5764 offset. For other arguments to the ADDR_EXPR we assume
5765 zero offset and as such do not care about the ADDR_EXPR
5766 type and strip possible nops from it. */
5767 else if (TREE_CODE (expr
) == ADDR_EXPR
)
5769 tree op0
= TREE_OPERAND (expr
, 0);
5770 if (TREE_CODE (op0
) == ARRAY_REF
)
5772 tree idx
= TREE_OPERAND (op0
, 1);
5773 *base
= TREE_OPERAND (op0
, 0);
5774 *offset
= fold_build2 (MULT_EXPR
, TREE_TYPE (idx
), idx
,
5775 array_ref_element_size (op0
));
5779 /* Handle array-to-pointer decay as &a. */
5780 if (TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
5781 *base
= TREE_OPERAND (expr
, 0);
5784 *offset
= NULL_TREE
;
5788 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5789 else if (SSA_VAR_P (expr
)
5790 && TREE_CODE (TREE_TYPE (expr
)) == POINTER_TYPE
)
5793 *offset
= NULL_TREE
;
5801 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5802 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5803 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5804 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5805 COND is the first argument to CODE; otherwise (as in the example
5806 given here), it is the second argument. TYPE is the type of the
5807 original expression. Return NULL_TREE if no simplification is
5811 fold_binary_op_with_conditional_arg (enum tree_code code
,
5812 tree type
, tree op0
, tree op1
,
5813 tree cond
, tree arg
, int cond_first_p
)
5815 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
5816 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
5817 tree test
, true_value
, false_value
;
5818 tree lhs
= NULL_TREE
;
5819 tree rhs
= NULL_TREE
;
5821 /* This transformation is only worthwhile if we don't have to wrap
5822 arg in a SAVE_EXPR, and the operation can be simplified on at least
5823 one of the branches once its pushed inside the COND_EXPR. */
5824 if (!TREE_CONSTANT (arg
))
5827 if (TREE_CODE (cond
) == COND_EXPR
)
5829 test
= TREE_OPERAND (cond
, 0);
5830 true_value
= TREE_OPERAND (cond
, 1);
5831 false_value
= TREE_OPERAND (cond
, 2);
5832 /* If this operand throws an expression, then it does not make
5833 sense to try to perform a logical or arithmetic operation
5835 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
5837 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
5842 tree testtype
= TREE_TYPE (cond
);
5844 true_value
= constant_boolean_node (true, testtype
);
5845 false_value
= constant_boolean_node (false, testtype
);
5848 arg
= fold_convert (arg_type
, arg
);
5851 true_value
= fold_convert (cond_type
, true_value
);
5853 lhs
= fold_build2 (code
, type
, true_value
, arg
);
5855 lhs
= fold_build2 (code
, type
, arg
, true_value
);
5859 false_value
= fold_convert (cond_type
, false_value
);
5861 rhs
= fold_build2 (code
, type
, false_value
, arg
);
5863 rhs
= fold_build2 (code
, type
, arg
, false_value
);
5866 test
= fold_build3 (COND_EXPR
, type
, test
, lhs
, rhs
);
5867 return fold_convert (type
, test
);
5871 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5873 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5874 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5875 ADDEND is the same as X.
5877 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5878 and finite. The problematic cases are when X is zero, and its mode
5879 has signed zeros. In the case of rounding towards -infinity,
5880 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5881 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5884 fold_real_zero_addition_p (tree type
, tree addend
, int negate
)
5886 if (!real_zerop (addend
))
5889 /* Don't allow the fold with -fsignaling-nans. */
5890 if (HONOR_SNANS (TYPE_MODE (type
)))
5893 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5894 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
5897 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5898 if (TREE_CODE (addend
) == REAL_CST
5899 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
5902 /* The mode has signed zeros, and we have to honor their sign.
5903 In this situation, there is only one case we can return true for.
5904 X - 0 is the same as X unless rounding towards -infinity is
5906 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
5909 /* Subroutine of fold() that checks comparisons of built-in math
5910 functions against real constants.
5912 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5913 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5914 is the type of the result and ARG0 and ARG1 are the operands of the
5915 comparison. ARG1 must be a TREE_REAL_CST.
5917 The function returns the constant folded tree if a simplification
5918 can be made, and NULL_TREE otherwise. */
5921 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
5922 tree type
, tree arg0
, tree arg1
)
5926 if (BUILTIN_SQRT_P (fcode
))
5928 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5929 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
5931 c
= TREE_REAL_CST (arg1
);
5932 if (REAL_VALUE_NEGATIVE (c
))
5934 /* sqrt(x) < y is always false, if y is negative. */
5935 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
5936 return omit_one_operand (type
, integer_zero_node
, arg
);
5938 /* sqrt(x) > y is always true, if y is negative and we
5939 don't care about NaNs, i.e. negative values of x. */
5940 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
5941 return omit_one_operand (type
, integer_one_node
, arg
);
5943 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5944 return fold_build2 (GE_EXPR
, type
, arg
,
5945 build_real (TREE_TYPE (arg
), dconst0
));
5947 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
5951 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5952 real_convert (&c2
, mode
, &c2
);
5954 if (REAL_VALUE_ISINF (c2
))
5956 /* sqrt(x) > y is x == +Inf, when y is very large. */
5957 if (HONOR_INFINITIES (mode
))
5958 return fold_build2 (EQ_EXPR
, type
, arg
,
5959 build_real (TREE_TYPE (arg
), c2
));
5961 /* sqrt(x) > y is always false, when y is very large
5962 and we don't care about infinities. */
5963 return omit_one_operand (type
, integer_zero_node
, arg
);
5966 /* sqrt(x) > c is the same as x > c*c. */
5967 return fold_build2 (code
, type
, arg
,
5968 build_real (TREE_TYPE (arg
), c2
));
5970 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
5974 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5975 real_convert (&c2
, mode
, &c2
);
5977 if (REAL_VALUE_ISINF (c2
))
5979 /* sqrt(x) < y is always true, when y is a very large
5980 value and we don't care about NaNs or Infinities. */
5981 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
5982 return omit_one_operand (type
, integer_one_node
, arg
);
5984 /* sqrt(x) < y is x != +Inf when y is very large and we
5985 don't care about NaNs. */
5986 if (! HONOR_NANS (mode
))
5987 return fold_build2 (NE_EXPR
, type
, arg
,
5988 build_real (TREE_TYPE (arg
), c2
));
5990 /* sqrt(x) < y is x >= 0 when y is very large and we
5991 don't care about Infinities. */
5992 if (! HONOR_INFINITIES (mode
))
5993 return fold_build2 (GE_EXPR
, type
, arg
,
5994 build_real (TREE_TYPE (arg
), dconst0
));
5996 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5997 if (lang_hooks
.decls
.global_bindings_p () != 0
5998 || CONTAINS_PLACEHOLDER_P (arg
))
6001 arg
= save_expr (arg
);
6002 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
6003 fold_build2 (GE_EXPR
, type
, arg
,
6004 build_real (TREE_TYPE (arg
),
6006 fold_build2 (NE_EXPR
, type
, arg
,
6007 build_real (TREE_TYPE (arg
),
6011 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6012 if (! HONOR_NANS (mode
))
6013 return fold_build2 (code
, type
, arg
,
6014 build_real (TREE_TYPE (arg
), c2
));
6016 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6017 if (lang_hooks
.decls
.global_bindings_p () == 0
6018 && ! CONTAINS_PLACEHOLDER_P (arg
))
6020 arg
= save_expr (arg
);
6021 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
6022 fold_build2 (GE_EXPR
, type
, arg
,
6023 build_real (TREE_TYPE (arg
),
6025 fold_build2 (code
, type
, arg
,
6026 build_real (TREE_TYPE (arg
),
6035 /* Subroutine of fold() that optimizes comparisons against Infinities,
6036 either +Inf or -Inf.
6038 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6039 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6040 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6042 The function returns the constant folded tree if a simplification
6043 can be made, and NULL_TREE otherwise. */
6046 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6048 enum machine_mode mode
;
6049 REAL_VALUE_TYPE max
;
6053 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6055 /* For negative infinity swap the sense of the comparison. */
6056 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6058 code
= swap_tree_comparison (code
);
6063 /* x > +Inf is always false, if with ignore sNANs. */
6064 if (HONOR_SNANS (mode
))
6066 return omit_one_operand (type
, integer_zero_node
, arg0
);
6069 /* x <= +Inf is always true, if we don't case about NaNs. */
6070 if (! HONOR_NANS (mode
))
6071 return omit_one_operand (type
, integer_one_node
, arg0
);
6073 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6074 if (lang_hooks
.decls
.global_bindings_p () == 0
6075 && ! CONTAINS_PLACEHOLDER_P (arg0
))
6077 arg0
= save_expr (arg0
);
6078 return fold_build2 (EQ_EXPR
, type
, arg0
, arg0
);
6084 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6085 real_maxval (&max
, neg
, mode
);
6086 return fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6087 arg0
, build_real (TREE_TYPE (arg0
), max
));
6090 /* x < +Inf is always equal to x <= DBL_MAX. */
6091 real_maxval (&max
, neg
, mode
);
6092 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6093 arg0
, build_real (TREE_TYPE (arg0
), max
));
6096 /* x != +Inf is always equal to !(x > DBL_MAX). */
6097 real_maxval (&max
, neg
, mode
);
6098 if (! HONOR_NANS (mode
))
6099 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6100 arg0
, build_real (TREE_TYPE (arg0
), max
));
6102 temp
= fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6103 arg0
, build_real (TREE_TYPE (arg0
), max
));
6104 return fold_build1 (TRUTH_NOT_EXPR
, type
, temp
);
6113 /* Subroutine of fold() that optimizes comparisons of a division by
6114 a nonzero integer constant against an integer constant, i.e.
6117 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6118 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6119 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6121 The function returns the constant folded tree if a simplification
6122 can be made, and NULL_TREE otherwise. */
6125 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6127 tree prod
, tmp
, hi
, lo
;
6128 tree arg00
= TREE_OPERAND (arg0
, 0);
6129 tree arg01
= TREE_OPERAND (arg0
, 1);
6130 unsigned HOST_WIDE_INT lpart
;
6131 HOST_WIDE_INT hpart
;
6132 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6136 /* We have to do this the hard way to detect unsigned overflow.
6137 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6138 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6139 TREE_INT_CST_HIGH (arg01
),
6140 TREE_INT_CST_LOW (arg1
),
6141 TREE_INT_CST_HIGH (arg1
),
6142 &lpart
, &hpart
, unsigned_p
);
6143 prod
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6145 neg_overflow
= false;
6149 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6150 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6153 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6154 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6155 TREE_INT_CST_HIGH (prod
),
6156 TREE_INT_CST_LOW (tmp
),
6157 TREE_INT_CST_HIGH (tmp
),
6158 &lpart
, &hpart
, unsigned_p
);
6159 hi
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6160 -1, overflow
| TREE_OVERFLOW (prod
));
6162 else if (tree_int_cst_sgn (arg01
) >= 0)
6164 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6165 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6166 switch (tree_int_cst_sgn (arg1
))
6169 neg_overflow
= true;
6170 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6175 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6180 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6190 /* A negative divisor reverses the relational operators. */
6191 code
= swap_tree_comparison (code
);
6193 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6194 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6195 switch (tree_int_cst_sgn (arg1
))
6198 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6203 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6208 neg_overflow
= true;
6209 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6221 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6222 return omit_one_operand (type
, integer_zero_node
, arg00
);
6223 if (TREE_OVERFLOW (hi
))
6224 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6225 if (TREE_OVERFLOW (lo
))
6226 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6227 return build_range_check (type
, arg00
, 1, lo
, hi
);
6230 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6231 return omit_one_operand (type
, integer_one_node
, arg00
);
6232 if (TREE_OVERFLOW (hi
))
6233 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6234 if (TREE_OVERFLOW (lo
))
6235 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6236 return build_range_check (type
, arg00
, 0, lo
, hi
);
6239 if (TREE_OVERFLOW (lo
))
6241 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6242 return omit_one_operand (type
, tmp
, arg00
);
6244 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6247 if (TREE_OVERFLOW (hi
))
6249 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6250 return omit_one_operand (type
, tmp
, arg00
);
6252 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6255 if (TREE_OVERFLOW (hi
))
6257 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6258 return omit_one_operand (type
, tmp
, arg00
);
6260 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6263 if (TREE_OVERFLOW (lo
))
6265 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6266 return omit_one_operand (type
, tmp
, arg00
);
6268 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6278 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6279 equality/inequality test, then return a simplified form of the test
6280 using a sign testing. Otherwise return NULL. TYPE is the desired
6284 fold_single_bit_test_into_sign_test (enum tree_code code
, tree arg0
, tree arg1
,
6287 /* If this is testing a single bit, we can optimize the test. */
6288 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6289 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6290 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6292 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6293 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6294 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6296 if (arg00
!= NULL_TREE
6297 /* This is only a win if casting to a signed type is cheap,
6298 i.e. when arg00's type is not a partial mode. */
6299 && TYPE_PRECISION (TREE_TYPE (arg00
))
6300 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6302 tree stype
= lang_hooks
.types
.signed_type (TREE_TYPE (arg00
));
6303 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6304 result_type
, fold_convert (stype
, arg00
),
6305 build_int_cst (stype
, 0));
6312 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6313 equality/inequality test, then return a simplified form of
6314 the test using shifts and logical operations. Otherwise return
6315 NULL. TYPE is the desired result type. */
6318 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
6321 /* If this is testing a single bit, we can optimize the test. */
6322 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6323 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6324 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6326 tree inner
= TREE_OPERAND (arg0
, 0);
6327 tree type
= TREE_TYPE (arg0
);
6328 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6329 enum machine_mode operand_mode
= TYPE_MODE (type
);
6331 tree signed_type
, unsigned_type
, intermediate_type
;
6334 /* First, see if we can fold the single bit test into a sign-bit
6336 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
,
6341 /* Otherwise we have (A & C) != 0 where C is a single bit,
6342 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6343 Similarly for (A & C) == 0. */
6345 /* If INNER is a right shift of a constant and it plus BITNUM does
6346 not overflow, adjust BITNUM and INNER. */
6347 if (TREE_CODE (inner
) == RSHIFT_EXPR
6348 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6349 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6350 && bitnum
< TYPE_PRECISION (type
)
6351 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6352 bitnum
- TYPE_PRECISION (type
)))
6354 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6355 inner
= TREE_OPERAND (inner
, 0);
6358 /* If we are going to be able to omit the AND below, we must do our
6359 operations as unsigned. If we must use the AND, we have a choice.
6360 Normally unsigned is faster, but for some machines signed is. */
6361 #ifdef LOAD_EXTEND_OP
6362 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6363 && !flag_syntax_only
) ? 0 : 1;
6368 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6369 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6370 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6371 inner
= fold_convert (intermediate_type
, inner
);
6374 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6375 inner
, size_int (bitnum
));
6377 one
= build_int_cst (intermediate_type
, 1);
6379 if (code
== EQ_EXPR
)
6380 inner
= fold_build2 (BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6382 /* Put the AND last so it can combine with more things. */
6383 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6385 /* Make sure to return the proper type. */
6386 inner
= fold_convert (result_type
, inner
);
6393 /* Check whether we are allowed to reorder operands arg0 and arg1,
6394 such that the evaluation of arg1 occurs before arg0. */
6397 reorder_operands_p (tree arg0
, tree arg1
)
6399 if (! flag_evaluation_order
)
6401 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6403 return ! TREE_SIDE_EFFECTS (arg0
)
6404 && ! TREE_SIDE_EFFECTS (arg1
);
6407 /* Test whether it is preferable two swap two operands, ARG0 and
6408 ARG1, for example because ARG0 is an integer constant and ARG1
6409 isn't. If REORDER is true, only recommend swapping if we can
6410 evaluate the operands in reverse order. */
6413 tree_swap_operands_p (tree arg0
, tree arg1
, bool reorder
)
6415 STRIP_SIGN_NOPS (arg0
);
6416 STRIP_SIGN_NOPS (arg1
);
6418 if (TREE_CODE (arg1
) == INTEGER_CST
)
6420 if (TREE_CODE (arg0
) == INTEGER_CST
)
6423 if (TREE_CODE (arg1
) == REAL_CST
)
6425 if (TREE_CODE (arg0
) == REAL_CST
)
6428 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6430 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6433 if (TREE_CONSTANT (arg1
))
6435 if (TREE_CONSTANT (arg0
))
6441 if (reorder
&& flag_evaluation_order
6442 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6450 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6451 for commutative and comparison operators. Ensuring a canonical
6452 form allows the optimizers to find additional redundancies without
6453 having to explicitly check for both orderings. */
6454 if (TREE_CODE (arg0
) == SSA_NAME
6455 && TREE_CODE (arg1
) == SSA_NAME
6456 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6462 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6463 ARG0 is extended to a wider type. */
6466 fold_widened_comparison (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6468 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6470 tree shorter_type
, outer_type
;
6474 if (arg0_unw
== arg0
)
6476 shorter_type
= TREE_TYPE (arg0_unw
);
6478 #ifdef HAVE_canonicalize_funcptr_for_compare
6479 /* Disable this optimization if we're casting a function pointer
6480 type on targets that require function pointer canonicalization. */
6481 if (HAVE_canonicalize_funcptr_for_compare
6482 && TREE_CODE (shorter_type
) == POINTER_TYPE
6483 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6487 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6490 arg1_unw
= get_unwidened (arg1
, shorter_type
);
6492 /* If possible, express the comparison in the shorter mode. */
6493 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6494 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6495 && (TREE_TYPE (arg1_unw
) == shorter_type
6496 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6497 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6498 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6499 && int_fits_type_p (arg1_unw
, shorter_type
))))
6500 return fold_build2 (code
, type
, arg0_unw
,
6501 fold_convert (shorter_type
, arg1_unw
));
6503 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6504 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6505 || !int_fits_type_p (arg1_unw
, shorter_type
))
6508 /* If we are comparing with the integer that does not fit into the range
6509 of the shorter type, the result is known. */
6510 outer_type
= TREE_TYPE (arg1_unw
);
6511 min
= lower_bound_in_type (outer_type
, shorter_type
);
6512 max
= upper_bound_in_type (outer_type
, shorter_type
);
6514 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6516 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6523 return omit_one_operand (type
, integer_zero_node
, arg0
);
6528 return omit_one_operand (type
, integer_one_node
, arg0
);
6534 return omit_one_operand (type
, integer_one_node
, arg0
);
6536 return omit_one_operand (type
, integer_zero_node
, arg0
);
6541 return omit_one_operand (type
, integer_zero_node
, arg0
);
6543 return omit_one_operand (type
, integer_one_node
, arg0
);
6552 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6553 ARG0 just the signedness is changed. */
6556 fold_sign_changed_comparison (enum tree_code code
, tree type
,
6557 tree arg0
, tree arg1
)
6560 tree inner_type
, outer_type
;
6562 if (TREE_CODE (arg0
) != NOP_EXPR
6563 && TREE_CODE (arg0
) != CONVERT_EXPR
)
6566 outer_type
= TREE_TYPE (arg0
);
6567 arg0_inner
= TREE_OPERAND (arg0
, 0);
6568 inner_type
= TREE_TYPE (arg0_inner
);
6570 #ifdef HAVE_canonicalize_funcptr_for_compare
6571 /* Disable this optimization if we're casting a function pointer
6572 type on targets that require function pointer canonicalization. */
6573 if (HAVE_canonicalize_funcptr_for_compare
6574 && TREE_CODE (inner_type
) == POINTER_TYPE
6575 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6579 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6582 if (TREE_CODE (arg1
) != INTEGER_CST
6583 && !((TREE_CODE (arg1
) == NOP_EXPR
6584 || TREE_CODE (arg1
) == CONVERT_EXPR
)
6585 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6588 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6593 if (TREE_CODE (arg1
) == INTEGER_CST
)
6594 arg1
= force_fit_type_double (inner_type
, TREE_INT_CST_LOW (arg1
),
6595 TREE_INT_CST_HIGH (arg1
), 0,
6596 TREE_OVERFLOW (arg1
));
6598 arg1
= fold_convert (inner_type
, arg1
);
6600 return fold_build2 (code
, type
, arg0_inner
, arg1
);
6603 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6604 step of the array. Reconstructs s and delta in the case of s * delta
6605 being an integer constant (and thus already folded).
6606 ADDR is the address. MULT is the multiplicative expression.
6607 If the function succeeds, the new address expression is returned. Otherwise
6608 NULL_TREE is returned. */
6611 try_move_mult_to_index (enum tree_code code
, tree addr
, tree op1
)
6613 tree s
, delta
, step
;
6614 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6618 /* Canonicalize op1 into a possibly non-constant delta
6619 and an INTEGER_CST s. */
6620 if (TREE_CODE (op1
) == MULT_EXPR
)
6622 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6627 if (TREE_CODE (arg0
) == INTEGER_CST
)
6632 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6640 else if (TREE_CODE (op1
) == INTEGER_CST
)
6647 /* Simulate we are delta * 1. */
6649 s
= integer_one_node
;
6652 for (;; ref
= TREE_OPERAND (ref
, 0))
6654 if (TREE_CODE (ref
) == ARRAY_REF
)
6656 itype
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6660 step
= array_ref_element_size (ref
);
6661 if (TREE_CODE (step
) != INTEGER_CST
)
6666 if (! tree_int_cst_equal (step
, s
))
6671 /* Try if delta is a multiple of step. */
6672 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, delta
, step
);
6681 if (!handled_component_p (ref
))
6685 /* We found the suitable array reference. So copy everything up to it,
6686 and replace the index. */
6688 pref
= TREE_OPERAND (addr
, 0);
6689 ret
= copy_node (pref
);
6694 pref
= TREE_OPERAND (pref
, 0);
6695 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6696 pos
= TREE_OPERAND (pos
, 0);
6699 TREE_OPERAND (pos
, 1) = fold_build2 (code
, itype
,
6700 fold_convert (itype
,
6701 TREE_OPERAND (pos
, 1)),
6702 fold_convert (itype
, delta
));
6704 return fold_build1 (ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6708 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6709 means A >= Y && A != MAX, but in this case we know that
6710 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6713 fold_to_nonsharp_ineq_using_bound (tree ineq
, tree bound
)
6715 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6717 if (TREE_CODE (bound
) == LT_EXPR
)
6718 a
= TREE_OPERAND (bound
, 0);
6719 else if (TREE_CODE (bound
) == GT_EXPR
)
6720 a
= TREE_OPERAND (bound
, 1);
6724 typea
= TREE_TYPE (a
);
6725 if (!INTEGRAL_TYPE_P (typea
)
6726 && !POINTER_TYPE_P (typea
))
6729 if (TREE_CODE (ineq
) == LT_EXPR
)
6731 a1
= TREE_OPERAND (ineq
, 1);
6732 y
= TREE_OPERAND (ineq
, 0);
6734 else if (TREE_CODE (ineq
) == GT_EXPR
)
6736 a1
= TREE_OPERAND (ineq
, 0);
6737 y
= TREE_OPERAND (ineq
, 1);
6742 if (TREE_TYPE (a1
) != typea
)
6745 diff
= fold_build2 (MINUS_EXPR
, typea
, a1
, a
);
6746 if (!integer_onep (diff
))
6749 return fold_build2 (GE_EXPR
, type
, a
, y
);
6752 /* Fold a sum or difference of at least one multiplication.
6753 Returns the folded tree or NULL if no simplification could be made. */
6756 fold_plusminus_mult_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6758 tree arg00
, arg01
, arg10
, arg11
;
6759 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6761 /* (A * C) +- (B * C) -> (A+-B) * C.
6762 (A * C) +- A -> A * (C+-1).
6763 We are most concerned about the case where C is a constant,
6764 but other combinations show up during loop reduction. Since
6765 it is not difficult, try all four possibilities. */
6767 if (TREE_CODE (arg0
) == MULT_EXPR
)
6769 arg00
= TREE_OPERAND (arg0
, 0);
6770 arg01
= TREE_OPERAND (arg0
, 1);
6775 arg01
= build_one_cst (type
);
6777 if (TREE_CODE (arg1
) == MULT_EXPR
)
6779 arg10
= TREE_OPERAND (arg1
, 0);
6780 arg11
= TREE_OPERAND (arg1
, 1);
6785 arg11
= build_one_cst (type
);
6789 if (operand_equal_p (arg01
, arg11
, 0))
6790 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6791 else if (operand_equal_p (arg00
, arg10
, 0))
6792 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6793 else if (operand_equal_p (arg00
, arg11
, 0))
6794 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6795 else if (operand_equal_p (arg01
, arg10
, 0))
6796 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6798 /* No identical multiplicands; see if we can find a common
6799 power-of-two factor in non-power-of-two multiplies. This
6800 can help in multi-dimensional array access. */
6801 else if (host_integerp (arg01
, 0)
6802 && host_integerp (arg11
, 0))
6804 HOST_WIDE_INT int01
, int11
, tmp
;
6807 int01
= TREE_INT_CST_LOW (arg01
);
6808 int11
= TREE_INT_CST_LOW (arg11
);
6810 /* Move min of absolute values to int11. */
6811 if ((int01
>= 0 ? int01
: -int01
)
6812 < (int11
>= 0 ? int11
: -int11
))
6814 tmp
= int01
, int01
= int11
, int11
= tmp
;
6815 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6822 if (exact_log2 (abs (int11
)) > 0 && int01
% int11
== 0)
6824 alt0
= fold_build2 (MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
6825 build_int_cst (TREE_TYPE (arg00
),
6830 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
6835 return fold_build2 (MULT_EXPR
, type
,
6836 fold_build2 (code
, type
,
6837 fold_convert (type
, alt0
),
6838 fold_convert (type
, alt1
)),
6839 fold_convert (type
, same
));
6844 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6845 specified by EXPR into the buffer PTR of length LEN bytes.
6846 Return the number of bytes placed in the buffer, or zero
6850 native_encode_int (tree expr
, unsigned char *ptr
, int len
)
6852 tree type
= TREE_TYPE (expr
);
6853 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
6854 int byte
, offset
, word
, words
;
6855 unsigned char value
;
6857 if (total_bytes
> len
)
6859 words
= total_bytes
/ UNITS_PER_WORD
;
6861 for (byte
= 0; byte
< total_bytes
; byte
++)
6863 int bitpos
= byte
* BITS_PER_UNIT
;
6864 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
6865 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
6867 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
6868 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
6870 if (total_bytes
> UNITS_PER_WORD
)
6872 word
= byte
/ UNITS_PER_WORD
;
6873 if (WORDS_BIG_ENDIAN
)
6874 word
= (words
- 1) - word
;
6875 offset
= word
* UNITS_PER_WORD
;
6876 if (BYTES_BIG_ENDIAN
)
6877 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
6879 offset
+= byte
% UNITS_PER_WORD
;
6882 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
6883 ptr
[offset
] = value
;
6889 /* Subroutine of native_encode_expr. Encode the REAL_CST
6890 specified by EXPR into the buffer PTR of length LEN bytes.
6891 Return the number of bytes placed in the buffer, or zero
6895 native_encode_real (tree expr
, unsigned char *ptr
, int len
)
6897 tree type
= TREE_TYPE (expr
);
6898 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
6899 int byte
, offset
, word
, words
;
6900 unsigned char value
;
6902 /* There are always 32 bits in each long, no matter the size of
6903 the hosts long. We handle floating point representations with
6907 if (total_bytes
> len
)
6909 words
= total_bytes
/ UNITS_PER_WORD
;
6911 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
6913 for (byte
= 0; byte
< total_bytes
; byte
++)
6915 int bitpos
= byte
* BITS_PER_UNIT
;
6916 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
6918 if (total_bytes
> UNITS_PER_WORD
)
6920 word
= byte
/ UNITS_PER_WORD
;
6921 if (FLOAT_WORDS_BIG_ENDIAN
)
6922 word
= (words
- 1) - word
;
6923 offset
= word
* UNITS_PER_WORD
;
6924 if (BYTES_BIG_ENDIAN
)
6925 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
6927 offset
+= byte
% UNITS_PER_WORD
;
6930 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
6931 ptr
[offset
] = value
;
6936 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6937 specified by EXPR into the buffer PTR of length LEN bytes.
6938 Return the number of bytes placed in the buffer, or zero
6942 native_encode_complex (tree expr
, unsigned char *ptr
, int len
)
6947 part
= TREE_REALPART (expr
);
6948 rsize
= native_encode_expr (part
, ptr
, len
);
6951 part
= TREE_IMAGPART (expr
);
6952 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
6955 return rsize
+ isize
;
6959 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6960 specified by EXPR into the buffer PTR of length LEN bytes.
6961 Return the number of bytes placed in the buffer, or zero
6965 native_encode_vector (tree expr
, unsigned char *ptr
, int len
)
6967 int i
, size
, offset
, count
;
6968 tree itype
, elem
, elements
;
6971 elements
= TREE_VECTOR_CST_ELTS (expr
);
6972 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
6973 itype
= TREE_TYPE (TREE_TYPE (expr
));
6974 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
6975 for (i
= 0; i
< count
; i
++)
6979 elem
= TREE_VALUE (elements
);
6980 elements
= TREE_CHAIN (elements
);
6987 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
6992 if (offset
+ size
> len
)
6994 memset (ptr
+offset
, 0, size
);
7002 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7003 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7004 buffer PTR of length LEN bytes. Return the number of bytes
7005 placed in the buffer, or zero upon failure. */
7008 native_encode_expr (tree expr
, unsigned char *ptr
, int len
)
7010 switch (TREE_CODE (expr
))
7013 return native_encode_int (expr
, ptr
, len
);
7016 return native_encode_real (expr
, ptr
, len
);
7019 return native_encode_complex (expr
, ptr
, len
);
7022 return native_encode_vector (expr
, ptr
, len
);
7030 /* Subroutine of native_interpret_expr. Interpret the contents of
7031 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7032 If the buffer cannot be interpreted, return NULL_TREE. */
7035 native_interpret_int (tree type
, unsigned char *ptr
, int len
)
7037 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7038 int byte
, offset
, word
, words
;
7039 unsigned char value
;
7040 unsigned int HOST_WIDE_INT lo
= 0;
7041 HOST_WIDE_INT hi
= 0;
7043 if (total_bytes
> len
)
7045 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
7047 words
= total_bytes
/ UNITS_PER_WORD
;
7049 for (byte
= 0; byte
< total_bytes
; byte
++)
7051 int bitpos
= byte
* BITS_PER_UNIT
;
7052 if (total_bytes
> UNITS_PER_WORD
)
7054 word
= byte
/ UNITS_PER_WORD
;
7055 if (WORDS_BIG_ENDIAN
)
7056 word
= (words
- 1) - word
;
7057 offset
= word
* UNITS_PER_WORD
;
7058 if (BYTES_BIG_ENDIAN
)
7059 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7061 offset
+= byte
% UNITS_PER_WORD
;
7064 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7065 value
= ptr
[offset
];
7067 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7068 lo
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7070 hi
|= (unsigned HOST_WIDE_INT
) value
7071 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7074 return build_int_cst_wide_type (type
, lo
, hi
);
7078 /* Subroutine of native_interpret_expr. Interpret the contents of
7079 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7080 If the buffer cannot be interpreted, return NULL_TREE. */
7083 native_interpret_real (tree type
, unsigned char *ptr
, int len
)
7085 enum machine_mode mode
= TYPE_MODE (type
);
7086 int total_bytes
= GET_MODE_SIZE (mode
);
7087 int byte
, offset
, word
, words
;
7088 unsigned char value
;
7089 /* There are always 32 bits in each long, no matter the size of
7090 the hosts long. We handle floating point representations with
7095 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7096 if (total_bytes
> len
|| total_bytes
> 24)
7098 words
= total_bytes
/ UNITS_PER_WORD
;
7100 memset (tmp
, 0, sizeof (tmp
));
7101 for (byte
= 0; byte
< total_bytes
; byte
++)
7103 int bitpos
= byte
* BITS_PER_UNIT
;
7104 if (total_bytes
> UNITS_PER_WORD
)
7106 word
= byte
/ UNITS_PER_WORD
;
7107 if (FLOAT_WORDS_BIG_ENDIAN
)
7108 word
= (words
- 1) - word
;
7109 offset
= word
* UNITS_PER_WORD
;
7110 if (BYTES_BIG_ENDIAN
)
7111 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7113 offset
+= byte
% UNITS_PER_WORD
;
7116 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7117 value
= ptr
[offset
];
7119 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7122 real_from_target (&r
, tmp
, mode
);
7123 return build_real (type
, r
);
7127 /* Subroutine of native_interpret_expr. Interpret the contents of
7128 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7129 If the buffer cannot be interpreted, return NULL_TREE. */
7132 native_interpret_complex (tree type
, unsigned char *ptr
, int len
)
7134 tree etype
, rpart
, ipart
;
7137 etype
= TREE_TYPE (type
);
7138 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7141 rpart
= native_interpret_expr (etype
, ptr
, size
);
7144 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7147 return build_complex (type
, rpart
, ipart
);
7151 /* Subroutine of native_interpret_expr. Interpret the contents of
7152 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7153 If the buffer cannot be interpreted, return NULL_TREE. */
7156 native_interpret_vector (tree type
, unsigned char *ptr
, int len
)
7158 tree etype
, elem
, elements
;
7161 etype
= TREE_TYPE (type
);
7162 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7163 count
= TYPE_VECTOR_SUBPARTS (type
);
7164 if (size
* count
> len
)
7167 elements
= NULL_TREE
;
7168 for (i
= count
- 1; i
>= 0; i
--)
7170 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7173 elements
= tree_cons (NULL_TREE
, elem
, elements
);
7175 return build_vector (type
, elements
);
7179 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7180 the buffer PTR of length LEN as a constant of type TYPE. For
7181 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7182 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7183 return NULL_TREE. */
7186 native_interpret_expr (tree type
, unsigned char *ptr
, int len
)
7188 switch (TREE_CODE (type
))
7193 return native_interpret_int (type
, ptr
, len
);
7196 return native_interpret_real (type
, ptr
, len
);
7199 return native_interpret_complex (type
, ptr
, len
);
7202 return native_interpret_vector (type
, ptr
, len
);
7210 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7211 TYPE at compile-time. If we're unable to perform the conversion
7212 return NULL_TREE. */
7215 fold_view_convert_expr (tree type
, tree expr
)
7217 /* We support up to 512-bit values (for V8DFmode). */
7218 unsigned char buffer
[64];
7221 /* Check that the host and target are sane. */
7222 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7225 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7229 return native_interpret_expr (type
, buffer
, len
);
7233 /* Fold a unary expression of code CODE and type TYPE with operand
7234 OP0. Return the folded expression if folding is successful.
7235 Otherwise, return NULL_TREE. */
7238 fold_unary (enum tree_code code
, tree type
, tree op0
)
7242 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7244 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7245 && TREE_CODE_LENGTH (code
) == 1);
7250 if (code
== NOP_EXPR
|| code
== CONVERT_EXPR
7251 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
7253 /* Don't use STRIP_NOPS, because signedness of argument type
7255 STRIP_SIGN_NOPS (arg0
);
7259 /* Strip any conversions that don't change the mode. This
7260 is safe for every expression, except for a comparison
7261 expression because its signedness is derived from its
7264 Note that this is done as an internal manipulation within
7265 the constant folder, in order to find the simplest
7266 representation of the arguments so that their form can be
7267 studied. In any cases, the appropriate type conversions
7268 should be put back in the tree that will get out of the
7274 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7276 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7277 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7278 fold_build1 (code
, type
, TREE_OPERAND (arg0
, 1)));
7279 else if (TREE_CODE (arg0
) == COND_EXPR
)
7281 tree arg01
= TREE_OPERAND (arg0
, 1);
7282 tree arg02
= TREE_OPERAND (arg0
, 2);
7283 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7284 arg01
= fold_build1 (code
, type
, arg01
);
7285 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7286 arg02
= fold_build1 (code
, type
, arg02
);
7287 tem
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7290 /* If this was a conversion, and all we did was to move into
7291 inside the COND_EXPR, bring it back out. But leave it if
7292 it is a conversion from integer to integer and the
7293 result precision is no wider than a word since such a
7294 conversion is cheap and may be optimized away by combine,
7295 while it couldn't if it were outside the COND_EXPR. Then return
7296 so we don't get into an infinite recursion loop taking the
7297 conversion out and then back in. */
7299 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
7300 || code
== NON_LVALUE_EXPR
)
7301 && TREE_CODE (tem
) == COND_EXPR
7302 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7303 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7304 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7305 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7306 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7307 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7308 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7310 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7311 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7312 || flag_syntax_only
))
7313 tem
= build1 (code
, type
,
7315 TREE_TYPE (TREE_OPERAND
7316 (TREE_OPERAND (tem
, 1), 0)),
7317 TREE_OPERAND (tem
, 0),
7318 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7319 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
7322 else if (COMPARISON_CLASS_P (arg0
))
7324 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7326 arg0
= copy_node (arg0
);
7327 TREE_TYPE (arg0
) = type
;
7330 else if (TREE_CODE (type
) != INTEGER_TYPE
)
7331 return fold_build3 (COND_EXPR
, type
, arg0
,
7332 fold_build1 (code
, type
,
7334 fold_build1 (code
, type
,
7335 integer_zero_node
));
7344 case FIX_TRUNC_EXPR
:
7345 if (TREE_TYPE (op0
) == type
)
7348 /* If we have (type) (a CMP b) and type is an integral type, return
7349 new expression involving the new type. */
7350 if (COMPARISON_CLASS_P (op0
) && INTEGRAL_TYPE_P (type
))
7351 return fold_build2 (TREE_CODE (op0
), type
, TREE_OPERAND (op0
, 0),
7352 TREE_OPERAND (op0
, 1));
7354 /* Handle cases of two conversions in a row. */
7355 if (TREE_CODE (op0
) == NOP_EXPR
7356 || TREE_CODE (op0
) == CONVERT_EXPR
)
7358 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7359 tree inter_type
= TREE_TYPE (op0
);
7360 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7361 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7362 int inside_float
= FLOAT_TYPE_P (inside_type
);
7363 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7364 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7365 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7366 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7367 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7368 int inter_float
= FLOAT_TYPE_P (inter_type
);
7369 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7370 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7371 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7372 int final_int
= INTEGRAL_TYPE_P (type
);
7373 int final_ptr
= POINTER_TYPE_P (type
);
7374 int final_float
= FLOAT_TYPE_P (type
);
7375 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7376 unsigned int final_prec
= TYPE_PRECISION (type
);
7377 int final_unsignedp
= TYPE_UNSIGNED (type
);
7379 /* In addition to the cases of two conversions in a row
7380 handled below, if we are converting something to its own
7381 type via an object of identical or wider precision, neither
7382 conversion is needed. */
7383 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7384 && (((inter_int
|| inter_ptr
) && final_int
)
7385 || (inter_float
&& final_float
))
7386 && inter_prec
>= final_prec
)
7387 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7389 /* Likewise, if the intermediate and final types are either both
7390 float or both integer, we don't need the middle conversion if
7391 it is wider than the final type and doesn't change the signedness
7392 (for integers). Avoid this if the final type is a pointer
7393 since then we sometimes need the inner conversion. Likewise if
7394 the outer has a precision not equal to the size of its mode. */
7395 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
7396 || (inter_float
&& inside_float
)
7397 || (inter_vec
&& inside_vec
))
7398 && inter_prec
>= inside_prec
7399 && (inter_float
|| inter_vec
7400 || inter_unsignedp
== inside_unsignedp
)
7401 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7402 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7404 && (! final_vec
|| inter_prec
== inside_prec
))
7405 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7407 /* If we have a sign-extension of a zero-extended value, we can
7408 replace that by a single zero-extension. */
7409 if (inside_int
&& inter_int
&& final_int
7410 && inside_prec
< inter_prec
&& inter_prec
< final_prec
7411 && inside_unsignedp
&& !inter_unsignedp
)
7412 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7414 /* Two conversions in a row are not needed unless:
7415 - some conversion is floating-point (overstrict for now), or
7416 - some conversion is a vector (overstrict for now), or
7417 - the intermediate type is narrower than both initial and
7419 - the intermediate type and innermost type differ in signedness,
7420 and the outermost type is wider than the intermediate, or
7421 - the initial type is a pointer type and the precisions of the
7422 intermediate and final types differ, or
7423 - the final type is a pointer type and the precisions of the
7424 initial and intermediate types differ.
7425 - the final type is a pointer type and the initial type not
7426 - the initial type is a pointer to an array and the final type
7428 if (! inside_float
&& ! inter_float
&& ! final_float
7429 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7430 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7431 && ! (inside_int
&& inter_int
7432 && inter_unsignedp
!= inside_unsignedp
7433 && inter_prec
< final_prec
)
7434 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7435 == (final_unsignedp
&& final_prec
> inter_prec
))
7436 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7437 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7438 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7439 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7440 && final_ptr
== inside_ptr
7442 && TREE_CODE (TREE_TYPE (inside_type
)) == ARRAY_TYPE
7443 && TREE_CODE (TREE_TYPE (type
)) != ARRAY_TYPE
))
7444 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7447 /* Handle (T *)&A.B.C for A being of type T and B and C
7448 living at offset zero. This occurs frequently in
7449 C++ upcasting and then accessing the base. */
7450 if (TREE_CODE (op0
) == ADDR_EXPR
7451 && POINTER_TYPE_P (type
)
7452 && handled_component_p (TREE_OPERAND (op0
, 0)))
7454 HOST_WIDE_INT bitsize
, bitpos
;
7456 enum machine_mode mode
;
7457 int unsignedp
, volatilep
;
7458 tree base
= TREE_OPERAND (op0
, 0);
7459 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7460 &mode
, &unsignedp
, &volatilep
, false);
7461 /* If the reference was to a (constant) zero offset, we can use
7462 the address of the base if it has the same base type
7463 as the result type. */
7464 if (! offset
&& bitpos
== 0
7465 && TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7466 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7467 return fold_convert (type
, build_fold_addr_expr (base
));
7470 if ((TREE_CODE (op0
) == MODIFY_EXPR
7471 || TREE_CODE (op0
) == GIMPLE_MODIFY_STMT
)
7472 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0
, 1))
7473 /* Detect assigning a bitfield. */
7474 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7476 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0
, 0), 1))))
7478 /* Don't leave an assignment inside a conversion
7479 unless assigning a bitfield. */
7480 tem
= fold_build1 (code
, type
, GENERIC_TREE_OPERAND (op0
, 1));
7481 /* First do the assignment, then return converted constant. */
7482 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7483 TREE_NO_WARNING (tem
) = 1;
7484 TREE_USED (tem
) = 1;
7488 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7489 constants (if x has signed type, the sign bit cannot be set
7490 in c). This folds extension into the BIT_AND_EXPR. */
7491 if (INTEGRAL_TYPE_P (type
)
7492 && TREE_CODE (type
) != BOOLEAN_TYPE
7493 && TREE_CODE (op0
) == BIT_AND_EXPR
7494 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7497 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
7500 if (TYPE_UNSIGNED (TREE_TYPE (and))
7501 || (TYPE_PRECISION (type
)
7502 <= TYPE_PRECISION (TREE_TYPE (and))))
7504 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7505 <= HOST_BITS_PER_WIDE_INT
7506 && host_integerp (and1
, 1))
7508 unsigned HOST_WIDE_INT cst
;
7510 cst
= tree_low_cst (and1
, 1);
7511 cst
&= (HOST_WIDE_INT
) -1
7512 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7513 change
= (cst
== 0);
7514 #ifdef LOAD_EXTEND_OP
7516 && !flag_syntax_only
7517 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7520 tree uns
= lang_hooks
.types
.unsigned_type (TREE_TYPE (and0
));
7521 and0
= fold_convert (uns
, and0
);
7522 and1
= fold_convert (uns
, and1
);
7528 tem
= force_fit_type_double (type
, TREE_INT_CST_LOW (and1
),
7529 TREE_INT_CST_HIGH (and1
), 0,
7530 TREE_OVERFLOW (and1
));
7531 return fold_build2 (BIT_AND_EXPR
, type
,
7532 fold_convert (type
, and0
), tem
);
7536 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7537 T2 being pointers to types of the same size. */
7538 if (POINTER_TYPE_P (type
)
7539 && BINARY_CLASS_P (arg0
)
7540 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7541 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7543 tree arg00
= TREE_OPERAND (arg0
, 0);
7545 tree t1
= TREE_TYPE (arg00
);
7546 tree tt0
= TREE_TYPE (t0
);
7547 tree tt1
= TREE_TYPE (t1
);
7548 tree s0
= TYPE_SIZE (tt0
);
7549 tree s1
= TYPE_SIZE (tt1
);
7551 if (s0
&& s1
&& operand_equal_p (s0
, s1
, OEP_ONLY_CONST
))
7552 return build2 (TREE_CODE (arg0
), t0
, fold_convert (t0
, arg00
),
7553 TREE_OPERAND (arg0
, 1));
7556 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7557 of the same precision, and X is a integer type not narrower than
7558 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7559 if (INTEGRAL_TYPE_P (type
)
7560 && TREE_CODE (op0
) == BIT_NOT_EXPR
7561 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7562 && (TREE_CODE (TREE_OPERAND (op0
, 0)) == NOP_EXPR
7563 || TREE_CODE (TREE_OPERAND (op0
, 0)) == CONVERT_EXPR
)
7564 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7566 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7567 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7568 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7569 return fold_build1 (BIT_NOT_EXPR
, type
, fold_convert (type
, tem
));
7572 tem
= fold_convert_const (code
, type
, arg0
);
7573 return tem
? tem
: NULL_TREE
;
7575 case VIEW_CONVERT_EXPR
:
7576 if (TREE_TYPE (op0
) == type
)
7578 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
7579 return fold_build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
7580 return fold_view_convert_expr (type
, op0
);
7583 tem
= fold_negate_expr (arg0
);
7585 return fold_convert (type
, tem
);
7589 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
7590 return fold_abs_const (arg0
, type
);
7591 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
7592 return fold_build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7593 /* Convert fabs((double)float) into (double)fabsf(float). */
7594 else if (TREE_CODE (arg0
) == NOP_EXPR
7595 && TREE_CODE (type
) == REAL_TYPE
)
7597 tree targ0
= strip_float_extensions (arg0
);
7599 return fold_convert (type
, fold_build1 (ABS_EXPR
,
7603 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7604 else if (tree_expr_nonnegative_p (arg0
) || TREE_CODE (arg0
) == ABS_EXPR
)
7607 /* Strip sign ops from argument. */
7608 if (TREE_CODE (type
) == REAL_TYPE
)
7610 tem
= fold_strip_sign_ops (arg0
);
7612 return fold_build1 (ABS_EXPR
, type
, fold_convert (type
, tem
));
7617 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7618 return fold_convert (type
, arg0
);
7619 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7621 tree itype
= TREE_TYPE (type
);
7622 tree rpart
= fold_convert (itype
, TREE_OPERAND (arg0
, 0));
7623 tree ipart
= fold_convert (itype
, TREE_OPERAND (arg0
, 1));
7624 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, negate_expr (ipart
));
7626 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7628 tree itype
= TREE_TYPE (type
);
7629 tree rpart
= fold_convert (itype
, TREE_REALPART (arg0
));
7630 tree ipart
= fold_convert (itype
, TREE_IMAGPART (arg0
));
7631 return build_complex (type
, rpart
, negate_expr (ipart
));
7633 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7634 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
7638 if (TREE_CODE (arg0
) == INTEGER_CST
)
7639 return fold_not_const (arg0
, type
);
7640 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
7641 return TREE_OPERAND (arg0
, 0);
7642 /* Convert ~ (-A) to A - 1. */
7643 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
7644 return fold_build2 (MINUS_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7645 build_int_cst (type
, 1));
7646 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7647 else if (INTEGRAL_TYPE_P (type
)
7648 && ((TREE_CODE (arg0
) == MINUS_EXPR
7649 && integer_onep (TREE_OPERAND (arg0
, 1)))
7650 || (TREE_CODE (arg0
) == PLUS_EXPR
7651 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
7652 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7653 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7654 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7655 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
7657 TREE_OPERAND (arg0
, 0)))))
7658 return fold_build2 (BIT_XOR_EXPR
, type
, tem
,
7659 fold_convert (type
, TREE_OPERAND (arg0
, 1)));
7660 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7661 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
7663 TREE_OPERAND (arg0
, 1)))))
7664 return fold_build2 (BIT_XOR_EXPR
, type
,
7665 fold_convert (type
, TREE_OPERAND (arg0
, 0)), tem
);
7669 case TRUTH_NOT_EXPR
:
7670 /* The argument to invert_truthvalue must have Boolean type. */
7671 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
7672 arg0
= fold_convert (boolean_type_node
, arg0
);
7674 /* Note that the operand of this must be an int
7675 and its values must be 0 or 1.
7676 ("true" is a fixed value perhaps depending on the language,
7677 but we don't handle values other than 1 correctly yet.) */
7678 tem
= fold_truth_not_expr (arg0
);
7681 return fold_convert (type
, tem
);
7684 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7685 return fold_convert (type
, arg0
);
7686 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7687 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
7688 TREE_OPERAND (arg0
, 1));
7689 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7690 return fold_convert (type
, TREE_REALPART (arg0
));
7691 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7693 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7694 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
7695 fold_build1 (REALPART_EXPR
, itype
,
7696 TREE_OPERAND (arg0
, 0)),
7697 fold_build1 (REALPART_EXPR
, itype
,
7698 TREE_OPERAND (arg0
, 1)));
7699 return fold_convert (type
, tem
);
7701 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7703 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7704 tem
= fold_build1 (REALPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
7705 return fold_convert (type
, tem
);
7707 if (TREE_CODE (arg0
) == CALL_EXPR
)
7709 tree fn
= get_callee_fndecl (arg0
);
7710 if (DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
7711 switch (DECL_FUNCTION_CODE (fn
))
7713 CASE_FLT_FN (BUILT_IN_CEXPI
):
7714 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
7715 return build_function_call_expr (fn
, TREE_OPERAND (arg0
, 1));
7723 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7724 return fold_convert (type
, integer_zero_node
);
7725 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7726 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
7727 TREE_OPERAND (arg0
, 0));
7728 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7729 return fold_convert (type
, TREE_IMAGPART (arg0
));
7730 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7732 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7733 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
7734 fold_build1 (IMAGPART_EXPR
, itype
,
7735 TREE_OPERAND (arg0
, 0)),
7736 fold_build1 (IMAGPART_EXPR
, itype
,
7737 TREE_OPERAND (arg0
, 1)));
7738 return fold_convert (type
, tem
);
7740 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7742 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7743 tem
= fold_build1 (IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
7744 return fold_convert (type
, negate_expr (tem
));
7746 if (TREE_CODE (arg0
) == CALL_EXPR
)
7748 tree fn
= get_callee_fndecl (arg0
);
7749 if (DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
7750 switch (DECL_FUNCTION_CODE (fn
))
7752 CASE_FLT_FN (BUILT_IN_CEXPI
):
7753 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
7754 return build_function_call_expr (fn
, TREE_OPERAND (arg0
, 1));
7763 } /* switch (code) */
7766 /* Fold a binary expression of code CODE and type TYPE with operands
7767 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7768 Return the folded expression if folding is successful. Otherwise,
7769 return NULL_TREE. */
7772 fold_minmax (enum tree_code code
, tree type
, tree op0
, tree op1
)
7774 enum tree_code compl_code
;
7776 if (code
== MIN_EXPR
)
7777 compl_code
= MAX_EXPR
;
7778 else if (code
== MAX_EXPR
)
7779 compl_code
= MIN_EXPR
;
7783 /* MIN (MAX (a, b), b) == b. */
7784 if (TREE_CODE (op0
) == compl_code
7785 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
7786 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 0));
7788 /* MIN (MAX (b, a), b) == b. */
7789 if (TREE_CODE (op0
) == compl_code
7790 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
7791 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
7792 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 1));
7794 /* MIN (a, MAX (a, b)) == a. */
7795 if (TREE_CODE (op1
) == compl_code
7796 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
7797 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
7798 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 1));
7800 /* MIN (a, MAX (b, a)) == a. */
7801 if (TREE_CODE (op1
) == compl_code
7802 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
7803 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
7804 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 0));
7809 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7810 by changing CODE to reduce the magnitude of constants involved in
7811 ARG0 of the comparison.
7812 Returns a canonicalized comparison tree if a simplification was
7813 possible, otherwise returns NULL_TREE. */
7816 maybe_canonicalize_comparison_1 (enum tree_code code
, tree type
,
7817 tree arg0
, tree arg1
)
7819 enum tree_code code0
= TREE_CODE (arg0
);
7820 tree t
, cst0
= NULL_TREE
;
7824 /* Match A +- CST code arg1 and CST code arg1. */
7825 if (!(((code0
== MINUS_EXPR
7826 || code0
== PLUS_EXPR
)
7827 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
7828 || code0
== INTEGER_CST
))
7831 /* Identify the constant in arg0 and its sign. */
7832 if (code0
== INTEGER_CST
)
7835 cst0
= TREE_OPERAND (arg0
, 1);
7836 sgn0
= tree_int_cst_sgn (cst0
);
7838 /* Overflowed constants and zero will cause problems. */
7839 if (integer_zerop (cst0
)
7840 || TREE_OVERFLOW (cst0
))
7843 /* See if we can reduce the magnitude of the constant in
7844 arg0 by changing the comparison code. */
7845 if (code0
== INTEGER_CST
)
7847 /* CST <= arg1 -> CST-1 < arg1. */
7848 if (code
== LE_EXPR
&& sgn0
== 1)
7850 /* -CST < arg1 -> -CST-1 <= arg1. */
7851 else if (code
== LT_EXPR
&& sgn0
== -1)
7853 /* CST > arg1 -> CST-1 >= arg1. */
7854 else if (code
== GT_EXPR
&& sgn0
== 1)
7856 /* -CST >= arg1 -> -CST-1 > arg1. */
7857 else if (code
== GE_EXPR
&& sgn0
== -1)
7861 /* arg1 code' CST' might be more canonical. */
7866 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7868 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
7870 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7871 else if (code
== GT_EXPR
7872 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
7874 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7875 else if (code
== LE_EXPR
7876 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
7878 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7879 else if (code
== GE_EXPR
7880 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
7886 /* Now build the constant reduced in magnitude. */
7887 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
7888 cst0
, build_int_cst (TREE_TYPE (cst0
), 1), 0);
7889 if (code0
!= INTEGER_CST
)
7890 t
= fold_build2 (code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
7892 /* If swapping might yield to a more canonical form, do so. */
7894 return fold_build2 (swap_tree_comparison (code
), type
, arg1
, t
);
7896 return fold_build2 (code
, type
, t
, arg1
);
7899 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7900 overflow further. Try to decrease the magnitude of constants involved
7901 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7902 and put sole constants at the second argument position.
7903 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7906 maybe_canonicalize_comparison (enum tree_code code
, tree type
,
7907 tree arg0
, tree arg1
)
7911 /* In principle pointers also have undefined overflow behavior,
7912 but that causes problems elsewhere. */
7913 if ((flag_wrapv
|| flag_trapv
)
7914 || (TYPE_UNSIGNED (TREE_TYPE (arg0
))
7915 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
7918 /* Try canonicalization by simplifying arg0. */
7919 t
= maybe_canonicalize_comparison_1 (code
, type
, arg0
, arg1
);
7923 /* Try canonicalization by simplifying arg1 using the swapped
7925 code
= swap_tree_comparison (code
);
7926 return maybe_canonicalize_comparison_1 (code
, type
, arg1
, arg0
);
7929 /* Subroutine of fold_binary. This routine performs all of the
7930 transformations that are common to the equality/inequality
7931 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7932 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7933 fold_binary should call fold_binary. Fold a comparison with
7934 tree code CODE and type TYPE with operands OP0 and OP1. Return
7935 the folded comparison or NULL_TREE. */
7938 fold_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
7940 tree arg0
, arg1
, tem
;
7945 STRIP_SIGN_NOPS (arg0
);
7946 STRIP_SIGN_NOPS (arg1
);
7948 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
7949 if (tem
!= NULL_TREE
)
7952 /* If one arg is a real or integer constant, put it last. */
7953 if (tree_swap_operands_p (arg0
, arg1
, true))
7954 return fold_build2 (swap_tree_comparison (code
), type
, op1
, op0
);
7956 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7957 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7958 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7959 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
7960 && !TYPE_UNSIGNED (TREE_TYPE (arg1
))
7961 && !(flag_wrapv
|| flag_trapv
))
7962 && (TREE_CODE (arg1
) == INTEGER_CST
7963 && !TREE_OVERFLOW (arg1
)))
7965 tree const1
= TREE_OPERAND (arg0
, 1);
7967 tree variable
= TREE_OPERAND (arg0
, 0);
7970 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
7972 lhs
= fold_build2 (lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
7973 TREE_TYPE (arg1
), const2
, const1
);
7974 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
7975 && (TREE_CODE (lhs
) != INTEGER_CST
7976 || !TREE_OVERFLOW (lhs
)))
7977 return fold_build2 (code
, type
, variable
, lhs
);
7980 /* For comparisons of pointers we can decompose it to a compile time
7981 comparison of the base objects and the offsets into the object.
7982 This requires at least one operand being an ADDR_EXPR to do more
7983 than the operand_equal_p test below. */
7984 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
7985 && (TREE_CODE (arg0
) == ADDR_EXPR
7986 || TREE_CODE (arg1
) == ADDR_EXPR
))
7988 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
7989 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
7990 enum machine_mode mode
;
7991 int volatilep
, unsignedp
;
7992 bool indirect_base0
= false;
7994 /* Get base and offset for the access. Strip ADDR_EXPR for
7995 get_inner_reference, but put it back by stripping INDIRECT_REF
7996 off the base object if possible. */
7998 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8000 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8001 &bitsize
, &bitpos0
, &offset0
, &mode
,
8002 &unsignedp
, &volatilep
, false);
8003 if (TREE_CODE (base0
) == INDIRECT_REF
)
8004 base0
= TREE_OPERAND (base0
, 0);
8006 indirect_base0
= true;
8010 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8012 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8013 &bitsize
, &bitpos1
, &offset1
, &mode
,
8014 &unsignedp
, &volatilep
, false);
8015 /* We have to make sure to have an indirect/non-indirect base1
8016 just the same as we did for base0. */
8017 if (TREE_CODE (base1
) == INDIRECT_REF
8019 base1
= TREE_OPERAND (base1
, 0);
8020 else if (!indirect_base0
)
8023 else if (indirect_base0
)
8026 /* If we have equivalent bases we might be able to simplify. */
8028 && operand_equal_p (base0
, base1
, 0))
8030 /* We can fold this expression to a constant if the non-constant
8031 offset parts are equal. */
8032 if (offset0
== offset1
8033 || (offset0
&& offset1
8034 && operand_equal_p (offset0
, offset1
, 0)))
8039 return build_int_cst (boolean_type_node
, bitpos0
== bitpos1
);
8041 return build_int_cst (boolean_type_node
, bitpos0
!= bitpos1
);
8043 return build_int_cst (boolean_type_node
, bitpos0
< bitpos1
);
8045 return build_int_cst (boolean_type_node
, bitpos0
<= bitpos1
);
8047 return build_int_cst (boolean_type_node
, bitpos0
>= bitpos1
);
8049 return build_int_cst (boolean_type_node
, bitpos0
> bitpos1
);
8053 /* We can simplify the comparison to a comparison of the variable
8054 offset parts if the constant offset parts are equal.
8055 Be careful to use signed size type here because otherwise we
8056 mess with array offsets in the wrong way. This is possible
8057 because pointer arithmetic is restricted to retain within an
8058 object and overflow on pointer differences is undefined as of
8059 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8060 else if (bitpos0
== bitpos1
)
8062 tree signed_size_type_node
;
8063 signed_size_type_node
= signed_type_for (size_type_node
);
8065 /* By converting to signed size type we cover middle-end pointer
8066 arithmetic which operates on unsigned pointer types of size
8067 type size and ARRAY_REF offsets which are properly sign or
8068 zero extended from their type in case it is narrower than
8070 if (offset0
== NULL_TREE
)
8071 offset0
= build_int_cst (signed_size_type_node
, 0);
8073 offset0
= fold_convert (signed_size_type_node
, offset0
);
8074 if (offset1
== NULL_TREE
)
8075 offset1
= build_int_cst (signed_size_type_node
, 0);
8077 offset1
= fold_convert (signed_size_type_node
, offset1
);
8079 return fold_build2 (code
, type
, offset0
, offset1
);
8084 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8085 same object, then we can fold this to a comparison of the two offsets in
8086 signed size type. This is possible because pointer arithmetic is
8087 restricted to retain within an object and overflow on pointer differences
8088 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8089 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8090 && !flag_wrapv
&& !flag_trapv
)
8092 tree base0
, offset0
, base1
, offset1
;
8094 if (extract_array_ref (arg0
, &base0
, &offset0
)
8095 && extract_array_ref (arg1
, &base1
, &offset1
)
8096 && operand_equal_p (base0
, base1
, 0))
8098 tree signed_size_type_node
;
8099 signed_size_type_node
= signed_type_for (size_type_node
);
8101 /* By converting to signed size type we cover middle-end pointer
8102 arithmetic which operates on unsigned pointer types of size
8103 type size and ARRAY_REF offsets which are properly sign or
8104 zero extended from their type in case it is narrower than
8106 if (offset0
== NULL_TREE
)
8107 offset0
= build_int_cst (signed_size_type_node
, 0);
8109 offset0
= fold_convert (signed_size_type_node
, offset0
);
8110 if (offset1
== NULL_TREE
)
8111 offset1
= build_int_cst (signed_size_type_node
, 0);
8113 offset1
= fold_convert (signed_size_type_node
, offset1
);
8115 return fold_build2 (code
, type
, offset0
, offset1
);
8119 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8120 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8121 the resulting offset is smaller in absolute value than the
8123 if (!(flag_wrapv
|| flag_trapv
)
8124 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
8125 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8126 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8127 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8128 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8129 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8130 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8132 tree const1
= TREE_OPERAND (arg0
, 1);
8133 tree const2
= TREE_OPERAND (arg1
, 1);
8134 tree variable1
= TREE_OPERAND (arg0
, 0);
8135 tree variable2
= TREE_OPERAND (arg1
, 0);
8138 /* Put the constant on the side where it doesn't overflow and is
8139 of lower absolute value than before. */
8140 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8141 ? MINUS_EXPR
: PLUS_EXPR
,
8143 if (!TREE_OVERFLOW (cst
)
8144 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
8145 return fold_build2 (code
, type
,
8147 fold_build2 (TREE_CODE (arg1
), TREE_TYPE (arg1
),
8150 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8151 ? MINUS_EXPR
: PLUS_EXPR
,
8153 if (!TREE_OVERFLOW (cst
)
8154 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
8155 return fold_build2 (code
, type
,
8156 fold_build2 (TREE_CODE (arg0
), TREE_TYPE (arg0
),
8161 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8162 signed arithmetic case. That form is created by the compiler
8163 often enough for folding it to be of value. One example is in
8164 computing loop trip counts after Operator Strength Reduction. */
8165 if (!(flag_wrapv
|| flag_trapv
)
8166 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
8167 && TREE_CODE (arg0
) == MULT_EXPR
8168 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8169 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8170 && integer_zerop (arg1
))
8172 tree const1
= TREE_OPERAND (arg0
, 1);
8173 tree const2
= arg1
; /* zero */
8174 tree variable1
= TREE_OPERAND (arg0
, 0);
8175 enum tree_code cmp_code
= code
;
8177 gcc_assert (!integer_zerop (const1
));
8179 /* If const1 is negative we swap the sense of the comparison. */
8180 if (tree_int_cst_sgn (const1
) < 0)
8181 cmp_code
= swap_tree_comparison (cmp_code
);
8183 return fold_build2 (cmp_code
, type
, variable1
, const2
);
8186 tem
= maybe_canonicalize_comparison (code
, type
, arg0
, arg1
);
8190 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8192 tree targ0
= strip_float_extensions (arg0
);
8193 tree targ1
= strip_float_extensions (arg1
);
8194 tree newtype
= TREE_TYPE (targ0
);
8196 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8197 newtype
= TREE_TYPE (targ1
);
8199 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8200 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
8201 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
8202 fold_convert (newtype
, targ1
));
8204 /* (-a) CMP (-b) -> b CMP a */
8205 if (TREE_CODE (arg0
) == NEGATE_EXPR
8206 && TREE_CODE (arg1
) == NEGATE_EXPR
)
8207 return fold_build2 (code
, type
, TREE_OPERAND (arg1
, 0),
8208 TREE_OPERAND (arg0
, 0));
8210 if (TREE_CODE (arg1
) == REAL_CST
)
8212 REAL_VALUE_TYPE cst
;
8213 cst
= TREE_REAL_CST (arg1
);
8215 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8216 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8217 return fold_build2 (swap_tree_comparison (code
), type
,
8218 TREE_OPERAND (arg0
, 0),
8219 build_real (TREE_TYPE (arg1
),
8220 REAL_VALUE_NEGATE (cst
)));
8222 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8223 /* a CMP (-0) -> a CMP 0 */
8224 if (REAL_VALUE_MINUS_ZERO (cst
))
8225 return fold_build2 (code
, type
, arg0
,
8226 build_real (TREE_TYPE (arg1
), dconst0
));
8228 /* x != NaN is always true, other ops are always false. */
8229 if (REAL_VALUE_ISNAN (cst
)
8230 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
8232 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
8233 return omit_one_operand (type
, tem
, arg0
);
8236 /* Fold comparisons against infinity. */
8237 if (REAL_VALUE_ISINF (cst
))
8239 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
8240 if (tem
!= NULL_TREE
)
8245 /* If this is a comparison of a real constant with a PLUS_EXPR
8246 or a MINUS_EXPR of a real constant, we can convert it into a
8247 comparison with a revised real constant as long as no overflow
8248 occurs when unsafe_math_optimizations are enabled. */
8249 if (flag_unsafe_math_optimizations
8250 && TREE_CODE (arg1
) == REAL_CST
8251 && (TREE_CODE (arg0
) == PLUS_EXPR
8252 || TREE_CODE (arg0
) == MINUS_EXPR
)
8253 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
8254 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
8255 ? MINUS_EXPR
: PLUS_EXPR
,
8256 arg1
, TREE_OPERAND (arg0
, 1), 0))
8257 && !TREE_OVERFLOW (tem
))
8258 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8260 /* Likewise, we can simplify a comparison of a real constant with
8261 a MINUS_EXPR whose first operand is also a real constant, i.e.
8262 (c1 - x) < c2 becomes x > c1-c2. */
8263 if (flag_unsafe_math_optimizations
8264 && TREE_CODE (arg1
) == REAL_CST
8265 && TREE_CODE (arg0
) == MINUS_EXPR
8266 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
8267 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
8269 && !TREE_OVERFLOW (tem
))
8270 return fold_build2 (swap_tree_comparison (code
), type
,
8271 TREE_OPERAND (arg0
, 1), tem
);
8273 /* Fold comparisons against built-in math functions. */
8274 if (TREE_CODE (arg1
) == REAL_CST
8275 && flag_unsafe_math_optimizations
8276 && ! flag_errno_math
)
8278 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8280 if (fcode
!= END_BUILTINS
)
8282 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
8283 if (tem
!= NULL_TREE
)
8289 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8290 if (TREE_CONSTANT (arg1
)
8291 && (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
8292 || TREE_CODE (arg0
) == POSTDECREMENT_EXPR
)
8293 /* This optimization is invalid for ordered comparisons
8294 if CONST+INCR overflows or if foo+incr might overflow.
8295 This optimization is invalid for floating point due to rounding.
8296 For pointer types we assume overflow doesn't happen. */
8297 && (POINTER_TYPE_P (TREE_TYPE (arg0
))
8298 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8299 && (code
== EQ_EXPR
|| code
== NE_EXPR
))))
8301 tree varop
, newconst
;
8303 if (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
)
8305 newconst
= fold_build2 (PLUS_EXPR
, TREE_TYPE (arg0
),
8306 arg1
, TREE_OPERAND (arg0
, 1));
8307 varop
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (arg0
),
8308 TREE_OPERAND (arg0
, 0),
8309 TREE_OPERAND (arg0
, 1));
8313 newconst
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg0
),
8314 arg1
, TREE_OPERAND (arg0
, 1));
8315 varop
= build2 (PREDECREMENT_EXPR
, TREE_TYPE (arg0
),
8316 TREE_OPERAND (arg0
, 0),
8317 TREE_OPERAND (arg0
, 1));
8321 /* If VAROP is a reference to a bitfield, we must mask
8322 the constant by the width of the field. */
8323 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
8324 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop
, 0), 1))
8325 && host_integerp (DECL_SIZE (TREE_OPERAND
8326 (TREE_OPERAND (varop
, 0), 1)), 1))
8328 tree fielddecl
= TREE_OPERAND (TREE_OPERAND (varop
, 0), 1);
8329 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (fielddecl
), 1);
8330 tree folded_compare
, shift
;
8332 /* First check whether the comparison would come out
8333 always the same. If we don't do that we would
8334 change the meaning with the masking. */
8335 folded_compare
= fold_build2 (code
, type
,
8336 TREE_OPERAND (varop
, 0), arg1
);
8337 if (TREE_CODE (folded_compare
) == INTEGER_CST
)
8338 return omit_one_operand (type
, folded_compare
, varop
);
8340 shift
= build_int_cst (NULL_TREE
,
8341 TYPE_PRECISION (TREE_TYPE (varop
)) - size
);
8342 shift
= fold_convert (TREE_TYPE (varop
), shift
);
8343 newconst
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (varop
),
8345 newconst
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (varop
),
8349 return fold_build2 (code
, type
, varop
, newconst
);
8352 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
8353 && (TREE_CODE (arg0
) == NOP_EXPR
8354 || TREE_CODE (arg0
) == CONVERT_EXPR
))
8356 /* If we are widening one operand of an integer comparison,
8357 see if the other operand is similarly being widened. Perhaps we
8358 can do the comparison in the narrower type. */
8359 tem
= fold_widened_comparison (code
, type
, arg0
, arg1
);
8363 /* Or if we are changing signedness. */
8364 tem
= fold_sign_changed_comparison (code
, type
, arg0
, arg1
);
8369 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8370 constant, we can simplify it. */
8371 if (TREE_CODE (arg1
) == INTEGER_CST
8372 && (TREE_CODE (arg0
) == MIN_EXPR
8373 || TREE_CODE (arg0
) == MAX_EXPR
)
8374 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8376 tem
= optimize_minmax_comparison (code
, type
, op0
, op1
);
8381 /* Simplify comparison of something with itself. (For IEEE
8382 floating-point, we can only do some of these simplifications.) */
8383 if (operand_equal_p (arg0
, arg1
, 0))
8388 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8389 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8390 return constant_boolean_node (1, type
);
8395 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8396 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8397 return constant_boolean_node (1, type
);
8398 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
8401 /* For NE, we can only do this simplification if integer
8402 or we don't honor IEEE floating point NaNs. */
8403 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
8404 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8406 /* ... fall through ... */
8409 return constant_boolean_node (0, type
);
8415 /* If we are comparing an expression that just has comparisons
8416 of two integer values, arithmetic expressions of those comparisons,
8417 and constants, we can simplify it. There are only three cases
8418 to check: the two values can either be equal, the first can be
8419 greater, or the second can be greater. Fold the expression for
8420 those three values. Since each value must be 0 or 1, we have
8421 eight possibilities, each of which corresponds to the constant 0
8422 or 1 or one of the six possible comparisons.
8424 This handles common cases like (a > b) == 0 but also handles
8425 expressions like ((x > y) - (y > x)) > 0, which supposedly
8426 occur in macroized code. */
8428 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8430 tree cval1
= 0, cval2
= 0;
8433 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8434 /* Don't handle degenerate cases here; they should already
8435 have been handled anyway. */
8436 && cval1
!= 0 && cval2
!= 0
8437 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8438 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8439 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8440 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8441 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8442 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8443 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8445 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8446 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8448 /* We can't just pass T to eval_subst in case cval1 or cval2
8449 was the same as ARG1. */
8452 = fold_build2 (code
, type
,
8453 eval_subst (arg0
, cval1
, maxval
,
8457 = fold_build2 (code
, type
,
8458 eval_subst (arg0
, cval1
, maxval
,
8462 = fold_build2 (code
, type
,
8463 eval_subst (arg0
, cval1
, minval
,
8467 /* All three of these results should be 0 or 1. Confirm they are.
8468 Then use those values to select the proper code to use. */
8470 if (TREE_CODE (high_result
) == INTEGER_CST
8471 && TREE_CODE (equal_result
) == INTEGER_CST
8472 && TREE_CODE (low_result
) == INTEGER_CST
)
8474 /* Make a 3-bit mask with the high-order bit being the
8475 value for `>', the next for '=', and the low for '<'. */
8476 switch ((integer_onep (high_result
) * 4)
8477 + (integer_onep (equal_result
) * 2)
8478 + integer_onep (low_result
))
8482 return omit_one_operand (type
, integer_zero_node
, arg0
);
8503 return omit_one_operand (type
, integer_one_node
, arg0
);
8507 return save_expr (build2 (code
, type
, cval1
, cval2
));
8508 return fold_build2 (code
, type
, cval1
, cval2
);
8513 /* Fold a comparison of the address of COMPONENT_REFs with the same
8514 type and component to a comparison of the address of the base
8515 object. In short, &x->a OP &y->a to x OP y and
8516 &x->a OP &y.a to x OP &y */
8517 if (TREE_CODE (arg0
) == ADDR_EXPR
8518 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == COMPONENT_REF
8519 && TREE_CODE (arg1
) == ADDR_EXPR
8520 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == COMPONENT_REF
)
8522 tree cref0
= TREE_OPERAND (arg0
, 0);
8523 tree cref1
= TREE_OPERAND (arg1
, 0);
8524 if (TREE_OPERAND (cref0
, 1) == TREE_OPERAND (cref1
, 1))
8526 tree op0
= TREE_OPERAND (cref0
, 0);
8527 tree op1
= TREE_OPERAND (cref1
, 0);
8528 return fold_build2 (code
, type
,
8529 build_fold_addr_expr (op0
),
8530 build_fold_addr_expr (op1
));
8534 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8535 into a single range test. */
8536 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
8537 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
8538 && TREE_CODE (arg1
) == INTEGER_CST
8539 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8540 && !integer_zerop (TREE_OPERAND (arg0
, 1))
8541 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8542 && !TREE_OVERFLOW (arg1
))
8544 tem
= fold_div_compare (code
, type
, arg0
, arg1
);
8545 if (tem
!= NULL_TREE
)
8549 /* Fold ~X op ~Y as Y op X. */
8550 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8551 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8552 return fold_build2 (code
, type
,
8553 TREE_OPERAND (arg1
, 0),
8554 TREE_OPERAND (arg0
, 0));
8556 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8557 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8558 && TREE_CODE (arg1
) == INTEGER_CST
)
8559 return fold_build2 (swap_tree_comparison (code
), type
,
8560 TREE_OPERAND (arg0
, 0),
8561 fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
));
8567 /* Subroutine of fold_binary. Optimize complex multiplications of the
8568 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8569 argument EXPR represents the expression "z" of type TYPE. */
8572 fold_mult_zconjz (tree type
, tree expr
)
8574 tree itype
= TREE_TYPE (type
);
8575 tree rpart
, ipart
, tem
;
8577 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
8579 rpart
= TREE_OPERAND (expr
, 0);
8580 ipart
= TREE_OPERAND (expr
, 1);
8582 else if (TREE_CODE (expr
) == COMPLEX_CST
)
8584 rpart
= TREE_REALPART (expr
);
8585 ipart
= TREE_IMAGPART (expr
);
8589 expr
= save_expr (expr
);
8590 rpart
= fold_build1 (REALPART_EXPR
, itype
, expr
);
8591 ipart
= fold_build1 (IMAGPART_EXPR
, itype
, expr
);
8594 rpart
= save_expr (rpart
);
8595 ipart
= save_expr (ipart
);
8596 tem
= fold_build2 (PLUS_EXPR
, itype
,
8597 fold_build2 (MULT_EXPR
, itype
, rpart
, rpart
),
8598 fold_build2 (MULT_EXPR
, itype
, ipart
, ipart
));
8599 return fold_build2 (COMPLEX_EXPR
, type
, tem
,
8600 fold_convert (itype
, integer_zero_node
));
8604 /* Fold a binary expression of code CODE and type TYPE with operands
8605 OP0 and OP1. Return the folded expression if folding is
8606 successful. Otherwise, return NULL_TREE. */
8609 fold_binary (enum tree_code code
, tree type
, tree op0
, tree op1
)
8611 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
8612 tree arg0
, arg1
, tem
;
8613 tree t1
= NULL_TREE
;
8615 gcc_assert ((IS_EXPR_CODE_CLASS (kind
)
8616 || IS_GIMPLE_STMT_CODE_CLASS (kind
))
8617 && TREE_CODE_LENGTH (code
) == 2
8619 && op1
!= NULL_TREE
);
8624 /* Strip any conversions that don't change the mode. This is
8625 safe for every expression, except for a comparison expression
8626 because its signedness is derived from its operands. So, in
8627 the latter case, only strip conversions that don't change the
8630 Note that this is done as an internal manipulation within the
8631 constant folder, in order to find the simplest representation
8632 of the arguments so that their form can be studied. In any
8633 cases, the appropriate type conversions should be put back in
8634 the tree that will get out of the constant folder. */
8636 if (kind
== tcc_comparison
)
8638 STRIP_SIGN_NOPS (arg0
);
8639 STRIP_SIGN_NOPS (arg1
);
8647 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8648 constant but we can't do arithmetic on them. */
8649 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
8650 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
8651 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
8652 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
8654 if (kind
== tcc_binary
)
8655 tem
= const_binop (code
, arg0
, arg1
, 0);
8656 else if (kind
== tcc_comparison
)
8657 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8661 if (tem
!= NULL_TREE
)
8663 if (TREE_TYPE (tem
) != type
)
8664 tem
= fold_convert (type
, tem
);
8669 /* If this is a commutative operation, and ARG0 is a constant, move it
8670 to ARG1 to reduce the number of tests below. */
8671 if (commutative_tree_code (code
)
8672 && tree_swap_operands_p (arg0
, arg1
, true))
8673 return fold_build2 (code
, type
, op1
, op0
);
8675 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8677 First check for cases where an arithmetic operation is applied to a
8678 compound, conditional, or comparison operation. Push the arithmetic
8679 operation inside the compound or conditional to see if any folding
8680 can then be done. Convert comparison to conditional for this purpose.
8681 The also optimizes non-constant cases that used to be done in
8684 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8685 one of the operands is a comparison and the other is a comparison, a
8686 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8687 code below would make the expression more complex. Change it to a
8688 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8689 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8691 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
8692 || code
== EQ_EXPR
|| code
== NE_EXPR
)
8693 && ((truth_value_p (TREE_CODE (arg0
))
8694 && (truth_value_p (TREE_CODE (arg1
))
8695 || (TREE_CODE (arg1
) == BIT_AND_EXPR
8696 && integer_onep (TREE_OPERAND (arg1
, 1)))))
8697 || (truth_value_p (TREE_CODE (arg1
))
8698 && (truth_value_p (TREE_CODE (arg0
))
8699 || (TREE_CODE (arg0
) == BIT_AND_EXPR
8700 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
8702 tem
= fold_build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
8703 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
8706 fold_convert (boolean_type_node
, arg0
),
8707 fold_convert (boolean_type_node
, arg1
));
8709 if (code
== EQ_EXPR
)
8710 tem
= invert_truthvalue (tem
);
8712 return fold_convert (type
, tem
);
8715 if (TREE_CODE_CLASS (code
) == tcc_binary
8716 || TREE_CODE_CLASS (code
) == tcc_comparison
)
8718 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
8719 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8720 fold_build2 (code
, type
,
8721 TREE_OPERAND (arg0
, 1), op1
));
8722 if (TREE_CODE (arg1
) == COMPOUND_EXPR
8723 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
8724 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
8725 fold_build2 (code
, type
,
8726 op0
, TREE_OPERAND (arg1
, 1)));
8728 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
8730 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
8732 /*cond_first_p=*/1);
8733 if (tem
!= NULL_TREE
)
8737 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
8739 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
8741 /*cond_first_p=*/0);
8742 if (tem
!= NULL_TREE
)
8750 /* A + (-B) -> A - B */
8751 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
8752 return fold_build2 (MINUS_EXPR
, type
,
8753 fold_convert (type
, arg0
),
8754 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
8755 /* (-A) + B -> B - A */
8756 if (TREE_CODE (arg0
) == NEGATE_EXPR
8757 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
8758 return fold_build2 (MINUS_EXPR
, type
,
8759 fold_convert (type
, arg1
),
8760 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
8761 /* Convert ~A + 1 to -A. */
8762 if (INTEGRAL_TYPE_P (type
)
8763 && TREE_CODE (arg0
) == BIT_NOT_EXPR
8764 && integer_onep (arg1
))
8765 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8767 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8769 if ((TREE_CODE (arg0
) == MULT_EXPR
8770 || TREE_CODE (arg1
) == MULT_EXPR
)
8771 && (!FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
8773 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
8778 if (! FLOAT_TYPE_P (type
))
8780 if (integer_zerop (arg1
))
8781 return non_lvalue (fold_convert (type
, arg0
));
8784 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8785 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
8786 && !TYPE_TRAP_SIGNED (type
))
8788 t1
= build_int_cst_type (type
, -1);
8789 return omit_one_operand (type
, t1
, arg1
);
8793 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8794 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
8795 && !TYPE_TRAP_SIGNED (type
))
8797 t1
= build_int_cst_type (type
, -1);
8798 return omit_one_operand (type
, t1
, arg0
);
8801 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8802 with a constant, and the two constants have no bits in common,
8803 we should treat this as a BIT_IOR_EXPR since this may produce more
8805 if (TREE_CODE (arg0
) == BIT_AND_EXPR
8806 && TREE_CODE (arg1
) == BIT_AND_EXPR
8807 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8808 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8809 && integer_zerop (const_binop (BIT_AND_EXPR
,
8810 TREE_OPERAND (arg0
, 1),
8811 TREE_OPERAND (arg1
, 1), 0)))
8813 code
= BIT_IOR_EXPR
;
8817 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8818 (plus (plus (mult) (mult)) (foo)) so that we can
8819 take advantage of the factoring cases below. */
8820 if (((TREE_CODE (arg0
) == PLUS_EXPR
8821 || TREE_CODE (arg0
) == MINUS_EXPR
)
8822 && TREE_CODE (arg1
) == MULT_EXPR
)
8823 || ((TREE_CODE (arg1
) == PLUS_EXPR
8824 || TREE_CODE (arg1
) == MINUS_EXPR
)
8825 && TREE_CODE (arg0
) == MULT_EXPR
))
8827 tree parg0
, parg1
, parg
, marg
;
8828 enum tree_code pcode
;
8830 if (TREE_CODE (arg1
) == MULT_EXPR
)
8831 parg
= arg0
, marg
= arg1
;
8833 parg
= arg1
, marg
= arg0
;
8834 pcode
= TREE_CODE (parg
);
8835 parg0
= TREE_OPERAND (parg
, 0);
8836 parg1
= TREE_OPERAND (parg
, 1);
8840 if (TREE_CODE (parg0
) == MULT_EXPR
8841 && TREE_CODE (parg1
) != MULT_EXPR
)
8842 return fold_build2 (pcode
, type
,
8843 fold_build2 (PLUS_EXPR
, type
,
8844 fold_convert (type
, parg0
),
8845 fold_convert (type
, marg
)),
8846 fold_convert (type
, parg1
));
8847 if (TREE_CODE (parg0
) != MULT_EXPR
8848 && TREE_CODE (parg1
) == MULT_EXPR
)
8849 return fold_build2 (PLUS_EXPR
, type
,
8850 fold_convert (type
, parg0
),
8851 fold_build2 (pcode
, type
,
8852 fold_convert (type
, marg
),
8857 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8858 of the array. Loop optimizer sometimes produce this type of
8860 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8862 tem
= try_move_mult_to_index (PLUS_EXPR
, arg0
, arg1
);
8864 return fold_convert (type
, tem
);
8866 else if (TREE_CODE (arg1
) == ADDR_EXPR
)
8868 tem
= try_move_mult_to_index (PLUS_EXPR
, arg1
, arg0
);
8870 return fold_convert (type
, tem
);
8875 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8876 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
8877 return non_lvalue (fold_convert (type
, arg0
));
8879 /* Likewise if the operands are reversed. */
8880 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
8881 return non_lvalue (fold_convert (type
, arg1
));
8883 /* Convert X + -C into X - C. */
8884 if (TREE_CODE (arg1
) == REAL_CST
8885 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
8887 tem
= fold_negate_const (arg1
, type
);
8888 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
8889 return fold_build2 (MINUS_EXPR
, type
,
8890 fold_convert (type
, arg0
),
8891 fold_convert (type
, tem
));
8894 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
8895 to __complex__ ( x, y ). This is not the same for SNaNs or
8896 if singed zeros are involved. */
8897 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8898 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
8899 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8901 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
8902 tree arg0r
= fold_unary (REALPART_EXPR
, rtype
, arg0
);
8903 tree arg0i
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
8904 bool arg0rz
= false, arg0iz
= false;
8905 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
8906 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
8908 tree arg1r
= fold_unary (REALPART_EXPR
, rtype
, arg1
);
8909 tree arg1i
= fold_unary (IMAGPART_EXPR
, rtype
, arg1
);
8910 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
8912 tree rp
= arg1r
? arg1r
8913 : build1 (REALPART_EXPR
, rtype
, arg1
);
8914 tree ip
= arg0i
? arg0i
8915 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
8916 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
8918 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
8920 tree rp
= arg0r
? arg0r
8921 : build1 (REALPART_EXPR
, rtype
, arg0
);
8922 tree ip
= arg1i
? arg1i
8923 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
8924 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
8929 if (flag_unsafe_math_optimizations
8930 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
8931 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
8932 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
8935 /* Convert x+x into x*2.0. */
8936 if (operand_equal_p (arg0
, arg1
, 0)
8937 && SCALAR_FLOAT_TYPE_P (type
))
8938 return fold_build2 (MULT_EXPR
, type
, arg0
,
8939 build_real (type
, dconst2
));
8941 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8942 if (flag_unsafe_math_optimizations
8943 && TREE_CODE (arg1
) == PLUS_EXPR
8944 && TREE_CODE (arg0
) != MULT_EXPR
)
8946 tree tree10
= TREE_OPERAND (arg1
, 0);
8947 tree tree11
= TREE_OPERAND (arg1
, 1);
8948 if (TREE_CODE (tree11
) == MULT_EXPR
8949 && TREE_CODE (tree10
) == MULT_EXPR
)
8952 tree0
= fold_build2 (PLUS_EXPR
, type
, arg0
, tree10
);
8953 return fold_build2 (PLUS_EXPR
, type
, tree0
, tree11
);
8956 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8957 if (flag_unsafe_math_optimizations
8958 && TREE_CODE (arg0
) == PLUS_EXPR
8959 && TREE_CODE (arg1
) != MULT_EXPR
)
8961 tree tree00
= TREE_OPERAND (arg0
, 0);
8962 tree tree01
= TREE_OPERAND (arg0
, 1);
8963 if (TREE_CODE (tree01
) == MULT_EXPR
8964 && TREE_CODE (tree00
) == MULT_EXPR
)
8967 tree0
= fold_build2 (PLUS_EXPR
, type
, tree01
, arg1
);
8968 return fold_build2 (PLUS_EXPR
, type
, tree00
, tree0
);
8974 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8975 is a rotate of A by C1 bits. */
8976 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8977 is a rotate of A by B bits. */
8979 enum tree_code code0
, code1
;
8980 code0
= TREE_CODE (arg0
);
8981 code1
= TREE_CODE (arg1
);
8982 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
8983 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
8984 && operand_equal_p (TREE_OPERAND (arg0
, 0),
8985 TREE_OPERAND (arg1
, 0), 0)
8986 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
8988 tree tree01
, tree11
;
8989 enum tree_code code01
, code11
;
8991 tree01
= TREE_OPERAND (arg0
, 1);
8992 tree11
= TREE_OPERAND (arg1
, 1);
8993 STRIP_NOPS (tree01
);
8994 STRIP_NOPS (tree11
);
8995 code01
= TREE_CODE (tree01
);
8996 code11
= TREE_CODE (tree11
);
8997 if (code01
== INTEGER_CST
8998 && code11
== INTEGER_CST
8999 && TREE_INT_CST_HIGH (tree01
) == 0
9000 && TREE_INT_CST_HIGH (tree11
) == 0
9001 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
9002 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9003 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9004 code0
== LSHIFT_EXPR
? tree01
: tree11
);
9005 else if (code11
== MINUS_EXPR
)
9007 tree tree110
, tree111
;
9008 tree110
= TREE_OPERAND (tree11
, 0);
9009 tree111
= TREE_OPERAND (tree11
, 1);
9010 STRIP_NOPS (tree110
);
9011 STRIP_NOPS (tree111
);
9012 if (TREE_CODE (tree110
) == INTEGER_CST
9013 && 0 == compare_tree_int (tree110
,
9015 (TREE_TYPE (TREE_OPERAND
9017 && operand_equal_p (tree01
, tree111
, 0))
9018 return build2 ((code0
== LSHIFT_EXPR
9021 type
, TREE_OPERAND (arg0
, 0), tree01
);
9023 else if (code01
== MINUS_EXPR
)
9025 tree tree010
, tree011
;
9026 tree010
= TREE_OPERAND (tree01
, 0);
9027 tree011
= TREE_OPERAND (tree01
, 1);
9028 STRIP_NOPS (tree010
);
9029 STRIP_NOPS (tree011
);
9030 if (TREE_CODE (tree010
) == INTEGER_CST
9031 && 0 == compare_tree_int (tree010
,
9033 (TREE_TYPE (TREE_OPERAND
9035 && operand_equal_p (tree11
, tree011
, 0))
9036 return build2 ((code0
!= LSHIFT_EXPR
9039 type
, TREE_OPERAND (arg0
, 0), tree11
);
9045 /* In most languages, can't associate operations on floats through
9046 parentheses. Rather than remember where the parentheses were, we
9047 don't associate floats at all, unless the user has specified
9048 -funsafe-math-optimizations. */
9050 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
9052 tree var0
, con0
, lit0
, minus_lit0
;
9053 tree var1
, con1
, lit1
, minus_lit1
;
9055 /* Split both trees into variables, constants, and literals. Then
9056 associate each group together, the constants with literals,
9057 then the result with variables. This increases the chances of
9058 literals being recombined later and of generating relocatable
9059 expressions for the sum of a constant and literal. */
9060 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
9061 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
9062 code
== MINUS_EXPR
);
9064 /* Only do something if we found more than two objects. Otherwise,
9065 nothing has changed and we risk infinite recursion. */
9066 if (2 < ((var0
!= 0) + (var1
!= 0)
9067 + (con0
!= 0) + (con1
!= 0)
9068 + (lit0
!= 0) + (lit1
!= 0)
9069 + (minus_lit0
!= 0) + (minus_lit1
!= 0)))
9071 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9072 if (code
== MINUS_EXPR
)
9075 var0
= associate_trees (var0
, var1
, code
, type
);
9076 con0
= associate_trees (con0
, con1
, code
, type
);
9077 lit0
= associate_trees (lit0
, lit1
, code
, type
);
9078 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
9080 /* Preserve the MINUS_EXPR if the negative part of the literal is
9081 greater than the positive part. Otherwise, the multiplicative
9082 folding code (i.e extract_muldiv) may be fooled in case
9083 unsigned constants are subtracted, like in the following
9084 example: ((X*2 + 4) - 8U)/2. */
9085 if (minus_lit0
&& lit0
)
9087 if (TREE_CODE (lit0
) == INTEGER_CST
9088 && TREE_CODE (minus_lit0
) == INTEGER_CST
9089 && tree_int_cst_lt (lit0
, minus_lit0
))
9091 minus_lit0
= associate_trees (minus_lit0
, lit0
,
9097 lit0
= associate_trees (lit0
, minus_lit0
,
9105 return fold_convert (type
,
9106 associate_trees (var0
, minus_lit0
,
9110 con0
= associate_trees (con0
, minus_lit0
,
9112 return fold_convert (type
,
9113 associate_trees (var0
, con0
,
9118 con0
= associate_trees (con0
, lit0
, code
, type
);
9119 return fold_convert (type
, associate_trees (var0
, con0
,
9127 /* A - (-B) -> A + B */
9128 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9129 return fold_build2 (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0));
9130 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9131 if (TREE_CODE (arg0
) == NEGATE_EXPR
9132 && (FLOAT_TYPE_P (type
)
9133 || INTEGRAL_TYPE_P (type
))
9134 && negate_expr_p (arg1
)
9135 && reorder_operands_p (arg0
, arg1
))
9136 return fold_build2 (MINUS_EXPR
, type
, negate_expr (arg1
),
9137 TREE_OPERAND (arg0
, 0));
9138 /* Convert -A - 1 to ~A. */
9139 if (INTEGRAL_TYPE_P (type
)
9140 && TREE_CODE (arg0
) == NEGATE_EXPR
9141 && integer_onep (arg1
)
9142 && !TYPE_TRAP_SIGNED (type
))
9143 return fold_build1 (BIT_NOT_EXPR
, type
,
9144 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
9146 /* Convert -1 - A to ~A. */
9147 if (INTEGRAL_TYPE_P (type
)
9148 && integer_all_onesp (arg0
))
9149 return fold_build1 (BIT_NOT_EXPR
, type
, op1
);
9151 if (! FLOAT_TYPE_P (type
))
9153 if (integer_zerop (arg0
))
9154 return negate_expr (fold_convert (type
, arg1
));
9155 if (integer_zerop (arg1
))
9156 return non_lvalue (fold_convert (type
, arg0
));
9158 /* Fold A - (A & B) into ~B & A. */
9159 if (!TREE_SIDE_EFFECTS (arg0
)
9160 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
9162 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
9163 return fold_build2 (BIT_AND_EXPR
, type
,
9164 fold_build1 (BIT_NOT_EXPR
, type
,
9165 TREE_OPERAND (arg1
, 0)),
9167 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9168 return fold_build2 (BIT_AND_EXPR
, type
,
9169 fold_build1 (BIT_NOT_EXPR
, type
,
9170 TREE_OPERAND (arg1
, 1)),
9174 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9175 any power of 2 minus 1. */
9176 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9177 && TREE_CODE (arg1
) == BIT_AND_EXPR
9178 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9179 TREE_OPERAND (arg1
, 0), 0))
9181 tree mask0
= TREE_OPERAND (arg0
, 1);
9182 tree mask1
= TREE_OPERAND (arg1
, 1);
9183 tree tem
= fold_build1 (BIT_NOT_EXPR
, type
, mask0
);
9185 if (operand_equal_p (tem
, mask1
, 0))
9187 tem
= fold_build2 (BIT_XOR_EXPR
, type
,
9188 TREE_OPERAND (arg0
, 0), mask1
);
9189 return fold_build2 (MINUS_EXPR
, type
, tem
, mask1
);
9194 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9195 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
9196 return non_lvalue (fold_convert (type
, arg0
));
9198 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9199 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9200 (-ARG1 + ARG0) reduces to -ARG1. */
9201 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
9202 return negate_expr (fold_convert (type
, arg1
));
9204 /* Fold &x - &x. This can happen from &x.foo - &x.
9205 This is unsafe for certain floats even in non-IEEE formats.
9206 In IEEE, it is unsafe because it does wrong for NaNs.
9207 Also note that operand_equal_p is always false if an operand
9210 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
9211 && operand_equal_p (arg0
, arg1
, 0))
9212 return fold_convert (type
, integer_zero_node
);
9214 /* A - B -> A + (-B) if B is easily negatable. */
9215 if (negate_expr_p (arg1
)
9216 && ((FLOAT_TYPE_P (type
)
9217 /* Avoid this transformation if B is a positive REAL_CST. */
9218 && (TREE_CODE (arg1
) != REAL_CST
9219 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
9220 || INTEGRAL_TYPE_P (type
)))
9221 return fold_build2 (PLUS_EXPR
, type
,
9222 fold_convert (type
, arg0
),
9223 fold_convert (type
, negate_expr (arg1
)));
9225 /* Try folding difference of addresses. */
9229 if ((TREE_CODE (arg0
) == ADDR_EXPR
9230 || TREE_CODE (arg1
) == ADDR_EXPR
)
9231 && ptr_difference_const (arg0
, arg1
, &diff
))
9232 return build_int_cst_type (type
, diff
);
9235 /* Fold &a[i] - &a[j] to i-j. */
9236 if (TREE_CODE (arg0
) == ADDR_EXPR
9237 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
9238 && TREE_CODE (arg1
) == ADDR_EXPR
9239 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
9241 tree aref0
= TREE_OPERAND (arg0
, 0);
9242 tree aref1
= TREE_OPERAND (arg1
, 0);
9243 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
9244 TREE_OPERAND (aref1
, 0), 0))
9246 tree op0
= fold_convert (type
, TREE_OPERAND (aref0
, 1));
9247 tree op1
= fold_convert (type
, TREE_OPERAND (aref1
, 1));
9248 tree esz
= array_ref_element_size (aref0
);
9249 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9250 return fold_build2 (MULT_EXPR
, type
, diff
,
9251 fold_convert (type
, esz
));
9256 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9257 of the array. Loop optimizer sometimes produce this type of
9259 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9261 tem
= try_move_mult_to_index (MINUS_EXPR
, arg0
, arg1
);
9263 return fold_convert (type
, tem
);
9266 if (flag_unsafe_math_optimizations
9267 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9268 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9269 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
9272 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9274 if ((TREE_CODE (arg0
) == MULT_EXPR
9275 || TREE_CODE (arg1
) == MULT_EXPR
)
9276 && (!FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
9278 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
9286 /* (-A) * (-B) -> A * B */
9287 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
9288 return fold_build2 (MULT_EXPR
, type
,
9289 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
9290 fold_convert (type
, negate_expr (arg1
)));
9291 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
9292 return fold_build2 (MULT_EXPR
, type
,
9293 fold_convert (type
, negate_expr (arg0
)),
9294 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
9296 if (! FLOAT_TYPE_P (type
))
9298 if (integer_zerop (arg1
))
9299 return omit_one_operand (type
, arg1
, arg0
);
9300 if (integer_onep (arg1
))
9301 return non_lvalue (fold_convert (type
, arg0
));
9302 /* Transform x * -1 into -x. */
9303 if (integer_all_onesp (arg1
))
9304 return fold_convert (type
, negate_expr (arg0
));
9305 /* Transform x * -C into -x * C if x is easily negatable. */
9306 if (TREE_CODE (arg1
) == INTEGER_CST
9307 && tree_int_cst_sgn (arg1
) == -1
9308 && negate_expr_p (arg0
)
9309 && (tem
= negate_expr (arg1
)) != arg1
9310 && !TREE_OVERFLOW (tem
))
9311 return fold_build2 (MULT_EXPR
, type
,
9312 negate_expr (arg0
), tem
);
9314 /* (a * (1 << b)) is (a << b) */
9315 if (TREE_CODE (arg1
) == LSHIFT_EXPR
9316 && integer_onep (TREE_OPERAND (arg1
, 0)))
9317 return fold_build2 (LSHIFT_EXPR
, type
, arg0
,
9318 TREE_OPERAND (arg1
, 1));
9319 if (TREE_CODE (arg0
) == LSHIFT_EXPR
9320 && integer_onep (TREE_OPERAND (arg0
, 0)))
9321 return fold_build2 (LSHIFT_EXPR
, type
, arg1
,
9322 TREE_OPERAND (arg0
, 1));
9324 if (TREE_CODE (arg1
) == INTEGER_CST
9325 && 0 != (tem
= extract_muldiv (op0
,
9326 fold_convert (type
, arg1
),
9328 return fold_convert (type
, tem
);
9330 /* Optimize z * conj(z) for integer complex numbers. */
9331 if (TREE_CODE (arg0
) == CONJ_EXPR
9332 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9333 return fold_mult_zconjz (type
, arg1
);
9334 if (TREE_CODE (arg1
) == CONJ_EXPR
9335 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9336 return fold_mult_zconjz (type
, arg0
);
9340 /* Maybe fold x * 0 to 0. The expressions aren't the same
9341 when x is NaN, since x * 0 is also NaN. Nor are they the
9342 same in modes with signed zeros, since multiplying a
9343 negative value by 0 gives -0, not +0. */
9344 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
9345 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9346 && real_zerop (arg1
))
9347 return omit_one_operand (type
, arg1
, arg0
);
9348 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9349 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9350 && real_onep (arg1
))
9351 return non_lvalue (fold_convert (type
, arg0
));
9353 /* Transform x * -1.0 into -x. */
9354 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9355 && real_minus_onep (arg1
))
9356 return fold_convert (type
, negate_expr (arg0
));
9358 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9359 if (flag_unsafe_math_optimizations
9360 && TREE_CODE (arg0
) == RDIV_EXPR
9361 && TREE_CODE (arg1
) == REAL_CST
9362 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
9364 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
9367 return fold_build2 (RDIV_EXPR
, type
, tem
,
9368 TREE_OPERAND (arg0
, 1));
9371 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9372 if (operand_equal_p (arg0
, arg1
, 0))
9374 tree tem
= fold_strip_sign_ops (arg0
);
9375 if (tem
!= NULL_TREE
)
9377 tem
= fold_convert (type
, tem
);
9378 return fold_build2 (MULT_EXPR
, type
, tem
, tem
);
9382 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9383 This is not the same for NaNs or if singed zeros are
9385 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
9386 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9387 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
9388 && TREE_CODE (arg1
) == COMPLEX_CST
9389 && real_zerop (TREE_REALPART (arg1
)))
9391 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9392 if (real_onep (TREE_IMAGPART (arg1
)))
9393 return fold_build2 (COMPLEX_EXPR
, type
,
9394 negate_expr (fold_build1 (IMAGPART_EXPR
,
9396 fold_build1 (REALPART_EXPR
, rtype
, arg0
));
9397 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
9398 return fold_build2 (COMPLEX_EXPR
, type
,
9399 fold_build1 (IMAGPART_EXPR
, rtype
, arg0
),
9400 negate_expr (fold_build1 (REALPART_EXPR
,
9404 /* Optimize z * conj(z) for floating point complex numbers.
9405 Guarded by flag_unsafe_math_optimizations as non-finite
9406 imaginary components don't produce scalar results. */
9407 if (flag_unsafe_math_optimizations
9408 && TREE_CODE (arg0
) == CONJ_EXPR
9409 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9410 return fold_mult_zconjz (type
, arg1
);
9411 if (flag_unsafe_math_optimizations
9412 && TREE_CODE (arg1
) == CONJ_EXPR
9413 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9414 return fold_mult_zconjz (type
, arg0
);
9416 if (flag_unsafe_math_optimizations
)
9418 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
9419 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
9421 /* Optimizations of root(...)*root(...). */
9422 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
9424 tree rootfn
, arg
, arglist
;
9425 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
9426 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
9428 /* Optimize sqrt(x)*sqrt(x) as x. */
9429 if (BUILTIN_SQRT_P (fcode0
)
9430 && operand_equal_p (arg00
, arg10
, 0)
9431 && ! HONOR_SNANS (TYPE_MODE (type
)))
9434 /* Optimize root(x)*root(y) as root(x*y). */
9435 rootfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
9436 arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
9437 arglist
= build_tree_list (NULL_TREE
, arg
);
9438 return build_function_call_expr (rootfn
, arglist
);
9441 /* Optimize expN(x)*expN(y) as expN(x+y). */
9442 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
9444 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
9445 tree arg
= fold_build2 (PLUS_EXPR
, type
,
9446 TREE_VALUE (TREE_OPERAND (arg0
, 1)),
9447 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
9448 tree arglist
= build_tree_list (NULL_TREE
, arg
);
9449 return build_function_call_expr (expfn
, arglist
);
9452 /* Optimizations of pow(...)*pow(...). */
9453 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
9454 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
9455 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
9457 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
9458 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
9460 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
9461 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
9464 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9465 if (operand_equal_p (arg01
, arg11
, 0))
9467 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
9468 tree arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
9469 tree arglist
= tree_cons (NULL_TREE
, arg
,
9470 build_tree_list (NULL_TREE
,
9472 return build_function_call_expr (powfn
, arglist
);
9475 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9476 if (operand_equal_p (arg00
, arg10
, 0))
9478 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
9479 tree arg
= fold_build2 (PLUS_EXPR
, type
, arg01
, arg11
);
9480 tree arglist
= tree_cons (NULL_TREE
, arg00
,
9481 build_tree_list (NULL_TREE
,
9483 return build_function_call_expr (powfn
, arglist
);
9487 /* Optimize tan(x)*cos(x) as sin(x). */
9488 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
9489 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
9490 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
9491 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
9492 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
9493 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
9494 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
9495 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
9497 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
9499 if (sinfn
!= NULL_TREE
)
9500 return build_function_call_expr (sinfn
,
9501 TREE_OPERAND (arg0
, 1));
9504 /* Optimize x*pow(x,c) as pow(x,c+1). */
9505 if (fcode1
== BUILT_IN_POW
9506 || fcode1
== BUILT_IN_POWF
9507 || fcode1
== BUILT_IN_POWL
)
9509 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
9510 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
9512 if (TREE_CODE (arg11
) == REAL_CST
9513 && !TREE_OVERFLOW (arg11
)
9514 && operand_equal_p (arg0
, arg10
, 0))
9516 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
9520 c
= TREE_REAL_CST (arg11
);
9521 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
9522 arg
= build_real (type
, c
);
9523 arglist
= build_tree_list (NULL_TREE
, arg
);
9524 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
9525 return build_function_call_expr (powfn
, arglist
);
9529 /* Optimize pow(x,c)*x as pow(x,c+1). */
9530 if (fcode0
== BUILT_IN_POW
9531 || fcode0
== BUILT_IN_POWF
9532 || fcode0
== BUILT_IN_POWL
)
9534 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
9535 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
9537 if (TREE_CODE (arg01
) == REAL_CST
9538 && !TREE_OVERFLOW (arg01
)
9539 && operand_equal_p (arg1
, arg00
, 0))
9541 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
9545 c
= TREE_REAL_CST (arg01
);
9546 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
9547 arg
= build_real (type
, c
);
9548 arglist
= build_tree_list (NULL_TREE
, arg
);
9549 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
9550 return build_function_call_expr (powfn
, arglist
);
9554 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9556 && operand_equal_p (arg0
, arg1
, 0))
9558 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
9562 tree arg
= build_real (type
, dconst2
);
9563 tree arglist
= build_tree_list (NULL_TREE
, arg
);
9564 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
9565 return build_function_call_expr (powfn
, arglist
);
9574 if (integer_all_onesp (arg1
))
9575 return omit_one_operand (type
, arg1
, arg0
);
9576 if (integer_zerop (arg1
))
9577 return non_lvalue (fold_convert (type
, arg0
));
9578 if (operand_equal_p (arg0
, arg1
, 0))
9579 return non_lvalue (fold_convert (type
, arg0
));
9582 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9583 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9585 t1
= build_int_cst_type (type
, -1);
9586 return omit_one_operand (type
, t1
, arg1
);
9590 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9591 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9593 t1
= build_int_cst_type (type
, -1);
9594 return omit_one_operand (type
, t1
, arg0
);
9597 /* Canonicalize (X & C1) | C2. */
9598 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9599 && TREE_CODE (arg1
) == INTEGER_CST
9600 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9602 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, mlo
, mhi
;
9603 int width
= TYPE_PRECISION (type
);
9604 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
9605 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
9606 hi2
= TREE_INT_CST_HIGH (arg1
);
9607 lo2
= TREE_INT_CST_LOW (arg1
);
9609 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9610 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
9611 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
9613 if (width
> HOST_BITS_PER_WIDE_INT
)
9615 mhi
= (unsigned HOST_WIDE_INT
) -1
9616 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
9622 mlo
= (unsigned HOST_WIDE_INT
) -1
9623 >> (HOST_BITS_PER_WIDE_INT
- width
);
9626 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9627 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
9628 return fold_build2 (BIT_IOR_EXPR
, type
,
9629 TREE_OPERAND (arg0
, 0), arg1
);
9631 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9634 if ((hi1
& ~hi2
) != hi1
|| (lo1
& ~lo2
) != lo1
)
9635 return fold_build2 (BIT_IOR_EXPR
, type
,
9636 fold_build2 (BIT_AND_EXPR
, type
,
9637 TREE_OPERAND (arg0
, 0),
9638 build_int_cst_wide (type
,
9644 /* (X & Y) | Y is (X, Y). */
9645 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9646 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9647 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
9648 /* (X & Y) | X is (Y, X). */
9649 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9650 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
9651 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
9652 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
9653 /* X | (X & Y) is (Y, X). */
9654 if (TREE_CODE (arg1
) == BIT_AND_EXPR
9655 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
9656 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
9657 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
9658 /* X | (Y & X) is (Y, X). */
9659 if (TREE_CODE (arg1
) == BIT_AND_EXPR
9660 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
9661 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9662 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
9664 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
9665 if (t1
!= NULL_TREE
)
9668 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9670 This results in more efficient code for machines without a NAND
9671 instruction. Combine will canonicalize to the first form
9672 which will allow use of NAND instructions provided by the
9673 backend if they exist. */
9674 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9675 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9677 return fold_build1 (BIT_NOT_EXPR
, type
,
9678 build2 (BIT_AND_EXPR
, type
,
9679 TREE_OPERAND (arg0
, 0),
9680 TREE_OPERAND (arg1
, 0)));
9683 /* See if this can be simplified into a rotate first. If that
9684 is unsuccessful continue in the association code. */
9688 if (integer_zerop (arg1
))
9689 return non_lvalue (fold_convert (type
, arg0
));
9690 if (integer_all_onesp (arg1
))
9691 return fold_build1 (BIT_NOT_EXPR
, type
, arg0
);
9692 if (operand_equal_p (arg0
, arg1
, 0))
9693 return omit_one_operand (type
, integer_zero_node
, arg0
);
9696 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9697 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9699 t1
= build_int_cst_type (type
, -1);
9700 return omit_one_operand (type
, t1
, arg1
);
9704 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9705 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9707 t1
= build_int_cst_type (type
, -1);
9708 return omit_one_operand (type
, t1
, arg0
);
9711 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9712 with a constant, and the two constants have no bits in common,
9713 we should treat this as a BIT_IOR_EXPR since this may produce more
9715 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9716 && TREE_CODE (arg1
) == BIT_AND_EXPR
9717 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9718 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9719 && integer_zerop (const_binop (BIT_AND_EXPR
,
9720 TREE_OPERAND (arg0
, 1),
9721 TREE_OPERAND (arg1
, 1), 0)))
9723 code
= BIT_IOR_EXPR
;
9727 /* (X | Y) ^ X -> Y & ~ X*/
9728 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9729 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9731 tree t2
= TREE_OPERAND (arg0
, 1);
9732 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
9734 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
9735 fold_convert (type
, t1
));
9739 /* (Y | X) ^ X -> Y & ~ X*/
9740 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9741 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9743 tree t2
= TREE_OPERAND (arg0
, 0);
9744 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
9746 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
9747 fold_convert (type
, t1
));
9751 /* X ^ (X | Y) -> Y & ~ X*/
9752 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
9753 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
9755 tree t2
= TREE_OPERAND (arg1
, 1);
9756 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
9758 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
9759 fold_convert (type
, t1
));
9763 /* X ^ (Y | X) -> Y & ~ X*/
9764 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
9765 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
9767 tree t2
= TREE_OPERAND (arg1
, 0);
9768 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
9770 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
9771 fold_convert (type
, t1
));
9775 /* Convert ~X ^ ~Y to X ^ Y. */
9776 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9777 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9778 return fold_build2 (code
, type
,
9779 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
9780 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
9782 /* Convert ~X ^ C to X ^ ~C. */
9783 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9784 && TREE_CODE (arg1
) == INTEGER_CST
)
9785 return fold_build2 (code
, type
,
9786 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
9787 fold_build1 (BIT_NOT_EXPR
, type
, arg1
));
9789 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9790 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9791 && integer_onep (TREE_OPERAND (arg0
, 1))
9792 && integer_onep (arg1
))
9793 return fold_build2 (EQ_EXPR
, type
, arg0
,
9794 build_int_cst (TREE_TYPE (arg0
), 0));
9796 /* Fold (X & Y) ^ Y as ~X & Y. */
9797 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9798 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9800 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
9801 return fold_build2 (BIT_AND_EXPR
, type
,
9802 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9803 fold_convert (type
, arg1
));
9805 /* Fold (X & Y) ^ X as ~Y & X. */
9806 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9807 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
9808 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
9810 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
9811 return fold_build2 (BIT_AND_EXPR
, type
,
9812 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9813 fold_convert (type
, arg1
));
9815 /* Fold X ^ (X & Y) as X & ~Y. */
9816 if (TREE_CODE (arg1
) == BIT_AND_EXPR
9817 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9819 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
9820 return fold_build2 (BIT_AND_EXPR
, type
,
9821 fold_convert (type
, arg0
),
9822 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
9824 /* Fold X ^ (Y & X) as ~Y & X. */
9825 if (TREE_CODE (arg1
) == BIT_AND_EXPR
9826 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
9827 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9829 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
9830 return fold_build2 (BIT_AND_EXPR
, type
,
9831 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9832 fold_convert (type
, arg0
));
9835 /* See if this can be simplified into a rotate first. If that
9836 is unsuccessful continue in the association code. */
9840 if (integer_all_onesp (arg1
))
9841 return non_lvalue (fold_convert (type
, arg0
));
9842 if (integer_zerop (arg1
))
9843 return omit_one_operand (type
, arg1
, arg0
);
9844 if (operand_equal_p (arg0
, arg1
, 0))
9845 return non_lvalue (fold_convert (type
, arg0
));
9847 /* ~X & X is always zero. */
9848 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9849 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9850 return omit_one_operand (type
, integer_zero_node
, arg1
);
9852 /* X & ~X is always zero. */
9853 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9854 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9855 return omit_one_operand (type
, integer_zero_node
, arg0
);
9857 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9858 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9859 && TREE_CODE (arg1
) == INTEGER_CST
9860 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9861 return fold_build2 (BIT_IOR_EXPR
, type
,
9862 fold_build2 (BIT_AND_EXPR
, type
,
9863 TREE_OPERAND (arg0
, 0), arg1
),
9864 fold_build2 (BIT_AND_EXPR
, type
,
9865 TREE_OPERAND (arg0
, 1), arg1
));
9867 /* (X | Y) & Y is (X, Y). */
9868 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9869 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9870 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
9871 /* (X | Y) & X is (Y, X). */
9872 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9873 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
9874 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
9875 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
9876 /* X & (X | Y) is (Y, X). */
9877 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
9878 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
9879 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
9880 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
9881 /* X & (Y | X) is (Y, X). */
9882 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
9883 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
9884 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9885 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
9887 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9888 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9889 && integer_onep (TREE_OPERAND (arg0
, 1))
9890 && integer_onep (arg1
))
9892 tem
= TREE_OPERAND (arg0
, 0);
9893 return fold_build2 (EQ_EXPR
, type
,
9894 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
9895 build_int_cst (TREE_TYPE (tem
), 1)),
9896 build_int_cst (TREE_TYPE (tem
), 0));
9898 /* Fold ~X & 1 as (X & 1) == 0. */
9899 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9900 && integer_onep (arg1
))
9902 tem
= TREE_OPERAND (arg0
, 0);
9903 return fold_build2 (EQ_EXPR
, type
,
9904 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
9905 build_int_cst (TREE_TYPE (tem
), 1)),
9906 build_int_cst (TREE_TYPE (tem
), 0));
9909 /* Fold (X ^ Y) & Y as ~X & Y. */
9910 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9911 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9913 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
9914 return fold_build2 (BIT_AND_EXPR
, type
,
9915 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9916 fold_convert (type
, arg1
));
9918 /* Fold (X ^ Y) & X as ~Y & X. */
9919 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9920 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
9921 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
9923 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
9924 return fold_build2 (BIT_AND_EXPR
, type
,
9925 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9926 fold_convert (type
, arg1
));
9928 /* Fold X & (X ^ Y) as X & ~Y. */
9929 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
9930 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9932 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
9933 return fold_build2 (BIT_AND_EXPR
, type
,
9934 fold_convert (type
, arg0
),
9935 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
9937 /* Fold X & (Y ^ X) as ~Y & X. */
9938 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
9939 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
9940 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9942 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
9943 return fold_build2 (BIT_AND_EXPR
, type
,
9944 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9945 fold_convert (type
, arg0
));
9948 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
9949 if (t1
!= NULL_TREE
)
9951 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9952 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
9953 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
9956 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
9958 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
9959 && (~TREE_INT_CST_LOW (arg1
)
9960 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
9961 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
9964 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9966 This results in more efficient code for machines without a NOR
9967 instruction. Combine will canonicalize to the first form
9968 which will allow use of NOR instructions provided by the
9969 backend if they exist. */
9970 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9971 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9973 return fold_build1 (BIT_NOT_EXPR
, type
,
9974 build2 (BIT_IOR_EXPR
, type
,
9975 TREE_OPERAND (arg0
, 0),
9976 TREE_OPERAND (arg1
, 0)));
9982 /* Don't touch a floating-point divide by zero unless the mode
9983 of the constant can represent infinity. */
9984 if (TREE_CODE (arg1
) == REAL_CST
9985 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
9986 && real_zerop (arg1
))
9989 /* Optimize A / A to 1.0 if we don't care about
9990 NaNs or Infinities. Skip the transformation
9991 for non-real operands. */
9992 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
9993 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
9994 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
9995 && operand_equal_p (arg0
, arg1
, 0))
9997 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
9999 return omit_two_operands (type
, r
, arg0
, arg1
);
10002 /* The complex version of the above A / A optimization. */
10003 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10004 && operand_equal_p (arg0
, arg1
, 0))
10006 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
10007 if (! HONOR_NANS (TYPE_MODE (elem_type
))
10008 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
10010 tree r
= build_real (elem_type
, dconst1
);
10011 /* omit_two_operands will call fold_convert for us. */
10012 return omit_two_operands (type
, r
, arg0
, arg1
);
10016 /* (-A) / (-B) -> A / B */
10017 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10018 return fold_build2 (RDIV_EXPR
, type
,
10019 TREE_OPERAND (arg0
, 0),
10020 negate_expr (arg1
));
10021 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10022 return fold_build2 (RDIV_EXPR
, type
,
10023 negate_expr (arg0
),
10024 TREE_OPERAND (arg1
, 0));
10026 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10027 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10028 && real_onep (arg1
))
10029 return non_lvalue (fold_convert (type
, arg0
));
10031 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10032 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10033 && real_minus_onep (arg1
))
10034 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
10036 /* If ARG1 is a constant, we can convert this to a multiply by the
10037 reciprocal. This does not have the same rounding properties,
10038 so only do this if -funsafe-math-optimizations. We can actually
10039 always safely do it if ARG1 is a power of two, but it's hard to
10040 tell if it is or not in a portable manner. */
10041 if (TREE_CODE (arg1
) == REAL_CST
)
10043 if (flag_unsafe_math_optimizations
10044 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
10046 return fold_build2 (MULT_EXPR
, type
, arg0
, tem
);
10047 /* Find the reciprocal if optimizing and the result is exact. */
10051 r
= TREE_REAL_CST (arg1
);
10052 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
10054 tem
= build_real (type
, r
);
10055 return fold_build2 (MULT_EXPR
, type
,
10056 fold_convert (type
, arg0
), tem
);
10060 /* Convert A/B/C to A/(B*C). */
10061 if (flag_unsafe_math_optimizations
10062 && TREE_CODE (arg0
) == RDIV_EXPR
)
10063 return fold_build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10064 fold_build2 (MULT_EXPR
, type
,
10065 TREE_OPERAND (arg0
, 1), arg1
));
10067 /* Convert A/(B/C) to (A/B)*C. */
10068 if (flag_unsafe_math_optimizations
10069 && TREE_CODE (arg1
) == RDIV_EXPR
)
10070 return fold_build2 (MULT_EXPR
, type
,
10071 fold_build2 (RDIV_EXPR
, type
, arg0
,
10072 TREE_OPERAND (arg1
, 0)),
10073 TREE_OPERAND (arg1
, 1));
10075 /* Convert C1/(X*C2) into (C1/C2)/X. */
10076 if (flag_unsafe_math_optimizations
10077 && TREE_CODE (arg1
) == MULT_EXPR
10078 && TREE_CODE (arg0
) == REAL_CST
10079 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
10081 tree tem
= const_binop (RDIV_EXPR
, arg0
,
10082 TREE_OPERAND (arg1
, 1), 0);
10084 return fold_build2 (RDIV_EXPR
, type
, tem
,
10085 TREE_OPERAND (arg1
, 0));
10088 if (flag_unsafe_math_optimizations
)
10090 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10091 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10093 /* Optimize sin(x)/cos(x) as tan(x). */
10094 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
10095 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
10096 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
10097 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
10098 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
10100 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
10102 if (tanfn
!= NULL_TREE
)
10103 return build_function_call_expr (tanfn
,
10104 TREE_OPERAND (arg0
, 1));
10107 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10108 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
10109 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
10110 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
10111 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
10112 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
10114 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
10116 if (tanfn
!= NULL_TREE
)
10118 tree tmp
= TREE_OPERAND (arg0
, 1);
10119 tmp
= build_function_call_expr (tanfn
, tmp
);
10120 return fold_build2 (RDIV_EXPR
, type
,
10121 build_real (type
, dconst1
), tmp
);
10125 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10126 NaNs or Infinities. */
10127 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
10128 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
10129 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
10131 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
10132 tree arg01
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
10134 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
10135 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
10136 && operand_equal_p (arg00
, arg01
, 0))
10138 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
10140 if (cosfn
!= NULL_TREE
)
10141 return build_function_call_expr (cosfn
,
10142 TREE_OPERAND (arg0
, 1));
10146 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10147 NaNs or Infinities. */
10148 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
10149 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
10150 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
10152 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
10153 tree arg01
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
10155 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
10156 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
10157 && operand_equal_p (arg00
, arg01
, 0))
10159 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
10161 if (cosfn
!= NULL_TREE
)
10163 tree tmp
= TREE_OPERAND (arg0
, 1);
10164 tmp
= build_function_call_expr (cosfn
, tmp
);
10165 return fold_build2 (RDIV_EXPR
, type
,
10166 build_real (type
, dconst1
),
10172 /* Optimize pow(x,c)/x as pow(x,c-1). */
10173 if (fcode0
== BUILT_IN_POW
10174 || fcode0
== BUILT_IN_POWF
10175 || fcode0
== BUILT_IN_POWL
)
10177 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
10178 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
10179 if (TREE_CODE (arg01
) == REAL_CST
10180 && !TREE_OVERFLOW (arg01
)
10181 && operand_equal_p (arg1
, arg00
, 0))
10183 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10187 c
= TREE_REAL_CST (arg01
);
10188 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
10189 arg
= build_real (type
, c
);
10190 arglist
= build_tree_list (NULL_TREE
, arg
);
10191 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
10192 return build_function_call_expr (powfn
, arglist
);
10196 /* Optimize x/expN(y) into x*expN(-y). */
10197 if (BUILTIN_EXPONENT_P (fcode1
))
10199 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
10200 tree arg
= negate_expr (TREE_VALUE (TREE_OPERAND (arg1
, 1)));
10201 tree arglist
= build_tree_list (NULL_TREE
,
10202 fold_convert (type
, arg
));
10203 arg1
= build_function_call_expr (expfn
, arglist
);
10204 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
10207 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10208 if (fcode1
== BUILT_IN_POW
10209 || fcode1
== BUILT_IN_POWF
10210 || fcode1
== BUILT_IN_POWL
)
10212 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
10213 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
10214 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
, 1)));
10215 tree neg11
= fold_convert (type
, negate_expr (arg11
));
10216 tree arglist
= tree_cons(NULL_TREE
, arg10
,
10217 build_tree_list (NULL_TREE
, neg11
));
10218 arg1
= build_function_call_expr (powfn
, arglist
);
10219 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
10224 case TRUNC_DIV_EXPR
:
10225 case FLOOR_DIV_EXPR
:
10226 /* Simplify A / (B << N) where A and B are positive and B is
10227 a power of 2, to A >> (N + log2(B)). */
10228 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10229 && (TYPE_UNSIGNED (type
) || tree_expr_nonnegative_p (arg0
)))
10231 tree sval
= TREE_OPERAND (arg1
, 0);
10232 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
10234 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
10235 unsigned long pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
10237 sh_cnt
= fold_build2 (PLUS_EXPR
, TREE_TYPE (sh_cnt
),
10238 sh_cnt
, build_int_cst (NULL_TREE
, pow2
));
10239 return fold_build2 (RSHIFT_EXPR
, type
,
10240 fold_convert (type
, arg0
), sh_cnt
);
10245 case ROUND_DIV_EXPR
:
10246 case CEIL_DIV_EXPR
:
10247 case EXACT_DIV_EXPR
:
10248 if (integer_onep (arg1
))
10249 return non_lvalue (fold_convert (type
, arg0
));
10250 if (integer_zerop (arg1
))
10252 /* X / -1 is -X. */
10253 if (!TYPE_UNSIGNED (type
)
10254 && TREE_CODE (arg1
) == INTEGER_CST
10255 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
10256 && TREE_INT_CST_HIGH (arg1
) == -1)
10257 return fold_convert (type
, negate_expr (arg0
));
10259 /* Convert -A / -B to A / B when the type is signed and overflow is
10261 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
10262 && TREE_CODE (arg0
) == NEGATE_EXPR
10263 && negate_expr_p (arg1
))
10264 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
10265 negate_expr (arg1
));
10266 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
10267 && TREE_CODE (arg1
) == NEGATE_EXPR
10268 && negate_expr_p (arg0
))
10269 return fold_build2 (code
, type
, negate_expr (arg0
),
10270 TREE_OPERAND (arg1
, 0));
10272 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10273 operation, EXACT_DIV_EXPR.
10275 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10276 At one time others generated faster code, it's not clear if they do
10277 after the last round to changes to the DIV code in expmed.c. */
10278 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
10279 && multiple_of_p (type
, arg0
, arg1
))
10280 return fold_build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
);
10282 if (TREE_CODE (arg1
) == INTEGER_CST
10283 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
10284 return fold_convert (type
, tem
);
10288 case CEIL_MOD_EXPR
:
10289 case FLOOR_MOD_EXPR
:
10290 case ROUND_MOD_EXPR
:
10291 case TRUNC_MOD_EXPR
:
10292 /* X % 1 is always zero, but be sure to preserve any side
10294 if (integer_onep (arg1
))
10295 return omit_one_operand (type
, integer_zero_node
, arg0
);
10297 /* X % 0, return X % 0 unchanged so that we can get the
10298 proper warnings and errors. */
10299 if (integer_zerop (arg1
))
10302 /* 0 % X is always zero, but be sure to preserve any side
10303 effects in X. Place this after checking for X == 0. */
10304 if (integer_zerop (arg0
))
10305 return omit_one_operand (type
, integer_zero_node
, arg1
);
10307 /* X % -1 is zero. */
10308 if (!TYPE_UNSIGNED (type
)
10309 && TREE_CODE (arg1
) == INTEGER_CST
10310 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
10311 && TREE_INT_CST_HIGH (arg1
) == -1)
10312 return omit_one_operand (type
, integer_zero_node
, arg0
);
10314 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10315 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10316 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
10317 && (TYPE_UNSIGNED (type
) || tree_expr_nonnegative_p (arg0
)))
10320 /* Also optimize A % (C << N) where C is a power of 2,
10321 to A & ((C << N) - 1). */
10322 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
10323 c
= TREE_OPERAND (arg1
, 0);
10325 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
10327 tree mask
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
10328 build_int_cst (TREE_TYPE (arg1
), 1));
10329 return fold_build2 (BIT_AND_EXPR
, type
,
10330 fold_convert (type
, arg0
),
10331 fold_convert (type
, mask
));
10335 /* X % -C is the same as X % C. */
10336 if (code
== TRUNC_MOD_EXPR
10337 && !TYPE_UNSIGNED (type
)
10338 && TREE_CODE (arg1
) == INTEGER_CST
10339 && !TREE_OVERFLOW (arg1
)
10340 && TREE_INT_CST_HIGH (arg1
) < 0
10342 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10343 && !sign_bit_p (arg1
, arg1
))
10344 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
10345 fold_convert (type
, negate_expr (arg1
)));
10347 /* X % -Y is the same as X % Y. */
10348 if (code
== TRUNC_MOD_EXPR
10349 && !TYPE_UNSIGNED (type
)
10350 && TREE_CODE (arg1
) == NEGATE_EXPR
10352 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
10353 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
10355 if (TREE_CODE (arg1
) == INTEGER_CST
10356 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
10357 return fold_convert (type
, tem
);
10363 if (integer_all_onesp (arg0
))
10364 return omit_one_operand (type
, arg0
, arg1
);
10368 /* Optimize -1 >> x for arithmetic right shifts. */
10369 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
10370 return omit_one_operand (type
, arg0
, arg1
);
10371 /* ... fall through ... */
10375 if (integer_zerop (arg1
))
10376 return non_lvalue (fold_convert (type
, arg0
));
10377 if (integer_zerop (arg0
))
10378 return omit_one_operand (type
, arg0
, arg1
);
10380 /* Since negative shift count is not well-defined,
10381 don't try to compute it in the compiler. */
10382 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
10385 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10386 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
10387 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
10388 && host_integerp (TREE_OPERAND (arg0
, 1), false)
10389 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
10391 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
10392 + TREE_INT_CST_LOW (arg1
));
10394 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10395 being well defined. */
10396 if (low
>= TYPE_PRECISION (type
))
10398 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
10399 low
= low
% TYPE_PRECISION (type
);
10400 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
10401 return build_int_cst (type
, 0);
10403 low
= TYPE_PRECISION (type
) - 1;
10406 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
10407 build_int_cst (type
, low
));
10410 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10411 into x & ((unsigned)-1 >> c) for unsigned types. */
10412 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
10413 || (TYPE_UNSIGNED (type
)
10414 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
10415 && host_integerp (arg1
, false)
10416 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
10417 && host_integerp (TREE_OPERAND (arg0
, 1), false)
10418 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
10420 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
10421 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
10427 arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10429 lshift
= build_int_cst (type
, -1);
10430 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
10432 return fold_build2 (BIT_AND_EXPR
, type
, arg00
, lshift
);
10436 /* Rewrite an LROTATE_EXPR by a constant into an
10437 RROTATE_EXPR by a new constant. */
10438 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
10440 tree tem
= build_int_cst (TREE_TYPE (arg1
),
10441 GET_MODE_BITSIZE (TYPE_MODE (type
)));
10442 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
10443 return fold_build2 (RROTATE_EXPR
, type
, arg0
, tem
);
10446 /* If we have a rotate of a bit operation with the rotate count and
10447 the second operand of the bit operation both constant,
10448 permute the two operations. */
10449 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10450 && (TREE_CODE (arg0
) == BIT_AND_EXPR
10451 || TREE_CODE (arg0
) == BIT_IOR_EXPR
10452 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
10453 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10454 return fold_build2 (TREE_CODE (arg0
), type
,
10455 fold_build2 (code
, type
,
10456 TREE_OPERAND (arg0
, 0), arg1
),
10457 fold_build2 (code
, type
,
10458 TREE_OPERAND (arg0
, 1), arg1
));
10460 /* Two consecutive rotates adding up to the width of the mode can
10462 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10463 && TREE_CODE (arg0
) == RROTATE_EXPR
10464 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10465 && TREE_INT_CST_HIGH (arg1
) == 0
10466 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
10467 && ((TREE_INT_CST_LOW (arg1
)
10468 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
10469 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
10470 return TREE_OPERAND (arg0
, 0);
10475 if (operand_equal_p (arg0
, arg1
, 0))
10476 return omit_one_operand (type
, arg0
, arg1
);
10477 if (INTEGRAL_TYPE_P (type
)
10478 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
10479 return omit_one_operand (type
, arg1
, arg0
);
10480 tem
= fold_minmax (MIN_EXPR
, type
, arg0
, arg1
);
10486 if (operand_equal_p (arg0
, arg1
, 0))
10487 return omit_one_operand (type
, arg0
, arg1
);
10488 if (INTEGRAL_TYPE_P (type
)
10489 && TYPE_MAX_VALUE (type
)
10490 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
10491 return omit_one_operand (type
, arg1
, arg0
);
10492 tem
= fold_minmax (MAX_EXPR
, type
, arg0
, arg1
);
10497 case TRUTH_ANDIF_EXPR
:
10498 /* Note that the operands of this must be ints
10499 and their values must be 0 or 1.
10500 ("true" is a fixed value perhaps depending on the language.) */
10501 /* If first arg is constant zero, return it. */
10502 if (integer_zerop (arg0
))
10503 return fold_convert (type
, arg0
);
10504 case TRUTH_AND_EXPR
:
10505 /* If either arg is constant true, drop it. */
10506 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10507 return non_lvalue (fold_convert (type
, arg1
));
10508 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
10509 /* Preserve sequence points. */
10510 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10511 return non_lvalue (fold_convert (type
, arg0
));
10512 /* If second arg is constant zero, result is zero, but first arg
10513 must be evaluated. */
10514 if (integer_zerop (arg1
))
10515 return omit_one_operand (type
, arg1
, arg0
);
10516 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10517 case will be handled here. */
10518 if (integer_zerop (arg0
))
10519 return omit_one_operand (type
, arg0
, arg1
);
10521 /* !X && X is always false. */
10522 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10523 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10524 return omit_one_operand (type
, integer_zero_node
, arg1
);
10525 /* X && !X is always false. */
10526 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10527 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10528 return omit_one_operand (type
, integer_zero_node
, arg0
);
10530 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10531 means A >= Y && A != MAX, but in this case we know that
10534 if (!TREE_SIDE_EFFECTS (arg0
)
10535 && !TREE_SIDE_EFFECTS (arg1
))
10537 tem
= fold_to_nonsharp_ineq_using_bound (arg0
, arg1
);
10538 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
10539 return fold_build2 (code
, type
, tem
, arg1
);
10541 tem
= fold_to_nonsharp_ineq_using_bound (arg1
, arg0
);
10542 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
10543 return fold_build2 (code
, type
, arg0
, tem
);
10547 /* We only do these simplifications if we are optimizing. */
10551 /* Check for things like (A || B) && (A || C). We can convert this
10552 to A || (B && C). Note that either operator can be any of the four
10553 truth and/or operations and the transformation will still be
10554 valid. Also note that we only care about order for the
10555 ANDIF and ORIF operators. If B contains side effects, this
10556 might change the truth-value of A. */
10557 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
10558 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
10559 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
10560 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
10561 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
10562 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
10564 tree a00
= TREE_OPERAND (arg0
, 0);
10565 tree a01
= TREE_OPERAND (arg0
, 1);
10566 tree a10
= TREE_OPERAND (arg1
, 0);
10567 tree a11
= TREE_OPERAND (arg1
, 1);
10568 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
10569 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
10570 && (code
== TRUTH_AND_EXPR
10571 || code
== TRUTH_OR_EXPR
));
10573 if (operand_equal_p (a00
, a10
, 0))
10574 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
10575 fold_build2 (code
, type
, a01
, a11
));
10576 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
10577 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
10578 fold_build2 (code
, type
, a01
, a10
));
10579 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
10580 return fold_build2 (TREE_CODE (arg0
), type
, a01
,
10581 fold_build2 (code
, type
, a00
, a11
));
10583 /* This case if tricky because we must either have commutative
10584 operators or else A10 must not have side-effects. */
10586 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
10587 && operand_equal_p (a01
, a11
, 0))
10588 return fold_build2 (TREE_CODE (arg0
), type
,
10589 fold_build2 (code
, type
, a00
, a10
),
10593 /* See if we can build a range comparison. */
10594 if (0 != (tem
= fold_range_test (code
, type
, op0
, op1
)))
10597 /* Check for the possibility of merging component references. If our
10598 lhs is another similar operation, try to merge its rhs with our
10599 rhs. Then try to merge our lhs and rhs. */
10600 if (TREE_CODE (arg0
) == code
10601 && 0 != (tem
= fold_truthop (code
, type
,
10602 TREE_OPERAND (arg0
, 1), arg1
)))
10603 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
10605 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
10610 case TRUTH_ORIF_EXPR
:
10611 /* Note that the operands of this must be ints
10612 and their values must be 0 or true.
10613 ("true" is a fixed value perhaps depending on the language.) */
10614 /* If first arg is constant true, return it. */
10615 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10616 return fold_convert (type
, arg0
);
10617 case TRUTH_OR_EXPR
:
10618 /* If either arg is constant zero, drop it. */
10619 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
10620 return non_lvalue (fold_convert (type
, arg1
));
10621 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
10622 /* Preserve sequence points. */
10623 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10624 return non_lvalue (fold_convert (type
, arg0
));
10625 /* If second arg is constant true, result is true, but we must
10626 evaluate first arg. */
10627 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
10628 return omit_one_operand (type
, arg1
, arg0
);
10629 /* Likewise for first arg, but note this only occurs here for
10631 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10632 return omit_one_operand (type
, arg0
, arg1
);
10634 /* !X || X is always true. */
10635 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10636 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10637 return omit_one_operand (type
, integer_one_node
, arg1
);
10638 /* X || !X is always true. */
10639 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10640 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10641 return omit_one_operand (type
, integer_one_node
, arg0
);
10645 case TRUTH_XOR_EXPR
:
10646 /* If the second arg is constant zero, drop it. */
10647 if (integer_zerop (arg1
))
10648 return non_lvalue (fold_convert (type
, arg0
));
10649 /* If the second arg is constant true, this is a logical inversion. */
10650 if (integer_onep (arg1
))
10652 /* Only call invert_truthvalue if operand is a truth value. */
10653 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
10654 tem
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
10656 tem
= invert_truthvalue (arg0
);
10657 return non_lvalue (fold_convert (type
, tem
));
10659 /* Identical arguments cancel to zero. */
10660 if (operand_equal_p (arg0
, arg1
, 0))
10661 return omit_one_operand (type
, integer_zero_node
, arg0
);
10663 /* !X ^ X is always true. */
10664 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10665 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10666 return omit_one_operand (type
, integer_one_node
, arg1
);
10668 /* X ^ !X is always true. */
10669 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10670 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10671 return omit_one_operand (type
, integer_one_node
, arg0
);
10677 tem
= fold_comparison (code
, type
, op0
, op1
);
10678 if (tem
!= NULL_TREE
)
10681 /* bool_var != 0 becomes bool_var. */
10682 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
10683 && code
== NE_EXPR
)
10684 return non_lvalue (fold_convert (type
, arg0
));
10686 /* bool_var == 1 becomes bool_var. */
10687 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
10688 && code
== EQ_EXPR
)
10689 return non_lvalue (fold_convert (type
, arg0
));
10691 /* bool_var != 1 becomes !bool_var. */
10692 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
10693 && code
== NE_EXPR
)
10694 return fold_build1 (TRUTH_NOT_EXPR
, type
, arg0
);
10696 /* bool_var == 0 becomes !bool_var. */
10697 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
10698 && code
== EQ_EXPR
)
10699 return fold_build1 (TRUTH_NOT_EXPR
, type
, arg0
);
10701 /* If this is an equality comparison of the address of a non-weak
10702 object against zero, then we know the result. */
10703 if (TREE_CODE (arg0
) == ADDR_EXPR
10704 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
10705 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
10706 && integer_zerop (arg1
))
10707 return constant_boolean_node (code
!= EQ_EXPR
, type
);
10709 /* If this is an equality comparison of the address of two non-weak,
10710 unaliased symbols neither of which are extern (since we do not
10711 have access to attributes for externs), then we know the result. */
10712 if (TREE_CODE (arg0
) == ADDR_EXPR
10713 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
10714 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
10715 && ! lookup_attribute ("alias",
10716 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
10717 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
10718 && TREE_CODE (arg1
) == ADDR_EXPR
10719 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
10720 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
10721 && ! lookup_attribute ("alias",
10722 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
10723 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
10725 /* We know that we're looking at the address of two
10726 non-weak, unaliased, static _DECL nodes.
10728 It is both wasteful and incorrect to call operand_equal_p
10729 to compare the two ADDR_EXPR nodes. It is wasteful in that
10730 all we need to do is test pointer equality for the arguments
10731 to the two ADDR_EXPR nodes. It is incorrect to use
10732 operand_equal_p as that function is NOT equivalent to a
10733 C equality test. It can in fact return false for two
10734 objects which would test as equal using the C equality
10736 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
10737 return constant_boolean_node (equal
10738 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
10742 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10743 a MINUS_EXPR of a constant, we can convert it into a comparison with
10744 a revised constant as long as no overflow occurs. */
10745 if (TREE_CODE (arg1
) == INTEGER_CST
10746 && (TREE_CODE (arg0
) == PLUS_EXPR
10747 || TREE_CODE (arg0
) == MINUS_EXPR
)
10748 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10749 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
10750 ? MINUS_EXPR
: PLUS_EXPR
,
10751 fold_convert (TREE_TYPE (arg0
), arg1
),
10752 TREE_OPERAND (arg0
, 1), 0))
10753 && !TREE_OVERFLOW (tem
))
10754 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
10756 /* Similarly for a NEGATE_EXPR. */
10757 if (TREE_CODE (arg0
) == NEGATE_EXPR
10758 && TREE_CODE (arg1
) == INTEGER_CST
10759 && 0 != (tem
= negate_expr (arg1
))
10760 && TREE_CODE (tem
) == INTEGER_CST
10761 && !TREE_OVERFLOW (tem
))
10762 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
10764 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
10765 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10766 && TREE_CODE (arg1
) == INTEGER_CST
10767 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10768 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
10769 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg0
),
10770 fold_convert (TREE_TYPE (arg0
), arg1
),
10771 TREE_OPERAND (arg0
, 1)));
10773 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10774 for !=. Don't do this for ordered comparisons due to overflow. */
10775 if (TREE_CODE (arg0
) == MINUS_EXPR
10776 && integer_zerop (arg1
))
10777 return fold_build2 (code
, type
,
10778 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
10780 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10781 if (TREE_CODE (arg0
) == ABS_EXPR
10782 && (integer_zerop (arg1
) || real_zerop (arg1
)))
10783 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
10785 /* If this is an EQ or NE comparison with zero and ARG0 is
10786 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10787 two operations, but the latter can be done in one less insn
10788 on machines that have only two-operand insns or on which a
10789 constant cannot be the first operand. */
10790 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10791 && integer_zerop (arg1
))
10793 tree arg00
= TREE_OPERAND (arg0
, 0);
10794 tree arg01
= TREE_OPERAND (arg0
, 1);
10795 if (TREE_CODE (arg00
) == LSHIFT_EXPR
10796 && integer_onep (TREE_OPERAND (arg00
, 0)))
10798 fold_build2 (code
, type
,
10799 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10800 build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
10801 arg01
, TREE_OPERAND (arg00
, 1)),
10802 fold_convert (TREE_TYPE (arg0
),
10803 integer_one_node
)),
10805 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
10806 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
10808 fold_build2 (code
, type
,
10809 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10810 build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
10811 arg00
, TREE_OPERAND (arg01
, 1)),
10812 fold_convert (TREE_TYPE (arg0
),
10813 integer_one_node
)),
10817 /* If this is an NE or EQ comparison of zero against the result of a
10818 signed MOD operation whose second operand is a power of 2, make
10819 the MOD operation unsigned since it is simpler and equivalent. */
10820 if (integer_zerop (arg1
)
10821 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
10822 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
10823 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
10824 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
10825 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
10826 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10828 tree newtype
= lang_hooks
.types
.unsigned_type (TREE_TYPE (arg0
));
10829 tree newmod
= fold_build2 (TREE_CODE (arg0
), newtype
,
10830 fold_convert (newtype
,
10831 TREE_OPERAND (arg0
, 0)),
10832 fold_convert (newtype
,
10833 TREE_OPERAND (arg0
, 1)));
10835 return fold_build2 (code
, type
, newmod
,
10836 fold_convert (newtype
, arg1
));
10839 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10840 C1 is a valid shift constant, and C2 is a power of two, i.e.
10842 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10843 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
10844 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
10846 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10847 && integer_zerop (arg1
))
10849 tree itype
= TREE_TYPE (arg0
);
10850 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
10851 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
10853 /* Check for a valid shift count. */
10854 if (TREE_INT_CST_HIGH (arg001
) == 0
10855 && TREE_INT_CST_LOW (arg001
) < prec
)
10857 tree arg01
= TREE_OPERAND (arg0
, 1);
10858 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10859 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
10860 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10861 can be rewritten as (X & (C2 << C1)) != 0. */
10862 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
10864 tem
= fold_build2 (LSHIFT_EXPR
, itype
, arg01
, arg001
);
10865 tem
= fold_build2 (BIT_AND_EXPR
, itype
, arg000
, tem
);
10866 return fold_build2 (code
, type
, tem
, arg1
);
10868 /* Otherwise, for signed (arithmetic) shifts,
10869 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10870 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10871 else if (!TYPE_UNSIGNED (itype
))
10872 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
10873 arg000
, build_int_cst (itype
, 0));
10874 /* Otherwise, of unsigned (logical) shifts,
10875 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10876 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10878 return omit_one_operand (type
,
10879 code
== EQ_EXPR
? integer_one_node
10880 : integer_zero_node
,
10885 /* If this is an NE comparison of zero with an AND of one, remove the
10886 comparison since the AND will give the correct value. */
10887 if (code
== NE_EXPR
10888 && integer_zerop (arg1
)
10889 && TREE_CODE (arg0
) == BIT_AND_EXPR
10890 && integer_onep (TREE_OPERAND (arg0
, 1)))
10891 return fold_convert (type
, arg0
);
10893 /* If we have (A & C) == C where C is a power of 2, convert this into
10894 (A & C) != 0. Similarly for NE_EXPR. */
10895 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10896 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10897 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10898 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10899 arg0
, fold_convert (TREE_TYPE (arg0
),
10900 integer_zero_node
));
10902 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10903 bit, then fold the expression into A < 0 or A >= 0. */
10904 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
, type
);
10908 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10909 Similarly for NE_EXPR. */
10910 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10911 && TREE_CODE (arg1
) == INTEGER_CST
10912 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10914 tree notc
= fold_build1 (BIT_NOT_EXPR
,
10915 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
10916 TREE_OPERAND (arg0
, 1));
10917 tree dandnotc
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10919 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
10920 if (integer_nonzerop (dandnotc
))
10921 return omit_one_operand (type
, rslt
, arg0
);
10924 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10925 Similarly for NE_EXPR. */
10926 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10927 && TREE_CODE (arg1
) == INTEGER_CST
10928 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10930 tree notd
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
10931 tree candnotd
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10932 TREE_OPERAND (arg0
, 1), notd
);
10933 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
10934 if (integer_nonzerop (candnotd
))
10935 return omit_one_operand (type
, rslt
, arg0
);
10938 /* If this is a comparison of a field, we may be able to simplify it. */
10939 if ((TREE_CODE (arg0
) == COMPONENT_REF
10940 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
10941 /* Handle the constant case even without -O
10942 to make sure the warnings are given. */
10943 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
10945 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
10950 /* Optimize comparisons of strlen vs zero to a compare of the
10951 first character of the string vs zero. To wit,
10952 strlen(ptr) == 0 => *ptr == 0
10953 strlen(ptr) != 0 => *ptr != 0
10954 Other cases should reduce to one of these two (or a constant)
10955 due to the return value of strlen being unsigned. */
10956 if (TREE_CODE (arg0
) == CALL_EXPR
10957 && integer_zerop (arg1
))
10959 tree fndecl
= get_callee_fndecl (arg0
);
10963 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
10964 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
10965 && (arglist
= TREE_OPERAND (arg0
, 1))
10966 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
10967 && ! TREE_CHAIN (arglist
))
10969 tree iref
= build_fold_indirect_ref (TREE_VALUE (arglist
));
10970 return fold_build2 (code
, type
, iref
,
10971 build_int_cst (TREE_TYPE (iref
), 0));
10975 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10976 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10977 if (TREE_CODE (arg0
) == RSHIFT_EXPR
10978 && integer_zerop (arg1
)
10979 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10981 tree arg00
= TREE_OPERAND (arg0
, 0);
10982 tree arg01
= TREE_OPERAND (arg0
, 1);
10983 tree itype
= TREE_TYPE (arg00
);
10984 if (TREE_INT_CST_HIGH (arg01
) == 0
10985 && TREE_INT_CST_LOW (arg01
)
10986 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
10988 if (TYPE_UNSIGNED (itype
))
10990 itype
= lang_hooks
.types
.signed_type (itype
);
10991 arg00
= fold_convert (itype
, arg00
);
10993 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
10994 type
, arg00
, build_int_cst (itype
, 0));
10998 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10999 if (integer_zerop (arg1
)
11000 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11001 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11002 TREE_OPERAND (arg0
, 1));
11004 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11005 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11006 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11007 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11008 build_int_cst (TREE_TYPE (arg1
), 0));
11009 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11010 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11011 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11012 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11013 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 1),
11014 build_int_cst (TREE_TYPE (arg1
), 0));
11016 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11017 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11018 && TREE_CODE (arg1
) == INTEGER_CST
11019 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11020 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11021 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg1
),
11022 TREE_OPERAND (arg0
, 1), arg1
));
11024 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11025 (X & C) == 0 when C is a single bit. */
11026 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11027 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
11028 && integer_zerop (arg1
)
11029 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11031 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
11032 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
11033 TREE_OPERAND (arg0
, 1));
11034 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
11038 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11039 constant C is a power of two, i.e. a single bit. */
11040 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11041 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11042 && integer_zerop (arg1
)
11043 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11044 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11045 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
11047 tree arg00
= TREE_OPERAND (arg0
, 0);
11048 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11049 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
11052 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11053 when is C is a power of two, i.e. a single bit. */
11054 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11055 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
11056 && integer_zerop (arg1
)
11057 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11058 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11059 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
11061 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
11062 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg000
),
11063 arg000
, TREE_OPERAND (arg0
, 1));
11064 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11065 tem
, build_int_cst (TREE_TYPE (tem
), 0));
11068 if (integer_zerop (arg1
)
11069 && tree_expr_nonzero_p (arg0
))
11071 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
11072 return omit_one_operand (type
, res
, arg0
);
11075 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11076 if (TREE_CODE (arg0
) == NEGATE_EXPR
11077 && TREE_CODE (arg1
) == NEGATE_EXPR
)
11078 return fold_build2 (code
, type
,
11079 TREE_OPERAND (arg0
, 0),
11080 TREE_OPERAND (arg1
, 0));
11082 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11083 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11084 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11086 tree arg00
= TREE_OPERAND (arg0
, 0);
11087 tree arg01
= TREE_OPERAND (arg0
, 1);
11088 tree arg10
= TREE_OPERAND (arg1
, 0);
11089 tree arg11
= TREE_OPERAND (arg1
, 1);
11090 tree itype
= TREE_TYPE (arg0
);
11092 if (operand_equal_p (arg01
, arg11
, 0))
11093 return fold_build2 (code
, type
,
11094 fold_build2 (BIT_AND_EXPR
, itype
,
11095 fold_build2 (BIT_XOR_EXPR
, itype
,
11098 build_int_cst (itype
, 0));
11100 if (operand_equal_p (arg01
, arg10
, 0))
11101 return fold_build2 (code
, type
,
11102 fold_build2 (BIT_AND_EXPR
, itype
,
11103 fold_build2 (BIT_XOR_EXPR
, itype
,
11106 build_int_cst (itype
, 0));
11108 if (operand_equal_p (arg00
, arg11
, 0))
11109 return fold_build2 (code
, type
,
11110 fold_build2 (BIT_AND_EXPR
, itype
,
11111 fold_build2 (BIT_XOR_EXPR
, itype
,
11114 build_int_cst (itype
, 0));
11116 if (operand_equal_p (arg00
, arg10
, 0))
11117 return fold_build2 (code
, type
,
11118 fold_build2 (BIT_AND_EXPR
, itype
,
11119 fold_build2 (BIT_XOR_EXPR
, itype
,
11122 build_int_cst (itype
, 0));
11125 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11126 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
11128 tree arg00
= TREE_OPERAND (arg0
, 0);
11129 tree arg01
= TREE_OPERAND (arg0
, 1);
11130 tree arg10
= TREE_OPERAND (arg1
, 0);
11131 tree arg11
= TREE_OPERAND (arg1
, 1);
11132 tree itype
= TREE_TYPE (arg0
);
11134 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11135 operand_equal_p guarantees no side-effects so we don't need
11136 to use omit_one_operand on Z. */
11137 if (operand_equal_p (arg01
, arg11
, 0))
11138 return fold_build2 (code
, type
, arg00
, arg10
);
11139 if (operand_equal_p (arg01
, arg10
, 0))
11140 return fold_build2 (code
, type
, arg00
, arg11
);
11141 if (operand_equal_p (arg00
, arg11
, 0))
11142 return fold_build2 (code
, type
, arg01
, arg10
);
11143 if (operand_equal_p (arg00
, arg10
, 0))
11144 return fold_build2 (code
, type
, arg01
, arg11
);
11146 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11147 if (TREE_CODE (arg01
) == INTEGER_CST
11148 && TREE_CODE (arg11
) == INTEGER_CST
)
11149 return fold_build2 (code
, type
,
11150 fold_build2 (BIT_XOR_EXPR
, itype
, arg00
,
11151 fold_build2 (BIT_XOR_EXPR
, itype
,
11161 tem
= fold_comparison (code
, type
, op0
, op1
);
11162 if (tem
!= NULL_TREE
)
11165 /* Transform comparisons of the form X +- C CMP X. */
11166 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
11167 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11168 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
11169 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
11170 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11171 && !TYPE_UNSIGNED (TREE_TYPE (arg1
))
11172 && !(flag_wrapv
|| flag_trapv
))))
11174 tree arg01
= TREE_OPERAND (arg0
, 1);
11175 enum tree_code code0
= TREE_CODE (arg0
);
11178 if (TREE_CODE (arg01
) == REAL_CST
)
11179 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
11181 is_positive
= tree_int_cst_sgn (arg01
);
11183 /* (X - c) > X becomes false. */
11184 if (code
== GT_EXPR
11185 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11186 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11187 return constant_boolean_node (0, type
);
11189 /* Likewise (X + c) < X becomes false. */
11190 if (code
== LT_EXPR
11191 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11192 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11193 return constant_boolean_node (0, type
);
11195 /* Convert (X - c) <= X to true. */
11196 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
11198 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11199 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11200 return constant_boolean_node (1, type
);
11202 /* Convert (X + c) >= X to true. */
11203 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
11205 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11206 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11207 return constant_boolean_node (1, type
);
11209 if (TREE_CODE (arg01
) == INTEGER_CST
)
11211 /* Convert X + c > X and X - c < X to true for integers. */
11212 if (code
== GT_EXPR
11213 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11214 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11215 return constant_boolean_node (1, type
);
11217 if (code
== LT_EXPR
11218 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11219 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11220 return constant_boolean_node (1, type
);
11222 /* Convert X + c <= X and X - c >= X to false for integers. */
11223 if (code
== LE_EXPR
11224 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11225 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11226 return constant_boolean_node (0, type
);
11228 if (code
== GE_EXPR
11229 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11230 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11231 return constant_boolean_node (0, type
);
11235 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11236 This transformation affects the cases which are handled in later
11237 optimizations involving comparisons with non-negative constants. */
11238 if (TREE_CODE (arg1
) == INTEGER_CST
11239 && TREE_CODE (arg0
) != INTEGER_CST
11240 && tree_int_cst_sgn (arg1
) > 0)
11242 if (code
== GE_EXPR
)
11244 arg1
= const_binop (MINUS_EXPR
, arg1
,
11245 build_int_cst (TREE_TYPE (arg1
), 1), 0);
11246 return fold_build2 (GT_EXPR
, type
, arg0
,
11247 fold_convert (TREE_TYPE (arg0
), arg1
));
11249 if (code
== LT_EXPR
)
11251 arg1
= const_binop (MINUS_EXPR
, arg1
,
11252 build_int_cst (TREE_TYPE (arg1
), 1), 0);
11253 return fold_build2 (LE_EXPR
, type
, arg0
,
11254 fold_convert (TREE_TYPE (arg0
), arg1
));
11258 /* Comparisons with the highest or lowest possible integer of
11259 the specified precision will have known values. */
11261 tree arg1_type
= TREE_TYPE (arg1
);
11262 unsigned int width
= TYPE_PRECISION (arg1_type
);
11264 if (TREE_CODE (arg1
) == INTEGER_CST
11265 && !TREE_OVERFLOW (arg1
)
11266 && width
<= 2 * HOST_BITS_PER_WIDE_INT
11267 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
11269 HOST_WIDE_INT signed_max_hi
;
11270 unsigned HOST_WIDE_INT signed_max_lo
;
11271 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
11273 if (width
<= HOST_BITS_PER_WIDE_INT
)
11275 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
11280 if (TYPE_UNSIGNED (arg1_type
))
11282 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
11288 max_lo
= signed_max_lo
;
11289 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
11295 width
-= HOST_BITS_PER_WIDE_INT
;
11296 signed_max_lo
= -1;
11297 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
11302 if (TYPE_UNSIGNED (arg1_type
))
11304 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
11309 max_hi
= signed_max_hi
;
11310 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
11314 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
11315 && TREE_INT_CST_LOW (arg1
) == max_lo
)
11319 return omit_one_operand (type
, integer_zero_node
, arg0
);
11322 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
11325 return omit_one_operand (type
, integer_one_node
, arg0
);
11328 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
11330 /* The GE_EXPR and LT_EXPR cases above are not normally
11331 reached because of previous transformations. */
11336 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
11338 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
11342 arg1
= const_binop (PLUS_EXPR
, arg1
,
11343 build_int_cst (TREE_TYPE (arg1
), 1), 0);
11344 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
11346 arg1
= const_binop (PLUS_EXPR
, arg1
,
11347 build_int_cst (TREE_TYPE (arg1
), 1), 0);
11348 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
11352 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
11354 && TREE_INT_CST_LOW (arg1
) == min_lo
)
11358 return omit_one_operand (type
, integer_zero_node
, arg0
);
11361 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
11364 return omit_one_operand (type
, integer_one_node
, arg0
);
11367 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
11372 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
11374 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
11378 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
11379 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
11381 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
11382 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
11387 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
11388 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
11389 && TYPE_UNSIGNED (arg1_type
)
11390 /* We will flip the signedness of the comparison operator
11391 associated with the mode of arg1, so the sign bit is
11392 specified by this mode. Check that arg1 is the signed
11393 max associated with this sign bit. */
11394 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
11395 /* signed_type does not work on pointer types. */
11396 && INTEGRAL_TYPE_P (arg1_type
))
11398 /* The following case also applies to X < signed_max+1
11399 and X >= signed_max+1 because previous transformations. */
11400 if (code
== LE_EXPR
|| code
== GT_EXPR
)
11403 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (arg0
));
11404 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (arg1
));
11405 return fold_build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
11406 type
, fold_convert (st0
, arg0
),
11407 build_int_cst (st1
, 0));
11413 /* If we are comparing an ABS_EXPR with a constant, we can
11414 convert all the cases into explicit comparisons, but they may
11415 well not be faster than doing the ABS and one comparison.
11416 But ABS (X) <= C is a range comparison, which becomes a subtraction
11417 and a comparison, and is probably faster. */
11418 if (code
== LE_EXPR
11419 && TREE_CODE (arg1
) == INTEGER_CST
11420 && TREE_CODE (arg0
) == ABS_EXPR
11421 && ! TREE_SIDE_EFFECTS (arg0
)
11422 && (0 != (tem
= negate_expr (arg1
)))
11423 && TREE_CODE (tem
) == INTEGER_CST
11424 && !TREE_OVERFLOW (tem
))
11425 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
11426 build2 (GE_EXPR
, type
,
11427 TREE_OPERAND (arg0
, 0), tem
),
11428 build2 (LE_EXPR
, type
,
11429 TREE_OPERAND (arg0
, 0), arg1
));
11431 /* Convert ABS_EXPR<x> >= 0 to true. */
11432 if (code
== GE_EXPR
11433 && tree_expr_nonnegative_p (arg0
)
11434 && (integer_zerop (arg1
)
11435 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11436 && real_zerop (arg1
))))
11437 return omit_one_operand (type
, integer_one_node
, arg0
);
11439 /* Convert ABS_EXPR<x> < 0 to false. */
11440 if (code
== LT_EXPR
11441 && tree_expr_nonnegative_p (arg0
)
11442 && (integer_zerop (arg1
) || real_zerop (arg1
)))
11443 return omit_one_operand (type
, integer_zero_node
, arg0
);
11445 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11446 and similarly for >= into !=. */
11447 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11448 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11449 && TREE_CODE (arg1
) == LSHIFT_EXPR
11450 && integer_onep (TREE_OPERAND (arg1
, 0)))
11451 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11452 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11453 TREE_OPERAND (arg1
, 1)),
11454 build_int_cst (TREE_TYPE (arg0
), 0));
11456 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11457 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11458 && (TREE_CODE (arg1
) == NOP_EXPR
11459 || TREE_CODE (arg1
) == CONVERT_EXPR
)
11460 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
11461 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
11463 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11464 fold_convert (TREE_TYPE (arg0
),
11465 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11466 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
11468 build_int_cst (TREE_TYPE (arg0
), 0));
11472 case UNORDERED_EXPR
:
11480 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
11482 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
11483 if (t1
!= NULL_TREE
)
11487 /* If the first operand is NaN, the result is constant. */
11488 if (TREE_CODE (arg0
) == REAL_CST
11489 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
11490 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
11492 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
11493 ? integer_zero_node
11494 : integer_one_node
;
11495 return omit_one_operand (type
, t1
, arg1
);
11498 /* If the second operand is NaN, the result is constant. */
11499 if (TREE_CODE (arg1
) == REAL_CST
11500 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
11501 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
11503 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
11504 ? integer_zero_node
11505 : integer_one_node
;
11506 return omit_one_operand (type
, t1
, arg0
);
11509 /* Simplify unordered comparison of something with itself. */
11510 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
11511 && operand_equal_p (arg0
, arg1
, 0))
11512 return constant_boolean_node (1, type
);
11514 if (code
== LTGT_EXPR
11515 && !flag_trapping_math
11516 && operand_equal_p (arg0
, arg1
, 0))
11517 return constant_boolean_node (0, type
);
11519 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11521 tree targ0
= strip_float_extensions (arg0
);
11522 tree targ1
= strip_float_extensions (arg1
);
11523 tree newtype
= TREE_TYPE (targ0
);
11525 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
11526 newtype
= TREE_TYPE (targ1
);
11528 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
11529 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
11530 fold_convert (newtype
, targ1
));
11535 case COMPOUND_EXPR
:
11536 /* When pedantic, a compound expression can be neither an lvalue
11537 nor an integer constant expression. */
11538 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
11540 /* Don't let (0, 0) be null pointer constant. */
11541 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
11542 : fold_convert (type
, arg1
);
11543 return pedantic_non_lvalue (tem
);
11546 if ((TREE_CODE (arg0
) == REAL_CST
11547 && TREE_CODE (arg1
) == REAL_CST
)
11548 || (TREE_CODE (arg0
) == INTEGER_CST
11549 && TREE_CODE (arg1
) == INTEGER_CST
))
11550 return build_complex (type
, arg0
, arg1
);
11554 /* An ASSERT_EXPR should never be passed to fold_binary. */
11555 gcc_unreachable ();
11559 } /* switch (code) */
11562 /* Callback for walk_tree, looking for LABEL_EXPR.
11563 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11564 Do not check the sub-tree of GOTO_EXPR. */
11567 contains_label_1 (tree
*tp
,
11568 int *walk_subtrees
,
11569 void *data ATTRIBUTE_UNUSED
)
11571 switch (TREE_CODE (*tp
))
11576 *walk_subtrees
= 0;
11583 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11584 accessible from outside the sub-tree. Returns NULL_TREE if no
11585 addressable label is found. */
11588 contains_label_p (tree st
)
11590 return (walk_tree (&st
, contains_label_1
, NULL
, NULL
) != NULL_TREE
);
11593 /* Fold a ternary expression of code CODE and type TYPE with operands
11594 OP0, OP1, and OP2. Return the folded expression if folding is
11595 successful. Otherwise, return NULL_TREE. */
11598 fold_ternary (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
11601 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
11602 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11604 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
11605 && TREE_CODE_LENGTH (code
) == 3);
11607 /* Strip any conversions that don't change the mode. This is safe
11608 for every expression, except for a comparison expression because
11609 its signedness is derived from its operands. So, in the latter
11610 case, only strip conversions that don't change the signedness.
11612 Note that this is done as an internal manipulation within the
11613 constant folder, in order to find the simplest representation of
11614 the arguments so that their form can be studied. In any cases,
11615 the appropriate type conversions should be put back in the tree
11616 that will get out of the constant folder. */
11631 case COMPONENT_REF
:
11632 if (TREE_CODE (arg0
) == CONSTRUCTOR
11633 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
11635 unsigned HOST_WIDE_INT idx
;
11637 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
11644 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11645 so all simple results must be passed through pedantic_non_lvalue. */
11646 if (TREE_CODE (arg0
) == INTEGER_CST
)
11648 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
11649 tem
= integer_zerop (arg0
) ? op2
: op1
;
11650 /* Only optimize constant conditions when the selected branch
11651 has the same type as the COND_EXPR. This avoids optimizing
11652 away "c ? x : throw", where the throw has a void type.
11653 Avoid throwing away that operand which contains label. */
11654 if ((!TREE_SIDE_EFFECTS (unused_op
)
11655 || !contains_label_p (unused_op
))
11656 && (! VOID_TYPE_P (TREE_TYPE (tem
))
11657 || VOID_TYPE_P (type
)))
11658 return pedantic_non_lvalue (tem
);
11661 if (operand_equal_p (arg1
, op2
, 0))
11662 return pedantic_omit_one_operand (type
, arg1
, arg0
);
11664 /* If we have A op B ? A : C, we may be able to convert this to a
11665 simpler expression, depending on the operation and the values
11666 of B and C. Signed zeros prevent all of these transformations,
11667 for reasons given above each one.
11669 Also try swapping the arguments and inverting the conditional. */
11670 if (COMPARISON_CLASS_P (arg0
)
11671 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11672 arg1
, TREE_OPERAND (arg0
, 1))
11673 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
11675 tem
= fold_cond_expr_with_comparison (type
, arg0
, op1
, op2
);
11680 if (COMPARISON_CLASS_P (arg0
)
11681 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11683 TREE_OPERAND (arg0
, 1))
11684 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
11686 tem
= fold_truth_not_expr (arg0
);
11687 if (tem
&& COMPARISON_CLASS_P (tem
))
11689 tem
= fold_cond_expr_with_comparison (type
, tem
, op2
, op1
);
11695 /* If the second operand is simpler than the third, swap them
11696 since that produces better jump optimization results. */
11697 if (truth_value_p (TREE_CODE (arg0
))
11698 && tree_swap_operands_p (op1
, op2
, false))
11700 /* See if this can be inverted. If it can't, possibly because
11701 it was a floating-point inequality comparison, don't do
11703 tem
= fold_truth_not_expr (arg0
);
11705 return fold_build3 (code
, type
, tem
, op2
, op1
);
11708 /* Convert A ? 1 : 0 to simply A. */
11709 if (integer_onep (op1
)
11710 && integer_zerop (op2
)
11711 /* If we try to convert OP0 to our type, the
11712 call to fold will try to move the conversion inside
11713 a COND, which will recurse. In that case, the COND_EXPR
11714 is probably the best choice, so leave it alone. */
11715 && type
== TREE_TYPE (arg0
))
11716 return pedantic_non_lvalue (arg0
);
11718 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11719 over COND_EXPR in cases such as floating point comparisons. */
11720 if (integer_zerop (op1
)
11721 && integer_onep (op2
)
11722 && truth_value_p (TREE_CODE (arg0
)))
11723 return pedantic_non_lvalue (fold_convert (type
,
11724 invert_truthvalue (arg0
)));
11726 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11727 if (TREE_CODE (arg0
) == LT_EXPR
11728 && integer_zerop (TREE_OPERAND (arg0
, 1))
11729 && integer_zerop (op2
)
11730 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
11732 /* sign_bit_p only checks ARG1 bits within A's precision.
11733 If <sign bit of A> has wider type than A, bits outside
11734 of A's precision in <sign bit of A> need to be checked.
11735 If they are all 0, this optimization needs to be done
11736 in unsigned A's type, if they are all 1 in signed A's type,
11737 otherwise this can't be done. */
11738 if (TYPE_PRECISION (TREE_TYPE (tem
))
11739 < TYPE_PRECISION (TREE_TYPE (arg1
))
11740 && TYPE_PRECISION (TREE_TYPE (tem
))
11741 < TYPE_PRECISION (type
))
11743 unsigned HOST_WIDE_INT mask_lo
;
11744 HOST_WIDE_INT mask_hi
;
11745 int inner_width
, outer_width
;
11748 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
11749 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
11750 if (outer_width
> TYPE_PRECISION (type
))
11751 outer_width
= TYPE_PRECISION (type
);
11753 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
11755 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
11756 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
11762 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
11763 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
11765 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
11767 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
11768 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
11772 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
11773 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
11775 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
11776 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
11778 tem_type
= lang_hooks
.types
.signed_type (TREE_TYPE (tem
));
11779 tem
= fold_convert (tem_type
, tem
);
11781 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
11782 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
11784 tem_type
= lang_hooks
.types
.unsigned_type (TREE_TYPE (tem
));
11785 tem
= fold_convert (tem_type
, tem
);
11792 return fold_convert (type
,
11793 fold_build2 (BIT_AND_EXPR
,
11794 TREE_TYPE (tem
), tem
,
11795 fold_convert (TREE_TYPE (tem
),
11799 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11800 already handled above. */
11801 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11802 && integer_onep (TREE_OPERAND (arg0
, 1))
11803 && integer_zerop (op2
)
11804 && integer_pow2p (arg1
))
11806 tree tem
= TREE_OPERAND (arg0
, 0);
11808 if (TREE_CODE (tem
) == RSHIFT_EXPR
11809 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
11810 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
11811 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
11812 return fold_build2 (BIT_AND_EXPR
, type
,
11813 TREE_OPERAND (tem
, 0), arg1
);
11816 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11817 is probably obsolete because the first operand should be a
11818 truth value (that's why we have the two cases above), but let's
11819 leave it in until we can confirm this for all front-ends. */
11820 if (integer_zerop (op2
)
11821 && TREE_CODE (arg0
) == NE_EXPR
11822 && integer_zerop (TREE_OPERAND (arg0
, 1))
11823 && integer_pow2p (arg1
)
11824 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11825 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11826 arg1
, OEP_ONLY_CONST
))
11827 return pedantic_non_lvalue (fold_convert (type
,
11828 TREE_OPERAND (arg0
, 0)));
11830 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11831 if (integer_zerop (op2
)
11832 && truth_value_p (TREE_CODE (arg0
))
11833 && truth_value_p (TREE_CODE (arg1
)))
11834 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
11835 fold_convert (type
, arg0
),
11838 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11839 if (integer_onep (op2
)
11840 && truth_value_p (TREE_CODE (arg0
))
11841 && truth_value_p (TREE_CODE (arg1
)))
11843 /* Only perform transformation if ARG0 is easily inverted. */
11844 tem
= fold_truth_not_expr (arg0
);
11846 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
11847 fold_convert (type
, tem
),
11851 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11852 if (integer_zerop (arg1
)
11853 && truth_value_p (TREE_CODE (arg0
))
11854 && truth_value_p (TREE_CODE (op2
)))
11856 /* Only perform transformation if ARG0 is easily inverted. */
11857 tem
= fold_truth_not_expr (arg0
);
11859 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
11860 fold_convert (type
, tem
),
11864 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11865 if (integer_onep (arg1
)
11866 && truth_value_p (TREE_CODE (arg0
))
11867 && truth_value_p (TREE_CODE (op2
)))
11868 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
11869 fold_convert (type
, arg0
),
11875 /* Check for a built-in function. */
11876 if (TREE_CODE (op0
) == ADDR_EXPR
11877 && TREE_CODE (TREE_OPERAND (op0
, 0)) == FUNCTION_DECL
11878 && DECL_BUILT_IN (TREE_OPERAND (op0
, 0)))
11879 return fold_builtin (TREE_OPERAND (op0
, 0), op1
, false);
11882 case BIT_FIELD_REF
:
11883 if (TREE_CODE (arg0
) == VECTOR_CST
11884 && type
== TREE_TYPE (TREE_TYPE (arg0
))
11885 && host_integerp (arg1
, 1)
11886 && host_integerp (op2
, 1))
11888 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
11889 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
11892 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
11893 && (idx
% width
) == 0
11894 && (idx
= idx
/ width
)
11895 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
11897 tree elements
= TREE_VECTOR_CST_ELTS (arg0
);
11898 while (idx
-- > 0 && elements
)
11899 elements
= TREE_CHAIN (elements
);
11901 return TREE_VALUE (elements
);
11903 return fold_convert (type
, integer_zero_node
);
11910 } /* switch (code) */
11913 /* Perform constant folding and related simplification of EXPR.
11914 The related simplifications include x*1 => x, x*0 => 0, etc.,
11915 and application of the associative law.
11916 NOP_EXPR conversions may be removed freely (as long as we
11917 are careful not to change the type of the overall expression).
11918 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11919 but we can constant-fold them if they have constant operands. */
11921 #ifdef ENABLE_FOLD_CHECKING
11922 # define fold(x) fold_1 (x)
11923 static tree
fold_1 (tree
);
11929 const tree t
= expr
;
11930 enum tree_code code
= TREE_CODE (t
);
11931 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11934 /* Return right away if a constant. */
11935 if (kind
== tcc_constant
)
11938 if (IS_EXPR_CODE_CLASS (kind
)
11939 || IS_GIMPLE_STMT_CODE_CLASS (kind
))
11941 tree type
= TREE_TYPE (t
);
11942 tree op0
, op1
, op2
;
11944 switch (TREE_CODE_LENGTH (code
))
11947 op0
= TREE_OPERAND (t
, 0);
11948 tem
= fold_unary (code
, type
, op0
);
11949 return tem
? tem
: expr
;
11951 op0
= TREE_OPERAND (t
, 0);
11952 op1
= TREE_OPERAND (t
, 1);
11953 tem
= fold_binary (code
, type
, op0
, op1
);
11954 return tem
? tem
: expr
;
11956 op0
= TREE_OPERAND (t
, 0);
11957 op1
= TREE_OPERAND (t
, 1);
11958 op2
= TREE_OPERAND (t
, 2);
11959 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
11960 return tem
? tem
: expr
;
11969 return fold (DECL_INITIAL (t
));
11973 } /* switch (code) */
11976 #ifdef ENABLE_FOLD_CHECKING
11979 static void fold_checksum_tree (tree
, struct md5_ctx
*, htab_t
);
11980 static void fold_check_failed (tree
, tree
);
11981 void print_fold_checksum (tree
);
11983 /* When --enable-checking=fold, compute a digest of expr before
11984 and after actual fold call to see if fold did not accidentally
11985 change original expr. */
11991 struct md5_ctx ctx
;
11992 unsigned char checksum_before
[16], checksum_after
[16];
11995 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
11996 md5_init_ctx (&ctx
);
11997 fold_checksum_tree (expr
, &ctx
, ht
);
11998 md5_finish_ctx (&ctx
, checksum_before
);
12001 ret
= fold_1 (expr
);
12003 md5_init_ctx (&ctx
);
12004 fold_checksum_tree (expr
, &ctx
, ht
);
12005 md5_finish_ctx (&ctx
, checksum_after
);
12008 if (memcmp (checksum_before
, checksum_after
, 16))
12009 fold_check_failed (expr
, ret
);
12015 print_fold_checksum (tree expr
)
12017 struct md5_ctx ctx
;
12018 unsigned char checksum
[16], cnt
;
12021 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
12022 md5_init_ctx (&ctx
);
12023 fold_checksum_tree (expr
, &ctx
, ht
);
12024 md5_finish_ctx (&ctx
, checksum
);
12026 for (cnt
= 0; cnt
< 16; ++cnt
)
12027 fprintf (stderr
, "%02x", checksum
[cnt
]);
12028 putc ('\n', stderr
);
12032 fold_check_failed (tree expr ATTRIBUTE_UNUSED
, tree ret ATTRIBUTE_UNUSED
)
12034 internal_error ("fold check: original tree changed by fold");
12038 fold_checksum_tree (tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
12041 enum tree_code code
;
12042 struct tree_function_decl buf
;
12047 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
12048 <= sizeof (struct tree_function_decl
))
12049 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
12052 slot
= htab_find_slot (ht
, expr
, INSERT
);
12056 code
= TREE_CODE (expr
);
12057 if (TREE_CODE_CLASS (code
) == tcc_declaration
12058 && DECL_ASSEMBLER_NAME_SET_P (expr
))
12060 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12061 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12062 expr
= (tree
) &buf
;
12063 SET_DECL_ASSEMBLER_NAME (expr
, NULL
);
12065 else if (TREE_CODE_CLASS (code
) == tcc_type
12066 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
12067 || TYPE_CACHED_VALUES_P (expr
)
12068 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)))
12070 /* Allow these fields to be modified. */
12071 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12072 expr
= (tree
) &buf
;
12073 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
) = 0;
12074 TYPE_POINTER_TO (expr
) = NULL
;
12075 TYPE_REFERENCE_TO (expr
) = NULL
;
12076 if (TYPE_CACHED_VALUES_P (expr
))
12078 TYPE_CACHED_VALUES_P (expr
) = 0;
12079 TYPE_CACHED_VALUES (expr
) = NULL
;
12082 md5_process_bytes (expr
, tree_size (expr
), ctx
);
12083 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
12084 if (TREE_CODE_CLASS (code
) != tcc_type
12085 && TREE_CODE_CLASS (code
) != tcc_declaration
12086 && code
!= TREE_LIST
)
12087 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
12088 switch (TREE_CODE_CLASS (code
))
12094 md5_process_bytes (TREE_STRING_POINTER (expr
),
12095 TREE_STRING_LENGTH (expr
), ctx
);
12098 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
12099 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
12102 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
12108 case tcc_exceptional
:
12112 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
12113 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
12114 expr
= TREE_CHAIN (expr
);
12115 goto recursive_label
;
12118 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
12119 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
12125 case tcc_expression
:
12126 case tcc_reference
:
12127 case tcc_comparison
:
12130 case tcc_statement
:
12131 len
= TREE_CODE_LENGTH (code
);
12132 for (i
= 0; i
< len
; ++i
)
12133 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
12135 case tcc_declaration
:
12136 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
12137 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
12138 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
12140 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
12141 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
12142 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
12143 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
12144 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
12146 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
12147 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
12149 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
12151 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
12152 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
12153 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
12157 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
12158 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
12159 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
12160 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
12161 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
12162 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
12163 if (INTEGRAL_TYPE_P (expr
)
12164 || SCALAR_FLOAT_TYPE_P (expr
))
12166 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
12167 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
12169 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
12170 if (TREE_CODE (expr
) == RECORD_TYPE
12171 || TREE_CODE (expr
) == UNION_TYPE
12172 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
12173 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
12174 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
12183 /* Fold a unary tree expression with code CODE of type TYPE with an
12184 operand OP0. Return a folded expression if successful. Otherwise,
12185 return a tree expression with code CODE of type TYPE with an
12189 fold_build1_stat (enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
12192 #ifdef ENABLE_FOLD_CHECKING
12193 unsigned char checksum_before
[16], checksum_after
[16];
12194 struct md5_ctx ctx
;
12197 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
12198 md5_init_ctx (&ctx
);
12199 fold_checksum_tree (op0
, &ctx
, ht
);
12200 md5_finish_ctx (&ctx
, checksum_before
);
12204 tem
= fold_unary (code
, type
, op0
);
12206 tem
= build1_stat (code
, type
, op0 PASS_MEM_STAT
);
12208 #ifdef ENABLE_FOLD_CHECKING
12209 md5_init_ctx (&ctx
);
12210 fold_checksum_tree (op0
, &ctx
, ht
);
12211 md5_finish_ctx (&ctx
, checksum_after
);
12214 if (memcmp (checksum_before
, checksum_after
, 16))
12215 fold_check_failed (op0
, tem
);
12220 /* Fold a binary tree expression with code CODE of type TYPE with
12221 operands OP0 and OP1. Return a folded expression if successful.
12222 Otherwise, return a tree expression with code CODE of type TYPE
12223 with operands OP0 and OP1. */
12226 fold_build2_stat (enum tree_code code
, tree type
, tree op0
, tree op1
12230 #ifdef ENABLE_FOLD_CHECKING
12231 unsigned char checksum_before_op0
[16],
12232 checksum_before_op1
[16],
12233 checksum_after_op0
[16],
12234 checksum_after_op1
[16];
12235 struct md5_ctx ctx
;
12238 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
12239 md5_init_ctx (&ctx
);
12240 fold_checksum_tree (op0
, &ctx
, ht
);
12241 md5_finish_ctx (&ctx
, checksum_before_op0
);
12244 md5_init_ctx (&ctx
);
12245 fold_checksum_tree (op1
, &ctx
, ht
);
12246 md5_finish_ctx (&ctx
, checksum_before_op1
);
12250 tem
= fold_binary (code
, type
, op0
, op1
);
12252 tem
= build2_stat (code
, type
, op0
, op1 PASS_MEM_STAT
);
12254 #ifdef ENABLE_FOLD_CHECKING
12255 md5_init_ctx (&ctx
);
12256 fold_checksum_tree (op0
, &ctx
, ht
);
12257 md5_finish_ctx (&ctx
, checksum_after_op0
);
12260 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12261 fold_check_failed (op0
, tem
);
12263 md5_init_ctx (&ctx
);
12264 fold_checksum_tree (op1
, &ctx
, ht
);
12265 md5_finish_ctx (&ctx
, checksum_after_op1
);
12268 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12269 fold_check_failed (op1
, tem
);
12274 /* Fold a ternary tree expression with code CODE of type TYPE with
12275 operands OP0, OP1, and OP2. Return a folded expression if
12276 successful. Otherwise, return a tree expression with code CODE of
12277 type TYPE with operands OP0, OP1, and OP2. */
12280 fold_build3_stat (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
12284 #ifdef ENABLE_FOLD_CHECKING
12285 unsigned char checksum_before_op0
[16],
12286 checksum_before_op1
[16],
12287 checksum_before_op2
[16],
12288 checksum_after_op0
[16],
12289 checksum_after_op1
[16],
12290 checksum_after_op2
[16];
12291 struct md5_ctx ctx
;
12294 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
12295 md5_init_ctx (&ctx
);
12296 fold_checksum_tree (op0
, &ctx
, ht
);
12297 md5_finish_ctx (&ctx
, checksum_before_op0
);
12300 md5_init_ctx (&ctx
);
12301 fold_checksum_tree (op1
, &ctx
, ht
);
12302 md5_finish_ctx (&ctx
, checksum_before_op1
);
12305 md5_init_ctx (&ctx
);
12306 fold_checksum_tree (op2
, &ctx
, ht
);
12307 md5_finish_ctx (&ctx
, checksum_before_op2
);
12311 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
12313 tem
= build3_stat (code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
12315 #ifdef ENABLE_FOLD_CHECKING
12316 md5_init_ctx (&ctx
);
12317 fold_checksum_tree (op0
, &ctx
, ht
);
12318 md5_finish_ctx (&ctx
, checksum_after_op0
);
12321 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12322 fold_check_failed (op0
, tem
);
12324 md5_init_ctx (&ctx
);
12325 fold_checksum_tree (op1
, &ctx
, ht
);
12326 md5_finish_ctx (&ctx
, checksum_after_op1
);
12329 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12330 fold_check_failed (op1
, tem
);
12332 md5_init_ctx (&ctx
);
12333 fold_checksum_tree (op2
, &ctx
, ht
);
12334 md5_finish_ctx (&ctx
, checksum_after_op2
);
12337 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
12338 fold_check_failed (op2
, tem
);
12343 /* Perform constant folding and related simplification of initializer
12344 expression EXPR. These behave identically to "fold_buildN" but ignore
12345 potential run-time traps and exceptions that fold must preserve. */
12347 #define START_FOLD_INIT \
12348 int saved_signaling_nans = flag_signaling_nans;\
12349 int saved_trapping_math = flag_trapping_math;\
12350 int saved_rounding_math = flag_rounding_math;\
12351 int saved_trapv = flag_trapv;\
12352 int saved_folding_initializer = folding_initializer;\
12353 flag_signaling_nans = 0;\
12354 flag_trapping_math = 0;\
12355 flag_rounding_math = 0;\
12357 folding_initializer = 1;
12359 #define END_FOLD_INIT \
12360 flag_signaling_nans = saved_signaling_nans;\
12361 flag_trapping_math = saved_trapping_math;\
12362 flag_rounding_math = saved_rounding_math;\
12363 flag_trapv = saved_trapv;\
12364 folding_initializer = saved_folding_initializer;
12367 fold_build1_initializer (enum tree_code code
, tree type
, tree op
)
12372 result
= fold_build1 (code
, type
, op
);
12379 fold_build2_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
)
12384 result
= fold_build2 (code
, type
, op0
, op1
);
12391 fold_build3_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
,
12397 result
= fold_build3 (code
, type
, op0
, op1
, op2
);
12403 #undef START_FOLD_INIT
12404 #undef END_FOLD_INIT
12406 /* Determine if first argument is a multiple of second argument. Return 0 if
12407 it is not, or we cannot easily determined it to be.
12409 An example of the sort of thing we care about (at this point; this routine
12410 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12411 fold cases do now) is discovering that
12413 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12419 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12421 This code also handles discovering that
12423 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12425 is a multiple of 8 so we don't have to worry about dealing with a
12426 possible remainder.
12428 Note that we *look* inside a SAVE_EXPR only to determine how it was
12429 calculated; it is not safe for fold to do much of anything else with the
12430 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12431 at run time. For example, the latter example above *cannot* be implemented
12432 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12433 evaluation time of the original SAVE_EXPR is not necessarily the same at
12434 the time the new expression is evaluated. The only optimization of this
12435 sort that would be valid is changing
12437 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12441 SAVE_EXPR (I) * SAVE_EXPR (J)
12443 (where the same SAVE_EXPR (J) is used in the original and the
12444 transformed version). */
12447 multiple_of_p (tree type
, tree top
, tree bottom
)
12449 if (operand_equal_p (top
, bottom
, 0))
12452 if (TREE_CODE (type
) != INTEGER_TYPE
)
12455 switch (TREE_CODE (top
))
12458 /* Bitwise and provides a power of two multiple. If the mask is
12459 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12460 if (!integer_pow2p (bottom
))
12465 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
12466 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
12470 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
12471 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
12474 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
12478 op1
= TREE_OPERAND (top
, 1);
12479 /* const_binop may not detect overflow correctly,
12480 so check for it explicitly here. */
12481 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
12482 > TREE_INT_CST_LOW (op1
)
12483 && TREE_INT_CST_HIGH (op1
) == 0
12484 && 0 != (t1
= fold_convert (type
,
12485 const_binop (LSHIFT_EXPR
,
12488 && !TREE_OVERFLOW (t1
))
12489 return multiple_of_p (type
, t1
, bottom
);
12494 /* Can't handle conversions from non-integral or wider integral type. */
12495 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
12496 || (TYPE_PRECISION (type
)
12497 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
12500 /* .. fall through ... */
12503 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
12506 if (TREE_CODE (bottom
) != INTEGER_CST
12507 || (TYPE_UNSIGNED (type
)
12508 && (tree_int_cst_sgn (top
) < 0
12509 || tree_int_cst_sgn (bottom
) < 0)))
12511 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,
12519 /* Return true if `t' is known to be non-negative. */
12522 tree_expr_nonnegative_p (tree t
)
12524 if (t
== error_mark_node
)
12527 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
12530 switch (TREE_CODE (t
))
12533 /* Query VRP to see if it has recorded any information about
12534 the range of this object. */
12535 return ssa_name_nonnegative_p (t
);
12538 /* We can't return 1 if flag_wrapv is set because
12539 ABS_EXPR<INT_MIN> = INT_MIN. */
12540 if (!(flag_wrapv
&& INTEGRAL_TYPE_P (TREE_TYPE (t
))))
12545 return tree_int_cst_sgn (t
) >= 0;
12548 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
12551 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
12552 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
12553 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
12555 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12556 both unsigned and at least 2 bits shorter than the result. */
12557 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
12558 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
12559 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
12561 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
12562 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
12563 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
12564 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
12566 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
12567 TYPE_PRECISION (inner2
)) + 1;
12568 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
12574 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
12576 /* x * x for floating point x is always non-negative. */
12577 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
12579 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
12580 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
12583 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12584 both unsigned and their total bits is shorter than the result. */
12585 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
12586 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
12587 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
12589 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
12590 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
12591 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
12592 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
12593 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
12594 < TYPE_PRECISION (TREE_TYPE (t
));
12600 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
12601 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
12607 case TRUNC_DIV_EXPR
:
12608 case CEIL_DIV_EXPR
:
12609 case FLOOR_DIV_EXPR
:
12610 case ROUND_DIV_EXPR
:
12611 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
12612 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
12614 case TRUNC_MOD_EXPR
:
12615 case CEIL_MOD_EXPR
:
12616 case FLOOR_MOD_EXPR
:
12617 case ROUND_MOD_EXPR
:
12619 case NON_LVALUE_EXPR
:
12621 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
12623 case COMPOUND_EXPR
:
12625 case GIMPLE_MODIFY_STMT
:
12626 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t
, 1));
12629 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t
, 1)));
12632 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
12633 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
12637 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
12638 tree outer_type
= TREE_TYPE (t
);
12640 if (TREE_CODE (outer_type
) == REAL_TYPE
)
12642 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12643 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
12644 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
12646 if (TYPE_UNSIGNED (inner_type
))
12648 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
12651 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
12653 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12654 return tree_expr_nonnegative_p (TREE_OPERAND (t
,0));
12655 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
12656 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
12657 && TYPE_UNSIGNED (inner_type
);
12664 tree temp
= TARGET_EXPR_SLOT (t
);
12665 t
= TARGET_EXPR_INITIAL (t
);
12667 /* If the initializer is non-void, then it's a normal expression
12668 that will be assigned to the slot. */
12669 if (!VOID_TYPE_P (t
))
12670 return tree_expr_nonnegative_p (t
);
12672 /* Otherwise, the initializer sets the slot in some way. One common
12673 way is an assignment statement at the end of the initializer. */
12676 if (TREE_CODE (t
) == BIND_EXPR
)
12677 t
= expr_last (BIND_EXPR_BODY (t
));
12678 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
12679 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
12680 t
= expr_last (TREE_OPERAND (t
, 0));
12681 else if (TREE_CODE (t
) == STATEMENT_LIST
)
12686 if ((TREE_CODE (t
) == MODIFY_EXPR
12687 || TREE_CODE (t
) == GIMPLE_MODIFY_STMT
)
12688 && GENERIC_TREE_OPERAND (t
, 0) == temp
)
12689 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t
, 1));
12696 tree fndecl
= get_callee_fndecl (t
);
12697 tree arglist
= TREE_OPERAND (t
, 1);
12698 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
12699 switch (DECL_FUNCTION_CODE (fndecl
))
12701 CASE_FLT_FN (BUILT_IN_ACOS
):
12702 CASE_FLT_FN (BUILT_IN_ACOSH
):
12703 CASE_FLT_FN (BUILT_IN_CABS
):
12704 CASE_FLT_FN (BUILT_IN_COSH
):
12705 CASE_FLT_FN (BUILT_IN_ERFC
):
12706 CASE_FLT_FN (BUILT_IN_EXP
):
12707 CASE_FLT_FN (BUILT_IN_EXP10
):
12708 CASE_FLT_FN (BUILT_IN_EXP2
):
12709 CASE_FLT_FN (BUILT_IN_FABS
):
12710 CASE_FLT_FN (BUILT_IN_FDIM
):
12711 CASE_FLT_FN (BUILT_IN_HYPOT
):
12712 CASE_FLT_FN (BUILT_IN_POW10
):
12713 CASE_INT_FN (BUILT_IN_FFS
):
12714 CASE_INT_FN (BUILT_IN_PARITY
):
12715 CASE_INT_FN (BUILT_IN_POPCOUNT
):
12716 case BUILT_IN_BSWAP32
:
12717 case BUILT_IN_BSWAP64
:
12721 CASE_FLT_FN (BUILT_IN_SQRT
):
12722 /* sqrt(-0.0) is -0.0. */
12723 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
12725 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
12727 CASE_FLT_FN (BUILT_IN_ASINH
):
12728 CASE_FLT_FN (BUILT_IN_ATAN
):
12729 CASE_FLT_FN (BUILT_IN_ATANH
):
12730 CASE_FLT_FN (BUILT_IN_CBRT
):
12731 CASE_FLT_FN (BUILT_IN_CEIL
):
12732 CASE_FLT_FN (BUILT_IN_ERF
):
12733 CASE_FLT_FN (BUILT_IN_EXPM1
):
12734 CASE_FLT_FN (BUILT_IN_FLOOR
):
12735 CASE_FLT_FN (BUILT_IN_FMOD
):
12736 CASE_FLT_FN (BUILT_IN_FREXP
):
12737 CASE_FLT_FN (BUILT_IN_LCEIL
):
12738 CASE_FLT_FN (BUILT_IN_LDEXP
):
12739 CASE_FLT_FN (BUILT_IN_LFLOOR
):
12740 CASE_FLT_FN (BUILT_IN_LLCEIL
):
12741 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
12742 CASE_FLT_FN (BUILT_IN_LLRINT
):
12743 CASE_FLT_FN (BUILT_IN_LLROUND
):
12744 CASE_FLT_FN (BUILT_IN_LRINT
):
12745 CASE_FLT_FN (BUILT_IN_LROUND
):
12746 CASE_FLT_FN (BUILT_IN_MODF
):
12747 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
12748 CASE_FLT_FN (BUILT_IN_RINT
):
12749 CASE_FLT_FN (BUILT_IN_ROUND
):
12750 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
12751 CASE_FLT_FN (BUILT_IN_SINH
):
12752 CASE_FLT_FN (BUILT_IN_TANH
):
12753 CASE_FLT_FN (BUILT_IN_TRUNC
):
12754 /* True if the 1st argument is nonnegative. */
12755 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
12757 CASE_FLT_FN (BUILT_IN_FMAX
):
12758 /* True if the 1st OR 2nd arguments are nonnegative. */
12759 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
12760 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
12762 CASE_FLT_FN (BUILT_IN_FMIN
):
12763 /* True if the 1st AND 2nd arguments are nonnegative. */
12764 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
12765 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
12767 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
12768 /* True if the 2nd argument is nonnegative. */
12769 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
12771 CASE_FLT_FN (BUILT_IN_POWI
):
12772 /* True if the 1st argument is nonnegative or the second
12773 argument is an even integer. */
12774 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist
))) == INTEGER_CST
)
12776 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
12777 if ((TREE_INT_CST_LOW (arg1
) & 1) == 0)
12780 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
12782 CASE_FLT_FN (BUILT_IN_POW
):
12783 /* True if the 1st argument is nonnegative or the second
12784 argument is an even integer valued real. */
12785 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist
))) == REAL_CST
)
12790 c
= TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist
)));
12791 n
= real_to_integer (&c
);
12794 REAL_VALUE_TYPE cint
;
12795 real_from_integer (&cint
, VOIDmode
, n
,
12796 n
< 0 ? -1 : 0, 0);
12797 if (real_identical (&c
, &cint
))
12801 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
12808 /* ... fall through ... */
12811 if (truth_value_p (TREE_CODE (t
)))
12812 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12816 /* We don't know sign of `t', so be conservative and return false. */
12820 /* Return true when T is an address and is known to be nonzero.
12821 For floating point we further ensure that T is not denormal.
12822 Similar logic is present in nonzero_address in rtlanal.h. */
12825 tree_expr_nonzero_p (tree t
)
12827 tree type
= TREE_TYPE (t
);
12829 /* Doing something useful for floating point would need more work. */
12830 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
12833 switch (TREE_CODE (t
))
12836 /* Query VRP to see if it has recorded any information about
12837 the range of this object. */
12838 return ssa_name_nonzero_p (t
);
12841 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
12844 return !integer_zerop (t
);
12847 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
12849 /* With the presence of negative values it is hard
12850 to say something. */
12851 if (!tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
12852 || !tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
12854 /* One of operands must be positive and the other non-negative. */
12855 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
12856 || tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
12861 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
12863 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
12864 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
12870 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
12871 tree outer_type
= TREE_TYPE (t
);
12873 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
12874 && tree_expr_nonzero_p (TREE_OPERAND (t
, 0)));
12880 tree base
= get_base_address (TREE_OPERAND (t
, 0));
12885 /* Weak declarations may link to NULL. */
12886 if (VAR_OR_FUNCTION_DECL_P (base
))
12887 return !DECL_WEAK (base
);
12889 /* Constants are never weak. */
12890 if (CONSTANT_CLASS_P (base
))
12897 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
12898 && tree_expr_nonzero_p (TREE_OPERAND (t
, 2)));
12901 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
12902 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
12905 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 0)))
12907 /* When both operands are nonzero, then MAX must be too. */
12908 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1)))
12911 /* MAX where operand 0 is positive is positive. */
12912 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
12914 /* MAX where operand 1 is positive is positive. */
12915 else if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
12916 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
12920 case COMPOUND_EXPR
:
12922 case GIMPLE_MODIFY_STMT
:
12924 return tree_expr_nonzero_p (GENERIC_TREE_OPERAND (t
, 1));
12927 case NON_LVALUE_EXPR
:
12928 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
12931 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
12932 || tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
12935 return alloca_call_p (t
);
12943 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12944 attempt to fold the expression to a constant without modifying TYPE,
12947 If the expression could be simplified to a constant, then return
12948 the constant. If the expression would not be simplified to a
12949 constant, then return NULL_TREE. */
12952 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
12954 tree tem
= fold_binary (code
, type
, op0
, op1
);
12955 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
12958 /* Given the components of a unary expression CODE, TYPE and OP0,
12959 attempt to fold the expression to a constant without modifying
12962 If the expression could be simplified to a constant, then return
12963 the constant. If the expression would not be simplified to a
12964 constant, then return NULL_TREE. */
12967 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
12969 tree tem
= fold_unary (code
, type
, op0
);
12970 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
12973 /* If EXP represents referencing an element in a constant string
12974 (either via pointer arithmetic or array indexing), return the
12975 tree representing the value accessed, otherwise return NULL. */
12978 fold_read_from_constant_string (tree exp
)
12980 if ((TREE_CODE (exp
) == INDIRECT_REF
12981 || TREE_CODE (exp
) == ARRAY_REF
)
12982 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
12984 tree exp1
= TREE_OPERAND (exp
, 0);
12988 if (TREE_CODE (exp
) == INDIRECT_REF
)
12989 string
= string_constant (exp1
, &index
);
12992 tree low_bound
= array_ref_low_bound (exp
);
12993 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
12995 /* Optimize the special-case of a zero lower bound.
12997 We convert the low_bound to sizetype to avoid some problems
12998 with constant folding. (E.g. suppose the lower bound is 1,
12999 and its mode is QI. Without the conversion,l (ARRAY
13000 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13001 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13002 if (! integer_zerop (low_bound
))
13003 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
13009 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
13010 && TREE_CODE (string
) == STRING_CST
13011 && TREE_CODE (index
) == INTEGER_CST
13012 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
13013 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
13015 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
13016 return fold_convert (TREE_TYPE (exp
),
13017 build_int_cst (NULL_TREE
,
13018 (TREE_STRING_POINTER (string
)
13019 [TREE_INT_CST_LOW (index
)])));
13024 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13025 an integer constant or real constant.
13027 TYPE is the type of the result. */
13030 fold_negate_const (tree arg0
, tree type
)
13032 tree t
= NULL_TREE
;
13034 switch (TREE_CODE (arg0
))
13038 unsigned HOST_WIDE_INT low
;
13039 HOST_WIDE_INT high
;
13040 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
13041 TREE_INT_CST_HIGH (arg0
),
13043 t
= force_fit_type_double (type
, low
, high
, 1,
13044 (overflow
| TREE_OVERFLOW (arg0
))
13045 && !TYPE_UNSIGNED (type
));
13050 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
13054 gcc_unreachable ();
13060 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13061 an integer constant or real constant.
13063 TYPE is the type of the result. */
13066 fold_abs_const (tree arg0
, tree type
)
13068 tree t
= NULL_TREE
;
13070 switch (TREE_CODE (arg0
))
13073 /* If the value is unsigned, then the absolute value is
13074 the same as the ordinary value. */
13075 if (TYPE_UNSIGNED (type
))
13077 /* Similarly, if the value is non-negative. */
13078 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
13080 /* If the value is negative, then the absolute value is
13084 unsigned HOST_WIDE_INT low
;
13085 HOST_WIDE_INT high
;
13086 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
13087 TREE_INT_CST_HIGH (arg0
),
13089 t
= force_fit_type_double (type
, low
, high
, -1,
13090 overflow
| TREE_OVERFLOW (arg0
));
13095 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
13096 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
13102 gcc_unreachable ();
13108 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13109 constant. TYPE is the type of the result. */
13112 fold_not_const (tree arg0
, tree type
)
13114 tree t
= NULL_TREE
;
13116 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
13118 t
= force_fit_type_double (type
, ~TREE_INT_CST_LOW (arg0
),
13119 ~TREE_INT_CST_HIGH (arg0
), 0,
13120 TREE_OVERFLOW (arg0
));
13125 /* Given CODE, a relational operator, the target type, TYPE and two
13126 constant operands OP0 and OP1, return the result of the
13127 relational operation. If the result is not a compile time
13128 constant, then return NULL_TREE. */
13131 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
13133 int result
, invert
;
13135 /* From here on, the only cases we handle are when the result is
13136 known to be a constant. */
13138 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
13140 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
13141 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
13143 /* Handle the cases where either operand is a NaN. */
13144 if (real_isnan (c0
) || real_isnan (c1
))
13154 case UNORDERED_EXPR
:
13168 if (flag_trapping_math
)
13174 gcc_unreachable ();
13177 return constant_boolean_node (result
, type
);
13180 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
13183 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13185 To compute GT, swap the arguments and do LT.
13186 To compute GE, do LT and invert the result.
13187 To compute LE, swap the arguments, do LT and invert the result.
13188 To compute NE, do EQ and invert the result.
13190 Therefore, the code below must handle only EQ and LT. */
13192 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13197 code
= swap_tree_comparison (code
);
13200 /* Note that it is safe to invert for real values here because we
13201 have already handled the one case that it matters. */
13204 if (code
== NE_EXPR
|| code
== GE_EXPR
)
13207 code
= invert_tree_comparison (code
, false);
13210 /* Compute a result for LT or EQ if args permit;
13211 Otherwise return T. */
13212 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
13214 if (code
== EQ_EXPR
)
13215 result
= tree_int_cst_equal (op0
, op1
);
13216 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
13217 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
13219 result
= INT_CST_LT (op0
, op1
);
13226 return constant_boolean_node (result
, type
);
13229 /* Build an expression for the a clean point containing EXPR with type TYPE.
13230 Don't build a cleanup point expression for EXPR which don't have side
13234 fold_build_cleanup_point_expr (tree type
, tree expr
)
13236 /* If the expression does not have side effects then we don't have to wrap
13237 it with a cleanup point expression. */
13238 if (!TREE_SIDE_EFFECTS (expr
))
13241 /* If the expression is a return, check to see if the expression inside the
13242 return has no side effects or the right hand side of the modify expression
13243 inside the return. If either don't have side effects set we don't need to
13244 wrap the expression in a cleanup point expression. Note we don't check the
13245 left hand side of the modify because it should always be a return decl. */
13246 if (TREE_CODE (expr
) == RETURN_EXPR
)
13248 tree op
= TREE_OPERAND (expr
, 0);
13249 if (!op
|| !TREE_SIDE_EFFECTS (op
))
13251 op
= TREE_OPERAND (op
, 1);
13252 if (!TREE_SIDE_EFFECTS (op
))
13256 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
13259 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13260 avoid confusing the gimplify process. */
13263 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
13265 /* The size of the object is not relevant when talking about its address. */
13266 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
13267 t
= TREE_OPERAND (t
, 0);
13269 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13270 if (TREE_CODE (t
) == INDIRECT_REF
13271 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
13273 t
= TREE_OPERAND (t
, 0);
13274 if (TREE_TYPE (t
) != ptrtype
)
13275 t
= build1 (NOP_EXPR
, ptrtype
, t
);
13281 while (handled_component_p (base
))
13282 base
= TREE_OPERAND (base
, 0);
13284 TREE_ADDRESSABLE (base
) = 1;
13286 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
13293 build_fold_addr_expr (tree t
)
13295 return build_fold_addr_expr_with_type (t
, build_pointer_type (TREE_TYPE (t
)));
13298 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13299 of an indirection through OP0, or NULL_TREE if no simplification is
13303 fold_indirect_ref_1 (tree type
, tree op0
)
13309 subtype
= TREE_TYPE (sub
);
13310 if (!POINTER_TYPE_P (subtype
))
13313 if (TREE_CODE (sub
) == ADDR_EXPR
)
13315 tree op
= TREE_OPERAND (sub
, 0);
13316 tree optype
= TREE_TYPE (op
);
13317 /* *&CONST_DECL -> to the value of the const decl. */
13318 if (TREE_CODE (op
) == CONST_DECL
)
13319 return DECL_INITIAL (op
);
13320 /* *&p => p; make sure to handle *&"str"[cst] here. */
13321 if (type
== optype
)
13323 tree fop
= fold_read_from_constant_string (op
);
13329 /* *(foo *)&fooarray => fooarray[0] */
13330 else if (TREE_CODE (optype
) == ARRAY_TYPE
13331 && type
== TREE_TYPE (optype
))
13333 tree type_domain
= TYPE_DOMAIN (optype
);
13334 tree min_val
= size_zero_node
;
13335 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
13336 min_val
= TYPE_MIN_VALUE (type_domain
);
13337 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
13339 /* *(foo *)&complexfoo => __real__ complexfoo */
13340 else if (TREE_CODE (optype
) == COMPLEX_TYPE
13341 && type
== TREE_TYPE (optype
))
13342 return fold_build1 (REALPART_EXPR
, type
, op
);
13343 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13344 else if (TREE_CODE (optype
) == VECTOR_TYPE
13345 && type
== TREE_TYPE (optype
))
13347 tree part_width
= TYPE_SIZE (type
);
13348 tree index
= bitsize_int (0);
13349 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
13353 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13354 if (TREE_CODE (sub
) == PLUS_EXPR
13355 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
13357 tree op00
= TREE_OPERAND (sub
, 0);
13358 tree op01
= TREE_OPERAND (sub
, 1);
13362 op00type
= TREE_TYPE (op00
);
13363 if (TREE_CODE (op00
) == ADDR_EXPR
13364 && TREE_CODE (TREE_TYPE (op00type
)) == COMPLEX_TYPE
13365 && type
== TREE_TYPE (TREE_TYPE (op00type
)))
13367 tree size
= TYPE_SIZE_UNIT (type
);
13368 if (tree_int_cst_equal (size
, op01
))
13369 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (op00
, 0));
13373 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13374 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
13375 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
13378 tree min_val
= size_zero_node
;
13379 sub
= build_fold_indirect_ref (sub
);
13380 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
13381 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
13382 min_val
= TYPE_MIN_VALUE (type_domain
);
13383 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
13389 /* Builds an expression for an indirection through T, simplifying some
13393 build_fold_indirect_ref (tree t
)
13395 tree type
= TREE_TYPE (TREE_TYPE (t
));
13396 tree sub
= fold_indirect_ref_1 (type
, t
);
13401 return build1 (INDIRECT_REF
, type
, t
);
13404 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13407 fold_indirect_ref (tree t
)
13409 tree sub
= fold_indirect_ref_1 (TREE_TYPE (t
), TREE_OPERAND (t
, 0));
13417 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13418 whose result is ignored. The type of the returned tree need not be
13419 the same as the original expression. */
13422 fold_ignored_result (tree t
)
13424 if (!TREE_SIDE_EFFECTS (t
))
13425 return integer_zero_node
;
13428 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
13431 t
= TREE_OPERAND (t
, 0);
13435 case tcc_comparison
:
13436 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
13437 t
= TREE_OPERAND (t
, 0);
13438 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
13439 t
= TREE_OPERAND (t
, 1);
13444 case tcc_expression
:
13445 switch (TREE_CODE (t
))
13447 case COMPOUND_EXPR
:
13448 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
13450 t
= TREE_OPERAND (t
, 0);
13454 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
13455 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
13457 t
= TREE_OPERAND (t
, 0);
13470 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13471 This can only be applied to objects of a sizetype. */
13474 round_up (tree value
, int divisor
)
13476 tree div
= NULL_TREE
;
13478 gcc_assert (divisor
> 0);
13482 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13483 have to do anything. Only do this when we are not given a const,
13484 because in that case, this check is more expensive than just
13486 if (TREE_CODE (value
) != INTEGER_CST
)
13488 div
= build_int_cst (TREE_TYPE (value
), divisor
);
13490 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
13494 /* If divisor is a power of two, simplify this to bit manipulation. */
13495 if (divisor
== (divisor
& -divisor
))
13499 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
13500 value
= size_binop (PLUS_EXPR
, value
, t
);
13501 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
13502 value
= size_binop (BIT_AND_EXPR
, value
, t
);
13507 div
= build_int_cst (TREE_TYPE (value
), divisor
);
13508 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
13509 value
= size_binop (MULT_EXPR
, value
, div
);
13515 /* Likewise, but round down. */
13518 round_down (tree value
, int divisor
)
13520 tree div
= NULL_TREE
;
13522 gcc_assert (divisor
> 0);
13526 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13527 have to do anything. Only do this when we are not given a const,
13528 because in that case, this check is more expensive than just
13530 if (TREE_CODE (value
) != INTEGER_CST
)
13532 div
= build_int_cst (TREE_TYPE (value
), divisor
);
13534 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
13538 /* If divisor is a power of two, simplify this to bit manipulation. */
13539 if (divisor
== (divisor
& -divisor
))
13543 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
13544 value
= size_binop (BIT_AND_EXPR
, value
, t
);
13549 div
= build_int_cst (TREE_TYPE (value
), divisor
);
13550 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
13551 value
= size_binop (MULT_EXPR
, value
, div
);
13557 /* Returns the pointer to the base of the object addressed by EXP and
13558 extracts the information about the offset of the access, storing it
13559 to PBITPOS and POFFSET. */
13562 split_address_to_core_and_offset (tree exp
,
13563 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
13566 enum machine_mode mode
;
13567 int unsignedp
, volatilep
;
13568 HOST_WIDE_INT bitsize
;
13570 if (TREE_CODE (exp
) == ADDR_EXPR
)
13572 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
13573 poffset
, &mode
, &unsignedp
, &volatilep
,
13575 core
= build_fold_addr_expr (core
);
13581 *poffset
= NULL_TREE
;
13587 /* Returns true if addresses of E1 and E2 differ by a constant, false
13588 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13591 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
13594 HOST_WIDE_INT bitpos1
, bitpos2
;
13595 tree toffset1
, toffset2
, tdiff
, type
;
13597 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
13598 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
13600 if (bitpos1
% BITS_PER_UNIT
!= 0
13601 || bitpos2
% BITS_PER_UNIT
!= 0
13602 || !operand_equal_p (core1
, core2
, 0))
13605 if (toffset1
&& toffset2
)
13607 type
= TREE_TYPE (toffset1
);
13608 if (type
!= TREE_TYPE (toffset2
))
13609 toffset2
= fold_convert (type
, toffset2
);
13611 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
13612 if (!cst_and_fits_in_hwi (tdiff
))
13615 *diff
= int_cst_value (tdiff
);
13617 else if (toffset1
|| toffset2
)
13619 /* If only one of the offsets is non-constant, the difference cannot
13626 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
13630 /* Simplify the floating point expression EXP when the sign of the
13631 result is not significant. Return NULL_TREE if no simplification
13635 fold_strip_sign_ops (tree exp
)
13639 switch (TREE_CODE (exp
))
13643 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
13644 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
13648 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
13650 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
13651 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
13652 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
13653 return fold_build2 (TREE_CODE (exp
), TREE_TYPE (exp
),
13654 arg0
? arg0
: TREE_OPERAND (exp
, 0),
13655 arg1
? arg1
: TREE_OPERAND (exp
, 1));
13658 case COMPOUND_EXPR
:
13659 arg0
= TREE_OPERAND (exp
, 0);
13660 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
13662 return fold_build2 (COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
13666 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
13667 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
13669 return fold_build3 (COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
13670 arg0
? arg0
: TREE_OPERAND (exp
, 1),
13671 arg1
? arg1
: TREE_OPERAND (exp
, 2));
13676 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
13679 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
13680 /* Strip copysign function call, return the 1st argument. */
13681 arg0
= TREE_VALUE (TREE_OPERAND (exp
, 1));
13682 arg1
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (exp
, 1)));
13683 return omit_one_operand (TREE_TYPE (exp
), arg0
, arg1
);
13686 /* Strip sign ops from the argument of "odd" math functions. */
13687 if (negate_mathfn_p (fcode
))
13689 arg0
= fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp
, 1)));
13691 return build_function_call_expr (get_callee_fndecl (exp
),
13692 build_tree_list (NULL_TREE
,