1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
53 #include "coretypes.h"
65 #include "langhooks.h"
68 /* Non-zero if we are folding constants inside an initializer; zero
70 int folding_initializer
= 0;
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code
{
94 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
95 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
96 static bool negate_mathfn_p (enum built_in_function
);
97 static bool negate_expr_p (tree
);
98 static tree
negate_expr (tree
);
99 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
100 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
101 static tree
const_binop (enum tree_code
, tree
, tree
, int);
102 static enum comparison_code
comparison_to_compcode (enum tree_code
);
103 static enum tree_code
compcode_to_comparison (enum comparison_code
);
104 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
105 enum tree_code
, tree
, tree
, tree
);
106 static int truth_value_p (enum tree_code
);
107 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
108 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
109 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
110 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
111 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
112 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
113 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
114 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
115 enum machine_mode
*, int *, int *,
117 static int all_ones_mask_p (tree
, int);
118 static tree
sign_bit_p (tree
, tree
);
119 static int simple_operand_p (tree
);
120 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
121 static tree
range_predecessor (tree
);
122 static tree
range_successor (tree
);
123 static tree
make_range (tree
, int *, tree
*, tree
*, bool *);
124 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
125 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
127 static tree
fold_range_test (enum tree_code
, tree
, tree
, tree
);
128 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
129 static tree
unextend (tree
, int, int, tree
);
130 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
131 static tree
optimize_minmax_comparison (enum tree_code
, tree
, tree
, tree
);
132 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
133 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
134 static int multiple_of_p (tree
, tree
, tree
);
135 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
,
138 static bool fold_real_zero_addition_p (tree
, tree
, int);
139 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
141 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
142 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
143 static bool reorder_operands_p (tree
, tree
);
144 static tree
fold_negate_const (tree
, tree
);
145 static tree
fold_not_const (tree
, tree
);
146 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
147 static int native_encode_expr (tree
, unsigned char *, int);
148 static tree
native_interpret_expr (tree
, unsigned char *, int);
151 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
152 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
153 and SUM1. Then this yields nonzero if overflow occurred during the
156 Overflow occurs if A and B have the same sign, but A and SUM differ in
157 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
159 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
161 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
162 We do that by representing the two-word integer in 4 words, with only
163 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
164 number. The value of the word is LOWPART + HIGHPART * BASE. */
167 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
168 #define HIGHPART(x) \
169 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
170 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
172 /* Unpack a two-word integer into 4 words.
173 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
174 WORDS points to the array of HOST_WIDE_INTs. */
177 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
179 words
[0] = LOWPART (low
);
180 words
[1] = HIGHPART (low
);
181 words
[2] = LOWPART (hi
);
182 words
[3] = HIGHPART (hi
);
185 /* Pack an array of 4 words into a two-word integer.
186 WORDS points to the array of words.
187 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
190 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
193 *low
= words
[0] + words
[1] * BASE
;
194 *hi
= words
[2] + words
[3] * BASE
;
197 /* Force the double-word integer L1, H1 to be within the range of the
198 integer type TYPE. Stores the properly truncated and sign-extended
199 double-word integer in *LV, *HV. Returns true if the operation
200 overflows, that is, argument and result are different. */
203 fit_double_type (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
204 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, tree type
)
206 unsigned HOST_WIDE_INT low0
= l1
;
207 HOST_WIDE_INT high0
= h1
;
209 int sign_extended_type
;
211 if (POINTER_TYPE_P (type
)
212 || TREE_CODE (type
) == OFFSET_TYPE
)
215 prec
= TYPE_PRECISION (type
);
217 /* Size types *are* sign extended. */
218 sign_extended_type
= (!TYPE_UNSIGNED (type
)
219 || (TREE_CODE (type
) == INTEGER_TYPE
220 && TYPE_IS_SIZETYPE (type
)));
222 /* First clear all bits that are beyond the type's precision. */
223 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
225 else if (prec
> HOST_BITS_PER_WIDE_INT
)
226 h1
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
230 if (prec
< HOST_BITS_PER_WIDE_INT
)
231 l1
&= ~((HOST_WIDE_INT
) (-1) << prec
);
234 /* Then do sign extension if necessary. */
235 if (!sign_extended_type
)
236 /* No sign extension */;
237 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
238 /* Correct width already. */;
239 else if (prec
> HOST_BITS_PER_WIDE_INT
)
241 /* Sign extend top half? */
242 if (h1
& ((unsigned HOST_WIDE_INT
)1
243 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
244 h1
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
246 else if (prec
== HOST_BITS_PER_WIDE_INT
)
248 if ((HOST_WIDE_INT
)l1
< 0)
253 /* Sign extend bottom half? */
254 if (l1
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
257 l1
|= (HOST_WIDE_INT
)(-1) << prec
;
264 /* If the value didn't fit, signal overflow. */
265 return l1
!= low0
|| h1
!= high0
;
268 /* We force the double-int HIGH:LOW to the range of the type TYPE by
269 sign or zero extending it.
270 OVERFLOWABLE indicates if we are interested
271 in overflow of the value, when >0 we are only interested in signed
272 overflow, for <0 we are interested in any overflow. OVERFLOWED
273 indicates whether overflow has already occurred. CONST_OVERFLOWED
274 indicates whether constant overflow has already occurred. We force
275 T's value to be within range of T's type (by setting to 0 or 1 all
276 the bits outside the type's range). We set TREE_OVERFLOWED if,
277 OVERFLOWED is nonzero,
278 or OVERFLOWABLE is >0 and signed overflow occurs
279 or OVERFLOWABLE is <0 and any overflow occurs
280 We return a new tree node for the extended double-int. The node
281 is shared if no overflow flags are set. */
284 force_fit_type_double (tree type
, unsigned HOST_WIDE_INT low
,
285 HOST_WIDE_INT high
, int overflowable
,
288 int sign_extended_type
;
291 /* Size types *are* sign extended. */
292 sign_extended_type
= (!TYPE_UNSIGNED (type
)
293 || (TREE_CODE (type
) == INTEGER_TYPE
294 && TYPE_IS_SIZETYPE (type
)));
296 overflow
= fit_double_type (low
, high
, &low
, &high
, type
);
298 /* If we need to set overflow flags, return a new unshared node. */
299 if (overflowed
|| overflow
)
303 || (overflowable
> 0 && sign_extended_type
))
305 tree t
= make_node (INTEGER_CST
);
306 TREE_INT_CST_LOW (t
) = low
;
307 TREE_INT_CST_HIGH (t
) = high
;
308 TREE_TYPE (t
) = type
;
309 TREE_OVERFLOW (t
) = 1;
314 /* Else build a shared node. */
315 return build_int_cst_wide (type
, low
, high
);
318 /* Add two doubleword integers with doubleword result.
319 Return nonzero if the operation overflows according to UNSIGNED_P.
320 Each argument is given as two `HOST_WIDE_INT' pieces.
321 One argument is L1 and H1; the other, L2 and H2.
322 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
325 add_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
326 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
327 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
330 unsigned HOST_WIDE_INT l
;
334 h
= h1
+ h2
+ (l
< l1
);
340 return (unsigned HOST_WIDE_INT
) h
< (unsigned HOST_WIDE_INT
) h1
;
342 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
345 /* Negate a doubleword integer with doubleword result.
346 Return nonzero if the operation overflows, assuming it's signed.
347 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
348 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
351 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
352 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
358 return (*hv
& h1
) < 0;
368 /* Multiply two doubleword integers with doubleword result.
369 Return nonzero if the operation overflows according to UNSIGNED_P.
370 Each argument is given as two `HOST_WIDE_INT' pieces.
371 One argument is L1 and H1; the other, L2 and H2.
372 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
375 mul_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
376 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
377 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
380 HOST_WIDE_INT arg1
[4];
381 HOST_WIDE_INT arg2
[4];
382 HOST_WIDE_INT prod
[4 * 2];
383 unsigned HOST_WIDE_INT carry
;
385 unsigned HOST_WIDE_INT toplow
, neglow
;
386 HOST_WIDE_INT tophigh
, neghigh
;
388 encode (arg1
, l1
, h1
);
389 encode (arg2
, l2
, h2
);
391 memset (prod
, 0, sizeof prod
);
393 for (i
= 0; i
< 4; i
++)
396 for (j
= 0; j
< 4; j
++)
399 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
400 carry
+= arg1
[i
] * arg2
[j
];
401 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
403 prod
[k
] = LOWPART (carry
);
404 carry
= HIGHPART (carry
);
409 decode (prod
, lv
, hv
);
410 decode (prod
+ 4, &toplow
, &tophigh
);
412 /* Unsigned overflow is immediate. */
414 return (toplow
| tophigh
) != 0;
416 /* Check for signed overflow by calculating the signed representation of the
417 top half of the result; it should agree with the low half's sign bit. */
420 neg_double (l2
, h2
, &neglow
, &neghigh
);
421 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
425 neg_double (l1
, h1
, &neglow
, &neghigh
);
426 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
428 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
431 /* Shift the doubleword integer in L1, H1 left by COUNT places
432 keeping only PREC bits of result.
433 Shift right if COUNT is negative.
434 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
435 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
438 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
439 HOST_WIDE_INT count
, unsigned int prec
,
440 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
442 unsigned HOST_WIDE_INT signmask
;
446 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
450 if (SHIFT_COUNT_TRUNCATED
)
453 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
455 /* Shifting by the host word size is undefined according to the
456 ANSI standard, so we must handle this as a special case. */
460 else if (count
>= HOST_BITS_PER_WIDE_INT
)
462 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
467 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
468 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
472 /* Sign extend all bits that are beyond the precision. */
474 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
475 ? ((unsigned HOST_WIDE_INT
) *hv
476 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
477 : (*lv
>> (prec
- 1))) & 1);
479 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
481 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
483 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
484 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
489 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
490 *lv
|= signmask
<< prec
;
494 /* Shift the doubleword integer in L1, H1 right by COUNT places
495 keeping only PREC bits of result. COUNT must be positive.
496 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
497 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
500 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
501 HOST_WIDE_INT count
, unsigned int prec
,
502 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
505 unsigned HOST_WIDE_INT signmask
;
508 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
511 if (SHIFT_COUNT_TRUNCATED
)
514 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
516 /* Shifting by the host word size is undefined according to the
517 ANSI standard, so we must handle this as a special case. */
521 else if (count
>= HOST_BITS_PER_WIDE_INT
)
524 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
528 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
530 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
533 /* Zero / sign extend all bits that are beyond the precision. */
535 if (count
>= (HOST_WIDE_INT
)prec
)
540 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
542 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
544 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
545 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
550 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
551 *lv
|= signmask
<< (prec
- count
);
555 /* Rotate the doubleword integer in L1, H1 left by COUNT places
556 keeping only PREC bits of result.
557 Rotate right if COUNT is negative.
558 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
561 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
562 HOST_WIDE_INT count
, unsigned int prec
,
563 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
565 unsigned HOST_WIDE_INT s1l
, s2l
;
566 HOST_WIDE_INT s1h
, s2h
;
572 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
573 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
578 /* Rotate the doubleword integer in L1, H1 left by COUNT places
579 keeping only PREC bits of result. COUNT must be positive.
580 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
583 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
584 HOST_WIDE_INT count
, unsigned int prec
,
585 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
587 unsigned HOST_WIDE_INT s1l
, s2l
;
588 HOST_WIDE_INT s1h
, s2h
;
594 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
595 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
600 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
601 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
602 CODE is a tree code for a kind of division, one of
603 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
605 It controls how the quotient is rounded to an integer.
606 Return nonzero if the operation overflows.
607 UNS nonzero says do unsigned division. */
610 div_and_round_double (enum tree_code code
, int uns
,
611 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
612 HOST_WIDE_INT hnum_orig
,
613 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
614 HOST_WIDE_INT hden_orig
,
615 unsigned HOST_WIDE_INT
*lquo
,
616 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
620 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
621 HOST_WIDE_INT den
[4], quo
[4];
623 unsigned HOST_WIDE_INT work
;
624 unsigned HOST_WIDE_INT carry
= 0;
625 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
626 HOST_WIDE_INT hnum
= hnum_orig
;
627 unsigned HOST_WIDE_INT lden
= lden_orig
;
628 HOST_WIDE_INT hden
= hden_orig
;
631 if (hden
== 0 && lden
== 0)
632 overflow
= 1, lden
= 1;
634 /* Calculate quotient sign and convert operands to unsigned. */
640 /* (minimum integer) / (-1) is the only overflow case. */
641 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
642 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
648 neg_double (lden
, hden
, &lden
, &hden
);
652 if (hnum
== 0 && hden
== 0)
653 { /* single precision */
655 /* This unsigned division rounds toward zero. */
661 { /* trivial case: dividend < divisor */
662 /* hden != 0 already checked. */
669 memset (quo
, 0, sizeof quo
);
671 memset (num
, 0, sizeof num
); /* to zero 9th element */
672 memset (den
, 0, sizeof den
);
674 encode (num
, lnum
, hnum
);
675 encode (den
, lden
, hden
);
677 /* Special code for when the divisor < BASE. */
678 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
680 /* hnum != 0 already checked. */
681 for (i
= 4 - 1; i
>= 0; i
--)
683 work
= num
[i
] + carry
* BASE
;
684 quo
[i
] = work
/ lden
;
690 /* Full double precision division,
691 with thanks to Don Knuth's "Seminumerical Algorithms". */
692 int num_hi_sig
, den_hi_sig
;
693 unsigned HOST_WIDE_INT quo_est
, scale
;
695 /* Find the highest nonzero divisor digit. */
696 for (i
= 4 - 1;; i
--)
703 /* Insure that the first digit of the divisor is at least BASE/2.
704 This is required by the quotient digit estimation algorithm. */
706 scale
= BASE
/ (den
[den_hi_sig
] + 1);
708 { /* scale divisor and dividend */
710 for (i
= 0; i
<= 4 - 1; i
++)
712 work
= (num
[i
] * scale
) + carry
;
713 num
[i
] = LOWPART (work
);
714 carry
= HIGHPART (work
);
719 for (i
= 0; i
<= 4 - 1; i
++)
721 work
= (den
[i
] * scale
) + carry
;
722 den
[i
] = LOWPART (work
);
723 carry
= HIGHPART (work
);
724 if (den
[i
] != 0) den_hi_sig
= i
;
731 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
733 /* Guess the next quotient digit, quo_est, by dividing the first
734 two remaining dividend digits by the high order quotient digit.
735 quo_est is never low and is at most 2 high. */
736 unsigned HOST_WIDE_INT tmp
;
738 num_hi_sig
= i
+ den_hi_sig
+ 1;
739 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
740 if (num
[num_hi_sig
] != den
[den_hi_sig
])
741 quo_est
= work
/ den
[den_hi_sig
];
745 /* Refine quo_est so it's usually correct, and at most one high. */
746 tmp
= work
- quo_est
* den
[den_hi_sig
];
748 && (den
[den_hi_sig
- 1] * quo_est
749 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
752 /* Try QUO_EST as the quotient digit, by multiplying the
753 divisor by QUO_EST and subtracting from the remaining dividend.
754 Keep in mind that QUO_EST is the I - 1st digit. */
757 for (j
= 0; j
<= den_hi_sig
; j
++)
759 work
= quo_est
* den
[j
] + carry
;
760 carry
= HIGHPART (work
);
761 work
= num
[i
+ j
] - LOWPART (work
);
762 num
[i
+ j
] = LOWPART (work
);
763 carry
+= HIGHPART (work
) != 0;
766 /* If quo_est was high by one, then num[i] went negative and
767 we need to correct things. */
768 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
771 carry
= 0; /* add divisor back in */
772 for (j
= 0; j
<= den_hi_sig
; j
++)
774 work
= num
[i
+ j
] + den
[j
] + carry
;
775 carry
= HIGHPART (work
);
776 num
[i
+ j
] = LOWPART (work
);
779 num
[num_hi_sig
] += carry
;
782 /* Store the quotient digit. */
787 decode (quo
, lquo
, hquo
);
790 /* If result is negative, make it so. */
792 neg_double (*lquo
, *hquo
, lquo
, hquo
);
794 /* Compute trial remainder: rem = num - (quo * den) */
795 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
796 neg_double (*lrem
, *hrem
, lrem
, hrem
);
797 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
802 case TRUNC_MOD_EXPR
: /* round toward zero */
803 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
807 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
808 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
811 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
819 case CEIL_MOD_EXPR
: /* round toward positive infinity */
820 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
822 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
830 case ROUND_MOD_EXPR
: /* round to closest integer */
832 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
833 HOST_WIDE_INT habs_rem
= *hrem
;
834 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
835 HOST_WIDE_INT habs_den
= hden
, htwice
;
837 /* Get absolute values. */
839 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
841 neg_double (lden
, hden
, &labs_den
, &habs_den
);
843 /* If (2 * abs (lrem) >= abs (lden)) */
844 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
845 labs_rem
, habs_rem
, <wice
, &htwice
);
847 if (((unsigned HOST_WIDE_INT
) habs_den
848 < (unsigned HOST_WIDE_INT
) htwice
)
849 || (((unsigned HOST_WIDE_INT
) habs_den
850 == (unsigned HOST_WIDE_INT
) htwice
)
851 && (labs_den
< ltwice
)))
855 add_double (*lquo
, *hquo
,
856 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
859 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
871 /* Compute true remainder: rem = num - (quo * den) */
872 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
873 neg_double (*lrem
, *hrem
, lrem
, hrem
);
874 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
878 /* If ARG2 divides ARG1 with zero remainder, carries out the division
879 of type CODE and returns the quotient.
880 Otherwise returns NULL_TREE. */
883 div_if_zero_remainder (enum tree_code code
, tree arg1
, tree arg2
)
885 unsigned HOST_WIDE_INT int1l
, int2l
;
886 HOST_WIDE_INT int1h
, int2h
;
887 unsigned HOST_WIDE_INT quol
, reml
;
888 HOST_WIDE_INT quoh
, remh
;
889 tree type
= TREE_TYPE (arg1
);
890 int uns
= TYPE_UNSIGNED (type
);
892 int1l
= TREE_INT_CST_LOW (arg1
);
893 int1h
= TREE_INT_CST_HIGH (arg1
);
894 int2l
= TREE_INT_CST_LOW (arg2
);
895 int2h
= TREE_INT_CST_HIGH (arg2
);
897 div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
898 &quol
, &quoh
, &reml
, &remh
);
899 if (remh
!= 0 || reml
!= 0)
902 return build_int_cst_wide (type
, quol
, quoh
);
905 /* This is non-zero if we should defer warnings about undefined
906 overflow. This facility exists because these warnings are a
907 special case. The code to estimate loop iterations does not want
908 to issue any warnings, since it works with expressions which do not
909 occur in user code. Various bits of cleanup code call fold(), but
910 only use the result if it has certain characteristics (e.g., is a
911 constant); that code only wants to issue a warning if the result is
914 static int fold_deferring_overflow_warnings
;
916 /* If a warning about undefined overflow is deferred, this is the
917 warning. Note that this may cause us to turn two warnings into
918 one, but that is fine since it is sufficient to only give one
919 warning per expression. */
921 static const char* fold_deferred_overflow_warning
;
923 /* If a warning about undefined overflow is deferred, this is the
924 level at which the warning should be emitted. */
926 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
928 /* Start deferring overflow warnings. We could use a stack here to
929 permit nested calls, but at present it is not necessary. */
932 fold_defer_overflow_warnings (void)
934 ++fold_deferring_overflow_warnings
;
937 /* Stop deferring overflow warnings. If there is a pending warning,
938 and ISSUE is true, then issue the warning if appropriate. STMT is
939 the statement with which the warning should be associated (used for
940 location information); STMT may be NULL. CODE is the level of the
941 warning--a warn_strict_overflow_code value. This function will use
942 the smaller of CODE and the deferred code when deciding whether to
943 issue the warning. CODE may be zero to mean to always use the
947 fold_undefer_overflow_warnings (bool issue
, tree stmt
, int code
)
952 gcc_assert (fold_deferring_overflow_warnings
> 0);
953 --fold_deferring_overflow_warnings
;
954 if (fold_deferring_overflow_warnings
> 0)
956 if (fold_deferred_overflow_warning
!= NULL
958 && code
< (int) fold_deferred_overflow_code
)
959 fold_deferred_overflow_code
= code
;
963 warnmsg
= fold_deferred_overflow_warning
;
964 fold_deferred_overflow_warning
= NULL
;
966 if (!issue
|| warnmsg
== NULL
)
969 /* Use the smallest code level when deciding to issue the
971 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
972 code
= fold_deferred_overflow_code
;
974 if (!issue_strict_overflow_warning (code
))
977 if (stmt
== NULL_TREE
|| !expr_has_location (stmt
))
978 locus
= input_location
;
980 locus
= expr_location (stmt
);
981 warning (OPT_Wstrict_overflow
, "%H%s", &locus
, warnmsg
);
984 /* Stop deferring overflow warnings, ignoring any deferred
988 fold_undefer_and_ignore_overflow_warnings (void)
990 fold_undefer_overflow_warnings (false, NULL_TREE
, 0);
993 /* Whether we are deferring overflow warnings. */
996 fold_deferring_overflow_warnings_p (void)
998 return fold_deferring_overflow_warnings
> 0;
1001 /* This is called when we fold something based on the fact that signed
1002 overflow is undefined. */
1005 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
1007 gcc_assert (!flag_wrapv
&& !flag_trapv
);
1008 if (fold_deferring_overflow_warnings
> 0)
1010 if (fold_deferred_overflow_warning
== NULL
1011 || wc
< fold_deferred_overflow_code
)
1013 fold_deferred_overflow_warning
= gmsgid
;
1014 fold_deferred_overflow_code
= wc
;
1017 else if (issue_strict_overflow_warning (wc
))
1018 warning (OPT_Wstrict_overflow
, gmsgid
);
1021 /* Return true if the built-in mathematical function specified by CODE
1022 is odd, i.e. -f(x) == f(-x). */
1025 negate_mathfn_p (enum built_in_function code
)
1029 CASE_FLT_FN (BUILT_IN_ASIN
):
1030 CASE_FLT_FN (BUILT_IN_ASINH
):
1031 CASE_FLT_FN (BUILT_IN_ATAN
):
1032 CASE_FLT_FN (BUILT_IN_ATANH
):
1033 CASE_FLT_FN (BUILT_IN_CASIN
):
1034 CASE_FLT_FN (BUILT_IN_CASINH
):
1035 CASE_FLT_FN (BUILT_IN_CATAN
):
1036 CASE_FLT_FN (BUILT_IN_CATANH
):
1037 CASE_FLT_FN (BUILT_IN_CBRT
):
1038 CASE_FLT_FN (BUILT_IN_CPROJ
):
1039 CASE_FLT_FN (BUILT_IN_CSIN
):
1040 CASE_FLT_FN (BUILT_IN_CSINH
):
1041 CASE_FLT_FN (BUILT_IN_CTAN
):
1042 CASE_FLT_FN (BUILT_IN_CTANH
):
1043 CASE_FLT_FN (BUILT_IN_ERF
):
1044 CASE_FLT_FN (BUILT_IN_LLROUND
):
1045 CASE_FLT_FN (BUILT_IN_LROUND
):
1046 CASE_FLT_FN (BUILT_IN_ROUND
):
1047 CASE_FLT_FN (BUILT_IN_SIN
):
1048 CASE_FLT_FN (BUILT_IN_SINH
):
1049 CASE_FLT_FN (BUILT_IN_TAN
):
1050 CASE_FLT_FN (BUILT_IN_TANH
):
1051 CASE_FLT_FN (BUILT_IN_TRUNC
):
1054 CASE_FLT_FN (BUILT_IN_LLRINT
):
1055 CASE_FLT_FN (BUILT_IN_LRINT
):
1056 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
1057 CASE_FLT_FN (BUILT_IN_RINT
):
1058 return !flag_rounding_math
;
1066 /* Check whether we may negate an integer constant T without causing
1070 may_negate_without_overflow_p (tree t
)
1072 unsigned HOST_WIDE_INT val
;
1076 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
1078 type
= TREE_TYPE (t
);
1079 if (TYPE_UNSIGNED (type
))
1082 prec
= TYPE_PRECISION (type
);
1083 if (prec
> HOST_BITS_PER_WIDE_INT
)
1085 if (TREE_INT_CST_LOW (t
) != 0)
1087 prec
-= HOST_BITS_PER_WIDE_INT
;
1088 val
= TREE_INT_CST_HIGH (t
);
1091 val
= TREE_INT_CST_LOW (t
);
1092 if (prec
< HOST_BITS_PER_WIDE_INT
)
1093 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
1094 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
1097 /* Determine whether an expression T can be cheaply negated using
1098 the function negate_expr without introducing undefined overflow. */
1101 negate_expr_p (tree t
)
1108 type
= TREE_TYPE (t
);
1110 STRIP_SIGN_NOPS (t
);
1111 switch (TREE_CODE (t
))
1114 if (TYPE_OVERFLOW_WRAPS (type
))
1117 /* Check that -CST will not overflow type. */
1118 return may_negate_without_overflow_p (t
);
1120 return (INTEGRAL_TYPE_P (type
)
1121 && TYPE_OVERFLOW_WRAPS (type
));
1128 return negate_expr_p (TREE_REALPART (t
))
1129 && negate_expr_p (TREE_IMAGPART (t
));
1132 return negate_expr_p (TREE_OPERAND (t
, 0))
1133 && negate_expr_p (TREE_OPERAND (t
, 1));
1136 return negate_expr_p (TREE_OPERAND (t
, 0));
1139 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1140 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1142 /* -(A + B) -> (-B) - A. */
1143 if (negate_expr_p (TREE_OPERAND (t
, 1))
1144 && reorder_operands_p (TREE_OPERAND (t
, 0),
1145 TREE_OPERAND (t
, 1)))
1147 /* -(A + B) -> (-A) - B. */
1148 return negate_expr_p (TREE_OPERAND (t
, 0));
1151 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1152 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1153 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1154 && reorder_operands_p (TREE_OPERAND (t
, 0),
1155 TREE_OPERAND (t
, 1));
1158 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1164 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1165 return negate_expr_p (TREE_OPERAND (t
, 1))
1166 || negate_expr_p (TREE_OPERAND (t
, 0));
1169 case TRUNC_DIV_EXPR
:
1170 case ROUND_DIV_EXPR
:
1171 case FLOOR_DIV_EXPR
:
1173 case EXACT_DIV_EXPR
:
1174 /* In general we can't negate A / B, because if A is INT_MIN and
1175 B is 1, we may turn this into INT_MIN / -1 which is undefined
1176 and actually traps on some architectures. But if overflow is
1177 undefined, we can negate, because - (INT_MIN / 1) is an
1179 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
1180 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
1182 return negate_expr_p (TREE_OPERAND (t
, 1))
1183 || negate_expr_p (TREE_OPERAND (t
, 0));
1186 /* Negate -((double)float) as (double)(-float). */
1187 if (TREE_CODE (type
) == REAL_TYPE
)
1189 tree tem
= strip_float_extensions (t
);
1191 return negate_expr_p (tem
);
1196 /* Negate -f(x) as f(-x). */
1197 if (negate_mathfn_p (builtin_mathfn_code (t
)))
1198 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
1202 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1203 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1205 tree op1
= TREE_OPERAND (t
, 1);
1206 if (TREE_INT_CST_HIGH (op1
) == 0
1207 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1208 == TREE_INT_CST_LOW (op1
))
1219 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1220 simplification is possible.
1221 If negate_expr_p would return true for T, NULL_TREE will never be
1225 fold_negate_expr (tree t
)
1227 tree type
= TREE_TYPE (t
);
1230 switch (TREE_CODE (t
))
1232 /* Convert - (~A) to A + 1. */
1234 if (INTEGRAL_TYPE_P (type
))
1235 return fold_build2 (PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
1236 build_int_cst (type
, 1));
1240 tem
= fold_negate_const (t
, type
);
1241 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
1242 || !TYPE_OVERFLOW_TRAPS (type
))
1247 tem
= fold_negate_const (t
, type
);
1248 /* Two's complement FP formats, such as c4x, may overflow. */
1249 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
1255 tree rpart
= negate_expr (TREE_REALPART (t
));
1256 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1258 if ((TREE_CODE (rpart
) == REAL_CST
1259 && TREE_CODE (ipart
) == REAL_CST
)
1260 || (TREE_CODE (rpart
) == INTEGER_CST
1261 && TREE_CODE (ipart
) == INTEGER_CST
))
1262 return build_complex (type
, rpart
, ipart
);
1267 if (negate_expr_p (t
))
1268 return fold_build2 (COMPLEX_EXPR
, type
,
1269 fold_negate_expr (TREE_OPERAND (t
, 0)),
1270 fold_negate_expr (TREE_OPERAND (t
, 1)));
1274 if (negate_expr_p (t
))
1275 return fold_build1 (CONJ_EXPR
, type
,
1276 fold_negate_expr (TREE_OPERAND (t
, 0)));
1280 return TREE_OPERAND (t
, 0);
1283 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1284 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1286 /* -(A + B) -> (-B) - A. */
1287 if (negate_expr_p (TREE_OPERAND (t
, 1))
1288 && reorder_operands_p (TREE_OPERAND (t
, 0),
1289 TREE_OPERAND (t
, 1)))
1291 tem
= negate_expr (TREE_OPERAND (t
, 1));
1292 return fold_build2 (MINUS_EXPR
, type
,
1293 tem
, TREE_OPERAND (t
, 0));
1296 /* -(A + B) -> (-A) - B. */
1297 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1299 tem
= negate_expr (TREE_OPERAND (t
, 0));
1300 return fold_build2 (MINUS_EXPR
, type
,
1301 tem
, TREE_OPERAND (t
, 1));
1307 /* - (A - B) -> B - A */
1308 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1309 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1310 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1311 return fold_build2 (MINUS_EXPR
, type
,
1312 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
1316 if (TYPE_UNSIGNED (type
))
1322 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
1324 tem
= TREE_OPERAND (t
, 1);
1325 if (negate_expr_p (tem
))
1326 return fold_build2 (TREE_CODE (t
), type
,
1327 TREE_OPERAND (t
, 0), negate_expr (tem
));
1328 tem
= TREE_OPERAND (t
, 0);
1329 if (negate_expr_p (tem
))
1330 return fold_build2 (TREE_CODE (t
), type
,
1331 negate_expr (tem
), TREE_OPERAND (t
, 1));
1335 case TRUNC_DIV_EXPR
:
1336 case ROUND_DIV_EXPR
:
1337 case FLOOR_DIV_EXPR
:
1339 case EXACT_DIV_EXPR
:
1340 /* In general we can't negate A / B, because if A is INT_MIN and
1341 B is 1, we may turn this into INT_MIN / -1 which is undefined
1342 and actually traps on some architectures. But if overflow is
1343 undefined, we can negate, because - (INT_MIN / 1) is an
1345 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
1347 const char * const warnmsg
= G_("assuming signed overflow does not "
1348 "occur when negating a division");
1349 tem
= TREE_OPERAND (t
, 1);
1350 if (negate_expr_p (tem
))
1352 if (INTEGRAL_TYPE_P (type
)
1353 && (TREE_CODE (tem
) != INTEGER_CST
1354 || integer_onep (tem
)))
1355 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
1356 return fold_build2 (TREE_CODE (t
), type
,
1357 TREE_OPERAND (t
, 0), negate_expr (tem
));
1359 tem
= TREE_OPERAND (t
, 0);
1360 if (negate_expr_p (tem
))
1362 if (INTEGRAL_TYPE_P (type
)
1363 && (TREE_CODE (tem
) != INTEGER_CST
1364 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
1365 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
1366 return fold_build2 (TREE_CODE (t
), type
,
1367 negate_expr (tem
), TREE_OPERAND (t
, 1));
1373 /* Convert -((double)float) into (double)(-float). */
1374 if (TREE_CODE (type
) == REAL_TYPE
)
1376 tem
= strip_float_extensions (t
);
1377 if (tem
!= t
&& negate_expr_p (tem
))
1378 return negate_expr (tem
);
1383 /* Negate -f(x) as f(-x). */
1384 if (negate_mathfn_p (builtin_mathfn_code (t
))
1385 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
1389 fndecl
= get_callee_fndecl (t
);
1390 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
1391 return build_call_expr (fndecl
, 1, arg
);
1396 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1397 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1399 tree op1
= TREE_OPERAND (t
, 1);
1400 if (TREE_INT_CST_HIGH (op1
) == 0
1401 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1402 == TREE_INT_CST_LOW (op1
))
1404 tree ntype
= TYPE_UNSIGNED (type
)
1405 ? lang_hooks
.types
.signed_type (type
)
1406 : lang_hooks
.types
.unsigned_type (type
);
1407 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1408 temp
= fold_build2 (RSHIFT_EXPR
, ntype
, temp
, op1
);
1409 return fold_convert (type
, temp
);
1421 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1422 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1423 return NULL_TREE. */
1426 negate_expr (tree t
)
1433 type
= TREE_TYPE (t
);
1434 STRIP_SIGN_NOPS (t
);
1436 tem
= fold_negate_expr (t
);
1438 tem
= build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1439 return fold_convert (type
, tem
);
1442 /* Split a tree IN into a constant, literal and variable parts that could be
1443 combined with CODE to make IN. "constant" means an expression with
1444 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1445 commutative arithmetic operation. Store the constant part into *CONP,
1446 the literal in *LITP and return the variable part. If a part isn't
1447 present, set it to null. If the tree does not decompose in this way,
1448 return the entire tree as the variable part and the other parts as null.
1450 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1451 case, we negate an operand that was subtracted. Except if it is a
1452 literal for which we use *MINUS_LITP instead.
1454 If NEGATE_P is true, we are negating all of IN, again except a literal
1455 for which we use *MINUS_LITP instead.
1457 If IN is itself a literal or constant, return it as appropriate.
1459 Note that we do not guarantee that any of the three values will be the
1460 same type as IN, but they will have the same signedness and mode. */
1463 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1464 tree
*minus_litp
, int negate_p
)
1472 /* Strip any conversions that don't change the machine mode or signedness. */
1473 STRIP_SIGN_NOPS (in
);
1475 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1477 else if (TREE_CODE (in
) == code
1478 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1479 /* We can associate addition and subtraction together (even
1480 though the C standard doesn't say so) for integers because
1481 the value is not affected. For reals, the value might be
1482 affected, so we can't. */
1483 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1484 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1486 tree op0
= TREE_OPERAND (in
, 0);
1487 tree op1
= TREE_OPERAND (in
, 1);
1488 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1489 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1491 /* First see if either of the operands is a literal, then a constant. */
1492 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1493 *litp
= op0
, op0
= 0;
1494 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1495 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1497 if (op0
!= 0 && TREE_CONSTANT (op0
))
1498 *conp
= op0
, op0
= 0;
1499 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1500 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1502 /* If we haven't dealt with either operand, this is not a case we can
1503 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1504 if (op0
!= 0 && op1
!= 0)
1509 var
= op1
, neg_var_p
= neg1_p
;
1511 /* Now do any needed negations. */
1513 *minus_litp
= *litp
, *litp
= 0;
1515 *conp
= negate_expr (*conp
);
1517 var
= negate_expr (var
);
1519 else if (TREE_CONSTANT (in
))
1527 *minus_litp
= *litp
, *litp
= 0;
1528 else if (*minus_litp
)
1529 *litp
= *minus_litp
, *minus_litp
= 0;
1530 *conp
= negate_expr (*conp
);
1531 var
= negate_expr (var
);
1537 /* Re-associate trees split by the above function. T1 and T2 are either
1538 expressions to associate or null. Return the new expression, if any. If
1539 we build an operation, do it in TYPE and with CODE. */
1542 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1549 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1550 try to fold this since we will have infinite recursion. But do
1551 deal with any NEGATE_EXPRs. */
1552 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1553 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1555 if (code
== PLUS_EXPR
)
1557 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1558 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1559 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1560 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1561 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1562 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1563 else if (integer_zerop (t2
))
1564 return fold_convert (type
, t1
);
1566 else if (code
== MINUS_EXPR
)
1568 if (integer_zerop (t2
))
1569 return fold_convert (type
, t1
);
1572 return build2 (code
, type
, fold_convert (type
, t1
),
1573 fold_convert (type
, t2
));
1576 return fold_build2 (code
, type
, fold_convert (type
, t1
),
1577 fold_convert (type
, t2
));
1580 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1581 for use in int_const_binop, size_binop and size_diffop. */
1584 int_binop_types_match_p (enum tree_code code
, tree type1
, tree type2
)
1586 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
1588 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
1603 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
1604 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
1605 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
1609 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1610 to produce a new constant. Return NULL_TREE if we don't know how
1611 to evaluate CODE at compile-time.
1613 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1616 int_const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1618 unsigned HOST_WIDE_INT int1l
, int2l
;
1619 HOST_WIDE_INT int1h
, int2h
;
1620 unsigned HOST_WIDE_INT low
;
1622 unsigned HOST_WIDE_INT garbagel
;
1623 HOST_WIDE_INT garbageh
;
1625 tree type
= TREE_TYPE (arg1
);
1626 int uns
= TYPE_UNSIGNED (type
);
1628 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1631 int1l
= TREE_INT_CST_LOW (arg1
);
1632 int1h
= TREE_INT_CST_HIGH (arg1
);
1633 int2l
= TREE_INT_CST_LOW (arg2
);
1634 int2h
= TREE_INT_CST_HIGH (arg2
);
1639 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1643 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1647 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1653 /* It's unclear from the C standard whether shifts can overflow.
1654 The following code ignores overflow; perhaps a C standard
1655 interpretation ruling is needed. */
1656 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1663 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1668 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1672 neg_double (int2l
, int2h
, &low
, &hi
);
1673 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1674 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1678 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1681 case TRUNC_DIV_EXPR
:
1682 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1683 case EXACT_DIV_EXPR
:
1684 /* This is a shortcut for a common special case. */
1685 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1686 && !TREE_OVERFLOW (arg1
)
1687 && !TREE_OVERFLOW (arg2
)
1688 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1690 if (code
== CEIL_DIV_EXPR
)
1693 low
= int1l
/ int2l
, hi
= 0;
1697 /* ... fall through ... */
1699 case ROUND_DIV_EXPR
:
1700 if (int2h
== 0 && int2l
== 0)
1702 if (int2h
== 0 && int2l
== 1)
1704 low
= int1l
, hi
= int1h
;
1707 if (int1l
== int2l
&& int1h
== int2h
1708 && ! (int1l
== 0 && int1h
== 0))
1713 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1714 &low
, &hi
, &garbagel
, &garbageh
);
1717 case TRUNC_MOD_EXPR
:
1718 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1719 /* This is a shortcut for a common special case. */
1720 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1721 && !TREE_OVERFLOW (arg1
)
1722 && !TREE_OVERFLOW (arg2
)
1723 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1725 if (code
== CEIL_MOD_EXPR
)
1727 low
= int1l
% int2l
, hi
= 0;
1731 /* ... fall through ... */
1733 case ROUND_MOD_EXPR
:
1734 if (int2h
== 0 && int2l
== 0)
1736 overflow
= div_and_round_double (code
, uns
,
1737 int1l
, int1h
, int2l
, int2h
,
1738 &garbagel
, &garbageh
, &low
, &hi
);
1744 low
= (((unsigned HOST_WIDE_INT
) int1h
1745 < (unsigned HOST_WIDE_INT
) int2h
)
1746 || (((unsigned HOST_WIDE_INT
) int1h
1747 == (unsigned HOST_WIDE_INT
) int2h
)
1750 low
= (int1h
< int2h
1751 || (int1h
== int2h
&& int1l
< int2l
));
1753 if (low
== (code
== MIN_EXPR
))
1754 low
= int1l
, hi
= int1h
;
1756 low
= int2l
, hi
= int2h
;
1765 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1767 /* Propagate overflow flags ourselves. */
1768 if (((!uns
|| is_sizetype
) && overflow
)
1769 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1772 TREE_OVERFLOW (t
) = 1;
1776 t
= force_fit_type_double (TREE_TYPE (arg1
), low
, hi
, 1,
1777 ((!uns
|| is_sizetype
) && overflow
)
1778 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1783 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1784 constant. We assume ARG1 and ARG2 have the same data type, or at least
1785 are the same kind of constant and the same machine mode. Return zero if
1786 combining the constants is not allowed in the current operating mode.
1788 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1791 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1793 /* Sanity check for the recursive cases. */
1800 if (TREE_CODE (arg1
) == INTEGER_CST
)
1801 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1803 if (TREE_CODE (arg1
) == REAL_CST
)
1805 enum machine_mode mode
;
1808 REAL_VALUE_TYPE value
;
1809 REAL_VALUE_TYPE result
;
1813 /* The following codes are handled by real_arithmetic. */
1828 d1
= TREE_REAL_CST (arg1
);
1829 d2
= TREE_REAL_CST (arg2
);
1831 type
= TREE_TYPE (arg1
);
1832 mode
= TYPE_MODE (type
);
1834 /* Don't perform operation if we honor signaling NaNs and
1835 either operand is a NaN. */
1836 if (HONOR_SNANS (mode
)
1837 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1840 /* Don't perform operation if it would raise a division
1841 by zero exception. */
1842 if (code
== RDIV_EXPR
1843 && REAL_VALUES_EQUAL (d2
, dconst0
)
1844 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1847 /* If either operand is a NaN, just return it. Otherwise, set up
1848 for floating-point trap; we return an overflow. */
1849 if (REAL_VALUE_ISNAN (d1
))
1851 else if (REAL_VALUE_ISNAN (d2
))
1854 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1855 real_convert (&result
, mode
, &value
);
1857 /* Don't constant fold this floating point operation if
1858 the result has overflowed and flag_trapping_math. */
1859 if (flag_trapping_math
1860 && MODE_HAS_INFINITIES (mode
)
1861 && REAL_VALUE_ISINF (result
)
1862 && !REAL_VALUE_ISINF (d1
)
1863 && !REAL_VALUE_ISINF (d2
))
1866 /* Don't constant fold this floating point operation if the
1867 result may dependent upon the run-time rounding mode and
1868 flag_rounding_math is set, or if GCC's software emulation
1869 is unable to accurately represent the result. */
1870 if ((flag_rounding_math
1871 || (REAL_MODE_FORMAT_COMPOSITE_P (mode
)
1872 && !flag_unsafe_math_optimizations
))
1873 && (inexact
|| !real_identical (&result
, &value
)))
1876 t
= build_real (type
, result
);
1878 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1882 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1884 tree type
= TREE_TYPE (arg1
);
1885 tree r1
= TREE_REALPART (arg1
);
1886 tree i1
= TREE_IMAGPART (arg1
);
1887 tree r2
= TREE_REALPART (arg2
);
1888 tree i2
= TREE_IMAGPART (arg2
);
1895 real
= const_binop (code
, r1
, r2
, notrunc
);
1896 imag
= const_binop (code
, i1
, i2
, notrunc
);
1900 real
= const_binop (MINUS_EXPR
,
1901 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1902 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1904 imag
= const_binop (PLUS_EXPR
,
1905 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1906 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1913 = const_binop (PLUS_EXPR
,
1914 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1915 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1918 = const_binop (PLUS_EXPR
,
1919 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1920 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1923 = const_binop (MINUS_EXPR
,
1924 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1925 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1928 if (INTEGRAL_TYPE_P (TREE_TYPE (r1
)))
1929 code
= TRUNC_DIV_EXPR
;
1931 real
= const_binop (code
, t1
, magsquared
, notrunc
);
1932 imag
= const_binop (code
, t2
, magsquared
, notrunc
);
1941 return build_complex (type
, real
, imag
);
1947 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1948 indicates which particular sizetype to create. */
1951 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1953 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1956 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1957 is a tree code. The type of the result is taken from the operands.
1958 Both must be equivalent integer types, ala int_binop_types_match_p.
1959 If the operands are constant, so is the result. */
1962 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
1964 tree type
= TREE_TYPE (arg0
);
1966 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1967 return error_mark_node
;
1969 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1972 /* Handle the special case of two integer constants faster. */
1973 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1975 /* And some specific cases even faster than that. */
1976 if (code
== PLUS_EXPR
)
1978 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1980 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1983 else if (code
== MINUS_EXPR
)
1985 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1988 else if (code
== MULT_EXPR
)
1990 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1994 /* Handle general case of two integer constants. */
1995 return int_const_binop (code
, arg0
, arg1
, 0);
1998 return fold_build2 (code
, type
, arg0
, arg1
);
2001 /* Given two values, either both of sizetype or both of bitsizetype,
2002 compute the difference between the two values. Return the value
2003 in signed type corresponding to the type of the operands. */
2006 size_diffop (tree arg0
, tree arg1
)
2008 tree type
= TREE_TYPE (arg0
);
2011 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
2014 /* If the type is already signed, just do the simple thing. */
2015 if (!TYPE_UNSIGNED (type
))
2016 return size_binop (MINUS_EXPR
, arg0
, arg1
);
2018 if (type
== sizetype
)
2020 else if (type
== bitsizetype
)
2021 ctype
= sbitsizetype
;
2023 ctype
= lang_hooks
.types
.signed_type (type
);
2025 /* If either operand is not a constant, do the conversions to the signed
2026 type and subtract. The hardware will do the right thing with any
2027 overflow in the subtraction. */
2028 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
2029 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
2030 fold_convert (ctype
, arg1
));
2032 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2033 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2034 overflow) and negate (which can't either). Special-case a result
2035 of zero while we're here. */
2036 if (tree_int_cst_equal (arg0
, arg1
))
2037 return build_int_cst (ctype
, 0);
2038 else if (tree_int_cst_lt (arg1
, arg0
))
2039 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
2041 return size_binop (MINUS_EXPR
, build_int_cst (ctype
, 0),
2042 fold_convert (ctype
, size_binop (MINUS_EXPR
,
2046 /* A subroutine of fold_convert_const handling conversions of an
2047 INTEGER_CST to another integer type. */
2050 fold_convert_const_int_from_int (tree type
, tree arg1
)
2054 /* Given an integer constant, make new constant with new type,
2055 appropriately sign-extended or truncated. */
2056 t
= force_fit_type_double (type
, TREE_INT_CST_LOW (arg1
),
2057 TREE_INT_CST_HIGH (arg1
),
2058 /* Don't set the overflow when
2059 converting a pointer */
2060 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
2061 (TREE_INT_CST_HIGH (arg1
) < 0
2062 && (TYPE_UNSIGNED (type
)
2063 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2064 | TREE_OVERFLOW (arg1
));
2069 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2070 to an integer type. */
2073 fold_convert_const_int_from_real (enum tree_code code
, tree type
, tree arg1
)
2078 /* The following code implements the floating point to integer
2079 conversion rules required by the Java Language Specification,
2080 that IEEE NaNs are mapped to zero and values that overflow
2081 the target precision saturate, i.e. values greater than
2082 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2083 are mapped to INT_MIN. These semantics are allowed by the
2084 C and C++ standards that simply state that the behavior of
2085 FP-to-integer conversion is unspecified upon overflow. */
2087 HOST_WIDE_INT high
, low
;
2089 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
2093 case FIX_TRUNC_EXPR
:
2094 real_trunc (&r
, VOIDmode
, &x
);
2101 /* If R is NaN, return zero and show we have an overflow. */
2102 if (REAL_VALUE_ISNAN (r
))
2109 /* See if R is less than the lower bound or greater than the
2114 tree lt
= TYPE_MIN_VALUE (type
);
2115 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
2116 if (REAL_VALUES_LESS (r
, l
))
2119 high
= TREE_INT_CST_HIGH (lt
);
2120 low
= TREE_INT_CST_LOW (lt
);
2126 tree ut
= TYPE_MAX_VALUE (type
);
2129 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
2130 if (REAL_VALUES_LESS (u
, r
))
2133 high
= TREE_INT_CST_HIGH (ut
);
2134 low
= TREE_INT_CST_LOW (ut
);
2140 REAL_VALUE_TO_INT (&low
, &high
, r
);
2142 t
= force_fit_type_double (type
, low
, high
, -1,
2143 overflow
| TREE_OVERFLOW (arg1
));
2147 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2148 to another floating point type. */
2151 fold_convert_const_real_from_real (tree type
, tree arg1
)
2153 REAL_VALUE_TYPE value
;
2156 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
2157 t
= build_real (type
, value
);
2159 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2163 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2164 type TYPE. If no simplification can be done return NULL_TREE. */
2167 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2169 if (TREE_TYPE (arg1
) == type
)
2172 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
2174 if (TREE_CODE (arg1
) == INTEGER_CST
)
2175 return fold_convert_const_int_from_int (type
, arg1
);
2176 else if (TREE_CODE (arg1
) == REAL_CST
)
2177 return fold_convert_const_int_from_real (code
, type
, arg1
);
2179 else if (TREE_CODE (type
) == REAL_TYPE
)
2181 if (TREE_CODE (arg1
) == INTEGER_CST
)
2182 return build_real_from_int_cst (type
, arg1
);
2183 if (TREE_CODE (arg1
) == REAL_CST
)
2184 return fold_convert_const_real_from_real (type
, arg1
);
2189 /* Construct a vector of zero elements of vector type TYPE. */
2192 build_zero_vector (tree type
)
2197 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2198 units
= TYPE_VECTOR_SUBPARTS (type
);
2201 for (i
= 0; i
< units
; i
++)
2202 list
= tree_cons (NULL_TREE
, elem
, list
);
2203 return build_vector (type
, list
);
2206 /* Convert expression ARG to type TYPE. Used by the middle-end for
2207 simple conversions in preference to calling the front-end's convert. */
2210 fold_convert (tree type
, tree arg
)
2212 tree orig
= TREE_TYPE (arg
);
2218 if (TREE_CODE (arg
) == ERROR_MARK
2219 || TREE_CODE (type
) == ERROR_MARK
2220 || TREE_CODE (orig
) == ERROR_MARK
)
2221 return error_mark_node
;
2223 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
)
2224 || lang_hooks
.types_compatible_p (TYPE_MAIN_VARIANT (type
),
2225 TYPE_MAIN_VARIANT (orig
)))
2226 return fold_build1 (NOP_EXPR
, type
, arg
);
2228 switch (TREE_CODE (type
))
2230 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2231 case POINTER_TYPE
: case REFERENCE_TYPE
:
2233 if (TREE_CODE (arg
) == INTEGER_CST
)
2235 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2236 if (tem
!= NULL_TREE
)
2239 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2240 || TREE_CODE (orig
) == OFFSET_TYPE
)
2241 return fold_build1 (NOP_EXPR
, type
, arg
);
2242 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2244 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2245 return fold_convert (type
, tem
);
2247 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2248 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2249 return fold_build1 (NOP_EXPR
, type
, arg
);
2252 if (TREE_CODE (arg
) == INTEGER_CST
)
2254 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2255 if (tem
!= NULL_TREE
)
2258 else if (TREE_CODE (arg
) == REAL_CST
)
2260 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2261 if (tem
!= NULL_TREE
)
2265 switch (TREE_CODE (orig
))
2268 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2269 case POINTER_TYPE
: case REFERENCE_TYPE
:
2270 return fold_build1 (FLOAT_EXPR
, type
, arg
);
2273 return fold_build1 (NOP_EXPR
, type
, arg
);
2276 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2277 return fold_convert (type
, tem
);
2284 switch (TREE_CODE (orig
))
2287 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2288 case POINTER_TYPE
: case REFERENCE_TYPE
:
2290 return build2 (COMPLEX_EXPR
, type
,
2291 fold_convert (TREE_TYPE (type
), arg
),
2292 fold_convert (TREE_TYPE (type
), integer_zero_node
));
2297 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2299 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
2300 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
2301 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2304 arg
= save_expr (arg
);
2305 rpart
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2306 ipart
= fold_build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2307 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
2308 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
2309 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2317 if (integer_zerop (arg
))
2318 return build_zero_vector (type
);
2319 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2320 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2321 || TREE_CODE (orig
) == VECTOR_TYPE
);
2322 return fold_build1 (VIEW_CONVERT_EXPR
, type
, arg
);
2325 tem
= fold_ignored_result (arg
);
2326 if (TREE_CODE (tem
) == GIMPLE_MODIFY_STMT
)
2328 return fold_build1 (NOP_EXPR
, type
, tem
);
2335 /* Return false if expr can be assumed not to be an lvalue, true
2339 maybe_lvalue_p (tree x
)
2341 /* We only need to wrap lvalue tree codes. */
2342 switch (TREE_CODE (x
))
2353 case ALIGN_INDIRECT_REF
:
2354 case MISALIGNED_INDIRECT_REF
:
2356 case ARRAY_RANGE_REF
:
2362 case PREINCREMENT_EXPR
:
2363 case PREDECREMENT_EXPR
:
2365 case TRY_CATCH_EXPR
:
2366 case WITH_CLEANUP_EXPR
:
2369 case GIMPLE_MODIFY_STMT
:
2378 /* Assume the worst for front-end tree codes. */
2379 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2387 /* Return an expr equal to X but certainly not valid as an lvalue. */
2392 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2397 if (! maybe_lvalue_p (x
))
2399 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2402 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2403 Zero means allow extended lvalues. */
2405 int pedantic_lvalues
;
2407 /* When pedantic, return an expr equal to X but certainly not valid as a
2408 pedantic lvalue. Otherwise, return X. */
2411 pedantic_non_lvalue (tree x
)
2413 if (pedantic_lvalues
)
2414 return non_lvalue (x
);
2419 /* Given a tree comparison code, return the code that is the logical inverse
2420 of the given code. It is not safe to do this for floating-point
2421 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2422 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2425 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2427 if (honor_nans
&& flag_trapping_math
)
2437 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2439 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2441 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2443 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2457 return UNORDERED_EXPR
;
2458 case UNORDERED_EXPR
:
2459 return ORDERED_EXPR
;
2465 /* Similar, but return the comparison that results if the operands are
2466 swapped. This is safe for floating-point. */
2469 swap_tree_comparison (enum tree_code code
)
2476 case UNORDERED_EXPR
:
2502 /* Convert a comparison tree code from an enum tree_code representation
2503 into a compcode bit-based encoding. This function is the inverse of
2504 compcode_to_comparison. */
2506 static enum comparison_code
2507 comparison_to_compcode (enum tree_code code
)
2524 return COMPCODE_ORD
;
2525 case UNORDERED_EXPR
:
2526 return COMPCODE_UNORD
;
2528 return COMPCODE_UNLT
;
2530 return COMPCODE_UNEQ
;
2532 return COMPCODE_UNLE
;
2534 return COMPCODE_UNGT
;
2536 return COMPCODE_LTGT
;
2538 return COMPCODE_UNGE
;
2544 /* Convert a compcode bit-based encoding of a comparison operator back
2545 to GCC's enum tree_code representation. This function is the
2546 inverse of comparison_to_compcode. */
2548 static enum tree_code
2549 compcode_to_comparison (enum comparison_code code
)
2566 return ORDERED_EXPR
;
2567 case COMPCODE_UNORD
:
2568 return UNORDERED_EXPR
;
2586 /* Return a tree for the comparison which is the combination of
2587 doing the AND or OR (depending on CODE) of the two operations LCODE
2588 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2589 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2590 if this makes the transformation invalid. */
2593 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2594 enum tree_code rcode
, tree truth_type
,
2595 tree ll_arg
, tree lr_arg
)
2597 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2598 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2599 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2600 enum comparison_code compcode
;
2604 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2605 compcode
= lcompcode
& rcompcode
;
2608 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2609 compcode
= lcompcode
| rcompcode
;
2618 /* Eliminate unordered comparisons, as well as LTGT and ORD
2619 which are not used unless the mode has NaNs. */
2620 compcode
&= ~COMPCODE_UNORD
;
2621 if (compcode
== COMPCODE_LTGT
)
2622 compcode
= COMPCODE_NE
;
2623 else if (compcode
== COMPCODE_ORD
)
2624 compcode
= COMPCODE_TRUE
;
2626 else if (flag_trapping_math
)
2628 /* Check that the original operation and the optimized ones will trap
2629 under the same condition. */
2630 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2631 && (lcompcode
!= COMPCODE_EQ
)
2632 && (lcompcode
!= COMPCODE_ORD
);
2633 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2634 && (rcompcode
!= COMPCODE_EQ
)
2635 && (rcompcode
!= COMPCODE_ORD
);
2636 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2637 && (compcode
!= COMPCODE_EQ
)
2638 && (compcode
!= COMPCODE_ORD
);
2640 /* In a short-circuited boolean expression the LHS might be
2641 such that the RHS, if evaluated, will never trap. For
2642 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2643 if neither x nor y is NaN. (This is a mixed blessing: for
2644 example, the expression above will never trap, hence
2645 optimizing it to x < y would be invalid). */
2646 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2647 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2650 /* If the comparison was short-circuited, and only the RHS
2651 trapped, we may now generate a spurious trap. */
2653 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2656 /* If we changed the conditions that cause a trap, we lose. */
2657 if ((ltrap
|| rtrap
) != trap
)
2661 if (compcode
== COMPCODE_TRUE
)
2662 return constant_boolean_node (true, truth_type
);
2663 else if (compcode
== COMPCODE_FALSE
)
2664 return constant_boolean_node (false, truth_type
);
2666 return fold_build2 (compcode_to_comparison (compcode
),
2667 truth_type
, ll_arg
, lr_arg
);
2670 /* Return nonzero if CODE is a tree code that represents a truth value. */
2673 truth_value_p (enum tree_code code
)
2675 return (TREE_CODE_CLASS (code
) == tcc_comparison
2676 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2677 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2678 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2681 /* Return nonzero if two operands (typically of the same tree node)
2682 are necessarily equal. If either argument has side-effects this
2683 function returns zero. FLAGS modifies behavior as follows:
2685 If OEP_ONLY_CONST is set, only return nonzero for constants.
2686 This function tests whether the operands are indistinguishable;
2687 it does not test whether they are equal using C's == operation.
2688 The distinction is important for IEEE floating point, because
2689 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2690 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2692 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2693 even though it may hold multiple values during a function.
2694 This is because a GCC tree node guarantees that nothing else is
2695 executed between the evaluation of its "operands" (which may often
2696 be evaluated in arbitrary order). Hence if the operands themselves
2697 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2698 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2699 unset means assuming isochronic (or instantaneous) tree equivalence.
2700 Unless comparing arbitrary expression trees, such as from different
2701 statements, this flag can usually be left unset.
2703 If OEP_PURE_SAME is set, then pure functions with identical arguments
2704 are considered the same. It is used when the caller has other ways
2705 to ensure that global memory is unchanged in between. */
2708 operand_equal_p (tree arg0
, tree arg1
, unsigned int flags
)
2710 /* If either is ERROR_MARK, they aren't equal. */
2711 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
2714 /* If both types don't have the same signedness, then we can't consider
2715 them equal. We must check this before the STRIP_NOPS calls
2716 because they may change the signedness of the arguments. */
2717 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2720 /* If both types don't have the same precision, then it is not safe
2722 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
2728 /* In case both args are comparisons but with different comparison
2729 code, try to swap the comparison operands of one arg to produce
2730 a match and compare that variant. */
2731 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2732 && COMPARISON_CLASS_P (arg0
)
2733 && COMPARISON_CLASS_P (arg1
))
2735 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2737 if (TREE_CODE (arg0
) == swap_code
)
2738 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2739 TREE_OPERAND (arg1
, 1), flags
)
2740 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2741 TREE_OPERAND (arg1
, 0), flags
);
2744 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2745 /* This is needed for conversions and for COMPONENT_REF.
2746 Might as well play it safe and always test this. */
2747 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2748 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2749 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2752 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2753 We don't care about side effects in that case because the SAVE_EXPR
2754 takes care of that for us. In all other cases, two expressions are
2755 equal if they have no side effects. If we have two identical
2756 expressions with side effects that should be treated the same due
2757 to the only side effects being identical SAVE_EXPR's, that will
2758 be detected in the recursive calls below. */
2759 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2760 && (TREE_CODE (arg0
) == SAVE_EXPR
2761 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2764 /* Next handle constant cases, those for which we can return 1 even
2765 if ONLY_CONST is set. */
2766 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2767 switch (TREE_CODE (arg0
))
2770 return tree_int_cst_equal (arg0
, arg1
);
2773 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2774 TREE_REAL_CST (arg1
)))
2778 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2780 /* If we do not distinguish between signed and unsigned zero,
2781 consider them equal. */
2782 if (real_zerop (arg0
) && real_zerop (arg1
))
2791 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2792 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2795 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2798 v1
= TREE_CHAIN (v1
);
2799 v2
= TREE_CHAIN (v2
);
2806 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2808 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2812 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2813 && ! memcmp (TREE_STRING_POINTER (arg0
),
2814 TREE_STRING_POINTER (arg1
),
2815 TREE_STRING_LENGTH (arg0
)));
2818 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2824 if (flags
& OEP_ONLY_CONST
)
2827 /* Define macros to test an operand from arg0 and arg1 for equality and a
2828 variant that allows null and views null as being different from any
2829 non-null value. In the latter case, if either is null, the both
2830 must be; otherwise, do the normal comparison. */
2831 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2832 TREE_OPERAND (arg1, N), flags)
2834 #define OP_SAME_WITH_NULL(N) \
2835 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2836 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2838 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2841 /* Two conversions are equal only if signedness and modes match. */
2842 switch (TREE_CODE (arg0
))
2846 case FIX_TRUNC_EXPR
:
2847 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2848 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2858 case tcc_comparison
:
2860 if (OP_SAME (0) && OP_SAME (1))
2863 /* For commutative ops, allow the other order. */
2864 return (commutative_tree_code (TREE_CODE (arg0
))
2865 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2866 TREE_OPERAND (arg1
, 1), flags
)
2867 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2868 TREE_OPERAND (arg1
, 0), flags
));
2871 /* If either of the pointer (or reference) expressions we are
2872 dereferencing contain a side effect, these cannot be equal. */
2873 if (TREE_SIDE_EFFECTS (arg0
)
2874 || TREE_SIDE_EFFECTS (arg1
))
2877 switch (TREE_CODE (arg0
))
2880 case ALIGN_INDIRECT_REF
:
2881 case MISALIGNED_INDIRECT_REF
:
2887 case ARRAY_RANGE_REF
:
2888 /* Operands 2 and 3 may be null. */
2891 && OP_SAME_WITH_NULL (2)
2892 && OP_SAME_WITH_NULL (3));
2895 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2896 may be NULL when we're called to compare MEM_EXPRs. */
2897 return OP_SAME_WITH_NULL (0)
2899 && OP_SAME_WITH_NULL (2);
2902 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2908 case tcc_expression
:
2909 switch (TREE_CODE (arg0
))
2912 case TRUTH_NOT_EXPR
:
2915 case TRUTH_ANDIF_EXPR
:
2916 case TRUTH_ORIF_EXPR
:
2917 return OP_SAME (0) && OP_SAME (1);
2919 case TRUTH_AND_EXPR
:
2921 case TRUTH_XOR_EXPR
:
2922 if (OP_SAME (0) && OP_SAME (1))
2925 /* Otherwise take into account this is a commutative operation. */
2926 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2927 TREE_OPERAND (arg1
, 1), flags
)
2928 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2929 TREE_OPERAND (arg1
, 0), flags
));
2936 switch (TREE_CODE (arg0
))
2939 /* If the CALL_EXPRs call different functions, then they
2940 clearly can not be equal. */
2941 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2946 unsigned int cef
= call_expr_flags (arg0
);
2947 if (flags
& OEP_PURE_SAME
)
2948 cef
&= ECF_CONST
| ECF_PURE
;
2955 /* Now see if all the arguments are the same. */
2957 call_expr_arg_iterator iter0
, iter1
;
2959 for (a0
= first_call_expr_arg (arg0
, &iter0
),
2960 a1
= first_call_expr_arg (arg1
, &iter1
);
2962 a0
= next_call_expr_arg (&iter0
),
2963 a1
= next_call_expr_arg (&iter1
))
2964 if (! operand_equal_p (a0
, a1
, flags
))
2967 /* If we get here and both argument lists are exhausted
2968 then the CALL_EXPRs are equal. */
2969 return ! (a0
|| a1
);
2975 case tcc_declaration
:
2976 /* Consider __builtin_sqrt equal to sqrt. */
2977 return (TREE_CODE (arg0
) == FUNCTION_DECL
2978 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2979 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2980 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2987 #undef OP_SAME_WITH_NULL
2990 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2991 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2993 When in doubt, return 0. */
2996 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2998 int unsignedp1
, unsignedpo
;
2999 tree primarg0
, primarg1
, primother
;
3000 unsigned int correct_width
;
3002 if (operand_equal_p (arg0
, arg1
, 0))
3005 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3006 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3009 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3010 and see if the inner values are the same. This removes any
3011 signedness comparison, which doesn't matter here. */
3012 primarg0
= arg0
, primarg1
= arg1
;
3013 STRIP_NOPS (primarg0
);
3014 STRIP_NOPS (primarg1
);
3015 if (operand_equal_p (primarg0
, primarg1
, 0))
3018 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3019 actual comparison operand, ARG0.
3021 First throw away any conversions to wider types
3022 already present in the operands. */
3024 primarg1
= get_narrower (arg1
, &unsignedp1
);
3025 primother
= get_narrower (other
, &unsignedpo
);
3027 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3028 if (unsignedp1
== unsignedpo
3029 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3030 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3032 tree type
= TREE_TYPE (arg0
);
3034 /* Make sure shorter operand is extended the right way
3035 to match the longer operand. */
3036 primarg1
= fold_convert (lang_hooks
.types
.signed_or_unsigned_type
3037 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3039 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3046 /* See if ARG is an expression that is either a comparison or is performing
3047 arithmetic on comparisons. The comparisons must only be comparing
3048 two different values, which will be stored in *CVAL1 and *CVAL2; if
3049 they are nonzero it means that some operands have already been found.
3050 No variables may be used anywhere else in the expression except in the
3051 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3052 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3054 If this is true, return 1. Otherwise, return zero. */
3057 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3059 enum tree_code code
= TREE_CODE (arg
);
3060 enum tree_code_class
class = TREE_CODE_CLASS (code
);
3062 /* We can handle some of the tcc_expression cases here. */
3063 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3065 else if (class == tcc_expression
3066 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3067 || code
== COMPOUND_EXPR
))
3070 else if (class == tcc_expression
&& code
== SAVE_EXPR
3071 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3073 /* If we've already found a CVAL1 or CVAL2, this expression is
3074 two complex to handle. */
3075 if (*cval1
|| *cval2
)
3085 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3088 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3089 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3090 cval1
, cval2
, save_p
));
3095 case tcc_expression
:
3096 if (code
== COND_EXPR
)
3097 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3098 cval1
, cval2
, save_p
)
3099 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3100 cval1
, cval2
, save_p
)
3101 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3102 cval1
, cval2
, save_p
));
3105 case tcc_comparison
:
3106 /* First see if we can handle the first operand, then the second. For
3107 the second operand, we know *CVAL1 can't be zero. It must be that
3108 one side of the comparison is each of the values; test for the
3109 case where this isn't true by failing if the two operands
3112 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3113 TREE_OPERAND (arg
, 1), 0))
3117 *cval1
= TREE_OPERAND (arg
, 0);
3118 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3120 else if (*cval2
== 0)
3121 *cval2
= TREE_OPERAND (arg
, 0);
3122 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3127 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3129 else if (*cval2
== 0)
3130 *cval2
= TREE_OPERAND (arg
, 1);
3131 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3143 /* ARG is a tree that is known to contain just arithmetic operations and
3144 comparisons. Evaluate the operations in the tree substituting NEW0 for
3145 any occurrence of OLD0 as an operand of a comparison and likewise for
3149 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
3151 tree type
= TREE_TYPE (arg
);
3152 enum tree_code code
= TREE_CODE (arg
);
3153 enum tree_code_class
class = TREE_CODE_CLASS (code
);
3155 /* We can handle some of the tcc_expression cases here. */
3156 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3158 else if (class == tcc_expression
3159 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3165 return fold_build1 (code
, type
,
3166 eval_subst (TREE_OPERAND (arg
, 0),
3167 old0
, new0
, old1
, new1
));
3170 return fold_build2 (code
, type
,
3171 eval_subst (TREE_OPERAND (arg
, 0),
3172 old0
, new0
, old1
, new1
),
3173 eval_subst (TREE_OPERAND (arg
, 1),
3174 old0
, new0
, old1
, new1
));
3176 case tcc_expression
:
3180 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
3183 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
3186 return fold_build3 (code
, type
,
3187 eval_subst (TREE_OPERAND (arg
, 0),
3188 old0
, new0
, old1
, new1
),
3189 eval_subst (TREE_OPERAND (arg
, 1),
3190 old0
, new0
, old1
, new1
),
3191 eval_subst (TREE_OPERAND (arg
, 2),
3192 old0
, new0
, old1
, new1
));
3196 /* Fall through - ??? */
3198 case tcc_comparison
:
3200 tree arg0
= TREE_OPERAND (arg
, 0);
3201 tree arg1
= TREE_OPERAND (arg
, 1);
3203 /* We need to check both for exact equality and tree equality. The
3204 former will be true if the operand has a side-effect. In that
3205 case, we know the operand occurred exactly once. */
3207 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3209 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3212 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3214 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3217 return fold_build2 (code
, type
, arg0
, arg1
);
3225 /* Return a tree for the case when the result of an expression is RESULT
3226 converted to TYPE and OMITTED was previously an operand of the expression
3227 but is now not needed (e.g., we folded OMITTED * 0).
3229 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3230 the conversion of RESULT to TYPE. */
3233 omit_one_operand (tree type
, tree result
, tree omitted
)
3235 tree t
= fold_convert (type
, result
);
3237 if (TREE_SIDE_EFFECTS (omitted
))
3238 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3240 return non_lvalue (t
);
3243 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3246 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
3248 tree t
= fold_convert (type
, result
);
3250 if (TREE_SIDE_EFFECTS (omitted
))
3251 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3253 return pedantic_non_lvalue (t
);
3256 /* Return a tree for the case when the result of an expression is RESULT
3257 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3258 of the expression but are now not needed.
3260 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3261 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3262 evaluated before OMITTED2. Otherwise, if neither has side effects,
3263 just do the conversion of RESULT to TYPE. */
3266 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
3268 tree t
= fold_convert (type
, result
);
3270 if (TREE_SIDE_EFFECTS (omitted2
))
3271 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
3272 if (TREE_SIDE_EFFECTS (omitted1
))
3273 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
3275 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
3279 /* Return a simplified tree node for the truth-negation of ARG. This
3280 never alters ARG itself. We assume that ARG is an operation that
3281 returns a truth value (0 or 1).
3283 FIXME: one would think we would fold the result, but it causes
3284 problems with the dominator optimizer. */
3287 fold_truth_not_expr (tree arg
)
3289 tree type
= TREE_TYPE (arg
);
3290 enum tree_code code
= TREE_CODE (arg
);
3292 /* If this is a comparison, we can simply invert it, except for
3293 floating-point non-equality comparisons, in which case we just
3294 enclose a TRUTH_NOT_EXPR around what we have. */
3296 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3298 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3299 if (FLOAT_TYPE_P (op_type
)
3300 && flag_trapping_math
3301 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3302 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3306 code
= invert_tree_comparison (code
,
3307 HONOR_NANS (TYPE_MODE (op_type
)));
3308 if (code
== ERROR_MARK
)
3311 return build2 (code
, type
,
3312 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
3319 return constant_boolean_node (integer_zerop (arg
), type
);
3321 case TRUTH_AND_EXPR
:
3322 return build2 (TRUTH_OR_EXPR
, type
,
3323 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3324 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3327 return build2 (TRUTH_AND_EXPR
, type
,
3328 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3329 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3331 case TRUTH_XOR_EXPR
:
3332 /* Here we can invert either operand. We invert the first operand
3333 unless the second operand is a TRUTH_NOT_EXPR in which case our
3334 result is the XOR of the first operand with the inside of the
3335 negation of the second operand. */
3337 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3338 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3339 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3341 return build2 (TRUTH_XOR_EXPR
, type
,
3342 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3343 TREE_OPERAND (arg
, 1));
3345 case TRUTH_ANDIF_EXPR
:
3346 return build2 (TRUTH_ORIF_EXPR
, type
,
3347 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3348 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3350 case TRUTH_ORIF_EXPR
:
3351 return build2 (TRUTH_ANDIF_EXPR
, type
,
3352 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3353 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3355 case TRUTH_NOT_EXPR
:
3356 return TREE_OPERAND (arg
, 0);
3360 tree arg1
= TREE_OPERAND (arg
, 1);
3361 tree arg2
= TREE_OPERAND (arg
, 2);
3362 /* A COND_EXPR may have a throw as one operand, which
3363 then has void type. Just leave void operands
3365 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3366 VOID_TYPE_P (TREE_TYPE (arg1
))
3367 ? arg1
: invert_truthvalue (arg1
),
3368 VOID_TYPE_P (TREE_TYPE (arg2
))
3369 ? arg2
: invert_truthvalue (arg2
));
3373 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3374 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3376 case NON_LVALUE_EXPR
:
3377 return invert_truthvalue (TREE_OPERAND (arg
, 0));
3380 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3381 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3385 return build1 (TREE_CODE (arg
), type
,
3386 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3389 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3391 return build2 (EQ_EXPR
, type
, arg
,
3392 build_int_cst (type
, 0));
3395 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3397 case CLEANUP_POINT_EXPR
:
3398 return build1 (CLEANUP_POINT_EXPR
, type
,
3399 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3408 /* Return a simplified tree node for the truth-negation of ARG. This
3409 never alters ARG itself. We assume that ARG is an operation that
3410 returns a truth value (0 or 1).
3412 FIXME: one would think we would fold the result, but it causes
3413 problems with the dominator optimizer. */
3416 invert_truthvalue (tree arg
)
3420 if (TREE_CODE (arg
) == ERROR_MARK
)
3423 tem
= fold_truth_not_expr (arg
);
3425 tem
= build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3430 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3431 operands are another bit-wise operation with a common input. If so,
3432 distribute the bit operations to save an operation and possibly two if
3433 constants are involved. For example, convert
3434 (A | B) & (A | C) into A | (B & C)
3435 Further simplification will occur if B and C are constants.
3437 If this optimization cannot be done, 0 will be returned. */
3440 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3445 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3446 || TREE_CODE (arg0
) == code
3447 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3448 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3451 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3453 common
= TREE_OPERAND (arg0
, 0);
3454 left
= TREE_OPERAND (arg0
, 1);
3455 right
= TREE_OPERAND (arg1
, 1);
3457 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3459 common
= TREE_OPERAND (arg0
, 0);
3460 left
= TREE_OPERAND (arg0
, 1);
3461 right
= TREE_OPERAND (arg1
, 0);
3463 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3465 common
= TREE_OPERAND (arg0
, 1);
3466 left
= TREE_OPERAND (arg0
, 0);
3467 right
= TREE_OPERAND (arg1
, 1);
3469 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3471 common
= TREE_OPERAND (arg0
, 1);
3472 left
= TREE_OPERAND (arg0
, 0);
3473 right
= TREE_OPERAND (arg1
, 0);
3478 return fold_build2 (TREE_CODE (arg0
), type
, common
,
3479 fold_build2 (code
, type
, left
, right
));
3482 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3483 with code CODE. This optimization is unsafe. */
3485 distribute_real_division (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3487 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3488 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3490 /* (A / C) +- (B / C) -> (A +- B) / C. */
3492 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3493 TREE_OPERAND (arg1
, 1), 0))
3494 return fold_build2 (mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3495 fold_build2 (code
, type
,
3496 TREE_OPERAND (arg0
, 0),
3497 TREE_OPERAND (arg1
, 0)),
3498 TREE_OPERAND (arg0
, 1));
3500 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3501 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3502 TREE_OPERAND (arg1
, 0), 0)
3503 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3504 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3506 REAL_VALUE_TYPE r0
, r1
;
3507 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3508 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3510 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3512 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3513 real_arithmetic (&r0
, code
, &r0
, &r1
);
3514 return fold_build2 (MULT_EXPR
, type
,
3515 TREE_OPERAND (arg0
, 0),
3516 build_real (type
, r0
));
3522 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3523 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3526 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
3533 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3534 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3535 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3536 && host_integerp (size
, 0)
3537 && tree_low_cst (size
, 0) == bitsize
)
3538 return fold_convert (type
, inner
);
3541 result
= build3 (BIT_FIELD_REF
, type
, inner
,
3542 size_int (bitsize
), bitsize_int (bitpos
));
3544 BIT_FIELD_REF_UNSIGNED (result
) = unsignedp
;
3549 /* Optimize a bit-field compare.
3551 There are two cases: First is a compare against a constant and the
3552 second is a comparison of two items where the fields are at the same
3553 bit position relative to the start of a chunk (byte, halfword, word)
3554 large enough to contain it. In these cases we can avoid the shift
3555 implicit in bitfield extractions.
3557 For constants, we emit a compare of the shifted constant with the
3558 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3559 compared. For two fields at the same position, we do the ANDs with the
3560 similar mask and compare the result of the ANDs.
3562 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3563 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3564 are the left and right operands of the comparison, respectively.
3566 If the optimization described above can be done, we return the resulting
3567 tree. Otherwise we return zero. */
3570 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
3573 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3574 tree type
= TREE_TYPE (lhs
);
3575 tree signed_type
, unsigned_type
;
3576 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3577 enum machine_mode lmode
, rmode
, nmode
;
3578 int lunsignedp
, runsignedp
;
3579 int lvolatilep
= 0, rvolatilep
= 0;
3580 tree linner
, rinner
= NULL_TREE
;
3584 /* Get all the information about the extractions being done. If the bit size
3585 if the same as the size of the underlying object, we aren't doing an
3586 extraction at all and so can do nothing. We also don't want to
3587 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3588 then will no longer be able to replace it. */
3589 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3590 &lunsignedp
, &lvolatilep
, false);
3591 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3592 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3597 /* If this is not a constant, we can only do something if bit positions,
3598 sizes, and signedness are the same. */
3599 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3600 &runsignedp
, &rvolatilep
, false);
3602 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3603 || lunsignedp
!= runsignedp
|| offset
!= 0
3604 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3608 /* See if we can find a mode to refer to this field. We should be able to,
3609 but fail if we can't. */
3610 nmode
= get_best_mode (lbitsize
, lbitpos
,
3611 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3612 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3613 TYPE_ALIGN (TREE_TYPE (rinner
))),
3614 word_mode
, lvolatilep
|| rvolatilep
);
3615 if (nmode
== VOIDmode
)
3618 /* Set signed and unsigned types of the precision of this mode for the
3620 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3621 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3623 /* Compute the bit position and size for the new reference and our offset
3624 within it. If the new reference is the same size as the original, we
3625 won't optimize anything, so return zero. */
3626 nbitsize
= GET_MODE_BITSIZE (nmode
);
3627 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3629 if (nbitsize
== lbitsize
)
3632 if (BYTES_BIG_ENDIAN
)
3633 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3635 /* Make the mask to be used against the extracted field. */
3636 mask
= build_int_cst_type (unsigned_type
, -1);
3637 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
3638 mask
= const_binop (RSHIFT_EXPR
, mask
,
3639 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
3642 /* If not comparing with constant, just rework the comparison
3644 return fold_build2 (code
, compare_type
,
3645 fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3646 make_bit_field_ref (linner
,
3651 fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3652 make_bit_field_ref (rinner
,
3658 /* Otherwise, we are handling the constant case. See if the constant is too
3659 big for the field. Warn and return a tree of for 0 (false) if so. We do
3660 this not only for its own sake, but to avoid having to test for this
3661 error case below. If we didn't, we might generate wrong code.
3663 For unsigned fields, the constant shifted right by the field length should
3664 be all zero. For signed fields, the high-order bits should agree with
3669 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3670 fold_convert (unsigned_type
, rhs
),
3671 size_int (lbitsize
), 0)))
3673 warning (0, "comparison is always %d due to width of bit-field",
3675 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3680 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
3681 size_int (lbitsize
- 1), 0);
3682 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3684 warning (0, "comparison is always %d due to width of bit-field",
3686 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3690 /* Single-bit compares should always be against zero. */
3691 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3693 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3694 rhs
= build_int_cst (type
, 0);
3697 /* Make a new bitfield reference, shift the constant over the
3698 appropriate number of bits and mask it with the computed mask
3699 (in case this was a signed field). If we changed it, make a new one. */
3700 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3703 TREE_SIDE_EFFECTS (lhs
) = 1;
3704 TREE_THIS_VOLATILE (lhs
) = 1;
3707 rhs
= const_binop (BIT_AND_EXPR
,
3708 const_binop (LSHIFT_EXPR
,
3709 fold_convert (unsigned_type
, rhs
),
3710 size_int (lbitpos
), 0),
3713 return build2 (code
, compare_type
,
3714 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
3718 /* Subroutine for fold_truthop: decode a field reference.
3720 If EXP is a comparison reference, we return the innermost reference.
3722 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3723 set to the starting bit number.
3725 If the innermost field can be completely contained in a mode-sized
3726 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3728 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3729 otherwise it is not changed.
3731 *PUNSIGNEDP is set to the signedness of the field.
3733 *PMASK is set to the mask used. This is either contained in a
3734 BIT_AND_EXPR or derived from the width of the field.
3736 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3738 Return 0 if this is not a component reference or is one that we can't
3739 do anything with. */
3742 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3743 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3744 int *punsignedp
, int *pvolatilep
,
3745 tree
*pmask
, tree
*pand_mask
)
3747 tree outer_type
= 0;
3749 tree mask
, inner
, offset
;
3751 unsigned int precision
;
3753 /* All the optimizations using this function assume integer fields.
3754 There are problems with FP fields since the type_for_size call
3755 below can fail for, e.g., XFmode. */
3756 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3759 /* We are interested in the bare arrangement of bits, so strip everything
3760 that doesn't affect the machine mode. However, record the type of the
3761 outermost expression if it may matter below. */
3762 if (TREE_CODE (exp
) == NOP_EXPR
3763 || TREE_CODE (exp
) == CONVERT_EXPR
3764 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3765 outer_type
= TREE_TYPE (exp
);
3768 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3770 and_mask
= TREE_OPERAND (exp
, 1);
3771 exp
= TREE_OPERAND (exp
, 0);
3772 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3773 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3777 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3778 punsignedp
, pvolatilep
, false);
3779 if ((inner
== exp
&& and_mask
== 0)
3780 || *pbitsize
< 0 || offset
!= 0
3781 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3784 /* If the number of bits in the reference is the same as the bitsize of
3785 the outer type, then the outer type gives the signedness. Otherwise
3786 (in case of a small bitfield) the signedness is unchanged. */
3787 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3788 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3790 /* Compute the mask to access the bitfield. */
3791 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3792 precision
= TYPE_PRECISION (unsigned_type
);
3794 mask
= build_int_cst_type (unsigned_type
, -1);
3796 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3797 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3799 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3801 mask
= fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3802 fold_convert (unsigned_type
, and_mask
), mask
);
3805 *pand_mask
= and_mask
;
3809 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3813 all_ones_mask_p (tree mask
, int size
)
3815 tree type
= TREE_TYPE (mask
);
3816 unsigned int precision
= TYPE_PRECISION (type
);
3819 tmask
= build_int_cst_type (lang_hooks
.types
.signed_type (type
), -1);
3822 tree_int_cst_equal (mask
,
3823 const_binop (RSHIFT_EXPR
,
3824 const_binop (LSHIFT_EXPR
, tmask
,
3825 size_int (precision
- size
),
3827 size_int (precision
- size
), 0));
3830 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3831 represents the sign bit of EXP's type. If EXP represents a sign
3832 or zero extension, also test VAL against the unextended type.
3833 The return value is the (sub)expression whose sign bit is VAL,
3834 or NULL_TREE otherwise. */
3837 sign_bit_p (tree exp
, tree val
)
3839 unsigned HOST_WIDE_INT mask_lo
, lo
;
3840 HOST_WIDE_INT mask_hi
, hi
;
3844 /* Tree EXP must have an integral type. */
3845 t
= TREE_TYPE (exp
);
3846 if (! INTEGRAL_TYPE_P (t
))
3849 /* Tree VAL must be an integer constant. */
3850 if (TREE_CODE (val
) != INTEGER_CST
3851 || TREE_OVERFLOW (val
))
3854 width
= TYPE_PRECISION (t
);
3855 if (width
> HOST_BITS_PER_WIDE_INT
)
3857 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3860 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3861 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3867 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3870 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3871 >> (HOST_BITS_PER_WIDE_INT
- width
));
3874 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3875 treat VAL as if it were unsigned. */
3876 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3877 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3880 /* Handle extension from a narrower type. */
3881 if (TREE_CODE (exp
) == NOP_EXPR
3882 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3883 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3888 /* Subroutine for fold_truthop: determine if an operand is simple enough
3889 to be evaluated unconditionally. */
3892 simple_operand_p (tree exp
)
3894 /* Strip any conversions that don't change the machine mode. */
3897 return (CONSTANT_CLASS_P (exp
)
3898 || TREE_CODE (exp
) == SSA_NAME
3900 && ! TREE_ADDRESSABLE (exp
)
3901 && ! TREE_THIS_VOLATILE (exp
)
3902 && ! DECL_NONLOCAL (exp
)
3903 /* Don't regard global variables as simple. They may be
3904 allocated in ways unknown to the compiler (shared memory,
3905 #pragma weak, etc). */
3906 && ! TREE_PUBLIC (exp
)
3907 && ! DECL_EXTERNAL (exp
)
3908 /* Loading a static variable is unduly expensive, but global
3909 registers aren't expensive. */
3910 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3913 /* The following functions are subroutines to fold_range_test and allow it to
3914 try to change a logical combination of comparisons into a range test.
3917 X == 2 || X == 3 || X == 4 || X == 5
3921 (unsigned) (X - 2) <= 3
3923 We describe each set of comparisons as being either inside or outside
3924 a range, using a variable named like IN_P, and then describe the
3925 range with a lower and upper bound. If one of the bounds is omitted,
3926 it represents either the highest or lowest value of the type.
3928 In the comments below, we represent a range by two numbers in brackets
3929 preceded by a "+" to designate being inside that range, or a "-" to
3930 designate being outside that range, so the condition can be inverted by
3931 flipping the prefix. An omitted bound is represented by a "-". For
3932 example, "- [-, 10]" means being outside the range starting at the lowest
3933 possible value and ending at 10, in other words, being greater than 10.
3934 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3937 We set up things so that the missing bounds are handled in a consistent
3938 manner so neither a missing bound nor "true" and "false" need to be
3939 handled using a special case. */
3941 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3942 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3943 and UPPER1_P are nonzero if the respective argument is an upper bound
3944 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3945 must be specified for a comparison. ARG1 will be converted to ARG0's
3946 type if both are specified. */
3949 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3950 tree arg1
, int upper1_p
)
3956 /* If neither arg represents infinity, do the normal operation.
3957 Else, if not a comparison, return infinity. Else handle the special
3958 comparison rules. Note that most of the cases below won't occur, but
3959 are handled for consistency. */
3961 if (arg0
!= 0 && arg1
!= 0)
3963 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3964 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3966 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3969 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3972 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3973 for neither. In real maths, we cannot assume open ended ranges are
3974 the same. But, this is computer arithmetic, where numbers are finite.
3975 We can therefore make the transformation of any unbounded range with
3976 the value Z, Z being greater than any representable number. This permits
3977 us to treat unbounded ranges as equal. */
3978 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3979 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3983 result
= sgn0
== sgn1
;
3986 result
= sgn0
!= sgn1
;
3989 result
= sgn0
< sgn1
;
3992 result
= sgn0
<= sgn1
;
3995 result
= sgn0
> sgn1
;
3998 result
= sgn0
>= sgn1
;
4004 return constant_boolean_node (result
, type
);
4007 /* Given EXP, a logical expression, set the range it is testing into
4008 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4009 actually being tested. *PLOW and *PHIGH will be made of the same
4010 type as the returned expression. If EXP is not a comparison, we
4011 will most likely not be returning a useful value and range. Set
4012 *STRICT_OVERFLOW_P to true if the return value is only valid
4013 because signed overflow is undefined; otherwise, do not change
4014 *STRICT_OVERFLOW_P. */
4017 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4018 bool *strict_overflow_p
)
4020 enum tree_code code
;
4021 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
4022 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
4024 tree low
, high
, n_low
, n_high
;
4026 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4027 and see if we can refine the range. Some of the cases below may not
4028 happen, but it doesn't seem worth worrying about this. We "continue"
4029 the outer loop when we've changed something; otherwise we "break"
4030 the switch, which will "break" the while. */
4033 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4037 code
= TREE_CODE (exp
);
4038 exp_type
= TREE_TYPE (exp
);
4040 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4042 if (TREE_OPERAND_LENGTH (exp
) > 0)
4043 arg0
= TREE_OPERAND (exp
, 0);
4044 if (TREE_CODE_CLASS (code
) == tcc_comparison
4045 || TREE_CODE_CLASS (code
) == tcc_unary
4046 || TREE_CODE_CLASS (code
) == tcc_binary
)
4047 arg0_type
= TREE_TYPE (arg0
);
4048 if (TREE_CODE_CLASS (code
) == tcc_binary
4049 || TREE_CODE_CLASS (code
) == tcc_comparison
4050 || (TREE_CODE_CLASS (code
) == tcc_expression
4051 && TREE_OPERAND_LENGTH (exp
) > 1))
4052 arg1
= TREE_OPERAND (exp
, 1);
4057 case TRUTH_NOT_EXPR
:
4058 in_p
= ! in_p
, exp
= arg0
;
4061 case EQ_EXPR
: case NE_EXPR
:
4062 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4063 /* We can only do something if the range is testing for zero
4064 and if the second operand is an integer constant. Note that
4065 saying something is "in" the range we make is done by
4066 complementing IN_P since it will set in the initial case of
4067 being not equal to zero; "out" is leaving it alone. */
4068 if (low
== 0 || high
== 0
4069 || ! integer_zerop (low
) || ! integer_zerop (high
)
4070 || TREE_CODE (arg1
) != INTEGER_CST
)
4075 case NE_EXPR
: /* - [c, c] */
4078 case EQ_EXPR
: /* + [c, c] */
4079 in_p
= ! in_p
, low
= high
= arg1
;
4081 case GT_EXPR
: /* - [-, c] */
4082 low
= 0, high
= arg1
;
4084 case GE_EXPR
: /* + [c, -] */
4085 in_p
= ! in_p
, low
= arg1
, high
= 0;
4087 case LT_EXPR
: /* - [c, -] */
4088 low
= arg1
, high
= 0;
4090 case LE_EXPR
: /* + [-, c] */
4091 in_p
= ! in_p
, low
= 0, high
= arg1
;
4097 /* If this is an unsigned comparison, we also know that EXP is
4098 greater than or equal to zero. We base the range tests we make
4099 on that fact, so we record it here so we can parse existing
4100 range tests. We test arg0_type since often the return type
4101 of, e.g. EQ_EXPR, is boolean. */
4102 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4104 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4106 build_int_cst (arg0_type
, 0),
4110 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4112 /* If the high bound is missing, but we have a nonzero low
4113 bound, reverse the range so it goes from zero to the low bound
4115 if (high
== 0 && low
&& ! integer_zerop (low
))
4118 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4119 integer_one_node
, 0);
4120 low
= build_int_cst (arg0_type
, 0);
4128 /* (-x) IN [a,b] -> x in [-b, -a] */
4129 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4130 build_int_cst (exp_type
, 0),
4132 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4133 build_int_cst (exp_type
, 0),
4135 low
= n_low
, high
= n_high
;
4141 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4142 build_int_cst (exp_type
, 1));
4145 case PLUS_EXPR
: case MINUS_EXPR
:
4146 if (TREE_CODE (arg1
) != INTEGER_CST
)
4149 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4150 move a constant to the other side. */
4151 if (!TYPE_UNSIGNED (arg0_type
)
4152 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4155 /* If EXP is signed, any overflow in the computation is undefined,
4156 so we don't worry about it so long as our computations on
4157 the bounds don't overflow. For unsigned, overflow is defined
4158 and this is exactly the right thing. */
4159 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4160 arg0_type
, low
, 0, arg1
, 0);
4161 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4162 arg0_type
, high
, 1, arg1
, 0);
4163 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4164 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4167 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4168 *strict_overflow_p
= true;
4170 /* Check for an unsigned range which has wrapped around the maximum
4171 value thus making n_high < n_low, and normalize it. */
4172 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4174 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4175 integer_one_node
, 0);
4176 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4177 integer_one_node
, 0);
4179 /* If the range is of the form +/- [ x+1, x ], we won't
4180 be able to normalize it. But then, it represents the
4181 whole range or the empty set, so make it
4183 if (tree_int_cst_equal (n_low
, low
)
4184 && tree_int_cst_equal (n_high
, high
))
4190 low
= n_low
, high
= n_high
;
4195 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
4196 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4199 if (! INTEGRAL_TYPE_P (arg0_type
)
4200 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4201 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4204 n_low
= low
, n_high
= high
;
4207 n_low
= fold_convert (arg0_type
, n_low
);
4210 n_high
= fold_convert (arg0_type
, n_high
);
4213 /* If we're converting arg0 from an unsigned type, to exp,
4214 a signed type, we will be doing the comparison as unsigned.
4215 The tests above have already verified that LOW and HIGH
4218 So we have to ensure that we will handle large unsigned
4219 values the same way that the current signed bounds treat
4222 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4225 tree equiv_type
= lang_hooks
.types
.type_for_mode
4226 (TYPE_MODE (arg0_type
), 1);
4228 /* A range without an upper bound is, naturally, unbounded.
4229 Since convert would have cropped a very large value, use
4230 the max value for the destination type. */
4232 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4233 : TYPE_MAX_VALUE (arg0_type
);
4235 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4236 high_positive
= fold_build2 (RSHIFT_EXPR
, arg0_type
,
4237 fold_convert (arg0_type
,
4239 build_int_cst (arg0_type
, 1));
4241 /* If the low bound is specified, "and" the range with the
4242 range for which the original unsigned value will be
4246 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4247 1, n_low
, n_high
, 1,
4248 fold_convert (arg0_type
,
4253 in_p
= (n_in_p
== in_p
);
4257 /* Otherwise, "or" the range with the range of the input
4258 that will be interpreted as negative. */
4259 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4260 0, n_low
, n_high
, 1,
4261 fold_convert (arg0_type
,
4266 in_p
= (in_p
!= n_in_p
);
4271 low
= n_low
, high
= n_high
;
4281 /* If EXP is a constant, we can evaluate whether this is true or false. */
4282 if (TREE_CODE (exp
) == INTEGER_CST
)
4284 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4286 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4292 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4296 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4297 type, TYPE, return an expression to test if EXP is in (or out of, depending
4298 on IN_P) the range. Return 0 if the test couldn't be created. */
4301 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
4303 tree etype
= TREE_TYPE (exp
);
4306 #ifdef HAVE_canonicalize_funcptr_for_compare
4307 /* Disable this optimization for function pointer expressions
4308 on targets that require function pointer canonicalization. */
4309 if (HAVE_canonicalize_funcptr_for_compare
4310 && TREE_CODE (etype
) == POINTER_TYPE
4311 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4317 value
= build_range_check (type
, exp
, 1, low
, high
);
4319 return invert_truthvalue (value
);
4324 if (low
== 0 && high
== 0)
4325 return build_int_cst (type
, 1);
4328 return fold_build2 (LE_EXPR
, type
, exp
,
4329 fold_convert (etype
, high
));
4332 return fold_build2 (GE_EXPR
, type
, exp
,
4333 fold_convert (etype
, low
));
4335 if (operand_equal_p (low
, high
, 0))
4336 return fold_build2 (EQ_EXPR
, type
, exp
,
4337 fold_convert (etype
, low
));
4339 if (integer_zerop (low
))
4341 if (! TYPE_UNSIGNED (etype
))
4343 etype
= lang_hooks
.types
.unsigned_type (etype
);
4344 high
= fold_convert (etype
, high
);
4345 exp
= fold_convert (etype
, exp
);
4347 return build_range_check (type
, exp
, 1, 0, high
);
4350 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4351 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4353 unsigned HOST_WIDE_INT lo
;
4357 prec
= TYPE_PRECISION (etype
);
4358 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4361 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4365 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4366 lo
= (unsigned HOST_WIDE_INT
) -1;
4369 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4371 if (TYPE_UNSIGNED (etype
))
4373 etype
= lang_hooks
.types
.signed_type (etype
);
4374 exp
= fold_convert (etype
, exp
);
4376 return fold_build2 (GT_EXPR
, type
, exp
,
4377 build_int_cst (etype
, 0));
4381 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4382 This requires wrap-around arithmetics for the type of the expression. */
4383 switch (TREE_CODE (etype
))
4386 /* There is no requirement that LOW be within the range of ETYPE
4387 if the latter is a subtype. It must, however, be within the base
4388 type of ETYPE. So be sure we do the subtraction in that type. */
4389 if (TREE_TYPE (etype
))
4390 etype
= TREE_TYPE (etype
);
4395 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4396 TYPE_UNSIGNED (etype
));
4403 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4404 if (TREE_CODE (etype
) == INTEGER_TYPE
4405 && !TYPE_OVERFLOW_WRAPS (etype
))
4407 tree utype
, minv
, maxv
;
4409 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4410 for the type in question, as we rely on this here. */
4411 utype
= lang_hooks
.types
.unsigned_type (etype
);
4412 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
4413 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4414 integer_one_node
, 1);
4415 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
4417 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4424 high
= fold_convert (etype
, high
);
4425 low
= fold_convert (etype
, low
);
4426 exp
= fold_convert (etype
, exp
);
4428 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4430 if (value
!= 0 && !TREE_OVERFLOW (value
))
4431 return build_range_check (type
,
4432 fold_build2 (MINUS_EXPR
, etype
, exp
, low
),
4433 1, build_int_cst (etype
, 0), value
);
4438 /* Return the predecessor of VAL in its type, handling the infinite case. */
4441 range_predecessor (tree val
)
4443 tree type
= TREE_TYPE (val
);
4445 if (INTEGRAL_TYPE_P (type
)
4446 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4449 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4452 /* Return the successor of VAL in its type, handling the infinite case. */
4455 range_successor (tree val
)
4457 tree type
= TREE_TYPE (val
);
4459 if (INTEGRAL_TYPE_P (type
)
4460 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4463 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4466 /* Given two ranges, see if we can merge them into one. Return 1 if we
4467 can, 0 if we can't. Set the output range into the specified parameters. */
4470 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4471 tree high0
, int in1_p
, tree low1
, tree high1
)
4479 int lowequal
= ((low0
== 0 && low1
== 0)
4480 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4481 low0
, 0, low1
, 0)));
4482 int highequal
= ((high0
== 0 && high1
== 0)
4483 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4484 high0
, 1, high1
, 1)));
4486 /* Make range 0 be the range that starts first, or ends last if they
4487 start at the same value. Swap them if it isn't. */
4488 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4491 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4492 high1
, 1, high0
, 1))))
4494 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4495 tem
= low0
, low0
= low1
, low1
= tem
;
4496 tem
= high0
, high0
= high1
, high1
= tem
;
4499 /* Now flag two cases, whether the ranges are disjoint or whether the
4500 second range is totally subsumed in the first. Note that the tests
4501 below are simplified by the ones above. */
4502 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4503 high0
, 1, low1
, 0));
4504 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4505 high1
, 1, high0
, 1));
4507 /* We now have four cases, depending on whether we are including or
4508 excluding the two ranges. */
4511 /* If they don't overlap, the result is false. If the second range
4512 is a subset it is the result. Otherwise, the range is from the start
4513 of the second to the end of the first. */
4515 in_p
= 0, low
= high
= 0;
4517 in_p
= 1, low
= low1
, high
= high1
;
4519 in_p
= 1, low
= low1
, high
= high0
;
4522 else if (in0_p
&& ! in1_p
)
4524 /* If they don't overlap, the result is the first range. If they are
4525 equal, the result is false. If the second range is a subset of the
4526 first, and the ranges begin at the same place, we go from just after
4527 the end of the second range to the end of the first. If the second
4528 range is not a subset of the first, or if it is a subset and both
4529 ranges end at the same place, the range starts at the start of the
4530 first range and ends just before the second range.
4531 Otherwise, we can't describe this as a single range. */
4533 in_p
= 1, low
= low0
, high
= high0
;
4534 else if (lowequal
&& highequal
)
4535 in_p
= 0, low
= high
= 0;
4536 else if (subset
&& lowequal
)
4538 low
= range_successor (high1
);
4542 else if (! subset
|| highequal
)
4545 high
= range_predecessor (low1
);
4552 else if (! in0_p
&& in1_p
)
4554 /* If they don't overlap, the result is the second range. If the second
4555 is a subset of the first, the result is false. Otherwise,
4556 the range starts just after the first range and ends at the
4557 end of the second. */
4559 in_p
= 1, low
= low1
, high
= high1
;
4560 else if (subset
|| highequal
)
4561 in_p
= 0, low
= high
= 0;
4564 low
= range_successor (high0
);
4572 /* The case where we are excluding both ranges. Here the complex case
4573 is if they don't overlap. In that case, the only time we have a
4574 range is if they are adjacent. If the second is a subset of the
4575 first, the result is the first. Otherwise, the range to exclude
4576 starts at the beginning of the first range and ends at the end of the
4580 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4581 range_successor (high0
),
4583 in_p
= 0, low
= low0
, high
= high1
;
4586 /* Canonicalize - [min, x] into - [-, x]. */
4587 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4588 switch (TREE_CODE (TREE_TYPE (low0
)))
4591 if (TYPE_PRECISION (TREE_TYPE (low0
))
4592 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4596 if (tree_int_cst_equal (low0
,
4597 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4601 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4602 && integer_zerop (low0
))
4609 /* Canonicalize - [x, max] into - [x, -]. */
4610 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4611 switch (TREE_CODE (TREE_TYPE (high1
)))
4614 if (TYPE_PRECISION (TREE_TYPE (high1
))
4615 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4619 if (tree_int_cst_equal (high1
,
4620 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4624 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4625 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4627 integer_one_node
, 1)))
4634 /* The ranges might be also adjacent between the maximum and
4635 minimum values of the given type. For
4636 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4637 return + [x + 1, y - 1]. */
4638 if (low0
== 0 && high1
== 0)
4640 low
= range_successor (high0
);
4641 high
= range_predecessor (low1
);
4642 if (low
== 0 || high
== 0)
4652 in_p
= 0, low
= low0
, high
= high0
;
4654 in_p
= 0, low
= low0
, high
= high1
;
4657 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4662 /* Subroutine of fold, looking inside expressions of the form
4663 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4664 of the COND_EXPR. This function is being used also to optimize
4665 A op B ? C : A, by reversing the comparison first.
4667 Return a folded expression whose code is not a COND_EXPR
4668 anymore, or NULL_TREE if no folding opportunity is found. */
4671 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
4673 enum tree_code comp_code
= TREE_CODE (arg0
);
4674 tree arg00
= TREE_OPERAND (arg0
, 0);
4675 tree arg01
= TREE_OPERAND (arg0
, 1);
4676 tree arg1_type
= TREE_TYPE (arg1
);
4682 /* If we have A op 0 ? A : -A, consider applying the following
4685 A == 0? A : -A same as -A
4686 A != 0? A : -A same as A
4687 A >= 0? A : -A same as abs (A)
4688 A > 0? A : -A same as abs (A)
4689 A <= 0? A : -A same as -abs (A)
4690 A < 0? A : -A same as -abs (A)
4692 None of these transformations work for modes with signed
4693 zeros. If A is +/-0, the first two transformations will
4694 change the sign of the result (from +0 to -0, or vice
4695 versa). The last four will fix the sign of the result,
4696 even though the original expressions could be positive or
4697 negative, depending on the sign of A.
4699 Note that all these transformations are correct if A is
4700 NaN, since the two alternatives (A and -A) are also NaNs. */
4701 if ((FLOAT_TYPE_P (TREE_TYPE (arg01
))
4702 ? real_zerop (arg01
)
4703 : integer_zerop (arg01
))
4704 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4705 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4706 /* In the case that A is of the form X-Y, '-A' (arg2) may
4707 have already been folded to Y-X, check for that. */
4708 || (TREE_CODE (arg1
) == MINUS_EXPR
4709 && TREE_CODE (arg2
) == MINUS_EXPR
4710 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4711 TREE_OPERAND (arg2
, 1), 0)
4712 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4713 TREE_OPERAND (arg2
, 0), 0))))
4718 tem
= fold_convert (arg1_type
, arg1
);
4719 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
4722 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4725 if (flag_trapping_math
)
4730 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4731 arg1
= fold_convert (lang_hooks
.types
.signed_type
4732 (TREE_TYPE (arg1
)), arg1
);
4733 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4734 return pedantic_non_lvalue (fold_convert (type
, tem
));
4737 if (flag_trapping_math
)
4741 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4742 arg1
= fold_convert (lang_hooks
.types
.signed_type
4743 (TREE_TYPE (arg1
)), arg1
);
4744 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4745 return negate_expr (fold_convert (type
, tem
));
4747 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4751 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4752 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4753 both transformations are correct when A is NaN: A != 0
4754 is then true, and A == 0 is false. */
4756 if (integer_zerop (arg01
) && integer_zerop (arg2
))
4758 if (comp_code
== NE_EXPR
)
4759 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4760 else if (comp_code
== EQ_EXPR
)
4761 return build_int_cst (type
, 0);
4764 /* Try some transformations of A op B ? A : B.
4766 A == B? A : B same as B
4767 A != B? A : B same as A
4768 A >= B? A : B same as max (A, B)
4769 A > B? A : B same as max (B, A)
4770 A <= B? A : B same as min (A, B)
4771 A < B? A : B same as min (B, A)
4773 As above, these transformations don't work in the presence
4774 of signed zeros. For example, if A and B are zeros of
4775 opposite sign, the first two transformations will change
4776 the sign of the result. In the last four, the original
4777 expressions give different results for (A=+0, B=-0) and
4778 (A=-0, B=+0), but the transformed expressions do not.
4780 The first two transformations are correct if either A or B
4781 is a NaN. In the first transformation, the condition will
4782 be false, and B will indeed be chosen. In the case of the
4783 second transformation, the condition A != B will be true,
4784 and A will be chosen.
4786 The conversions to max() and min() are not correct if B is
4787 a number and A is not. The conditions in the original
4788 expressions will be false, so all four give B. The min()
4789 and max() versions would give a NaN instead. */
4790 if (operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4791 /* Avoid these transformations if the COND_EXPR may be used
4792 as an lvalue in the C++ front-end. PR c++/19199. */
4794 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4795 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4796 || ! maybe_lvalue_p (arg1
)
4797 || ! maybe_lvalue_p (arg2
)))
4799 tree comp_op0
= arg00
;
4800 tree comp_op1
= arg01
;
4801 tree comp_type
= TREE_TYPE (comp_op0
);
4803 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4804 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4814 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4816 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4821 /* In C++ a ?: expression can be an lvalue, so put the
4822 operand which will be used if they are equal first
4823 so that we can convert this back to the
4824 corresponding COND_EXPR. */
4825 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4827 comp_op0
= fold_convert (comp_type
, comp_op0
);
4828 comp_op1
= fold_convert (comp_type
, comp_op1
);
4829 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4830 ? fold_build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4831 : fold_build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
);
4832 return pedantic_non_lvalue (fold_convert (type
, tem
));
4839 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4841 comp_op0
= fold_convert (comp_type
, comp_op0
);
4842 comp_op1
= fold_convert (comp_type
, comp_op1
);
4843 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4844 ? fold_build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4845 : fold_build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
);
4846 return pedantic_non_lvalue (fold_convert (type
, tem
));
4850 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4851 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4854 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4855 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4858 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4863 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4864 we might still be able to simplify this. For example,
4865 if C1 is one less or one more than C2, this might have started
4866 out as a MIN or MAX and been transformed by this function.
4867 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4869 if (INTEGRAL_TYPE_P (type
)
4870 && TREE_CODE (arg01
) == INTEGER_CST
4871 && TREE_CODE (arg2
) == INTEGER_CST
)
4875 /* We can replace A with C1 in this case. */
4876 arg1
= fold_convert (type
, arg01
);
4877 return fold_build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
);
4880 /* If C1 is C2 + 1, this is min(A, C2). */
4881 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4883 && operand_equal_p (arg01
,
4884 const_binop (PLUS_EXPR
, arg2
,
4885 build_int_cst (type
, 1), 0),
4887 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4892 /* If C1 is C2 - 1, this is min(A, C2). */
4893 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4895 && operand_equal_p (arg01
,
4896 const_binop (MINUS_EXPR
, arg2
,
4897 build_int_cst (type
, 1), 0),
4899 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4904 /* If C1 is C2 - 1, this is max(A, C2). */
4905 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4907 && operand_equal_p (arg01
,
4908 const_binop (MINUS_EXPR
, arg2
,
4909 build_int_cst (type
, 1), 0),
4911 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4916 /* If C1 is C2 + 1, this is max(A, C2). */
4917 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4919 && operand_equal_p (arg01
,
4920 const_binop (PLUS_EXPR
, arg2
,
4921 build_int_cst (type
, 1), 0),
4923 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4937 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4938 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4941 /* EXP is some logical combination of boolean tests. See if we can
4942 merge it into some range test. Return the new tree if so. */
4945 fold_range_test (enum tree_code code
, tree type
, tree op0
, tree op1
)
4947 int or_op
= (code
== TRUTH_ORIF_EXPR
4948 || code
== TRUTH_OR_EXPR
);
4949 int in0_p
, in1_p
, in_p
;
4950 tree low0
, low1
, low
, high0
, high1
, high
;
4951 bool strict_overflow_p
= false;
4952 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4953 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4955 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4956 "when simplifying range test");
4958 /* If this is an OR operation, invert both sides; we will invert
4959 again at the end. */
4961 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4963 /* If both expressions are the same, if we can merge the ranges, and we
4964 can build the range test, return it or it inverted. If one of the
4965 ranges is always true or always false, consider it to be the same
4966 expression as the other. */
4967 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4968 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4970 && 0 != (tem
= (build_range_check (type
,
4972 : rhs
!= 0 ? rhs
: integer_zero_node
,
4975 if (strict_overflow_p
)
4976 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4977 return or_op
? invert_truthvalue (tem
) : tem
;
4980 /* On machines where the branch cost is expensive, if this is a
4981 short-circuited branch and the underlying object on both sides
4982 is the same, make a non-short-circuit operation. */
4983 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4984 && lhs
!= 0 && rhs
!= 0
4985 && (code
== TRUTH_ANDIF_EXPR
4986 || code
== TRUTH_ORIF_EXPR
)
4987 && operand_equal_p (lhs
, rhs
, 0))
4989 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4990 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4991 which cases we can't do this. */
4992 if (simple_operand_p (lhs
))
4993 return build2 (code
== TRUTH_ANDIF_EXPR
4994 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4997 else if (lang_hooks
.decls
.global_bindings_p () == 0
4998 && ! CONTAINS_PLACEHOLDER_P (lhs
))
5000 tree common
= save_expr (lhs
);
5002 if (0 != (lhs
= build_range_check (type
, common
,
5003 or_op
? ! in0_p
: in0_p
,
5005 && (0 != (rhs
= build_range_check (type
, common
,
5006 or_op
? ! in1_p
: in1_p
,
5009 if (strict_overflow_p
)
5010 fold_overflow_warning (warnmsg
,
5011 WARN_STRICT_OVERFLOW_COMPARISON
);
5012 return build2 (code
== TRUTH_ANDIF_EXPR
5013 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5022 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5023 bit value. Arrange things so the extra bits will be set to zero if and
5024 only if C is signed-extended to its full width. If MASK is nonzero,
5025 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5028 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5030 tree type
= TREE_TYPE (c
);
5031 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5034 if (p
== modesize
|| unsignedp
)
5037 /* We work by getting just the sign bit into the low-order bit, then
5038 into the high-order bit, then sign-extend. We then XOR that value
5040 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
5041 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
5043 /* We must use a signed type in order to get an arithmetic right shift.
5044 However, we must also avoid introducing accidental overflows, so that
5045 a subsequent call to integer_zerop will work. Hence we must
5046 do the type conversion here. At this point, the constant is either
5047 zero or one, and the conversion to a signed type can never overflow.
5048 We could get an overflow if this conversion is done anywhere else. */
5049 if (TYPE_UNSIGNED (type
))
5050 temp
= fold_convert (lang_hooks
.types
.signed_type (type
), temp
);
5052 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
5053 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
5055 temp
= const_binop (BIT_AND_EXPR
, temp
,
5056 fold_convert (TREE_TYPE (c
), mask
), 0);
5057 /* If necessary, convert the type back to match the type of C. */
5058 if (TYPE_UNSIGNED (type
))
5059 temp
= fold_convert (type
, temp
);
5061 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
5064 /* Find ways of folding logical expressions of LHS and RHS:
5065 Try to merge two comparisons to the same innermost item.
5066 Look for range tests like "ch >= '0' && ch <= '9'".
5067 Look for combinations of simple terms on machines with expensive branches
5068 and evaluate the RHS unconditionally.
5070 For example, if we have p->a == 2 && p->b == 4 and we can make an
5071 object large enough to span both A and B, we can do this with a comparison
5072 against the object ANDed with the a mask.
5074 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5075 operations to do this with one comparison.
5077 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5078 function and the one above.
5080 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5081 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5083 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5086 We return the simplified tree or 0 if no optimization is possible. */
5089 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
5091 /* If this is the "or" of two comparisons, we can do something if
5092 the comparisons are NE_EXPR. If this is the "and", we can do something
5093 if the comparisons are EQ_EXPR. I.e.,
5094 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5096 WANTED_CODE is this operation code. For single bit fields, we can
5097 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5098 comparison for one-bit fields. */
5100 enum tree_code wanted_code
;
5101 enum tree_code lcode
, rcode
;
5102 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5103 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5104 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5105 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5106 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5107 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5108 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5109 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5110 enum machine_mode lnmode
, rnmode
;
5111 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5112 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5113 tree l_const
, r_const
;
5114 tree lntype
, rntype
, result
;
5115 int first_bit
, end_bit
;
5117 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5118 enum tree_code orig_code
= code
;
5120 /* Start by getting the comparison codes. Fail if anything is volatile.
5121 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5122 it were surrounded with a NE_EXPR. */
5124 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5127 lcode
= TREE_CODE (lhs
);
5128 rcode
= TREE_CODE (rhs
);
5130 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5132 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5133 build_int_cst (TREE_TYPE (lhs
), 0));
5137 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5139 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5140 build_int_cst (TREE_TYPE (rhs
), 0));
5144 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5145 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5148 ll_arg
= TREE_OPERAND (lhs
, 0);
5149 lr_arg
= TREE_OPERAND (lhs
, 1);
5150 rl_arg
= TREE_OPERAND (rhs
, 0);
5151 rr_arg
= TREE_OPERAND (rhs
, 1);
5153 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5154 if (simple_operand_p (ll_arg
)
5155 && simple_operand_p (lr_arg
))
5158 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5159 && operand_equal_p (lr_arg
, rr_arg
, 0))
5161 result
= combine_comparisons (code
, lcode
, rcode
,
5162 truth_type
, ll_arg
, lr_arg
);
5166 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5167 && operand_equal_p (lr_arg
, rl_arg
, 0))
5169 result
= combine_comparisons (code
, lcode
,
5170 swap_tree_comparison (rcode
),
5171 truth_type
, ll_arg
, lr_arg
);
5177 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5178 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5180 /* If the RHS can be evaluated unconditionally and its operands are
5181 simple, it wins to evaluate the RHS unconditionally on machines
5182 with expensive branches. In this case, this isn't a comparison
5183 that can be merged. Avoid doing this if the RHS is a floating-point
5184 comparison since those can trap. */
5186 if (BRANCH_COST
>= 2
5187 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5188 && simple_operand_p (rl_arg
)
5189 && simple_operand_p (rr_arg
))
5191 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5192 if (code
== TRUTH_OR_EXPR
5193 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5194 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5195 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
5196 return build2 (NE_EXPR
, truth_type
,
5197 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5199 build_int_cst (TREE_TYPE (ll_arg
), 0));
5201 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5202 if (code
== TRUTH_AND_EXPR
5203 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5204 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5205 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
5206 return build2 (EQ_EXPR
, truth_type
,
5207 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5209 build_int_cst (TREE_TYPE (ll_arg
), 0));
5211 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
5213 if (code
!= orig_code
|| lhs
!= orig_lhs
|| rhs
!= orig_rhs
)
5214 return build2 (code
, truth_type
, lhs
, rhs
);
5219 /* See if the comparisons can be merged. Then get all the parameters for
5222 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5223 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5227 ll_inner
= decode_field_reference (ll_arg
,
5228 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5229 &ll_unsignedp
, &volatilep
, &ll_mask
,
5231 lr_inner
= decode_field_reference (lr_arg
,
5232 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5233 &lr_unsignedp
, &volatilep
, &lr_mask
,
5235 rl_inner
= decode_field_reference (rl_arg
,
5236 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5237 &rl_unsignedp
, &volatilep
, &rl_mask
,
5239 rr_inner
= decode_field_reference (rr_arg
,
5240 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5241 &rr_unsignedp
, &volatilep
, &rr_mask
,
5244 /* It must be true that the inner operation on the lhs of each
5245 comparison must be the same if we are to be able to do anything.
5246 Then see if we have constants. If not, the same must be true for
5248 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5249 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5252 if (TREE_CODE (lr_arg
) == INTEGER_CST
5253 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5254 l_const
= lr_arg
, r_const
= rr_arg
;
5255 else if (lr_inner
== 0 || rr_inner
== 0
5256 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5259 l_const
= r_const
= 0;
5261 /* If either comparison code is not correct for our logical operation,
5262 fail. However, we can convert a one-bit comparison against zero into
5263 the opposite comparison against that bit being set in the field. */
5265 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5266 if (lcode
!= wanted_code
)
5268 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5270 /* Make the left operand unsigned, since we are only interested
5271 in the value of one bit. Otherwise we are doing the wrong
5280 /* This is analogous to the code for l_const above. */
5281 if (rcode
!= wanted_code
)
5283 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5292 /* See if we can find a mode that contains both fields being compared on
5293 the left. If we can't, fail. Otherwise, update all constants and masks
5294 to be relative to a field of that size. */
5295 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5296 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5297 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5298 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5300 if (lnmode
== VOIDmode
)
5303 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5304 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5305 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5306 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5308 if (BYTES_BIG_ENDIAN
)
5310 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5311 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5314 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
5315 size_int (xll_bitpos
), 0);
5316 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
5317 size_int (xrl_bitpos
), 0);
5321 l_const
= fold_convert (lntype
, l_const
);
5322 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5323 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
5324 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5325 fold_build1 (BIT_NOT_EXPR
,
5329 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5331 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5336 r_const
= fold_convert (lntype
, r_const
);
5337 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5338 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
5339 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5340 fold_build1 (BIT_NOT_EXPR
,
5344 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5346 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5350 /* If the right sides are not constant, do the same for it. Also,
5351 disallow this optimization if a size or signedness mismatch occurs
5352 between the left and right sides. */
5355 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5356 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5357 /* Make sure the two fields on the right
5358 correspond to the left without being swapped. */
5359 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5362 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5363 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5364 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5365 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5367 if (rnmode
== VOIDmode
)
5370 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5371 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5372 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5373 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5375 if (BYTES_BIG_ENDIAN
)
5377 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5378 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5381 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
5382 size_int (xlr_bitpos
), 0);
5383 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
5384 size_int (xrr_bitpos
), 0);
5386 /* Make a mask that corresponds to both fields being compared.
5387 Do this for both items being compared. If the operands are the
5388 same size and the bits being compared are in the same position
5389 then we can do this by masking both and comparing the masked
5391 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5392 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
5393 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5395 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5396 ll_unsignedp
|| rl_unsignedp
);
5397 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5398 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5400 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5401 lr_unsignedp
|| rr_unsignedp
);
5402 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5403 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5405 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5408 /* There is still another way we can do something: If both pairs of
5409 fields being compared are adjacent, we may be able to make a wider
5410 field containing them both.
5412 Note that we still must mask the lhs/rhs expressions. Furthermore,
5413 the mask must be shifted to account for the shift done by
5414 make_bit_field_ref. */
5415 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5416 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5417 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5418 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5422 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
5423 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5424 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
5425 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5427 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5428 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
5429 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5430 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
5432 /* Convert to the smaller type before masking out unwanted bits. */
5434 if (lntype
!= rntype
)
5436 if (lnbitsize
> rnbitsize
)
5438 lhs
= fold_convert (rntype
, lhs
);
5439 ll_mask
= fold_convert (rntype
, ll_mask
);
5442 else if (lnbitsize
< rnbitsize
)
5444 rhs
= fold_convert (lntype
, rhs
);
5445 lr_mask
= fold_convert (lntype
, lr_mask
);
5450 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5451 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5453 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5454 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5456 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5462 /* Handle the case of comparisons with constants. If there is something in
5463 common between the masks, those bits of the constants must be the same.
5464 If not, the condition is always false. Test for this to avoid generating
5465 incorrect code below. */
5466 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
5467 if (! integer_zerop (result
)
5468 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
5469 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
5471 if (wanted_code
== NE_EXPR
)
5473 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5474 return constant_boolean_node (true, truth_type
);
5478 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5479 return constant_boolean_node (false, truth_type
);
5483 /* Construct the expression we will return. First get the component
5484 reference we will make. Unless the mask is all ones the width of
5485 that field, perform the mask operation. Then compare with the
5487 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5488 ll_unsignedp
|| rl_unsignedp
);
5490 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5491 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5492 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5494 return build2 (wanted_code
, truth_type
, result
,
5495 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
5498 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5502 optimize_minmax_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
5505 enum tree_code op_code
;
5506 tree comp_const
= op1
;
5508 int consts_equal
, consts_lt
;
5511 STRIP_SIGN_NOPS (arg0
);
5513 op_code
= TREE_CODE (arg0
);
5514 minmax_const
= TREE_OPERAND (arg0
, 1);
5515 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5516 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5517 inner
= TREE_OPERAND (arg0
, 0);
5519 /* If something does not permit us to optimize, return the original tree. */
5520 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5521 || TREE_CODE (comp_const
) != INTEGER_CST
5522 || TREE_OVERFLOW (comp_const
)
5523 || TREE_CODE (minmax_const
) != INTEGER_CST
5524 || TREE_OVERFLOW (minmax_const
))
5527 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5528 and GT_EXPR, doing the rest with recursive calls using logical
5532 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5534 tree tem
= optimize_minmax_comparison (invert_tree_comparison (code
, false),
5537 return invert_truthvalue (tem
);
5543 fold_build2 (TRUTH_ORIF_EXPR
, type
,
5544 optimize_minmax_comparison
5545 (EQ_EXPR
, type
, arg0
, comp_const
),
5546 optimize_minmax_comparison
5547 (GT_EXPR
, type
, arg0
, comp_const
));
5550 if (op_code
== MAX_EXPR
&& consts_equal
)
5551 /* MAX (X, 0) == 0 -> X <= 0 */
5552 return fold_build2 (LE_EXPR
, type
, inner
, comp_const
);
5554 else if (op_code
== MAX_EXPR
&& consts_lt
)
5555 /* MAX (X, 0) == 5 -> X == 5 */
5556 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5558 else if (op_code
== MAX_EXPR
)
5559 /* MAX (X, 0) == -1 -> false */
5560 return omit_one_operand (type
, integer_zero_node
, inner
);
5562 else if (consts_equal
)
5563 /* MIN (X, 0) == 0 -> X >= 0 */
5564 return fold_build2 (GE_EXPR
, type
, inner
, comp_const
);
5567 /* MIN (X, 0) == 5 -> false */
5568 return omit_one_operand (type
, integer_zero_node
, inner
);
5571 /* MIN (X, 0) == -1 -> X == -1 */
5572 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5575 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5576 /* MAX (X, 0) > 0 -> X > 0
5577 MAX (X, 0) > 5 -> X > 5 */
5578 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5580 else if (op_code
== MAX_EXPR
)
5581 /* MAX (X, 0) > -1 -> true */
5582 return omit_one_operand (type
, integer_one_node
, inner
);
5584 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5585 /* MIN (X, 0) > 0 -> false
5586 MIN (X, 0) > 5 -> false */
5587 return omit_one_operand (type
, integer_zero_node
, inner
);
5590 /* MIN (X, 0) > -1 -> X > -1 */
5591 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5598 /* T is an integer expression that is being multiplied, divided, or taken a
5599 modulus (CODE says which and what kind of divide or modulus) by a
5600 constant C. See if we can eliminate that operation by folding it with
5601 other operations already in T. WIDE_TYPE, if non-null, is a type that
5602 should be used for the computation if wider than our type.
5604 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5605 (X * 2) + (Y * 4). We must, however, be assured that either the original
5606 expression would not overflow or that overflow is undefined for the type
5607 in the language in question.
5609 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5610 the machine has a multiply-accumulate insn or that this is part of an
5611 addressing calculation.
5613 If we return a non-null expression, it is an equivalent form of the
5614 original computation, but need not be in the original type.
5616 We set *STRICT_OVERFLOW_P to true if the return values depends on
5617 signed overflow being undefined. Otherwise we do not change
5618 *STRICT_OVERFLOW_P. */
5621 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5622 bool *strict_overflow_p
)
5624 /* To avoid exponential search depth, refuse to allow recursion past
5625 three levels. Beyond that (1) it's highly unlikely that we'll find
5626 something interesting and (2) we've probably processed it before
5627 when we built the inner expression. */
5636 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5643 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5644 bool *strict_overflow_p
)
5646 tree type
= TREE_TYPE (t
);
5647 enum tree_code tcode
= TREE_CODE (t
);
5648 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5649 > GET_MODE_SIZE (TYPE_MODE (type
)))
5650 ? wide_type
: type
);
5652 int same_p
= tcode
== code
;
5653 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5654 bool sub_strict_overflow_p
;
5656 /* Don't deal with constants of zero here; they confuse the code below. */
5657 if (integer_zerop (c
))
5660 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5661 op0
= TREE_OPERAND (t
, 0);
5663 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5664 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5666 /* Note that we need not handle conditional operations here since fold
5667 already handles those cases. So just do arithmetic here. */
5671 /* For a constant, we can always simplify if we are a multiply
5672 or (for divide and modulus) if it is a multiple of our constant. */
5673 if (code
== MULT_EXPR
5674 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
5675 return const_binop (code
, fold_convert (ctype
, t
),
5676 fold_convert (ctype
, c
), 0);
5679 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
5680 /* If op0 is an expression ... */
5681 if ((COMPARISON_CLASS_P (op0
)
5682 || UNARY_CLASS_P (op0
)
5683 || BINARY_CLASS_P (op0
)
5684 || VL_EXP_CLASS_P (op0
)
5685 || EXPRESSION_CLASS_P (op0
))
5686 /* ... and is unsigned, and its type is smaller than ctype,
5687 then we cannot pass through as widening. */
5688 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
5689 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5690 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5691 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
5692 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
5693 /* ... or this is a truncation (t is narrower than op0),
5694 then we cannot pass through this narrowing. */
5695 || (GET_MODE_SIZE (TYPE_MODE (type
))
5696 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
5697 /* ... or signedness changes for division or modulus,
5698 then we cannot pass through this conversion. */
5699 || (code
!= MULT_EXPR
5700 && (TYPE_UNSIGNED (ctype
)
5701 != TYPE_UNSIGNED (TREE_TYPE (op0
))))))
5704 /* Pass the constant down and see if we can make a simplification. If
5705 we can, replace this expression with the inner simplification for
5706 possible later conversion to our or some other type. */
5707 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5708 && TREE_CODE (t2
) == INTEGER_CST
5709 && !TREE_OVERFLOW (t2
)
5710 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5712 ? ctype
: NULL_TREE
,
5713 strict_overflow_p
))))
5718 /* If widening the type changes it from signed to unsigned, then we
5719 must avoid building ABS_EXPR itself as unsigned. */
5720 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5722 tree cstype
= (*lang_hooks
.types
.signed_type
) (ctype
);
5723 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5726 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5727 return fold_convert (ctype
, t1
);
5733 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5735 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5738 case MIN_EXPR
: case MAX_EXPR
:
5739 /* If widening the type changes the signedness, then we can't perform
5740 this optimization as that changes the result. */
5741 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5744 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5745 sub_strict_overflow_p
= false;
5746 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5747 &sub_strict_overflow_p
)) != 0
5748 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5749 &sub_strict_overflow_p
)) != 0)
5751 if (tree_int_cst_sgn (c
) < 0)
5752 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5753 if (sub_strict_overflow_p
)
5754 *strict_overflow_p
= true;
5755 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5756 fold_convert (ctype
, t2
));
5760 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5761 /* If the second operand is constant, this is a multiplication
5762 or floor division, by a power of two, so we can treat it that
5763 way unless the multiplier or divisor overflows. Signed
5764 left-shift overflow is implementation-defined rather than
5765 undefined in C90, so do not convert signed left shift into
5767 if (TREE_CODE (op1
) == INTEGER_CST
5768 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5769 /* const_binop may not detect overflow correctly,
5770 so check for it explicitly here. */
5771 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5772 && TREE_INT_CST_HIGH (op1
) == 0
5773 && 0 != (t1
= fold_convert (ctype
,
5774 const_binop (LSHIFT_EXPR
,
5777 && !TREE_OVERFLOW (t1
))
5778 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5779 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5780 ctype
, fold_convert (ctype
, op0
), t1
),
5781 c
, code
, wide_type
, strict_overflow_p
);
5784 case PLUS_EXPR
: case MINUS_EXPR
:
5785 /* See if we can eliminate the operation on both sides. If we can, we
5786 can return a new PLUS or MINUS. If we can't, the only remaining
5787 cases where we can do anything are if the second operand is a
5789 sub_strict_overflow_p
= false;
5790 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5791 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5792 if (t1
!= 0 && t2
!= 0
5793 && (code
== MULT_EXPR
5794 /* If not multiplication, we can only do this if both operands
5795 are divisible by c. */
5796 || (multiple_of_p (ctype
, op0
, c
)
5797 && multiple_of_p (ctype
, op1
, c
))))
5799 if (sub_strict_overflow_p
)
5800 *strict_overflow_p
= true;
5801 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5802 fold_convert (ctype
, t2
));
5805 /* If this was a subtraction, negate OP1 and set it to be an addition.
5806 This simplifies the logic below. */
5807 if (tcode
== MINUS_EXPR
)
5808 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5810 if (TREE_CODE (op1
) != INTEGER_CST
)
5813 /* If either OP1 or C are negative, this optimization is not safe for
5814 some of the division and remainder types while for others we need
5815 to change the code. */
5816 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5818 if (code
== CEIL_DIV_EXPR
)
5819 code
= FLOOR_DIV_EXPR
;
5820 else if (code
== FLOOR_DIV_EXPR
)
5821 code
= CEIL_DIV_EXPR
;
5822 else if (code
!= MULT_EXPR
5823 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5827 /* If it's a multiply or a division/modulus operation of a multiple
5828 of our constant, do the operation and verify it doesn't overflow. */
5829 if (code
== MULT_EXPR
5830 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5832 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5833 fold_convert (ctype
, c
), 0);
5834 /* We allow the constant to overflow with wrapping semantics. */
5836 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5842 /* If we have an unsigned type is not a sizetype, we cannot widen
5843 the operation since it will change the result if the original
5844 computation overflowed. */
5845 if (TYPE_UNSIGNED (ctype
)
5846 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5850 /* If we were able to eliminate our operation from the first side,
5851 apply our operation to the second side and reform the PLUS. */
5852 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5853 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5855 /* The last case is if we are a multiply. In that case, we can
5856 apply the distributive law to commute the multiply and addition
5857 if the multiplication of the constants doesn't overflow. */
5858 if (code
== MULT_EXPR
)
5859 return fold_build2 (tcode
, ctype
,
5860 fold_build2 (code
, ctype
,
5861 fold_convert (ctype
, op0
),
5862 fold_convert (ctype
, c
)),
5868 /* We have a special case here if we are doing something like
5869 (C * 8) % 4 since we know that's zero. */
5870 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5871 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5872 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5873 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5874 return omit_one_operand (type
, integer_zero_node
, op0
);
5876 /* ... fall through ... */
5878 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5879 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5880 /* If we can extract our operation from the LHS, do so and return a
5881 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5882 do something only if the second operand is a constant. */
5884 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5885 strict_overflow_p
)) != 0)
5886 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5887 fold_convert (ctype
, op1
));
5888 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5889 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5890 strict_overflow_p
)) != 0)
5891 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5892 fold_convert (ctype
, t1
));
5893 else if (TREE_CODE (op1
) != INTEGER_CST
)
5896 /* If these are the same operation types, we can associate them
5897 assuming no overflow. */
5899 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
5900 fold_convert (ctype
, c
), 0))
5901 && !TREE_OVERFLOW (t1
))
5902 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
5904 /* If these operations "cancel" each other, we have the main
5905 optimizations of this pass, which occur when either constant is a
5906 multiple of the other, in which case we replace this with either an
5907 operation or CODE or TCODE.
5909 If we have an unsigned type that is not a sizetype, we cannot do
5910 this since it will change the result if the original computation
5912 if ((TYPE_OVERFLOW_UNDEFINED (ctype
)
5913 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5914 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5915 || (tcode
== MULT_EXPR
5916 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5917 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
5919 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5921 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5922 *strict_overflow_p
= true;
5923 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5924 fold_convert (ctype
,
5925 const_binop (TRUNC_DIV_EXPR
,
5928 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
5930 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5931 *strict_overflow_p
= true;
5932 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5933 fold_convert (ctype
,
5934 const_binop (TRUNC_DIV_EXPR
,
5947 /* Return a node which has the indicated constant VALUE (either 0 or
5948 1), and is of the indicated TYPE. */
5951 constant_boolean_node (int value
, tree type
)
5953 if (type
== integer_type_node
)
5954 return value
? integer_one_node
: integer_zero_node
;
5955 else if (type
== boolean_type_node
)
5956 return value
? boolean_true_node
: boolean_false_node
;
5958 return build_int_cst (type
, value
);
5962 /* Return true if expr looks like an ARRAY_REF and set base and
5963 offset to the appropriate trees. If there is no offset,
5964 offset is set to NULL_TREE. Base will be canonicalized to
5965 something you can get the element type from using
5966 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5967 in bytes to the base. */
5970 extract_array_ref (tree expr
, tree
*base
, tree
*offset
)
5972 /* One canonical form is a PLUS_EXPR with the first
5973 argument being an ADDR_EXPR with a possible NOP_EXPR
5975 if (TREE_CODE (expr
) == PLUS_EXPR
)
5977 tree op0
= TREE_OPERAND (expr
, 0);
5978 tree inner_base
, dummy1
;
5979 /* Strip NOP_EXPRs here because the C frontends and/or
5980 folders present us (int *)&x.a + 4B possibly. */
5982 if (extract_array_ref (op0
, &inner_base
, &dummy1
))
5985 if (dummy1
== NULL_TREE
)
5986 *offset
= TREE_OPERAND (expr
, 1);
5988 *offset
= fold_build2 (PLUS_EXPR
, TREE_TYPE (expr
),
5989 dummy1
, TREE_OPERAND (expr
, 1));
5993 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5994 which we transform into an ADDR_EXPR with appropriate
5995 offset. For other arguments to the ADDR_EXPR we assume
5996 zero offset and as such do not care about the ADDR_EXPR
5997 type and strip possible nops from it. */
5998 else if (TREE_CODE (expr
) == ADDR_EXPR
)
6000 tree op0
= TREE_OPERAND (expr
, 0);
6001 if (TREE_CODE (op0
) == ARRAY_REF
)
6003 tree idx
= TREE_OPERAND (op0
, 1);
6004 *base
= TREE_OPERAND (op0
, 0);
6005 *offset
= fold_build2 (MULT_EXPR
, TREE_TYPE (idx
), idx
,
6006 array_ref_element_size (op0
));
6010 /* Handle array-to-pointer decay as &a. */
6011 if (TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
6012 *base
= TREE_OPERAND (expr
, 0);
6015 *offset
= NULL_TREE
;
6019 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
6020 else if (SSA_VAR_P (expr
)
6021 && TREE_CODE (TREE_TYPE (expr
)) == POINTER_TYPE
)
6024 *offset
= NULL_TREE
;
6032 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6033 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6034 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6035 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6036 COND is the first argument to CODE; otherwise (as in the example
6037 given here), it is the second argument. TYPE is the type of the
6038 original expression. Return NULL_TREE if no simplification is
6042 fold_binary_op_with_conditional_arg (enum tree_code code
,
6043 tree type
, tree op0
, tree op1
,
6044 tree cond
, tree arg
, int cond_first_p
)
6046 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6047 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6048 tree test
, true_value
, false_value
;
6049 tree lhs
= NULL_TREE
;
6050 tree rhs
= NULL_TREE
;
6052 /* This transformation is only worthwhile if we don't have to wrap
6053 arg in a SAVE_EXPR, and the operation can be simplified on at least
6054 one of the branches once its pushed inside the COND_EXPR. */
6055 if (!TREE_CONSTANT (arg
))
6058 if (TREE_CODE (cond
) == COND_EXPR
)
6060 test
= TREE_OPERAND (cond
, 0);
6061 true_value
= TREE_OPERAND (cond
, 1);
6062 false_value
= TREE_OPERAND (cond
, 2);
6063 /* If this operand throws an expression, then it does not make
6064 sense to try to perform a logical or arithmetic operation
6066 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6068 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6073 tree testtype
= TREE_TYPE (cond
);
6075 true_value
= constant_boolean_node (true, testtype
);
6076 false_value
= constant_boolean_node (false, testtype
);
6079 arg
= fold_convert (arg_type
, arg
);
6082 true_value
= fold_convert (cond_type
, true_value
);
6084 lhs
= fold_build2 (code
, type
, true_value
, arg
);
6086 lhs
= fold_build2 (code
, type
, arg
, true_value
);
6090 false_value
= fold_convert (cond_type
, false_value
);
6092 rhs
= fold_build2 (code
, type
, false_value
, arg
);
6094 rhs
= fold_build2 (code
, type
, arg
, false_value
);
6097 test
= fold_build3 (COND_EXPR
, type
, test
, lhs
, rhs
);
6098 return fold_convert (type
, test
);
6102 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6104 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6105 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6106 ADDEND is the same as X.
6108 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6109 and finite. The problematic cases are when X is zero, and its mode
6110 has signed zeros. In the case of rounding towards -infinity,
6111 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6112 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6115 fold_real_zero_addition_p (tree type
, tree addend
, int negate
)
6117 if (!real_zerop (addend
))
6120 /* Don't allow the fold with -fsignaling-nans. */
6121 if (HONOR_SNANS (TYPE_MODE (type
)))
6124 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6125 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6128 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6129 if (TREE_CODE (addend
) == REAL_CST
6130 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6133 /* The mode has signed zeros, and we have to honor their sign.
6134 In this situation, there is only one case we can return true for.
6135 X - 0 is the same as X unless rounding towards -infinity is
6137 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6140 /* Subroutine of fold() that checks comparisons of built-in math
6141 functions against real constants.
6143 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6144 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6145 is the type of the result and ARG0 and ARG1 are the operands of the
6146 comparison. ARG1 must be a TREE_REAL_CST.
6148 The function returns the constant folded tree if a simplification
6149 can be made, and NULL_TREE otherwise. */
6152 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
6153 tree type
, tree arg0
, tree arg1
)
6157 if (BUILTIN_SQRT_P (fcode
))
6159 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6160 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6162 c
= TREE_REAL_CST (arg1
);
6163 if (REAL_VALUE_NEGATIVE (c
))
6165 /* sqrt(x) < y is always false, if y is negative. */
6166 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6167 return omit_one_operand (type
, integer_zero_node
, arg
);
6169 /* sqrt(x) > y is always true, if y is negative and we
6170 don't care about NaNs, i.e. negative values of x. */
6171 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6172 return omit_one_operand (type
, integer_one_node
, arg
);
6174 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6175 return fold_build2 (GE_EXPR
, type
, arg
,
6176 build_real (TREE_TYPE (arg
), dconst0
));
6178 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6182 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6183 real_convert (&c2
, mode
, &c2
);
6185 if (REAL_VALUE_ISINF (c2
))
6187 /* sqrt(x) > y is x == +Inf, when y is very large. */
6188 if (HONOR_INFINITIES (mode
))
6189 return fold_build2 (EQ_EXPR
, type
, arg
,
6190 build_real (TREE_TYPE (arg
), c2
));
6192 /* sqrt(x) > y is always false, when y is very large
6193 and we don't care about infinities. */
6194 return omit_one_operand (type
, integer_zero_node
, arg
);
6197 /* sqrt(x) > c is the same as x > c*c. */
6198 return fold_build2 (code
, type
, arg
,
6199 build_real (TREE_TYPE (arg
), c2
));
6201 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6205 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6206 real_convert (&c2
, mode
, &c2
);
6208 if (REAL_VALUE_ISINF (c2
))
6210 /* sqrt(x) < y is always true, when y is a very large
6211 value and we don't care about NaNs or Infinities. */
6212 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6213 return omit_one_operand (type
, integer_one_node
, arg
);
6215 /* sqrt(x) < y is x != +Inf when y is very large and we
6216 don't care about NaNs. */
6217 if (! HONOR_NANS (mode
))
6218 return fold_build2 (NE_EXPR
, type
, arg
,
6219 build_real (TREE_TYPE (arg
), c2
));
6221 /* sqrt(x) < y is x >= 0 when y is very large and we
6222 don't care about Infinities. */
6223 if (! HONOR_INFINITIES (mode
))
6224 return fold_build2 (GE_EXPR
, type
, arg
,
6225 build_real (TREE_TYPE (arg
), dconst0
));
6227 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6228 if (lang_hooks
.decls
.global_bindings_p () != 0
6229 || CONTAINS_PLACEHOLDER_P (arg
))
6232 arg
= save_expr (arg
);
6233 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
6234 fold_build2 (GE_EXPR
, type
, arg
,
6235 build_real (TREE_TYPE (arg
),
6237 fold_build2 (NE_EXPR
, type
, arg
,
6238 build_real (TREE_TYPE (arg
),
6242 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6243 if (! HONOR_NANS (mode
))
6244 return fold_build2 (code
, type
, arg
,
6245 build_real (TREE_TYPE (arg
), c2
));
6247 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6248 if (lang_hooks
.decls
.global_bindings_p () == 0
6249 && ! CONTAINS_PLACEHOLDER_P (arg
))
6251 arg
= save_expr (arg
);
6252 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
6253 fold_build2 (GE_EXPR
, type
, arg
,
6254 build_real (TREE_TYPE (arg
),
6256 fold_build2 (code
, type
, arg
,
6257 build_real (TREE_TYPE (arg
),
6266 /* Subroutine of fold() that optimizes comparisons against Infinities,
6267 either +Inf or -Inf.
6269 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6270 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6271 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6273 The function returns the constant folded tree if a simplification
6274 can be made, and NULL_TREE otherwise. */
6277 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6279 enum machine_mode mode
;
6280 REAL_VALUE_TYPE max
;
6284 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6286 /* For negative infinity swap the sense of the comparison. */
6287 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6289 code
= swap_tree_comparison (code
);
6294 /* x > +Inf is always false, if with ignore sNANs. */
6295 if (HONOR_SNANS (mode
))
6297 return omit_one_operand (type
, integer_zero_node
, arg0
);
6300 /* x <= +Inf is always true, if we don't case about NaNs. */
6301 if (! HONOR_NANS (mode
))
6302 return omit_one_operand (type
, integer_one_node
, arg0
);
6304 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6305 if (lang_hooks
.decls
.global_bindings_p () == 0
6306 && ! CONTAINS_PLACEHOLDER_P (arg0
))
6308 arg0
= save_expr (arg0
);
6309 return fold_build2 (EQ_EXPR
, type
, arg0
, arg0
);
6315 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6316 real_maxval (&max
, neg
, mode
);
6317 return fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6318 arg0
, build_real (TREE_TYPE (arg0
), max
));
6321 /* x < +Inf is always equal to x <= DBL_MAX. */
6322 real_maxval (&max
, neg
, mode
);
6323 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6324 arg0
, build_real (TREE_TYPE (arg0
), max
));
6327 /* x != +Inf is always equal to !(x > DBL_MAX). */
6328 real_maxval (&max
, neg
, mode
);
6329 if (! HONOR_NANS (mode
))
6330 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6331 arg0
, build_real (TREE_TYPE (arg0
), max
));
6333 temp
= fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6334 arg0
, build_real (TREE_TYPE (arg0
), max
));
6335 return fold_build1 (TRUTH_NOT_EXPR
, type
, temp
);
6344 /* Subroutine of fold() that optimizes comparisons of a division by
6345 a nonzero integer constant against an integer constant, i.e.
6348 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6349 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6350 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6352 The function returns the constant folded tree if a simplification
6353 can be made, and NULL_TREE otherwise. */
6356 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6358 tree prod
, tmp
, hi
, lo
;
6359 tree arg00
= TREE_OPERAND (arg0
, 0);
6360 tree arg01
= TREE_OPERAND (arg0
, 1);
6361 unsigned HOST_WIDE_INT lpart
;
6362 HOST_WIDE_INT hpart
;
6363 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6367 /* We have to do this the hard way to detect unsigned overflow.
6368 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6369 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6370 TREE_INT_CST_HIGH (arg01
),
6371 TREE_INT_CST_LOW (arg1
),
6372 TREE_INT_CST_HIGH (arg1
),
6373 &lpart
, &hpart
, unsigned_p
);
6374 prod
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6376 neg_overflow
= false;
6380 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6381 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6384 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6385 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6386 TREE_INT_CST_HIGH (prod
),
6387 TREE_INT_CST_LOW (tmp
),
6388 TREE_INT_CST_HIGH (tmp
),
6389 &lpart
, &hpart
, unsigned_p
);
6390 hi
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6391 -1, overflow
| TREE_OVERFLOW (prod
));
6393 else if (tree_int_cst_sgn (arg01
) >= 0)
6395 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6396 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6397 switch (tree_int_cst_sgn (arg1
))
6400 neg_overflow
= true;
6401 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6406 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6411 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6421 /* A negative divisor reverses the relational operators. */
6422 code
= swap_tree_comparison (code
);
6424 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6425 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6426 switch (tree_int_cst_sgn (arg1
))
6429 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6434 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6439 neg_overflow
= true;
6440 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6452 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6453 return omit_one_operand (type
, integer_zero_node
, arg00
);
6454 if (TREE_OVERFLOW (hi
))
6455 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6456 if (TREE_OVERFLOW (lo
))
6457 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6458 return build_range_check (type
, arg00
, 1, lo
, hi
);
6461 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6462 return omit_one_operand (type
, integer_one_node
, arg00
);
6463 if (TREE_OVERFLOW (hi
))
6464 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6465 if (TREE_OVERFLOW (lo
))
6466 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6467 return build_range_check (type
, arg00
, 0, lo
, hi
);
6470 if (TREE_OVERFLOW (lo
))
6472 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6473 return omit_one_operand (type
, tmp
, arg00
);
6475 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6478 if (TREE_OVERFLOW (hi
))
6480 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6481 return omit_one_operand (type
, tmp
, arg00
);
6483 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6486 if (TREE_OVERFLOW (hi
))
6488 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6489 return omit_one_operand (type
, tmp
, arg00
);
6491 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6494 if (TREE_OVERFLOW (lo
))
6496 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6497 return omit_one_operand (type
, tmp
, arg00
);
6499 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6509 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6510 equality/inequality test, then return a simplified form of the test
6511 using a sign testing. Otherwise return NULL. TYPE is the desired
6515 fold_single_bit_test_into_sign_test (enum tree_code code
, tree arg0
, tree arg1
,
6518 /* If this is testing a single bit, we can optimize the test. */
6519 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6520 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6521 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6523 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6524 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6525 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6527 if (arg00
!= NULL_TREE
6528 /* This is only a win if casting to a signed type is cheap,
6529 i.e. when arg00's type is not a partial mode. */
6530 && TYPE_PRECISION (TREE_TYPE (arg00
))
6531 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6533 tree stype
= lang_hooks
.types
.signed_type (TREE_TYPE (arg00
));
6534 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6535 result_type
, fold_convert (stype
, arg00
),
6536 build_int_cst (stype
, 0));
6543 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6544 equality/inequality test, then return a simplified form of
6545 the test using shifts and logical operations. Otherwise return
6546 NULL. TYPE is the desired result type. */
6549 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
6552 /* If this is testing a single bit, we can optimize the test. */
6553 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6554 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6555 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6557 tree inner
= TREE_OPERAND (arg0
, 0);
6558 tree type
= TREE_TYPE (arg0
);
6559 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6560 enum machine_mode operand_mode
= TYPE_MODE (type
);
6562 tree signed_type
, unsigned_type
, intermediate_type
;
6565 /* First, see if we can fold the single bit test into a sign-bit
6567 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
,
6572 /* Otherwise we have (A & C) != 0 where C is a single bit,
6573 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6574 Similarly for (A & C) == 0. */
6576 /* If INNER is a right shift of a constant and it plus BITNUM does
6577 not overflow, adjust BITNUM and INNER. */
6578 if (TREE_CODE (inner
) == RSHIFT_EXPR
6579 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6580 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6581 && bitnum
< TYPE_PRECISION (type
)
6582 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6583 bitnum
- TYPE_PRECISION (type
)))
6585 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6586 inner
= TREE_OPERAND (inner
, 0);
6589 /* If we are going to be able to omit the AND below, we must do our
6590 operations as unsigned. If we must use the AND, we have a choice.
6591 Normally unsigned is faster, but for some machines signed is. */
6592 #ifdef LOAD_EXTEND_OP
6593 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6594 && !flag_syntax_only
) ? 0 : 1;
6599 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6600 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6601 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6602 inner
= fold_convert (intermediate_type
, inner
);
6605 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6606 inner
, size_int (bitnum
));
6608 one
= build_int_cst (intermediate_type
, 1);
6610 if (code
== EQ_EXPR
)
6611 inner
= fold_build2 (BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6613 /* Put the AND last so it can combine with more things. */
6614 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6616 /* Make sure to return the proper type. */
6617 inner
= fold_convert (result_type
, inner
);
6624 /* Check whether we are allowed to reorder operands arg0 and arg1,
6625 such that the evaluation of arg1 occurs before arg0. */
6628 reorder_operands_p (tree arg0
, tree arg1
)
6630 if (! flag_evaluation_order
)
6632 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6634 return ! TREE_SIDE_EFFECTS (arg0
)
6635 && ! TREE_SIDE_EFFECTS (arg1
);
6638 /* Test whether it is preferable two swap two operands, ARG0 and
6639 ARG1, for example because ARG0 is an integer constant and ARG1
6640 isn't. If REORDER is true, only recommend swapping if we can
6641 evaluate the operands in reverse order. */
6644 tree_swap_operands_p (tree arg0
, tree arg1
, bool reorder
)
6646 STRIP_SIGN_NOPS (arg0
);
6647 STRIP_SIGN_NOPS (arg1
);
6649 if (TREE_CODE (arg1
) == INTEGER_CST
)
6651 if (TREE_CODE (arg0
) == INTEGER_CST
)
6654 if (TREE_CODE (arg1
) == REAL_CST
)
6656 if (TREE_CODE (arg0
) == REAL_CST
)
6659 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6661 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6664 if (TREE_CONSTANT (arg1
))
6666 if (TREE_CONSTANT (arg0
))
6672 if (reorder
&& flag_evaluation_order
6673 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6676 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6677 for commutative and comparison operators. Ensuring a canonical
6678 form allows the optimizers to find additional redundancies without
6679 having to explicitly check for both orderings. */
6680 if (TREE_CODE (arg0
) == SSA_NAME
6681 && TREE_CODE (arg1
) == SSA_NAME
6682 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6685 /* Put SSA_NAMEs last. */
6686 if (TREE_CODE (arg1
) == SSA_NAME
)
6688 if (TREE_CODE (arg0
) == SSA_NAME
)
6691 /* Put variables last. */
6700 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6701 ARG0 is extended to a wider type. */
6704 fold_widened_comparison (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6706 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6708 tree shorter_type
, outer_type
;
6712 if (arg0_unw
== arg0
)
6714 shorter_type
= TREE_TYPE (arg0_unw
);
6716 #ifdef HAVE_canonicalize_funcptr_for_compare
6717 /* Disable this optimization if we're casting a function pointer
6718 type on targets that require function pointer canonicalization. */
6719 if (HAVE_canonicalize_funcptr_for_compare
6720 && TREE_CODE (shorter_type
) == POINTER_TYPE
6721 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6725 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6728 arg1_unw
= get_unwidened (arg1
, shorter_type
);
6730 /* If possible, express the comparison in the shorter mode. */
6731 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6732 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6733 && (TREE_TYPE (arg1_unw
) == shorter_type
6734 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6735 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6736 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6737 && int_fits_type_p (arg1_unw
, shorter_type
))))
6738 return fold_build2 (code
, type
, arg0_unw
,
6739 fold_convert (shorter_type
, arg1_unw
));
6741 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6742 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6743 || !int_fits_type_p (arg1_unw
, shorter_type
))
6746 /* If we are comparing with the integer that does not fit into the range
6747 of the shorter type, the result is known. */
6748 outer_type
= TREE_TYPE (arg1_unw
);
6749 min
= lower_bound_in_type (outer_type
, shorter_type
);
6750 max
= upper_bound_in_type (outer_type
, shorter_type
);
6752 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6754 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6761 return omit_one_operand (type
, integer_zero_node
, arg0
);
6766 return omit_one_operand (type
, integer_one_node
, arg0
);
6772 return omit_one_operand (type
, integer_one_node
, arg0
);
6774 return omit_one_operand (type
, integer_zero_node
, arg0
);
6779 return omit_one_operand (type
, integer_zero_node
, arg0
);
6781 return omit_one_operand (type
, integer_one_node
, arg0
);
6790 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6791 ARG0 just the signedness is changed. */
6794 fold_sign_changed_comparison (enum tree_code code
, tree type
,
6795 tree arg0
, tree arg1
)
6798 tree inner_type
, outer_type
;
6800 if (TREE_CODE (arg0
) != NOP_EXPR
6801 && TREE_CODE (arg0
) != CONVERT_EXPR
)
6804 outer_type
= TREE_TYPE (arg0
);
6805 arg0_inner
= TREE_OPERAND (arg0
, 0);
6806 inner_type
= TREE_TYPE (arg0_inner
);
6808 #ifdef HAVE_canonicalize_funcptr_for_compare
6809 /* Disable this optimization if we're casting a function pointer
6810 type on targets that require function pointer canonicalization. */
6811 if (HAVE_canonicalize_funcptr_for_compare
6812 && TREE_CODE (inner_type
) == POINTER_TYPE
6813 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6817 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6820 if (TREE_CODE (arg1
) != INTEGER_CST
6821 && !((TREE_CODE (arg1
) == NOP_EXPR
6822 || TREE_CODE (arg1
) == CONVERT_EXPR
)
6823 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6826 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6831 if (TREE_CODE (arg1
) == INTEGER_CST
)
6832 arg1
= force_fit_type_double (inner_type
, TREE_INT_CST_LOW (arg1
),
6833 TREE_INT_CST_HIGH (arg1
), 0,
6834 TREE_OVERFLOW (arg1
));
6836 arg1
= fold_convert (inner_type
, arg1
);
6838 return fold_build2 (code
, type
, arg0_inner
, arg1
);
6841 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6842 step of the array. Reconstructs s and delta in the case of s * delta
6843 being an integer constant (and thus already folded).
6844 ADDR is the address. MULT is the multiplicative expression.
6845 If the function succeeds, the new address expression is returned. Otherwise
6846 NULL_TREE is returned. */
6849 try_move_mult_to_index (enum tree_code code
, tree addr
, tree op1
)
6851 tree s
, delta
, step
;
6852 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6857 /* Canonicalize op1 into a possibly non-constant delta
6858 and an INTEGER_CST s. */
6859 if (TREE_CODE (op1
) == MULT_EXPR
)
6861 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6866 if (TREE_CODE (arg0
) == INTEGER_CST
)
6871 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6879 else if (TREE_CODE (op1
) == INTEGER_CST
)
6886 /* Simulate we are delta * 1. */
6888 s
= integer_one_node
;
6891 for (;; ref
= TREE_OPERAND (ref
, 0))
6893 if (TREE_CODE (ref
) == ARRAY_REF
)
6895 /* Remember if this was a multi-dimensional array. */
6896 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6899 itype
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6903 step
= array_ref_element_size (ref
);
6904 if (TREE_CODE (step
) != INTEGER_CST
)
6909 if (! tree_int_cst_equal (step
, s
))
6914 /* Try if delta is a multiple of step. */
6915 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, delta
, step
);
6921 /* Only fold here if we can verify we do not overflow one
6922 dimension of a multi-dimensional array. */
6927 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
6928 || !INTEGRAL_TYPE_P (itype
)
6929 || !TYPE_MAX_VALUE (itype
)
6930 || TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
)
6933 tmp
= fold_binary (code
, itype
,
6934 fold_convert (itype
,
6935 TREE_OPERAND (ref
, 1)),
6936 fold_convert (itype
, delta
));
6938 || TREE_CODE (tmp
) != INTEGER_CST
6939 || tree_int_cst_lt (TYPE_MAX_VALUE (itype
), tmp
))
6948 if (!handled_component_p (ref
))
6952 /* We found the suitable array reference. So copy everything up to it,
6953 and replace the index. */
6955 pref
= TREE_OPERAND (addr
, 0);
6956 ret
= copy_node (pref
);
6961 pref
= TREE_OPERAND (pref
, 0);
6962 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6963 pos
= TREE_OPERAND (pos
, 0);
6966 TREE_OPERAND (pos
, 1) = fold_build2 (code
, itype
,
6967 fold_convert (itype
,
6968 TREE_OPERAND (pos
, 1)),
6969 fold_convert (itype
, delta
));
6971 return fold_build1 (ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6975 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6976 means A >= Y && A != MAX, but in this case we know that
6977 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6980 fold_to_nonsharp_ineq_using_bound (tree ineq
, tree bound
)
6982 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6984 if (TREE_CODE (bound
) == LT_EXPR
)
6985 a
= TREE_OPERAND (bound
, 0);
6986 else if (TREE_CODE (bound
) == GT_EXPR
)
6987 a
= TREE_OPERAND (bound
, 1);
6991 typea
= TREE_TYPE (a
);
6992 if (!INTEGRAL_TYPE_P (typea
)
6993 && !POINTER_TYPE_P (typea
))
6996 if (TREE_CODE (ineq
) == LT_EXPR
)
6998 a1
= TREE_OPERAND (ineq
, 1);
6999 y
= TREE_OPERAND (ineq
, 0);
7001 else if (TREE_CODE (ineq
) == GT_EXPR
)
7003 a1
= TREE_OPERAND (ineq
, 0);
7004 y
= TREE_OPERAND (ineq
, 1);
7009 if (TREE_TYPE (a1
) != typea
)
7012 diff
= fold_build2 (MINUS_EXPR
, typea
, a1
, a
);
7013 if (!integer_onep (diff
))
7016 return fold_build2 (GE_EXPR
, type
, a
, y
);
7019 /* Fold a sum or difference of at least one multiplication.
7020 Returns the folded tree or NULL if no simplification could be made. */
7023 fold_plusminus_mult_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
7025 tree arg00
, arg01
, arg10
, arg11
;
7026 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7028 /* (A * C) +- (B * C) -> (A+-B) * C.
7029 (A * C) +- A -> A * (C+-1).
7030 We are most concerned about the case where C is a constant,
7031 but other combinations show up during loop reduction. Since
7032 it is not difficult, try all four possibilities. */
7034 if (TREE_CODE (arg0
) == MULT_EXPR
)
7036 arg00
= TREE_OPERAND (arg0
, 0);
7037 arg01
= TREE_OPERAND (arg0
, 1);
7042 arg01
= build_one_cst (type
);
7044 if (TREE_CODE (arg1
) == MULT_EXPR
)
7046 arg10
= TREE_OPERAND (arg1
, 0);
7047 arg11
= TREE_OPERAND (arg1
, 1);
7052 arg11
= build_one_cst (type
);
7056 if (operand_equal_p (arg01
, arg11
, 0))
7057 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7058 else if (operand_equal_p (arg00
, arg10
, 0))
7059 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7060 else if (operand_equal_p (arg00
, arg11
, 0))
7061 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7062 else if (operand_equal_p (arg01
, arg10
, 0))
7063 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7065 /* No identical multiplicands; see if we can find a common
7066 power-of-two factor in non-power-of-two multiplies. This
7067 can help in multi-dimensional array access. */
7068 else if (host_integerp (arg01
, 0)
7069 && host_integerp (arg11
, 0))
7071 HOST_WIDE_INT int01
, int11
, tmp
;
7074 int01
= TREE_INT_CST_LOW (arg01
);
7075 int11
= TREE_INT_CST_LOW (arg11
);
7077 /* Move min of absolute values to int11. */
7078 if ((int01
>= 0 ? int01
: -int01
)
7079 < (int11
>= 0 ? int11
: -int11
))
7081 tmp
= int01
, int01
= int11
, int11
= tmp
;
7082 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7089 if (exact_log2 (abs (int11
)) > 0 && int01
% int11
== 0)
7091 alt0
= fold_build2 (MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7092 build_int_cst (TREE_TYPE (arg00
),
7097 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7102 return fold_build2 (MULT_EXPR
, type
,
7103 fold_build2 (code
, type
,
7104 fold_convert (type
, alt0
),
7105 fold_convert (type
, alt1
)),
7106 fold_convert (type
, same
));
7111 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7112 specified by EXPR into the buffer PTR of length LEN bytes.
7113 Return the number of bytes placed in the buffer, or zero
7117 native_encode_int (tree expr
, unsigned char *ptr
, int len
)
7119 tree type
= TREE_TYPE (expr
);
7120 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7121 int byte
, offset
, word
, words
;
7122 unsigned char value
;
7124 if (total_bytes
> len
)
7126 words
= total_bytes
/ UNITS_PER_WORD
;
7128 for (byte
= 0; byte
< total_bytes
; byte
++)
7130 int bitpos
= byte
* BITS_PER_UNIT
;
7131 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7132 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7134 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7135 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7137 if (total_bytes
> UNITS_PER_WORD
)
7139 word
= byte
/ UNITS_PER_WORD
;
7140 if (WORDS_BIG_ENDIAN
)
7141 word
= (words
- 1) - word
;
7142 offset
= word
* UNITS_PER_WORD
;
7143 if (BYTES_BIG_ENDIAN
)
7144 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7146 offset
+= byte
% UNITS_PER_WORD
;
7149 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7150 ptr
[offset
] = value
;
7156 /* Subroutine of native_encode_expr. Encode the REAL_CST
7157 specified by EXPR into the buffer PTR of length LEN bytes.
7158 Return the number of bytes placed in the buffer, or zero
7162 native_encode_real (tree expr
, unsigned char *ptr
, int len
)
7164 tree type
= TREE_TYPE (expr
);
7165 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7166 int byte
, offset
, word
, words
;
7167 unsigned char value
;
7169 /* There are always 32 bits in each long, no matter the size of
7170 the hosts long. We handle floating point representations with
7174 if (total_bytes
> len
)
7176 words
= total_bytes
/ UNITS_PER_WORD
;
7178 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7180 for (byte
= 0; byte
< total_bytes
; byte
++)
7182 int bitpos
= byte
* BITS_PER_UNIT
;
7183 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7185 if (total_bytes
> UNITS_PER_WORD
)
7187 word
= byte
/ UNITS_PER_WORD
;
7188 if (FLOAT_WORDS_BIG_ENDIAN
)
7189 word
= (words
- 1) - word
;
7190 offset
= word
* UNITS_PER_WORD
;
7191 if (BYTES_BIG_ENDIAN
)
7192 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7194 offset
+= byte
% UNITS_PER_WORD
;
7197 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7198 ptr
[offset
] = value
;
7203 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7204 specified by EXPR into the buffer PTR of length LEN bytes.
7205 Return the number of bytes placed in the buffer, or zero
7209 native_encode_complex (tree expr
, unsigned char *ptr
, int len
)
7214 part
= TREE_REALPART (expr
);
7215 rsize
= native_encode_expr (part
, ptr
, len
);
7218 part
= TREE_IMAGPART (expr
);
7219 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7222 return rsize
+ isize
;
7226 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7227 specified by EXPR into the buffer PTR of length LEN bytes.
7228 Return the number of bytes placed in the buffer, or zero
7232 native_encode_vector (tree expr
, unsigned char *ptr
, int len
)
7234 int i
, size
, offset
, count
;
7235 tree itype
, elem
, elements
;
7238 elements
= TREE_VECTOR_CST_ELTS (expr
);
7239 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
7240 itype
= TREE_TYPE (TREE_TYPE (expr
));
7241 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7242 for (i
= 0; i
< count
; i
++)
7246 elem
= TREE_VALUE (elements
);
7247 elements
= TREE_CHAIN (elements
);
7254 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7259 if (offset
+ size
> len
)
7261 memset (ptr
+offset
, 0, size
);
7269 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7270 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7271 buffer PTR of length LEN bytes. Return the number of bytes
7272 placed in the buffer, or zero upon failure. */
7275 native_encode_expr (tree expr
, unsigned char *ptr
, int len
)
7277 switch (TREE_CODE (expr
))
7280 return native_encode_int (expr
, ptr
, len
);
7283 return native_encode_real (expr
, ptr
, len
);
7286 return native_encode_complex (expr
, ptr
, len
);
7289 return native_encode_vector (expr
, ptr
, len
);
7297 /* Subroutine of native_interpret_expr. Interpret the contents of
7298 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7299 If the buffer cannot be interpreted, return NULL_TREE. */
7302 native_interpret_int (tree type
, unsigned char *ptr
, int len
)
7304 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7305 int byte
, offset
, word
, words
;
7306 unsigned char value
;
7307 unsigned int HOST_WIDE_INT lo
= 0;
7308 HOST_WIDE_INT hi
= 0;
7310 if (total_bytes
> len
)
7312 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
7314 words
= total_bytes
/ UNITS_PER_WORD
;
7316 for (byte
= 0; byte
< total_bytes
; byte
++)
7318 int bitpos
= byte
* BITS_PER_UNIT
;
7319 if (total_bytes
> UNITS_PER_WORD
)
7321 word
= byte
/ UNITS_PER_WORD
;
7322 if (WORDS_BIG_ENDIAN
)
7323 word
= (words
- 1) - word
;
7324 offset
= word
* UNITS_PER_WORD
;
7325 if (BYTES_BIG_ENDIAN
)
7326 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7328 offset
+= byte
% UNITS_PER_WORD
;
7331 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7332 value
= ptr
[offset
];
7334 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7335 lo
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7337 hi
|= (unsigned HOST_WIDE_INT
) value
7338 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7341 return build_int_cst_wide_type (type
, lo
, hi
);
7345 /* Subroutine of native_interpret_expr. Interpret the contents of
7346 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7347 If the buffer cannot be interpreted, return NULL_TREE. */
7350 native_interpret_real (tree type
, unsigned char *ptr
, int len
)
7352 enum machine_mode mode
= TYPE_MODE (type
);
7353 int total_bytes
= GET_MODE_SIZE (mode
);
7354 int byte
, offset
, word
, words
;
7355 unsigned char value
;
7356 /* There are always 32 bits in each long, no matter the size of
7357 the hosts long. We handle floating point representations with
7362 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7363 if (total_bytes
> len
|| total_bytes
> 24)
7365 words
= total_bytes
/ UNITS_PER_WORD
;
7367 memset (tmp
, 0, sizeof (tmp
));
7368 for (byte
= 0; byte
< total_bytes
; byte
++)
7370 int bitpos
= byte
* BITS_PER_UNIT
;
7371 if (total_bytes
> UNITS_PER_WORD
)
7373 word
= byte
/ UNITS_PER_WORD
;
7374 if (FLOAT_WORDS_BIG_ENDIAN
)
7375 word
= (words
- 1) - word
;
7376 offset
= word
* UNITS_PER_WORD
;
7377 if (BYTES_BIG_ENDIAN
)
7378 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7380 offset
+= byte
% UNITS_PER_WORD
;
7383 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7384 value
= ptr
[offset
];
7386 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7389 real_from_target (&r
, tmp
, mode
);
7390 return build_real (type
, r
);
7394 /* Subroutine of native_interpret_expr. Interpret the contents of
7395 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7396 If the buffer cannot be interpreted, return NULL_TREE. */
7399 native_interpret_complex (tree type
, unsigned char *ptr
, int len
)
7401 tree etype
, rpart
, ipart
;
7404 etype
= TREE_TYPE (type
);
7405 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7408 rpart
= native_interpret_expr (etype
, ptr
, size
);
7411 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7414 return build_complex (type
, rpart
, ipart
);
7418 /* Subroutine of native_interpret_expr. Interpret the contents of
7419 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7420 If the buffer cannot be interpreted, return NULL_TREE. */
7423 native_interpret_vector (tree type
, unsigned char *ptr
, int len
)
7425 tree etype
, elem
, elements
;
7428 etype
= TREE_TYPE (type
);
7429 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7430 count
= TYPE_VECTOR_SUBPARTS (type
);
7431 if (size
* count
> len
)
7434 elements
= NULL_TREE
;
7435 for (i
= count
- 1; i
>= 0; i
--)
7437 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7440 elements
= tree_cons (NULL_TREE
, elem
, elements
);
7442 return build_vector (type
, elements
);
7446 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7447 the buffer PTR of length LEN as a constant of type TYPE. For
7448 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7449 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7450 return NULL_TREE. */
7453 native_interpret_expr (tree type
, unsigned char *ptr
, int len
)
7455 switch (TREE_CODE (type
))
7460 return native_interpret_int (type
, ptr
, len
);
7463 return native_interpret_real (type
, ptr
, len
);
7466 return native_interpret_complex (type
, ptr
, len
);
7469 return native_interpret_vector (type
, ptr
, len
);
7477 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7478 TYPE at compile-time. If we're unable to perform the conversion
7479 return NULL_TREE. */
7482 fold_view_convert_expr (tree type
, tree expr
)
7484 /* We support up to 512-bit values (for V8DFmode). */
7485 unsigned char buffer
[64];
7488 /* Check that the host and target are sane. */
7489 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7492 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7496 return native_interpret_expr (type
, buffer
, len
);
7500 /* Fold a unary expression of code CODE and type TYPE with operand
7501 OP0. Return the folded expression if folding is successful.
7502 Otherwise, return NULL_TREE. */
7505 fold_unary (enum tree_code code
, tree type
, tree op0
)
7509 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7511 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7512 && TREE_CODE_LENGTH (code
) == 1);
7517 if (code
== NOP_EXPR
|| code
== CONVERT_EXPR
7518 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
7520 /* Don't use STRIP_NOPS, because signedness of argument type
7522 STRIP_SIGN_NOPS (arg0
);
7526 /* Strip any conversions that don't change the mode. This
7527 is safe for every expression, except for a comparison
7528 expression because its signedness is derived from its
7531 Note that this is done as an internal manipulation within
7532 the constant folder, in order to find the simplest
7533 representation of the arguments so that their form can be
7534 studied. In any cases, the appropriate type conversions
7535 should be put back in the tree that will get out of the
7541 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7543 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7544 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7545 fold_build1 (code
, type
, TREE_OPERAND (arg0
, 1)));
7546 else if (TREE_CODE (arg0
) == COND_EXPR
)
7548 tree arg01
= TREE_OPERAND (arg0
, 1);
7549 tree arg02
= TREE_OPERAND (arg0
, 2);
7550 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7551 arg01
= fold_build1 (code
, type
, arg01
);
7552 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7553 arg02
= fold_build1 (code
, type
, arg02
);
7554 tem
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7557 /* If this was a conversion, and all we did was to move into
7558 inside the COND_EXPR, bring it back out. But leave it if
7559 it is a conversion from integer to integer and the
7560 result precision is no wider than a word since such a
7561 conversion is cheap and may be optimized away by combine,
7562 while it couldn't if it were outside the COND_EXPR. Then return
7563 so we don't get into an infinite recursion loop taking the
7564 conversion out and then back in. */
7566 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
7567 || code
== NON_LVALUE_EXPR
)
7568 && TREE_CODE (tem
) == COND_EXPR
7569 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7570 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7571 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7572 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7573 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7574 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7575 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7577 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7578 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7579 || flag_syntax_only
))
7580 tem
= build1 (code
, type
,
7582 TREE_TYPE (TREE_OPERAND
7583 (TREE_OPERAND (tem
, 1), 0)),
7584 TREE_OPERAND (tem
, 0),
7585 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7586 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
7589 else if (COMPARISON_CLASS_P (arg0
))
7591 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7593 arg0
= copy_node (arg0
);
7594 TREE_TYPE (arg0
) = type
;
7597 else if (TREE_CODE (type
) != INTEGER_TYPE
)
7598 return fold_build3 (COND_EXPR
, type
, arg0
,
7599 fold_build1 (code
, type
,
7601 fold_build1 (code
, type
,
7602 integer_zero_node
));
7611 case FIX_TRUNC_EXPR
:
7612 if (TREE_TYPE (op0
) == type
)
7615 /* If we have (type) (a CMP b) and type is an integral type, return
7616 new expression involving the new type. */
7617 if (COMPARISON_CLASS_P (op0
) && INTEGRAL_TYPE_P (type
))
7618 return fold_build2 (TREE_CODE (op0
), type
, TREE_OPERAND (op0
, 0),
7619 TREE_OPERAND (op0
, 1));
7621 /* Handle cases of two conversions in a row. */
7622 if (TREE_CODE (op0
) == NOP_EXPR
7623 || TREE_CODE (op0
) == CONVERT_EXPR
)
7625 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7626 tree inter_type
= TREE_TYPE (op0
);
7627 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7628 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7629 int inside_float
= FLOAT_TYPE_P (inside_type
);
7630 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7631 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7632 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7633 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7634 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7635 int inter_float
= FLOAT_TYPE_P (inter_type
);
7636 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7637 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7638 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7639 int final_int
= INTEGRAL_TYPE_P (type
);
7640 int final_ptr
= POINTER_TYPE_P (type
);
7641 int final_float
= FLOAT_TYPE_P (type
);
7642 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7643 unsigned int final_prec
= TYPE_PRECISION (type
);
7644 int final_unsignedp
= TYPE_UNSIGNED (type
);
7646 /* In addition to the cases of two conversions in a row
7647 handled below, if we are converting something to its own
7648 type via an object of identical or wider precision, neither
7649 conversion is needed. */
7650 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7651 && (((inter_int
|| inter_ptr
) && final_int
)
7652 || (inter_float
&& final_float
))
7653 && inter_prec
>= final_prec
)
7654 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7656 /* Likewise, if the intermediate and final types are either both
7657 float or both integer, we don't need the middle conversion if
7658 it is wider than the final type and doesn't change the signedness
7659 (for integers). Avoid this if the final type is a pointer
7660 since then we sometimes need the inner conversion. Likewise if
7661 the outer has a precision not equal to the size of its mode. */
7662 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
7663 || (inter_float
&& inside_float
)
7664 || (inter_vec
&& inside_vec
))
7665 && inter_prec
>= inside_prec
7666 && (inter_float
|| inter_vec
7667 || inter_unsignedp
== inside_unsignedp
)
7668 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7669 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7671 && (! final_vec
|| inter_prec
== inside_prec
))
7672 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7674 /* If we have a sign-extension of a zero-extended value, we can
7675 replace that by a single zero-extension. */
7676 if (inside_int
&& inter_int
&& final_int
7677 && inside_prec
< inter_prec
&& inter_prec
< final_prec
7678 && inside_unsignedp
&& !inter_unsignedp
)
7679 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7681 /* Two conversions in a row are not needed unless:
7682 - some conversion is floating-point (overstrict for now), or
7683 - some conversion is a vector (overstrict for now), or
7684 - the intermediate type is narrower than both initial and
7686 - the intermediate type and innermost type differ in signedness,
7687 and the outermost type is wider than the intermediate, or
7688 - the initial type is a pointer type and the precisions of the
7689 intermediate and final types differ, or
7690 - the final type is a pointer type and the precisions of the
7691 initial and intermediate types differ.
7692 - the final type is a pointer type and the initial type not
7693 - the initial type is a pointer to an array and the final type
7695 if (! inside_float
&& ! inter_float
&& ! final_float
7696 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7697 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7698 && ! (inside_int
&& inter_int
7699 && inter_unsignedp
!= inside_unsignedp
7700 && inter_prec
< final_prec
)
7701 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7702 == (final_unsignedp
&& final_prec
> inter_prec
))
7703 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7704 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7705 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7706 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7707 && final_ptr
== inside_ptr
7709 && TREE_CODE (TREE_TYPE (inside_type
)) == ARRAY_TYPE
7710 && TREE_CODE (TREE_TYPE (type
)) != ARRAY_TYPE
))
7711 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7714 /* Handle (T *)&A.B.C for A being of type T and B and C
7715 living at offset zero. This occurs frequently in
7716 C++ upcasting and then accessing the base. */
7717 if (TREE_CODE (op0
) == ADDR_EXPR
7718 && POINTER_TYPE_P (type
)
7719 && handled_component_p (TREE_OPERAND (op0
, 0)))
7721 HOST_WIDE_INT bitsize
, bitpos
;
7723 enum machine_mode mode
;
7724 int unsignedp
, volatilep
;
7725 tree base
= TREE_OPERAND (op0
, 0);
7726 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7727 &mode
, &unsignedp
, &volatilep
, false);
7728 /* If the reference was to a (constant) zero offset, we can use
7729 the address of the base if it has the same base type
7730 as the result type. */
7731 if (! offset
&& bitpos
== 0
7732 && TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7733 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7734 return fold_convert (type
, build_fold_addr_expr (base
));
7737 if ((TREE_CODE (op0
) == MODIFY_EXPR
7738 || TREE_CODE (op0
) == GIMPLE_MODIFY_STMT
)
7739 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0
, 1))
7740 /* Detect assigning a bitfield. */
7741 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7743 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0
, 0), 1))))
7745 /* Don't leave an assignment inside a conversion
7746 unless assigning a bitfield. */
7747 tem
= fold_build1 (code
, type
, GENERIC_TREE_OPERAND (op0
, 1));
7748 /* First do the assignment, then return converted constant. */
7749 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7750 TREE_NO_WARNING (tem
) = 1;
7751 TREE_USED (tem
) = 1;
7755 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7756 constants (if x has signed type, the sign bit cannot be set
7757 in c). This folds extension into the BIT_AND_EXPR. */
7758 if (INTEGRAL_TYPE_P (type
)
7759 && TREE_CODE (type
) != BOOLEAN_TYPE
7760 && TREE_CODE (op0
) == BIT_AND_EXPR
7761 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7764 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
7767 if (TYPE_UNSIGNED (TREE_TYPE (and))
7768 || (TYPE_PRECISION (type
)
7769 <= TYPE_PRECISION (TREE_TYPE (and))))
7771 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7772 <= HOST_BITS_PER_WIDE_INT
7773 && host_integerp (and1
, 1))
7775 unsigned HOST_WIDE_INT cst
;
7777 cst
= tree_low_cst (and1
, 1);
7778 cst
&= (HOST_WIDE_INT
) -1
7779 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7780 change
= (cst
== 0);
7781 #ifdef LOAD_EXTEND_OP
7783 && !flag_syntax_only
7784 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7787 tree uns
= lang_hooks
.types
.unsigned_type (TREE_TYPE (and0
));
7788 and0
= fold_convert (uns
, and0
);
7789 and1
= fold_convert (uns
, and1
);
7795 tem
= force_fit_type_double (type
, TREE_INT_CST_LOW (and1
),
7796 TREE_INT_CST_HIGH (and1
), 0,
7797 TREE_OVERFLOW (and1
));
7798 return fold_build2 (BIT_AND_EXPR
, type
,
7799 fold_convert (type
, and0
), tem
);
7803 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7804 T2 being pointers to types of the same size. */
7805 if (POINTER_TYPE_P (type
)
7806 && BINARY_CLASS_P (arg0
)
7807 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7808 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7810 tree arg00
= TREE_OPERAND (arg0
, 0);
7812 tree t1
= TREE_TYPE (arg00
);
7813 tree tt0
= TREE_TYPE (t0
);
7814 tree tt1
= TREE_TYPE (t1
);
7815 tree s0
= TYPE_SIZE (tt0
);
7816 tree s1
= TYPE_SIZE (tt1
);
7818 if (s0
&& s1
&& operand_equal_p (s0
, s1
, OEP_ONLY_CONST
))
7819 return build2 (TREE_CODE (arg0
), t0
, fold_convert (t0
, arg00
),
7820 TREE_OPERAND (arg0
, 1));
7823 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7824 of the same precision, and X is a integer type not narrower than
7825 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7826 if (INTEGRAL_TYPE_P (type
)
7827 && TREE_CODE (op0
) == BIT_NOT_EXPR
7828 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7829 && (TREE_CODE (TREE_OPERAND (op0
, 0)) == NOP_EXPR
7830 || TREE_CODE (TREE_OPERAND (op0
, 0)) == CONVERT_EXPR
)
7831 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7833 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7834 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7835 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7836 return fold_build1 (BIT_NOT_EXPR
, type
, fold_convert (type
, tem
));
7839 tem
= fold_convert_const (code
, type
, arg0
);
7840 return tem
? tem
: NULL_TREE
;
7842 case VIEW_CONVERT_EXPR
:
7843 if (TREE_TYPE (op0
) == type
)
7845 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
7846 return fold_build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
7847 return fold_view_convert_expr (type
, op0
);
7850 tem
= fold_negate_expr (arg0
);
7852 return fold_convert (type
, tem
);
7856 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
7857 return fold_abs_const (arg0
, type
);
7858 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
7859 return fold_build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7860 /* Convert fabs((double)float) into (double)fabsf(float). */
7861 else if (TREE_CODE (arg0
) == NOP_EXPR
7862 && TREE_CODE (type
) == REAL_TYPE
)
7864 tree targ0
= strip_float_extensions (arg0
);
7866 return fold_convert (type
, fold_build1 (ABS_EXPR
,
7870 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7871 else if (TREE_CODE (arg0
) == ABS_EXPR
)
7873 else if (tree_expr_nonnegative_p (arg0
))
7876 /* Strip sign ops from argument. */
7877 if (TREE_CODE (type
) == REAL_TYPE
)
7879 tem
= fold_strip_sign_ops (arg0
);
7881 return fold_build1 (ABS_EXPR
, type
, fold_convert (type
, tem
));
7886 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7887 return fold_convert (type
, arg0
);
7888 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7890 tree itype
= TREE_TYPE (type
);
7891 tree rpart
= fold_convert (itype
, TREE_OPERAND (arg0
, 0));
7892 tree ipart
= fold_convert (itype
, TREE_OPERAND (arg0
, 1));
7893 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, negate_expr (ipart
));
7895 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7897 tree itype
= TREE_TYPE (type
);
7898 tree rpart
= fold_convert (itype
, TREE_REALPART (arg0
));
7899 tree ipart
= fold_convert (itype
, TREE_IMAGPART (arg0
));
7900 return build_complex (type
, rpart
, negate_expr (ipart
));
7902 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7903 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
7907 if (TREE_CODE (arg0
) == INTEGER_CST
)
7908 return fold_not_const (arg0
, type
);
7909 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
7910 return TREE_OPERAND (arg0
, 0);
7911 /* Convert ~ (-A) to A - 1. */
7912 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
7913 return fold_build2 (MINUS_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7914 build_int_cst (type
, 1));
7915 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7916 else if (INTEGRAL_TYPE_P (type
)
7917 && ((TREE_CODE (arg0
) == MINUS_EXPR
7918 && integer_onep (TREE_OPERAND (arg0
, 1)))
7919 || (TREE_CODE (arg0
) == PLUS_EXPR
7920 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
7921 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7922 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7923 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7924 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
7926 TREE_OPERAND (arg0
, 0)))))
7927 return fold_build2 (BIT_XOR_EXPR
, type
, tem
,
7928 fold_convert (type
, TREE_OPERAND (arg0
, 1)));
7929 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7930 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
7932 TREE_OPERAND (arg0
, 1)))))
7933 return fold_build2 (BIT_XOR_EXPR
, type
,
7934 fold_convert (type
, TREE_OPERAND (arg0
, 0)), tem
);
7938 case TRUTH_NOT_EXPR
:
7939 /* The argument to invert_truthvalue must have Boolean type. */
7940 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
7941 arg0
= fold_convert (boolean_type_node
, arg0
);
7943 /* Note that the operand of this must be an int
7944 and its values must be 0 or 1.
7945 ("true" is a fixed value perhaps depending on the language,
7946 but we don't handle values other than 1 correctly yet.) */
7947 tem
= fold_truth_not_expr (arg0
);
7950 return fold_convert (type
, tem
);
7953 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7954 return fold_convert (type
, arg0
);
7955 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7956 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
7957 TREE_OPERAND (arg0
, 1));
7958 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7959 return fold_convert (type
, TREE_REALPART (arg0
));
7960 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7962 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7963 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
7964 fold_build1 (REALPART_EXPR
, itype
,
7965 TREE_OPERAND (arg0
, 0)),
7966 fold_build1 (REALPART_EXPR
, itype
,
7967 TREE_OPERAND (arg0
, 1)));
7968 return fold_convert (type
, tem
);
7970 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7972 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7973 tem
= fold_build1 (REALPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
7974 return fold_convert (type
, tem
);
7976 if (TREE_CODE (arg0
) == CALL_EXPR
)
7978 tree fn
= get_callee_fndecl (arg0
);
7979 if (DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
7980 switch (DECL_FUNCTION_CODE (fn
))
7982 CASE_FLT_FN (BUILT_IN_CEXPI
):
7983 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
7985 return build_call_expr (fn
, 1, CALL_EXPR_ARG (arg0
, 0));
7995 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7996 return fold_convert (type
, integer_zero_node
);
7997 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7998 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
7999 TREE_OPERAND (arg0
, 0));
8000 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8001 return fold_convert (type
, TREE_IMAGPART (arg0
));
8002 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8004 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8005 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
8006 fold_build1 (IMAGPART_EXPR
, itype
,
8007 TREE_OPERAND (arg0
, 0)),
8008 fold_build1 (IMAGPART_EXPR
, itype
,
8009 TREE_OPERAND (arg0
, 1)));
8010 return fold_convert (type
, tem
);
8012 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8014 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8015 tem
= fold_build1 (IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8016 return fold_convert (type
, negate_expr (tem
));
8018 if (TREE_CODE (arg0
) == CALL_EXPR
)
8020 tree fn
= get_callee_fndecl (arg0
);
8021 if (DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8022 switch (DECL_FUNCTION_CODE (fn
))
8024 CASE_FLT_FN (BUILT_IN_CEXPI
):
8025 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8027 return build_call_expr (fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8038 } /* switch (code) */
8041 /* Fold a binary expression of code CODE and type TYPE with operands
8042 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8043 Return the folded expression if folding is successful. Otherwise,
8044 return NULL_TREE. */
8047 fold_minmax (enum tree_code code
, tree type
, tree op0
, tree op1
)
8049 enum tree_code compl_code
;
8051 if (code
== MIN_EXPR
)
8052 compl_code
= MAX_EXPR
;
8053 else if (code
== MAX_EXPR
)
8054 compl_code
= MIN_EXPR
;
8058 /* MIN (MAX (a, b), b) == b. */
8059 if (TREE_CODE (op0
) == compl_code
8060 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8061 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 0));
8063 /* MIN (MAX (b, a), b) == b. */
8064 if (TREE_CODE (op0
) == compl_code
8065 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8066 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8067 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 1));
8069 /* MIN (a, MAX (a, b)) == a. */
8070 if (TREE_CODE (op1
) == compl_code
8071 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8072 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8073 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 1));
8075 /* MIN (a, MAX (b, a)) == a. */
8076 if (TREE_CODE (op1
) == compl_code
8077 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8078 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8079 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 0));
8084 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8085 by changing CODE to reduce the magnitude of constants involved in
8086 ARG0 of the comparison.
8087 Returns a canonicalized comparison tree if a simplification was
8088 possible, otherwise returns NULL_TREE.
8089 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8090 valid if signed overflow is undefined. */
8093 maybe_canonicalize_comparison_1 (enum tree_code code
, tree type
,
8094 tree arg0
, tree arg1
,
8095 bool *strict_overflow_p
)
8097 enum tree_code code0
= TREE_CODE (arg0
);
8098 tree t
, cst0
= NULL_TREE
;
8102 /* Match A +- CST code arg1 and CST code arg1. */
8103 if (!(((code0
== MINUS_EXPR
8104 || code0
== PLUS_EXPR
)
8105 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8106 || code0
== INTEGER_CST
))
8109 /* Identify the constant in arg0 and its sign. */
8110 if (code0
== INTEGER_CST
)
8113 cst0
= TREE_OPERAND (arg0
, 1);
8114 sgn0
= tree_int_cst_sgn (cst0
);
8116 /* Overflowed constants and zero will cause problems. */
8117 if (integer_zerop (cst0
)
8118 || TREE_OVERFLOW (cst0
))
8121 /* See if we can reduce the magnitude of the constant in
8122 arg0 by changing the comparison code. */
8123 if (code0
== INTEGER_CST
)
8125 /* CST <= arg1 -> CST-1 < arg1. */
8126 if (code
== LE_EXPR
&& sgn0
== 1)
8128 /* -CST < arg1 -> -CST-1 <= arg1. */
8129 else if (code
== LT_EXPR
&& sgn0
== -1)
8131 /* CST > arg1 -> CST-1 >= arg1. */
8132 else if (code
== GT_EXPR
&& sgn0
== 1)
8134 /* -CST >= arg1 -> -CST-1 > arg1. */
8135 else if (code
== GE_EXPR
&& sgn0
== -1)
8139 /* arg1 code' CST' might be more canonical. */
8144 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8146 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8148 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8149 else if (code
== GT_EXPR
8150 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8152 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8153 else if (code
== LE_EXPR
8154 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8156 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8157 else if (code
== GE_EXPR
8158 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8162 *strict_overflow_p
= true;
8165 /* Now build the constant reduced in magnitude. */
8166 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8167 cst0
, build_int_cst (TREE_TYPE (cst0
), 1), 0);
8168 if (code0
!= INTEGER_CST
)
8169 t
= fold_build2 (code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8171 /* If swapping might yield to a more canonical form, do so. */
8173 return fold_build2 (swap_tree_comparison (code
), type
, arg1
, t
);
8175 return fold_build2 (code
, type
, t
, arg1
);
8178 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8179 overflow further. Try to decrease the magnitude of constants involved
8180 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8181 and put sole constants at the second argument position.
8182 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8185 maybe_canonicalize_comparison (enum tree_code code
, tree type
,
8186 tree arg0
, tree arg1
)
8189 bool strict_overflow_p
;
8190 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8191 "when reducing constant in comparison");
8193 /* In principle pointers also have undefined overflow behavior,
8194 but that causes problems elsewhere. */
8195 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8196 || POINTER_TYPE_P (TREE_TYPE (arg0
)))
8199 /* Try canonicalization by simplifying arg0. */
8200 strict_overflow_p
= false;
8201 t
= maybe_canonicalize_comparison_1 (code
, type
, arg0
, arg1
,
8202 &strict_overflow_p
);
8205 if (strict_overflow_p
)
8206 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8210 /* Try canonicalization by simplifying arg1 using the swapped
8212 code
= swap_tree_comparison (code
);
8213 strict_overflow_p
= false;
8214 t
= maybe_canonicalize_comparison_1 (code
, type
, arg1
, arg0
,
8215 &strict_overflow_p
);
8216 if (t
&& strict_overflow_p
)
8217 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8221 /* Subroutine of fold_binary. This routine performs all of the
8222 transformations that are common to the equality/inequality
8223 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8224 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8225 fold_binary should call fold_binary. Fold a comparison with
8226 tree code CODE and type TYPE with operands OP0 and OP1. Return
8227 the folded comparison or NULL_TREE. */
8230 fold_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
8232 tree arg0
, arg1
, tem
;
8237 STRIP_SIGN_NOPS (arg0
);
8238 STRIP_SIGN_NOPS (arg1
);
8240 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8241 if (tem
!= NULL_TREE
)
8244 /* If one arg is a real or integer constant, put it last. */
8245 if (tree_swap_operands_p (arg0
, arg1
, true))
8246 return fold_build2 (swap_tree_comparison (code
), type
, op1
, op0
);
8248 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8249 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8250 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8251 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8252 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8253 && (TREE_CODE (arg1
) == INTEGER_CST
8254 && !TREE_OVERFLOW (arg1
)))
8256 tree const1
= TREE_OPERAND (arg0
, 1);
8258 tree variable
= TREE_OPERAND (arg0
, 0);
8261 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8263 lhs
= fold_build2 (lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8264 TREE_TYPE (arg1
), const2
, const1
);
8266 /* If the constant operation overflowed this can be
8267 simplified as a comparison against INT_MAX/INT_MIN. */
8268 if (TREE_CODE (lhs
) == INTEGER_CST
8269 && TREE_OVERFLOW (lhs
))
8271 int const1_sgn
= tree_int_cst_sgn (const1
);
8272 enum tree_code code2
= code
;
8274 /* Get the sign of the constant on the lhs if the
8275 operation were VARIABLE + CONST1. */
8276 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8277 const1_sgn
= -const1_sgn
;
8279 /* The sign of the constant determines if we overflowed
8280 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8281 Canonicalize to the INT_MIN overflow by swapping the comparison
8283 if (const1_sgn
== -1)
8284 code2
= swap_tree_comparison (code
);
8286 /* We now can look at the canonicalized case
8287 VARIABLE + 1 CODE2 INT_MIN
8288 and decide on the result. */
8289 if (code2
== LT_EXPR
8291 || code2
== EQ_EXPR
)
8292 return omit_one_operand (type
, boolean_false_node
, variable
);
8293 else if (code2
== NE_EXPR
8295 || code2
== GT_EXPR
)
8296 return omit_one_operand (type
, boolean_true_node
, variable
);
8299 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8300 && (TREE_CODE (lhs
) != INTEGER_CST
8301 || !TREE_OVERFLOW (lhs
)))
8303 fold_overflow_warning (("assuming signed overflow does not occur "
8304 "when changing X +- C1 cmp C2 to "
8306 WARN_STRICT_OVERFLOW_COMPARISON
);
8307 return fold_build2 (code
, type
, variable
, lhs
);
8311 /* For comparisons of pointers we can decompose it to a compile time
8312 comparison of the base objects and the offsets into the object.
8313 This requires at least one operand being an ADDR_EXPR to do more
8314 than the operand_equal_p test below. */
8315 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8316 && (TREE_CODE (arg0
) == ADDR_EXPR
8317 || TREE_CODE (arg1
) == ADDR_EXPR
))
8319 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8320 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8321 enum machine_mode mode
;
8322 int volatilep
, unsignedp
;
8323 bool indirect_base0
= false;
8325 /* Get base and offset for the access. Strip ADDR_EXPR for
8326 get_inner_reference, but put it back by stripping INDIRECT_REF
8327 off the base object if possible. */
8329 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8331 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8332 &bitsize
, &bitpos0
, &offset0
, &mode
,
8333 &unsignedp
, &volatilep
, false);
8334 if (TREE_CODE (base0
) == INDIRECT_REF
)
8335 base0
= TREE_OPERAND (base0
, 0);
8337 indirect_base0
= true;
8341 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8343 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8344 &bitsize
, &bitpos1
, &offset1
, &mode
,
8345 &unsignedp
, &volatilep
, false);
8346 /* We have to make sure to have an indirect/non-indirect base1
8347 just the same as we did for base0. */
8348 if (TREE_CODE (base1
) == INDIRECT_REF
8350 base1
= TREE_OPERAND (base1
, 0);
8351 else if (!indirect_base0
)
8354 else if (indirect_base0
)
8357 /* If we have equivalent bases we might be able to simplify. */
8359 && operand_equal_p (base0
, base1
, 0))
8361 /* We can fold this expression to a constant if the non-constant
8362 offset parts are equal. */
8363 if (offset0
== offset1
8364 || (offset0
&& offset1
8365 && operand_equal_p (offset0
, offset1
, 0)))
8370 return build_int_cst (boolean_type_node
, bitpos0
== bitpos1
);
8372 return build_int_cst (boolean_type_node
, bitpos0
!= bitpos1
);
8374 return build_int_cst (boolean_type_node
, bitpos0
< bitpos1
);
8376 return build_int_cst (boolean_type_node
, bitpos0
<= bitpos1
);
8378 return build_int_cst (boolean_type_node
, bitpos0
>= bitpos1
);
8380 return build_int_cst (boolean_type_node
, bitpos0
> bitpos1
);
8384 /* We can simplify the comparison to a comparison of the variable
8385 offset parts if the constant offset parts are equal.
8386 Be careful to use signed size type here because otherwise we
8387 mess with array offsets in the wrong way. This is possible
8388 because pointer arithmetic is restricted to retain within an
8389 object and overflow on pointer differences is undefined as of
8390 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8391 else if (bitpos0
== bitpos1
)
8393 tree signed_size_type_node
;
8394 signed_size_type_node
= signed_type_for (size_type_node
);
8396 /* By converting to signed size type we cover middle-end pointer
8397 arithmetic which operates on unsigned pointer types of size
8398 type size and ARRAY_REF offsets which are properly sign or
8399 zero extended from their type in case it is narrower than
8401 if (offset0
== NULL_TREE
)
8402 offset0
= build_int_cst (signed_size_type_node
, 0);
8404 offset0
= fold_convert (signed_size_type_node
, offset0
);
8405 if (offset1
== NULL_TREE
)
8406 offset1
= build_int_cst (signed_size_type_node
, 0);
8408 offset1
= fold_convert (signed_size_type_node
, offset1
);
8410 return fold_build2 (code
, type
, offset0
, offset1
);
8415 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8416 same object, then we can fold this to a comparison of the two offsets in
8417 signed size type. This is possible because pointer arithmetic is
8418 restricted to retain within an object and overflow on pointer differences
8419 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8421 We check flag_wrapv directly because pointers types are unsigned,
8422 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8423 normally what we want to avoid certain odd overflow cases, but
8425 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8427 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0
)))
8429 tree base0
, offset0
, base1
, offset1
;
8431 if (extract_array_ref (arg0
, &base0
, &offset0
)
8432 && extract_array_ref (arg1
, &base1
, &offset1
)
8433 && operand_equal_p (base0
, base1
, 0))
8435 tree signed_size_type_node
;
8436 signed_size_type_node
= signed_type_for (size_type_node
);
8438 /* By converting to signed size type we cover middle-end pointer
8439 arithmetic which operates on unsigned pointer types of size
8440 type size and ARRAY_REF offsets which are properly sign or
8441 zero extended from their type in case it is narrower than
8443 if (offset0
== NULL_TREE
)
8444 offset0
= build_int_cst (signed_size_type_node
, 0);
8446 offset0
= fold_convert (signed_size_type_node
, offset0
);
8447 if (offset1
== NULL_TREE
)
8448 offset1
= build_int_cst (signed_size_type_node
, 0);
8450 offset1
= fold_convert (signed_size_type_node
, offset1
);
8452 return fold_build2 (code
, type
, offset0
, offset1
);
8456 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8457 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8458 the resulting offset is smaller in absolute value than the
8460 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8461 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8462 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8463 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8464 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8465 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8466 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8468 tree const1
= TREE_OPERAND (arg0
, 1);
8469 tree const2
= TREE_OPERAND (arg1
, 1);
8470 tree variable1
= TREE_OPERAND (arg0
, 0);
8471 tree variable2
= TREE_OPERAND (arg1
, 0);
8473 const char * const warnmsg
= G_("assuming signed overflow does not "
8474 "occur when combining constants around "
8477 /* Put the constant on the side where it doesn't overflow and is
8478 of lower absolute value than before. */
8479 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8480 ? MINUS_EXPR
: PLUS_EXPR
,
8482 if (!TREE_OVERFLOW (cst
)
8483 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
8485 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8486 return fold_build2 (code
, type
,
8488 fold_build2 (TREE_CODE (arg1
), TREE_TYPE (arg1
),
8492 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8493 ? MINUS_EXPR
: PLUS_EXPR
,
8495 if (!TREE_OVERFLOW (cst
)
8496 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
8498 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8499 return fold_build2 (code
, type
,
8500 fold_build2 (TREE_CODE (arg0
), TREE_TYPE (arg0
),
8506 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8507 signed arithmetic case. That form is created by the compiler
8508 often enough for folding it to be of value. One example is in
8509 computing loop trip counts after Operator Strength Reduction. */
8510 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8511 && TREE_CODE (arg0
) == MULT_EXPR
8512 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8513 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8514 && integer_zerop (arg1
))
8516 tree const1
= TREE_OPERAND (arg0
, 1);
8517 tree const2
= arg1
; /* zero */
8518 tree variable1
= TREE_OPERAND (arg0
, 0);
8519 enum tree_code cmp_code
= code
;
8521 gcc_assert (!integer_zerop (const1
));
8523 fold_overflow_warning (("assuming signed overflow does not occur when "
8524 "eliminating multiplication in comparison "
8526 WARN_STRICT_OVERFLOW_COMPARISON
);
8528 /* If const1 is negative we swap the sense of the comparison. */
8529 if (tree_int_cst_sgn (const1
) < 0)
8530 cmp_code
= swap_tree_comparison (cmp_code
);
8532 return fold_build2 (cmp_code
, type
, variable1
, const2
);
8535 tem
= maybe_canonicalize_comparison (code
, type
, arg0
, arg1
);
8539 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8541 tree targ0
= strip_float_extensions (arg0
);
8542 tree targ1
= strip_float_extensions (arg1
);
8543 tree newtype
= TREE_TYPE (targ0
);
8545 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8546 newtype
= TREE_TYPE (targ1
);
8548 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8549 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
8550 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
8551 fold_convert (newtype
, targ1
));
8553 /* (-a) CMP (-b) -> b CMP a */
8554 if (TREE_CODE (arg0
) == NEGATE_EXPR
8555 && TREE_CODE (arg1
) == NEGATE_EXPR
)
8556 return fold_build2 (code
, type
, TREE_OPERAND (arg1
, 0),
8557 TREE_OPERAND (arg0
, 0));
8559 if (TREE_CODE (arg1
) == REAL_CST
)
8561 REAL_VALUE_TYPE cst
;
8562 cst
= TREE_REAL_CST (arg1
);
8564 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8565 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8566 return fold_build2 (swap_tree_comparison (code
), type
,
8567 TREE_OPERAND (arg0
, 0),
8568 build_real (TREE_TYPE (arg1
),
8569 REAL_VALUE_NEGATE (cst
)));
8571 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8572 /* a CMP (-0) -> a CMP 0 */
8573 if (REAL_VALUE_MINUS_ZERO (cst
))
8574 return fold_build2 (code
, type
, arg0
,
8575 build_real (TREE_TYPE (arg1
), dconst0
));
8577 /* x != NaN is always true, other ops are always false. */
8578 if (REAL_VALUE_ISNAN (cst
)
8579 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
8581 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
8582 return omit_one_operand (type
, tem
, arg0
);
8585 /* Fold comparisons against infinity. */
8586 if (REAL_VALUE_ISINF (cst
))
8588 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
8589 if (tem
!= NULL_TREE
)
8594 /* If this is a comparison of a real constant with a PLUS_EXPR
8595 or a MINUS_EXPR of a real constant, we can convert it into a
8596 comparison with a revised real constant as long as no overflow
8597 occurs when unsafe_math_optimizations are enabled. */
8598 if (flag_unsafe_math_optimizations
8599 && TREE_CODE (arg1
) == REAL_CST
8600 && (TREE_CODE (arg0
) == PLUS_EXPR
8601 || TREE_CODE (arg0
) == MINUS_EXPR
)
8602 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
8603 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
8604 ? MINUS_EXPR
: PLUS_EXPR
,
8605 arg1
, TREE_OPERAND (arg0
, 1), 0))
8606 && !TREE_OVERFLOW (tem
))
8607 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8609 /* Likewise, we can simplify a comparison of a real constant with
8610 a MINUS_EXPR whose first operand is also a real constant, i.e.
8611 (c1 - x) < c2 becomes x > c1-c2. */
8612 if (flag_unsafe_math_optimizations
8613 && TREE_CODE (arg1
) == REAL_CST
8614 && TREE_CODE (arg0
) == MINUS_EXPR
8615 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
8616 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
8618 && !TREE_OVERFLOW (tem
))
8619 return fold_build2 (swap_tree_comparison (code
), type
,
8620 TREE_OPERAND (arg0
, 1), tem
);
8622 /* Fold comparisons against built-in math functions. */
8623 if (TREE_CODE (arg1
) == REAL_CST
8624 && flag_unsafe_math_optimizations
8625 && ! flag_errno_math
)
8627 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8629 if (fcode
!= END_BUILTINS
)
8631 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
8632 if (tem
!= NULL_TREE
)
8638 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8639 if (TREE_CONSTANT (arg1
)
8640 && (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
8641 || TREE_CODE (arg0
) == POSTDECREMENT_EXPR
)
8642 /* This optimization is invalid for ordered comparisons
8643 if CONST+INCR overflows or if foo+incr might overflow.
8644 This optimization is invalid for floating point due to rounding.
8645 For pointer types we assume overflow doesn't happen. */
8646 && (POINTER_TYPE_P (TREE_TYPE (arg0
))
8647 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8648 && (code
== EQ_EXPR
|| code
== NE_EXPR
))))
8650 tree varop
, newconst
;
8652 if (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
)
8654 newconst
= fold_build2 (PLUS_EXPR
, TREE_TYPE (arg0
),
8655 arg1
, TREE_OPERAND (arg0
, 1));
8656 varop
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (arg0
),
8657 TREE_OPERAND (arg0
, 0),
8658 TREE_OPERAND (arg0
, 1));
8662 newconst
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg0
),
8663 arg1
, TREE_OPERAND (arg0
, 1));
8664 varop
= build2 (PREDECREMENT_EXPR
, TREE_TYPE (arg0
),
8665 TREE_OPERAND (arg0
, 0),
8666 TREE_OPERAND (arg0
, 1));
8670 /* If VAROP is a reference to a bitfield, we must mask
8671 the constant by the width of the field. */
8672 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
8673 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop
, 0), 1))
8674 && host_integerp (DECL_SIZE (TREE_OPERAND
8675 (TREE_OPERAND (varop
, 0), 1)), 1))
8677 tree fielddecl
= TREE_OPERAND (TREE_OPERAND (varop
, 0), 1);
8678 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (fielddecl
), 1);
8679 tree folded_compare
, shift
;
8681 /* First check whether the comparison would come out
8682 always the same. If we don't do that we would
8683 change the meaning with the masking. */
8684 folded_compare
= fold_build2 (code
, type
,
8685 TREE_OPERAND (varop
, 0), arg1
);
8686 if (TREE_CODE (folded_compare
) == INTEGER_CST
)
8687 return omit_one_operand (type
, folded_compare
, varop
);
8689 shift
= build_int_cst (NULL_TREE
,
8690 TYPE_PRECISION (TREE_TYPE (varop
)) - size
);
8691 shift
= fold_convert (TREE_TYPE (varop
), shift
);
8692 newconst
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (varop
),
8694 newconst
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (varop
),
8698 return fold_build2 (code
, type
, varop
, newconst
);
8701 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
8702 && (TREE_CODE (arg0
) == NOP_EXPR
8703 || TREE_CODE (arg0
) == CONVERT_EXPR
))
8705 /* If we are widening one operand of an integer comparison,
8706 see if the other operand is similarly being widened. Perhaps we
8707 can do the comparison in the narrower type. */
8708 tem
= fold_widened_comparison (code
, type
, arg0
, arg1
);
8712 /* Or if we are changing signedness. */
8713 tem
= fold_sign_changed_comparison (code
, type
, arg0
, arg1
);
8718 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8719 constant, we can simplify it. */
8720 if (TREE_CODE (arg1
) == INTEGER_CST
8721 && (TREE_CODE (arg0
) == MIN_EXPR
8722 || TREE_CODE (arg0
) == MAX_EXPR
)
8723 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8725 tem
= optimize_minmax_comparison (code
, type
, op0
, op1
);
8730 /* Simplify comparison of something with itself. (For IEEE
8731 floating-point, we can only do some of these simplifications.) */
8732 if (operand_equal_p (arg0
, arg1
, 0))
8737 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8738 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8739 return constant_boolean_node (1, type
);
8744 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8745 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8746 return constant_boolean_node (1, type
);
8747 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
8750 /* For NE, we can only do this simplification if integer
8751 or we don't honor IEEE floating point NaNs. */
8752 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
8753 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8755 /* ... fall through ... */
8758 return constant_boolean_node (0, type
);
8764 /* If we are comparing an expression that just has comparisons
8765 of two integer values, arithmetic expressions of those comparisons,
8766 and constants, we can simplify it. There are only three cases
8767 to check: the two values can either be equal, the first can be
8768 greater, or the second can be greater. Fold the expression for
8769 those three values. Since each value must be 0 or 1, we have
8770 eight possibilities, each of which corresponds to the constant 0
8771 or 1 or one of the six possible comparisons.
8773 This handles common cases like (a > b) == 0 but also handles
8774 expressions like ((x > y) - (y > x)) > 0, which supposedly
8775 occur in macroized code. */
8777 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8779 tree cval1
= 0, cval2
= 0;
8782 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8783 /* Don't handle degenerate cases here; they should already
8784 have been handled anyway. */
8785 && cval1
!= 0 && cval2
!= 0
8786 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8787 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8788 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8789 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8790 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8791 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8792 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8794 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8795 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8797 /* We can't just pass T to eval_subst in case cval1 or cval2
8798 was the same as ARG1. */
8801 = fold_build2 (code
, type
,
8802 eval_subst (arg0
, cval1
, maxval
,
8806 = fold_build2 (code
, type
,
8807 eval_subst (arg0
, cval1
, maxval
,
8811 = fold_build2 (code
, type
,
8812 eval_subst (arg0
, cval1
, minval
,
8816 /* All three of these results should be 0 or 1. Confirm they are.
8817 Then use those values to select the proper code to use. */
8819 if (TREE_CODE (high_result
) == INTEGER_CST
8820 && TREE_CODE (equal_result
) == INTEGER_CST
8821 && TREE_CODE (low_result
) == INTEGER_CST
)
8823 /* Make a 3-bit mask with the high-order bit being the
8824 value for `>', the next for '=', and the low for '<'. */
8825 switch ((integer_onep (high_result
) * 4)
8826 + (integer_onep (equal_result
) * 2)
8827 + integer_onep (low_result
))
8831 return omit_one_operand (type
, integer_zero_node
, arg0
);
8852 return omit_one_operand (type
, integer_one_node
, arg0
);
8856 return save_expr (build2 (code
, type
, cval1
, cval2
));
8857 return fold_build2 (code
, type
, cval1
, cval2
);
8862 /* If this is a comparison of complex values and both sides
8863 are COMPLEX_CST, do the comparison by parts to fold the
8865 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8866 && TREE_CODE (TREE_TYPE (arg0
)) == COMPLEX_TYPE
8867 && TREE_CODE (arg0
) == COMPLEX_CST
8868 && TREE_CODE (arg1
) == COMPLEX_CST
)
8870 tree real0
, imag0
, real1
, imag1
;
8871 enum tree_code outercode
;
8873 real0
= TREE_REALPART (arg0
);
8874 imag0
= TREE_IMAGPART (arg0
);
8875 real1
= TREE_REALPART (arg1
);
8876 imag1
= TREE_IMAGPART (arg1
);
8877 outercode
= code
== EQ_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8879 return fold_build2 (outercode
, type
,
8880 fold_build2 (code
, type
, real0
, real1
),
8881 fold_build2 (code
, type
, imag0
, imag1
));
8885 /* Fold a comparison of the address of COMPONENT_REFs with the same
8886 type and component to a comparison of the address of the base
8887 object. In short, &x->a OP &y->a to x OP y and
8888 &x->a OP &y.a to x OP &y */
8889 if (TREE_CODE (arg0
) == ADDR_EXPR
8890 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == COMPONENT_REF
8891 && TREE_CODE (arg1
) == ADDR_EXPR
8892 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == COMPONENT_REF
)
8894 tree cref0
= TREE_OPERAND (arg0
, 0);
8895 tree cref1
= TREE_OPERAND (arg1
, 0);
8896 if (TREE_OPERAND (cref0
, 1) == TREE_OPERAND (cref1
, 1))
8898 tree op0
= TREE_OPERAND (cref0
, 0);
8899 tree op1
= TREE_OPERAND (cref1
, 0);
8900 return fold_build2 (code
, type
,
8901 build_fold_addr_expr (op0
),
8902 build_fold_addr_expr (op1
));
8906 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8907 into a single range test. */
8908 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
8909 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
8910 && TREE_CODE (arg1
) == INTEGER_CST
8911 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8912 && !integer_zerop (TREE_OPERAND (arg0
, 1))
8913 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8914 && !TREE_OVERFLOW (arg1
))
8916 tem
= fold_div_compare (code
, type
, arg0
, arg1
);
8917 if (tem
!= NULL_TREE
)
8921 /* Fold ~X op ~Y as Y op X. */
8922 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8923 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8924 return fold_build2 (code
, type
,
8925 TREE_OPERAND (arg1
, 0),
8926 TREE_OPERAND (arg0
, 0));
8928 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8929 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8930 && TREE_CODE (arg1
) == INTEGER_CST
)
8931 return fold_build2 (swap_tree_comparison (code
), type
,
8932 TREE_OPERAND (arg0
, 0),
8933 fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
));
8939 /* Subroutine of fold_binary. Optimize complex multiplications of the
8940 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8941 argument EXPR represents the expression "z" of type TYPE. */
8944 fold_mult_zconjz (tree type
, tree expr
)
8946 tree itype
= TREE_TYPE (type
);
8947 tree rpart
, ipart
, tem
;
8949 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
8951 rpart
= TREE_OPERAND (expr
, 0);
8952 ipart
= TREE_OPERAND (expr
, 1);
8954 else if (TREE_CODE (expr
) == COMPLEX_CST
)
8956 rpart
= TREE_REALPART (expr
);
8957 ipart
= TREE_IMAGPART (expr
);
8961 expr
= save_expr (expr
);
8962 rpart
= fold_build1 (REALPART_EXPR
, itype
, expr
);
8963 ipart
= fold_build1 (IMAGPART_EXPR
, itype
, expr
);
8966 rpart
= save_expr (rpart
);
8967 ipart
= save_expr (ipart
);
8968 tem
= fold_build2 (PLUS_EXPR
, itype
,
8969 fold_build2 (MULT_EXPR
, itype
, rpart
, rpart
),
8970 fold_build2 (MULT_EXPR
, itype
, ipart
, ipart
));
8971 return fold_build2 (COMPLEX_EXPR
, type
, tem
,
8972 fold_convert (itype
, integer_zero_node
));
8976 /* Fold a binary expression of code CODE and type TYPE with operands
8977 OP0 and OP1. Return the folded expression if folding is
8978 successful. Otherwise, return NULL_TREE. */
8981 fold_binary (enum tree_code code
, tree type
, tree op0
, tree op1
)
8983 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
8984 tree arg0
, arg1
, tem
;
8985 tree t1
= NULL_TREE
;
8986 bool strict_overflow_p
;
8988 gcc_assert ((IS_EXPR_CODE_CLASS (kind
)
8989 || IS_GIMPLE_STMT_CODE_CLASS (kind
))
8990 && TREE_CODE_LENGTH (code
) == 2
8992 && op1
!= NULL_TREE
);
8997 /* Strip any conversions that don't change the mode. This is
8998 safe for every expression, except for a comparison expression
8999 because its signedness is derived from its operands. So, in
9000 the latter case, only strip conversions that don't change the
9003 Note that this is done as an internal manipulation within the
9004 constant folder, in order to find the simplest representation
9005 of the arguments so that their form can be studied. In any
9006 cases, the appropriate type conversions should be put back in
9007 the tree that will get out of the constant folder. */
9009 if (kind
== tcc_comparison
)
9011 STRIP_SIGN_NOPS (arg0
);
9012 STRIP_SIGN_NOPS (arg1
);
9020 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9021 constant but we can't do arithmetic on them. */
9022 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9023 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9024 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9025 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
9027 if (kind
== tcc_binary
)
9028 tem
= const_binop (code
, arg0
, arg1
, 0);
9029 else if (kind
== tcc_comparison
)
9030 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9034 if (tem
!= NULL_TREE
)
9036 if (TREE_TYPE (tem
) != type
)
9037 tem
= fold_convert (type
, tem
);
9042 /* If this is a commutative operation, and ARG0 is a constant, move it
9043 to ARG1 to reduce the number of tests below. */
9044 if (commutative_tree_code (code
)
9045 && tree_swap_operands_p (arg0
, arg1
, true))
9046 return fold_build2 (code
, type
, op1
, op0
);
9048 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9050 First check for cases where an arithmetic operation is applied to a
9051 compound, conditional, or comparison operation. Push the arithmetic
9052 operation inside the compound or conditional to see if any folding
9053 can then be done. Convert comparison to conditional for this purpose.
9054 The also optimizes non-constant cases that used to be done in
9057 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9058 one of the operands is a comparison and the other is a comparison, a
9059 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9060 code below would make the expression more complex. Change it to a
9061 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9062 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9064 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9065 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9066 && ((truth_value_p (TREE_CODE (arg0
))
9067 && (truth_value_p (TREE_CODE (arg1
))
9068 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9069 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9070 || (truth_value_p (TREE_CODE (arg1
))
9071 && (truth_value_p (TREE_CODE (arg0
))
9072 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9073 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9075 tem
= fold_build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9076 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9079 fold_convert (boolean_type_node
, arg0
),
9080 fold_convert (boolean_type_node
, arg1
));
9082 if (code
== EQ_EXPR
)
9083 tem
= invert_truthvalue (tem
);
9085 return fold_convert (type
, tem
);
9088 if (TREE_CODE_CLASS (code
) == tcc_binary
9089 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9091 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9092 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9093 fold_build2 (code
, type
,
9094 TREE_OPERAND (arg0
, 1), op1
));
9095 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9096 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9097 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9098 fold_build2 (code
, type
,
9099 op0
, TREE_OPERAND (arg1
, 1)));
9101 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
9103 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
9105 /*cond_first_p=*/1);
9106 if (tem
!= NULL_TREE
)
9110 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
9112 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
9114 /*cond_first_p=*/0);
9115 if (tem
!= NULL_TREE
)
9123 /* A + (-B) -> A - B */
9124 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9125 return fold_build2 (MINUS_EXPR
, type
,
9126 fold_convert (type
, arg0
),
9127 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
9128 /* (-A) + B -> B - A */
9129 if (TREE_CODE (arg0
) == NEGATE_EXPR
9130 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
9131 return fold_build2 (MINUS_EXPR
, type
,
9132 fold_convert (type
, arg1
),
9133 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
9134 /* Convert ~A + 1 to -A. */
9135 if (INTEGRAL_TYPE_P (type
)
9136 && TREE_CODE (arg0
) == BIT_NOT_EXPR
9137 && integer_onep (arg1
))
9138 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
9140 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9142 if ((TREE_CODE (arg0
) == MULT_EXPR
9143 || TREE_CODE (arg1
) == MULT_EXPR
)
9144 && (!FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
9146 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
9151 if (! FLOAT_TYPE_P (type
))
9153 if (integer_zerop (arg1
))
9154 return non_lvalue (fold_convert (type
, arg0
));
9157 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9158 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
9159 && !TYPE_OVERFLOW_TRAPS (type
))
9161 t1
= build_int_cst_type (type
, -1);
9162 return omit_one_operand (type
, t1
, arg1
);
9166 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9167 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
9168 && !TYPE_OVERFLOW_TRAPS (type
))
9170 t1
= build_int_cst_type (type
, -1);
9171 return omit_one_operand (type
, t1
, arg0
);
9174 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9175 with a constant, and the two constants have no bits in common,
9176 we should treat this as a BIT_IOR_EXPR since this may produce more
9178 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9179 && TREE_CODE (arg1
) == BIT_AND_EXPR
9180 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9181 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9182 && integer_zerop (const_binop (BIT_AND_EXPR
,
9183 TREE_OPERAND (arg0
, 1),
9184 TREE_OPERAND (arg1
, 1), 0)))
9186 code
= BIT_IOR_EXPR
;
9190 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9191 (plus (plus (mult) (mult)) (foo)) so that we can
9192 take advantage of the factoring cases below. */
9193 if (((TREE_CODE (arg0
) == PLUS_EXPR
9194 || TREE_CODE (arg0
) == MINUS_EXPR
)
9195 && TREE_CODE (arg1
) == MULT_EXPR
)
9196 || ((TREE_CODE (arg1
) == PLUS_EXPR
9197 || TREE_CODE (arg1
) == MINUS_EXPR
)
9198 && TREE_CODE (arg0
) == MULT_EXPR
))
9200 tree parg0
, parg1
, parg
, marg
;
9201 enum tree_code pcode
;
9203 if (TREE_CODE (arg1
) == MULT_EXPR
)
9204 parg
= arg0
, marg
= arg1
;
9206 parg
= arg1
, marg
= arg0
;
9207 pcode
= TREE_CODE (parg
);
9208 parg0
= TREE_OPERAND (parg
, 0);
9209 parg1
= TREE_OPERAND (parg
, 1);
9213 if (TREE_CODE (parg0
) == MULT_EXPR
9214 && TREE_CODE (parg1
) != MULT_EXPR
)
9215 return fold_build2 (pcode
, type
,
9216 fold_build2 (PLUS_EXPR
, type
,
9217 fold_convert (type
, parg0
),
9218 fold_convert (type
, marg
)),
9219 fold_convert (type
, parg1
));
9220 if (TREE_CODE (parg0
) != MULT_EXPR
9221 && TREE_CODE (parg1
) == MULT_EXPR
)
9222 return fold_build2 (PLUS_EXPR
, type
,
9223 fold_convert (type
, parg0
),
9224 fold_build2 (pcode
, type
,
9225 fold_convert (type
, marg
),
9230 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
9231 of the array. Loop optimizer sometimes produce this type of
9233 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9235 tem
= try_move_mult_to_index (PLUS_EXPR
, arg0
, arg1
);
9237 return fold_convert (type
, tem
);
9239 else if (TREE_CODE (arg1
) == ADDR_EXPR
)
9241 tem
= try_move_mult_to_index (PLUS_EXPR
, arg1
, arg0
);
9243 return fold_convert (type
, tem
);
9248 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9249 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
9250 return non_lvalue (fold_convert (type
, arg0
));
9252 /* Likewise if the operands are reversed. */
9253 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
9254 return non_lvalue (fold_convert (type
, arg1
));
9256 /* Convert X + -C into X - C. */
9257 if (TREE_CODE (arg1
) == REAL_CST
9258 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
9260 tem
= fold_negate_const (arg1
, type
);
9261 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
9262 return fold_build2 (MINUS_EXPR
, type
,
9263 fold_convert (type
, arg0
),
9264 fold_convert (type
, tem
));
9267 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9268 to __complex__ ( x, y ). This is not the same for SNaNs or
9269 if signed zeros are involved. */
9270 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9271 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9272 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9274 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9275 tree arg0r
= fold_unary (REALPART_EXPR
, rtype
, arg0
);
9276 tree arg0i
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
9277 bool arg0rz
= false, arg0iz
= false;
9278 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9279 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9281 tree arg1r
= fold_unary (REALPART_EXPR
, rtype
, arg1
);
9282 tree arg1i
= fold_unary (IMAGPART_EXPR
, rtype
, arg1
);
9283 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9285 tree rp
= arg1r
? arg1r
9286 : build1 (REALPART_EXPR
, rtype
, arg1
);
9287 tree ip
= arg0i
? arg0i
9288 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9289 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9291 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9293 tree rp
= arg0r
? arg0r
9294 : build1 (REALPART_EXPR
, rtype
, arg0
);
9295 tree ip
= arg1i
? arg1i
9296 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9297 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9302 if (flag_unsafe_math_optimizations
9303 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9304 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9305 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
9308 /* Convert x+x into x*2.0. */
9309 if (operand_equal_p (arg0
, arg1
, 0)
9310 && SCALAR_FLOAT_TYPE_P (type
))
9311 return fold_build2 (MULT_EXPR
, type
, arg0
,
9312 build_real (type
, dconst2
));
9314 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
9315 if (flag_unsafe_math_optimizations
9316 && TREE_CODE (arg1
) == PLUS_EXPR
9317 && TREE_CODE (arg0
) != MULT_EXPR
)
9319 tree tree10
= TREE_OPERAND (arg1
, 0);
9320 tree tree11
= TREE_OPERAND (arg1
, 1);
9321 if (TREE_CODE (tree11
) == MULT_EXPR
9322 && TREE_CODE (tree10
) == MULT_EXPR
)
9325 tree0
= fold_build2 (PLUS_EXPR
, type
, arg0
, tree10
);
9326 return fold_build2 (PLUS_EXPR
, type
, tree0
, tree11
);
9329 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
9330 if (flag_unsafe_math_optimizations
9331 && TREE_CODE (arg0
) == PLUS_EXPR
9332 && TREE_CODE (arg1
) != MULT_EXPR
)
9334 tree tree00
= TREE_OPERAND (arg0
, 0);
9335 tree tree01
= TREE_OPERAND (arg0
, 1);
9336 if (TREE_CODE (tree01
) == MULT_EXPR
9337 && TREE_CODE (tree00
) == MULT_EXPR
)
9340 tree0
= fold_build2 (PLUS_EXPR
, type
, tree01
, arg1
);
9341 return fold_build2 (PLUS_EXPR
, type
, tree00
, tree0
);
9347 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9348 is a rotate of A by C1 bits. */
9349 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9350 is a rotate of A by B bits. */
9352 enum tree_code code0
, code1
;
9353 code0
= TREE_CODE (arg0
);
9354 code1
= TREE_CODE (arg1
);
9355 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9356 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9357 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9358 TREE_OPERAND (arg1
, 0), 0)
9359 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
9361 tree tree01
, tree11
;
9362 enum tree_code code01
, code11
;
9364 tree01
= TREE_OPERAND (arg0
, 1);
9365 tree11
= TREE_OPERAND (arg1
, 1);
9366 STRIP_NOPS (tree01
);
9367 STRIP_NOPS (tree11
);
9368 code01
= TREE_CODE (tree01
);
9369 code11
= TREE_CODE (tree11
);
9370 if (code01
== INTEGER_CST
9371 && code11
== INTEGER_CST
9372 && TREE_INT_CST_HIGH (tree01
) == 0
9373 && TREE_INT_CST_HIGH (tree11
) == 0
9374 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
9375 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9376 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9377 code0
== LSHIFT_EXPR
? tree01
: tree11
);
9378 else if (code11
== MINUS_EXPR
)
9380 tree tree110
, tree111
;
9381 tree110
= TREE_OPERAND (tree11
, 0);
9382 tree111
= TREE_OPERAND (tree11
, 1);
9383 STRIP_NOPS (tree110
);
9384 STRIP_NOPS (tree111
);
9385 if (TREE_CODE (tree110
) == INTEGER_CST
9386 && 0 == compare_tree_int (tree110
,
9388 (TREE_TYPE (TREE_OPERAND
9390 && operand_equal_p (tree01
, tree111
, 0))
9391 return build2 ((code0
== LSHIFT_EXPR
9394 type
, TREE_OPERAND (arg0
, 0), tree01
);
9396 else if (code01
== MINUS_EXPR
)
9398 tree tree010
, tree011
;
9399 tree010
= TREE_OPERAND (tree01
, 0);
9400 tree011
= TREE_OPERAND (tree01
, 1);
9401 STRIP_NOPS (tree010
);
9402 STRIP_NOPS (tree011
);
9403 if (TREE_CODE (tree010
) == INTEGER_CST
9404 && 0 == compare_tree_int (tree010
,
9406 (TREE_TYPE (TREE_OPERAND
9408 && operand_equal_p (tree11
, tree011
, 0))
9409 return build2 ((code0
!= LSHIFT_EXPR
9412 type
, TREE_OPERAND (arg0
, 0), tree11
);
9418 /* In most languages, can't associate operations on floats through
9419 parentheses. Rather than remember where the parentheses were, we
9420 don't associate floats at all, unless the user has specified
9421 -funsafe-math-optimizations. */
9423 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
9425 tree var0
, con0
, lit0
, minus_lit0
;
9426 tree var1
, con1
, lit1
, minus_lit1
;
9428 /* Split both trees into variables, constants, and literals. Then
9429 associate each group together, the constants with literals,
9430 then the result with variables. This increases the chances of
9431 literals being recombined later and of generating relocatable
9432 expressions for the sum of a constant and literal. */
9433 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
9434 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
9435 code
== MINUS_EXPR
);
9437 /* Only do something if we found more than two objects. Otherwise,
9438 nothing has changed and we risk infinite recursion. */
9439 if (2 < ((var0
!= 0) + (var1
!= 0)
9440 + (con0
!= 0) + (con1
!= 0)
9441 + (lit0
!= 0) + (lit1
!= 0)
9442 + (minus_lit0
!= 0) + (minus_lit1
!= 0)))
9444 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9445 if (code
== MINUS_EXPR
)
9448 var0
= associate_trees (var0
, var1
, code
, type
);
9449 con0
= associate_trees (con0
, con1
, code
, type
);
9450 lit0
= associate_trees (lit0
, lit1
, code
, type
);
9451 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
9453 /* Preserve the MINUS_EXPR if the negative part of the literal is
9454 greater than the positive part. Otherwise, the multiplicative
9455 folding code (i.e extract_muldiv) may be fooled in case
9456 unsigned constants are subtracted, like in the following
9457 example: ((X*2 + 4) - 8U)/2. */
9458 if (minus_lit0
&& lit0
)
9460 if (TREE_CODE (lit0
) == INTEGER_CST
9461 && TREE_CODE (minus_lit0
) == INTEGER_CST
9462 && tree_int_cst_lt (lit0
, minus_lit0
))
9464 minus_lit0
= associate_trees (minus_lit0
, lit0
,
9470 lit0
= associate_trees (lit0
, minus_lit0
,
9478 return fold_convert (type
,
9479 associate_trees (var0
, minus_lit0
,
9483 con0
= associate_trees (con0
, minus_lit0
,
9485 return fold_convert (type
,
9486 associate_trees (var0
, con0
,
9491 con0
= associate_trees (con0
, lit0
, code
, type
);
9492 return fold_convert (type
, associate_trees (var0
, con0
,
9500 /* A - (-B) -> A + B */
9501 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9502 return fold_build2 (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0));
9503 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9504 if (TREE_CODE (arg0
) == NEGATE_EXPR
9505 && (FLOAT_TYPE_P (type
)
9506 || INTEGRAL_TYPE_P (type
))
9507 && negate_expr_p (arg1
)
9508 && reorder_operands_p (arg0
, arg1
))
9509 return fold_build2 (MINUS_EXPR
, type
, negate_expr (arg1
),
9510 TREE_OPERAND (arg0
, 0));
9511 /* Convert -A - 1 to ~A. */
9512 if (INTEGRAL_TYPE_P (type
)
9513 && TREE_CODE (arg0
) == NEGATE_EXPR
9514 && integer_onep (arg1
)
9515 && !TYPE_OVERFLOW_TRAPS (type
))
9516 return fold_build1 (BIT_NOT_EXPR
, type
,
9517 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
9519 /* Convert -1 - A to ~A. */
9520 if (INTEGRAL_TYPE_P (type
)
9521 && integer_all_onesp (arg0
))
9522 return fold_build1 (BIT_NOT_EXPR
, type
, op1
);
9524 if (! FLOAT_TYPE_P (type
))
9526 if (integer_zerop (arg0
))
9527 return negate_expr (fold_convert (type
, arg1
));
9528 if (integer_zerop (arg1
))
9529 return non_lvalue (fold_convert (type
, arg0
));
9531 /* Fold A - (A & B) into ~B & A. */
9532 if (!TREE_SIDE_EFFECTS (arg0
)
9533 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
9535 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
9536 return fold_build2 (BIT_AND_EXPR
, type
,
9537 fold_build1 (BIT_NOT_EXPR
, type
,
9538 TREE_OPERAND (arg1
, 0)),
9540 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9541 return fold_build2 (BIT_AND_EXPR
, type
,
9542 fold_build1 (BIT_NOT_EXPR
, type
,
9543 TREE_OPERAND (arg1
, 1)),
9547 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9548 any power of 2 minus 1. */
9549 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9550 && TREE_CODE (arg1
) == BIT_AND_EXPR
9551 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9552 TREE_OPERAND (arg1
, 0), 0))
9554 tree mask0
= TREE_OPERAND (arg0
, 1);
9555 tree mask1
= TREE_OPERAND (arg1
, 1);
9556 tree tem
= fold_build1 (BIT_NOT_EXPR
, type
, mask0
);
9558 if (operand_equal_p (tem
, mask1
, 0))
9560 tem
= fold_build2 (BIT_XOR_EXPR
, type
,
9561 TREE_OPERAND (arg0
, 0), mask1
);
9562 return fold_build2 (MINUS_EXPR
, type
, tem
, mask1
);
9567 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9568 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
9569 return non_lvalue (fold_convert (type
, arg0
));
9571 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9572 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9573 (-ARG1 + ARG0) reduces to -ARG1. */
9574 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
9575 return negate_expr (fold_convert (type
, arg1
));
9577 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9578 __complex__ ( x, -y ). This is not the same for SNaNs or if
9579 signed zeros are involved. */
9580 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9581 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9582 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9584 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9585 tree arg0r
= fold_unary (REALPART_EXPR
, rtype
, arg0
);
9586 tree arg0i
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
9587 bool arg0rz
= false, arg0iz
= false;
9588 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9589 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9591 tree arg1r
= fold_unary (REALPART_EXPR
, rtype
, arg1
);
9592 tree arg1i
= fold_unary (IMAGPART_EXPR
, rtype
, arg1
);
9593 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9595 tree rp
= fold_build1 (NEGATE_EXPR
, rtype
,
9597 : build1 (REALPART_EXPR
, rtype
, arg1
));
9598 tree ip
= arg0i
? arg0i
9599 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9600 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9602 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9604 tree rp
= arg0r
? arg0r
9605 : build1 (REALPART_EXPR
, rtype
, arg0
);
9606 tree ip
= fold_build1 (NEGATE_EXPR
, rtype
,
9608 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
9609 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9614 /* Fold &x - &x. This can happen from &x.foo - &x.
9615 This is unsafe for certain floats even in non-IEEE formats.
9616 In IEEE, it is unsafe because it does wrong for NaNs.
9617 Also note that operand_equal_p is always false if an operand
9620 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
9621 && operand_equal_p (arg0
, arg1
, 0))
9622 return fold_convert (type
, integer_zero_node
);
9624 /* A - B -> A + (-B) if B is easily negatable. */
9625 if (negate_expr_p (arg1
)
9626 && ((FLOAT_TYPE_P (type
)
9627 /* Avoid this transformation if B is a positive REAL_CST. */
9628 && (TREE_CODE (arg1
) != REAL_CST
9629 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
9630 || INTEGRAL_TYPE_P (type
)))
9631 return fold_build2 (PLUS_EXPR
, type
,
9632 fold_convert (type
, arg0
),
9633 fold_convert (type
, negate_expr (arg1
)));
9635 /* Try folding difference of addresses. */
9639 if ((TREE_CODE (arg0
) == ADDR_EXPR
9640 || TREE_CODE (arg1
) == ADDR_EXPR
)
9641 && ptr_difference_const (arg0
, arg1
, &diff
))
9642 return build_int_cst_type (type
, diff
);
9645 /* Fold &a[i] - &a[j] to i-j. */
9646 if (TREE_CODE (arg0
) == ADDR_EXPR
9647 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
9648 && TREE_CODE (arg1
) == ADDR_EXPR
9649 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
9651 tree aref0
= TREE_OPERAND (arg0
, 0);
9652 tree aref1
= TREE_OPERAND (arg1
, 0);
9653 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
9654 TREE_OPERAND (aref1
, 0), 0))
9656 tree op0
= fold_convert (type
, TREE_OPERAND (aref0
, 1));
9657 tree op1
= fold_convert (type
, TREE_OPERAND (aref1
, 1));
9658 tree esz
= array_ref_element_size (aref0
);
9659 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9660 return fold_build2 (MULT_EXPR
, type
, diff
,
9661 fold_convert (type
, esz
));
9666 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9667 of the array. Loop optimizer sometimes produce this type of
9669 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9671 tem
= try_move_mult_to_index (MINUS_EXPR
, arg0
, arg1
);
9673 return fold_convert (type
, tem
);
9676 if (flag_unsafe_math_optimizations
9677 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9678 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9679 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
9682 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9684 if ((TREE_CODE (arg0
) == MULT_EXPR
9685 || TREE_CODE (arg1
) == MULT_EXPR
)
9686 && (!FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
9688 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
9696 /* (-A) * (-B) -> A * B */
9697 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
9698 return fold_build2 (MULT_EXPR
, type
,
9699 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
9700 fold_convert (type
, negate_expr (arg1
)));
9701 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
9702 return fold_build2 (MULT_EXPR
, type
,
9703 fold_convert (type
, negate_expr (arg0
)),
9704 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
9706 if (! FLOAT_TYPE_P (type
))
9708 if (integer_zerop (arg1
))
9709 return omit_one_operand (type
, arg1
, arg0
);
9710 if (integer_onep (arg1
))
9711 return non_lvalue (fold_convert (type
, arg0
));
9712 /* Transform x * -1 into -x. */
9713 if (integer_all_onesp (arg1
))
9714 return fold_convert (type
, negate_expr (arg0
));
9715 /* Transform x * -C into -x * C if x is easily negatable. */
9716 if (TREE_CODE (arg1
) == INTEGER_CST
9717 && tree_int_cst_sgn (arg1
) == -1
9718 && negate_expr_p (arg0
)
9719 && (tem
= negate_expr (arg1
)) != arg1
9720 && !TREE_OVERFLOW (tem
))
9721 return fold_build2 (MULT_EXPR
, type
,
9722 negate_expr (arg0
), tem
);
9724 /* (a * (1 << b)) is (a << b) */
9725 if (TREE_CODE (arg1
) == LSHIFT_EXPR
9726 && integer_onep (TREE_OPERAND (arg1
, 0)))
9727 return fold_build2 (LSHIFT_EXPR
, type
, arg0
,
9728 TREE_OPERAND (arg1
, 1));
9729 if (TREE_CODE (arg0
) == LSHIFT_EXPR
9730 && integer_onep (TREE_OPERAND (arg0
, 0)))
9731 return fold_build2 (LSHIFT_EXPR
, type
, arg1
,
9732 TREE_OPERAND (arg0
, 1));
9734 strict_overflow_p
= false;
9735 if (TREE_CODE (arg1
) == INTEGER_CST
9736 && 0 != (tem
= extract_muldiv (op0
,
9737 fold_convert (type
, arg1
),
9739 &strict_overflow_p
)))
9741 if (strict_overflow_p
)
9742 fold_overflow_warning (("assuming signed overflow does not "
9743 "occur when simplifying "
9745 WARN_STRICT_OVERFLOW_MISC
);
9746 return fold_convert (type
, tem
);
9749 /* Optimize z * conj(z) for integer complex numbers. */
9750 if (TREE_CODE (arg0
) == CONJ_EXPR
9751 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9752 return fold_mult_zconjz (type
, arg1
);
9753 if (TREE_CODE (arg1
) == CONJ_EXPR
9754 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9755 return fold_mult_zconjz (type
, arg0
);
9759 /* Maybe fold x * 0 to 0. The expressions aren't the same
9760 when x is NaN, since x * 0 is also NaN. Nor are they the
9761 same in modes with signed zeros, since multiplying a
9762 negative value by 0 gives -0, not +0. */
9763 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
9764 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9765 && real_zerop (arg1
))
9766 return omit_one_operand (type
, arg1
, arg0
);
9767 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9768 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9769 && real_onep (arg1
))
9770 return non_lvalue (fold_convert (type
, arg0
));
9772 /* Transform x * -1.0 into -x. */
9773 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9774 && real_minus_onep (arg1
))
9775 return fold_convert (type
, negate_expr (arg0
));
9777 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9778 if (flag_unsafe_math_optimizations
9779 && TREE_CODE (arg0
) == RDIV_EXPR
9780 && TREE_CODE (arg1
) == REAL_CST
9781 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
9783 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
9786 return fold_build2 (RDIV_EXPR
, type
, tem
,
9787 TREE_OPERAND (arg0
, 1));
9790 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9791 if (operand_equal_p (arg0
, arg1
, 0))
9793 tree tem
= fold_strip_sign_ops (arg0
);
9794 if (tem
!= NULL_TREE
)
9796 tem
= fold_convert (type
, tem
);
9797 return fold_build2 (MULT_EXPR
, type
, tem
, tem
);
9801 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9802 This is not the same for NaNs or if signed zeros are
9804 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
9805 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9806 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
9807 && TREE_CODE (arg1
) == COMPLEX_CST
9808 && real_zerop (TREE_REALPART (arg1
)))
9810 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9811 if (real_onep (TREE_IMAGPART (arg1
)))
9812 return fold_build2 (COMPLEX_EXPR
, type
,
9813 negate_expr (fold_build1 (IMAGPART_EXPR
,
9815 fold_build1 (REALPART_EXPR
, rtype
, arg0
));
9816 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
9817 return fold_build2 (COMPLEX_EXPR
, type
,
9818 fold_build1 (IMAGPART_EXPR
, rtype
, arg0
),
9819 negate_expr (fold_build1 (REALPART_EXPR
,
9823 /* Optimize z * conj(z) for floating point complex numbers.
9824 Guarded by flag_unsafe_math_optimizations as non-finite
9825 imaginary components don't produce scalar results. */
9826 if (flag_unsafe_math_optimizations
9827 && TREE_CODE (arg0
) == CONJ_EXPR
9828 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9829 return fold_mult_zconjz (type
, arg1
);
9830 if (flag_unsafe_math_optimizations
9831 && TREE_CODE (arg1
) == CONJ_EXPR
9832 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9833 return fold_mult_zconjz (type
, arg0
);
9835 if (flag_unsafe_math_optimizations
)
9837 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
9838 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
9840 /* Optimizations of root(...)*root(...). */
9841 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
9844 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
9845 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
9847 /* Optimize sqrt(x)*sqrt(x) as x. */
9848 if (BUILTIN_SQRT_P (fcode0
)
9849 && operand_equal_p (arg00
, arg10
, 0)
9850 && ! HONOR_SNANS (TYPE_MODE (type
)))
9853 /* Optimize root(x)*root(y) as root(x*y). */
9854 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
9855 arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
9856 return build_call_expr (rootfn
, 1, arg
);
9859 /* Optimize expN(x)*expN(y) as expN(x+y). */
9860 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
9862 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
9863 tree arg
= fold_build2 (PLUS_EXPR
, type
,
9864 CALL_EXPR_ARG (arg0
, 0),
9865 CALL_EXPR_ARG (arg1
, 0));
9866 return build_call_expr (expfn
, 1, arg
);
9869 /* Optimizations of pow(...)*pow(...). */
9870 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
9871 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
9872 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
9874 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
9875 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
9876 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
9877 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
9879 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9880 if (operand_equal_p (arg01
, arg11
, 0))
9882 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
9883 tree arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
9884 return build_call_expr (powfn
, 2, arg
, arg01
);
9887 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9888 if (operand_equal_p (arg00
, arg10
, 0))
9890 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
9891 tree arg
= fold_build2 (PLUS_EXPR
, type
, arg01
, arg11
);
9892 return build_call_expr (powfn
, 2, arg00
, arg
);
9896 /* Optimize tan(x)*cos(x) as sin(x). */
9897 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
9898 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
9899 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
9900 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
9901 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
9902 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
9903 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
9904 CALL_EXPR_ARG (arg1
, 0), 0))
9906 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
9908 if (sinfn
!= NULL_TREE
)
9909 return build_call_expr (sinfn
, 1, CALL_EXPR_ARG (arg0
, 0));
9912 /* Optimize x*pow(x,c) as pow(x,c+1). */
9913 if (fcode1
== BUILT_IN_POW
9914 || fcode1
== BUILT_IN_POWF
9915 || fcode1
== BUILT_IN_POWL
)
9917 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
9918 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
9919 if (TREE_CODE (arg11
) == REAL_CST
9920 && !TREE_OVERFLOW (arg11
)
9921 && operand_equal_p (arg0
, arg10
, 0))
9923 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
9927 c
= TREE_REAL_CST (arg11
);
9928 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
9929 arg
= build_real (type
, c
);
9930 return build_call_expr (powfn
, 2, arg0
, arg
);
9934 /* Optimize pow(x,c)*x as pow(x,c+1). */
9935 if (fcode0
== BUILT_IN_POW
9936 || fcode0
== BUILT_IN_POWF
9937 || fcode0
== BUILT_IN_POWL
)
9939 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
9940 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
9941 if (TREE_CODE (arg01
) == REAL_CST
9942 && !TREE_OVERFLOW (arg01
)
9943 && operand_equal_p (arg1
, arg00
, 0))
9945 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
9949 c
= TREE_REAL_CST (arg01
);
9950 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
9951 arg
= build_real (type
, c
);
9952 return build_call_expr (powfn
, 2, arg1
, arg
);
9956 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9958 && operand_equal_p (arg0
, arg1
, 0))
9960 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
9964 tree arg
= build_real (type
, dconst2
);
9965 return build_call_expr (powfn
, 2, arg0
, arg
);
9974 if (integer_all_onesp (arg1
))
9975 return omit_one_operand (type
, arg1
, arg0
);
9976 if (integer_zerop (arg1
))
9977 return non_lvalue (fold_convert (type
, arg0
));
9978 if (operand_equal_p (arg0
, arg1
, 0))
9979 return non_lvalue (fold_convert (type
, arg0
));
9982 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9983 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9985 t1
= build_int_cst_type (type
, -1);
9986 return omit_one_operand (type
, t1
, arg1
);
9990 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9991 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9993 t1
= build_int_cst_type (type
, -1);
9994 return omit_one_operand (type
, t1
, arg0
);
9997 /* Canonicalize (X & C1) | C2. */
9998 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9999 && TREE_CODE (arg1
) == INTEGER_CST
10000 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10002 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, mlo
, mhi
;
10003 int width
= TYPE_PRECISION (type
);
10004 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
10005 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
10006 hi2
= TREE_INT_CST_HIGH (arg1
);
10007 lo2
= TREE_INT_CST_LOW (arg1
);
10009 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10010 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
10011 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10013 if (width
> HOST_BITS_PER_WIDE_INT
)
10015 mhi
= (unsigned HOST_WIDE_INT
) -1
10016 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
10022 mlo
= (unsigned HOST_WIDE_INT
) -1
10023 >> (HOST_BITS_PER_WIDE_INT
- width
);
10026 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10027 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
10028 return fold_build2 (BIT_IOR_EXPR
, type
,
10029 TREE_OPERAND (arg0
, 0), arg1
);
10031 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
10034 if ((hi1
& ~hi2
) != hi1
|| (lo1
& ~lo2
) != lo1
)
10035 return fold_build2 (BIT_IOR_EXPR
, type
,
10036 fold_build2 (BIT_AND_EXPR
, type
,
10037 TREE_OPERAND (arg0
, 0),
10038 build_int_cst_wide (type
,
10044 /* (X & Y) | Y is (X, Y). */
10045 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10046 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10047 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10048 /* (X & Y) | X is (Y, X). */
10049 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10050 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10051 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10052 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
10053 /* X | (X & Y) is (Y, X). */
10054 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10055 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10056 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10057 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
10058 /* X | (Y & X) is (Y, X). */
10059 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10060 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10061 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10062 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
10064 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
10065 if (t1
!= NULL_TREE
)
10068 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10070 This results in more efficient code for machines without a NAND
10071 instruction. Combine will canonicalize to the first form
10072 which will allow use of NAND instructions provided by the
10073 backend if they exist. */
10074 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10075 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10077 return fold_build1 (BIT_NOT_EXPR
, type
,
10078 build2 (BIT_AND_EXPR
, type
,
10079 TREE_OPERAND (arg0
, 0),
10080 TREE_OPERAND (arg1
, 0)));
10083 /* See if this can be simplified into a rotate first. If that
10084 is unsuccessful continue in the association code. */
10088 if (integer_zerop (arg1
))
10089 return non_lvalue (fold_convert (type
, arg0
));
10090 if (integer_all_onesp (arg1
))
10091 return fold_build1 (BIT_NOT_EXPR
, type
, arg0
);
10092 if (operand_equal_p (arg0
, arg1
, 0))
10093 return omit_one_operand (type
, integer_zero_node
, arg0
);
10095 /* ~X ^ X is -1. */
10096 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10097 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10099 t1
= build_int_cst_type (type
, -1);
10100 return omit_one_operand (type
, t1
, arg1
);
10103 /* X ^ ~X is -1. */
10104 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10105 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10107 t1
= build_int_cst_type (type
, -1);
10108 return omit_one_operand (type
, t1
, arg0
);
10111 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10112 with a constant, and the two constants have no bits in common,
10113 we should treat this as a BIT_IOR_EXPR since this may produce more
10114 simplifications. */
10115 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10116 && TREE_CODE (arg1
) == BIT_AND_EXPR
10117 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10118 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10119 && integer_zerop (const_binop (BIT_AND_EXPR
,
10120 TREE_OPERAND (arg0
, 1),
10121 TREE_OPERAND (arg1
, 1), 0)))
10123 code
= BIT_IOR_EXPR
;
10127 /* (X | Y) ^ X -> Y & ~ X*/
10128 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10129 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10131 tree t2
= TREE_OPERAND (arg0
, 1);
10132 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10134 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10135 fold_convert (type
, t1
));
10139 /* (Y | X) ^ X -> Y & ~ X*/
10140 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10141 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10143 tree t2
= TREE_OPERAND (arg0
, 0);
10144 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10146 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10147 fold_convert (type
, t1
));
10151 /* X ^ (X | Y) -> Y & ~ X*/
10152 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10153 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
10155 tree t2
= TREE_OPERAND (arg1
, 1);
10156 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10158 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10159 fold_convert (type
, t1
));
10163 /* X ^ (Y | X) -> Y & ~ X*/
10164 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10165 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
10167 tree t2
= TREE_OPERAND (arg1
, 0);
10168 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10170 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10171 fold_convert (type
, t1
));
10175 /* Convert ~X ^ ~Y to X ^ Y. */
10176 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10177 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10178 return fold_build2 (code
, type
,
10179 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10180 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
10182 /* Convert ~X ^ C to X ^ ~C. */
10183 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10184 && TREE_CODE (arg1
) == INTEGER_CST
)
10185 return fold_build2 (code
, type
,
10186 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10187 fold_build1 (BIT_NOT_EXPR
, type
, arg1
));
10189 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10190 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10191 && integer_onep (TREE_OPERAND (arg0
, 1))
10192 && integer_onep (arg1
))
10193 return fold_build2 (EQ_EXPR
, type
, arg0
,
10194 build_int_cst (TREE_TYPE (arg0
), 0));
10196 /* Fold (X & Y) ^ Y as ~X & Y. */
10197 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10198 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10200 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10201 return fold_build2 (BIT_AND_EXPR
, type
,
10202 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10203 fold_convert (type
, arg1
));
10205 /* Fold (X & Y) ^ X as ~Y & X. */
10206 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10207 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10208 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10210 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10211 return fold_build2 (BIT_AND_EXPR
, type
,
10212 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10213 fold_convert (type
, arg1
));
10215 /* Fold X ^ (X & Y) as X & ~Y. */
10216 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10217 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10219 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10220 return fold_build2 (BIT_AND_EXPR
, type
,
10221 fold_convert (type
, arg0
),
10222 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
10224 /* Fold X ^ (Y & X) as ~Y & X. */
10225 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10226 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10227 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10229 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10230 return fold_build2 (BIT_AND_EXPR
, type
,
10231 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10232 fold_convert (type
, arg0
));
10235 /* See if this can be simplified into a rotate first. If that
10236 is unsuccessful continue in the association code. */
10240 if (integer_all_onesp (arg1
))
10241 return non_lvalue (fold_convert (type
, arg0
));
10242 if (integer_zerop (arg1
))
10243 return omit_one_operand (type
, arg1
, arg0
);
10244 if (operand_equal_p (arg0
, arg1
, 0))
10245 return non_lvalue (fold_convert (type
, arg0
));
10247 /* ~X & X is always zero. */
10248 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10249 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10250 return omit_one_operand (type
, integer_zero_node
, arg1
);
10252 /* X & ~X is always zero. */
10253 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10254 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10255 return omit_one_operand (type
, integer_zero_node
, arg0
);
10257 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10258 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10259 && TREE_CODE (arg1
) == INTEGER_CST
10260 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10261 return fold_build2 (BIT_IOR_EXPR
, type
,
10262 fold_build2 (BIT_AND_EXPR
, type
,
10263 TREE_OPERAND (arg0
, 0), arg1
),
10264 fold_build2 (BIT_AND_EXPR
, type
,
10265 TREE_OPERAND (arg0
, 1), arg1
));
10267 /* (X | Y) & Y is (X, Y). */
10268 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10269 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10270 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10271 /* (X | Y) & X is (Y, X). */
10272 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10273 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10274 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10275 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
10276 /* X & (X | Y) is (Y, X). */
10277 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10278 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10279 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10280 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
10281 /* X & (Y | X) is (Y, X). */
10282 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10283 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10284 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10285 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
10287 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10288 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10289 && integer_onep (TREE_OPERAND (arg0
, 1))
10290 && integer_onep (arg1
))
10292 tem
= TREE_OPERAND (arg0
, 0);
10293 return fold_build2 (EQ_EXPR
, type
,
10294 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
10295 build_int_cst (TREE_TYPE (tem
), 1)),
10296 build_int_cst (TREE_TYPE (tem
), 0));
10298 /* Fold ~X & 1 as (X & 1) == 0. */
10299 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10300 && integer_onep (arg1
))
10302 tem
= TREE_OPERAND (arg0
, 0);
10303 return fold_build2 (EQ_EXPR
, type
,
10304 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
10305 build_int_cst (TREE_TYPE (tem
), 1)),
10306 build_int_cst (TREE_TYPE (tem
), 0));
10309 /* Fold (X ^ Y) & Y as ~X & Y. */
10310 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10311 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10313 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10314 return fold_build2 (BIT_AND_EXPR
, type
,
10315 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10316 fold_convert (type
, arg1
));
10318 /* Fold (X ^ Y) & X as ~Y & X. */
10319 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10320 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10321 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10323 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10324 return fold_build2 (BIT_AND_EXPR
, type
,
10325 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10326 fold_convert (type
, arg1
));
10328 /* Fold X & (X ^ Y) as X & ~Y. */
10329 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10330 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10332 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10333 return fold_build2 (BIT_AND_EXPR
, type
,
10334 fold_convert (type
, arg0
),
10335 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
10337 /* Fold X & (Y ^ X) as ~Y & X. */
10338 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10339 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10340 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10342 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10343 return fold_build2 (BIT_AND_EXPR
, type
,
10344 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10345 fold_convert (type
, arg0
));
10348 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
10349 if (t1
!= NULL_TREE
)
10351 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10352 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
10353 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
10356 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
10358 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
10359 && (~TREE_INT_CST_LOW (arg1
)
10360 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
10361 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
10364 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10366 This results in more efficient code for machines without a NOR
10367 instruction. Combine will canonicalize to the first form
10368 which will allow use of NOR instructions provided by the
10369 backend if they exist. */
10370 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10371 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10373 return fold_build1 (BIT_NOT_EXPR
, type
,
10374 build2 (BIT_IOR_EXPR
, type
,
10375 TREE_OPERAND (arg0
, 0),
10376 TREE_OPERAND (arg1
, 0)));
10382 /* Don't touch a floating-point divide by zero unless the mode
10383 of the constant can represent infinity. */
10384 if (TREE_CODE (arg1
) == REAL_CST
10385 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
10386 && real_zerop (arg1
))
10389 /* Optimize A / A to 1.0 if we don't care about
10390 NaNs or Infinities. Skip the transformation
10391 for non-real operands. */
10392 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10393 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10394 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
10395 && operand_equal_p (arg0
, arg1
, 0))
10397 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
10399 return omit_two_operands (type
, r
, arg0
, arg1
);
10402 /* The complex version of the above A / A optimization. */
10403 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10404 && operand_equal_p (arg0
, arg1
, 0))
10406 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
10407 if (! HONOR_NANS (TYPE_MODE (elem_type
))
10408 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
10410 tree r
= build_real (elem_type
, dconst1
);
10411 /* omit_two_operands will call fold_convert for us. */
10412 return omit_two_operands (type
, r
, arg0
, arg1
);
10416 /* (-A) / (-B) -> A / B */
10417 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10418 return fold_build2 (RDIV_EXPR
, type
,
10419 TREE_OPERAND (arg0
, 0),
10420 negate_expr (arg1
));
10421 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10422 return fold_build2 (RDIV_EXPR
, type
,
10423 negate_expr (arg0
),
10424 TREE_OPERAND (arg1
, 0));
10426 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10427 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10428 && real_onep (arg1
))
10429 return non_lvalue (fold_convert (type
, arg0
));
10431 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10432 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10433 && real_minus_onep (arg1
))
10434 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
10436 /* If ARG1 is a constant, we can convert this to a multiply by the
10437 reciprocal. This does not have the same rounding properties,
10438 so only do this if -funsafe-math-optimizations. We can actually
10439 always safely do it if ARG1 is a power of two, but it's hard to
10440 tell if it is or not in a portable manner. */
10441 if (TREE_CODE (arg1
) == REAL_CST
)
10443 if (flag_unsafe_math_optimizations
10444 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
10446 return fold_build2 (MULT_EXPR
, type
, arg0
, tem
);
10447 /* Find the reciprocal if optimizing and the result is exact. */
10451 r
= TREE_REAL_CST (arg1
);
10452 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
10454 tem
= build_real (type
, r
);
10455 return fold_build2 (MULT_EXPR
, type
,
10456 fold_convert (type
, arg0
), tem
);
10460 /* Convert A/B/C to A/(B*C). */
10461 if (flag_unsafe_math_optimizations
10462 && TREE_CODE (arg0
) == RDIV_EXPR
)
10463 return fold_build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10464 fold_build2 (MULT_EXPR
, type
,
10465 TREE_OPERAND (arg0
, 1), arg1
));
10467 /* Convert A/(B/C) to (A/B)*C. */
10468 if (flag_unsafe_math_optimizations
10469 && TREE_CODE (arg1
) == RDIV_EXPR
)
10470 return fold_build2 (MULT_EXPR
, type
,
10471 fold_build2 (RDIV_EXPR
, type
, arg0
,
10472 TREE_OPERAND (arg1
, 0)),
10473 TREE_OPERAND (arg1
, 1));
10475 /* Convert C1/(X*C2) into (C1/C2)/X. */
10476 if (flag_unsafe_math_optimizations
10477 && TREE_CODE (arg1
) == MULT_EXPR
10478 && TREE_CODE (arg0
) == REAL_CST
10479 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
10481 tree tem
= const_binop (RDIV_EXPR
, arg0
,
10482 TREE_OPERAND (arg1
, 1), 0);
10484 return fold_build2 (RDIV_EXPR
, type
, tem
,
10485 TREE_OPERAND (arg1
, 0));
10488 if (flag_unsafe_math_optimizations
)
10490 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10491 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10493 /* Optimize sin(x)/cos(x) as tan(x). */
10494 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
10495 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
10496 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
10497 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10498 CALL_EXPR_ARG (arg1
, 0), 0))
10500 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
10502 if (tanfn
!= NULL_TREE
)
10503 return build_call_expr (tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
10506 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10507 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
10508 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
10509 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
10510 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10511 CALL_EXPR_ARG (arg1
, 0), 0))
10513 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
10515 if (tanfn
!= NULL_TREE
)
10517 tree tmp
= build_call_expr (tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
10518 return fold_build2 (RDIV_EXPR
, type
,
10519 build_real (type
, dconst1
), tmp
);
10523 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10524 NaNs or Infinities. */
10525 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
10526 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
10527 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
10529 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10530 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
10532 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
10533 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
10534 && operand_equal_p (arg00
, arg01
, 0))
10536 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
10538 if (cosfn
!= NULL_TREE
)
10539 return build_call_expr (cosfn
, 1, arg00
);
10543 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10544 NaNs or Infinities. */
10545 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
10546 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
10547 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
10549 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10550 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
10552 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
10553 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
10554 && operand_equal_p (arg00
, arg01
, 0))
10556 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
10558 if (cosfn
!= NULL_TREE
)
10560 tree tmp
= build_call_expr (cosfn
, 1, arg00
);
10561 return fold_build2 (RDIV_EXPR
, type
,
10562 build_real (type
, dconst1
),
10568 /* Optimize pow(x,c)/x as pow(x,c-1). */
10569 if (fcode0
== BUILT_IN_POW
10570 || fcode0
== BUILT_IN_POWF
10571 || fcode0
== BUILT_IN_POWL
)
10573 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10574 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10575 if (TREE_CODE (arg01
) == REAL_CST
10576 && !TREE_OVERFLOW (arg01
)
10577 && operand_equal_p (arg1
, arg00
, 0))
10579 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10583 c
= TREE_REAL_CST (arg01
);
10584 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
10585 arg
= build_real (type
, c
);
10586 return build_call_expr (powfn
, 2, arg1
, arg
);
10590 /* Optimize x/expN(y) into x*expN(-y). */
10591 if (BUILTIN_EXPONENT_P (fcode1
))
10593 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10594 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
10595 arg1
= build_call_expr (expfn
, 1, fold_convert (type
, arg
));
10596 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
10599 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10600 if (fcode1
== BUILT_IN_POW
10601 || fcode1
== BUILT_IN_POWF
10602 || fcode1
== BUILT_IN_POWL
)
10604 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10605 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10606 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10607 tree neg11
= fold_convert (type
, negate_expr (arg11
));
10608 arg1
= build_call_expr (powfn
, 2, arg10
, neg11
);
10609 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
10614 case TRUNC_DIV_EXPR
:
10615 case FLOOR_DIV_EXPR
:
10616 /* Simplify A / (B << N) where A and B are positive and B is
10617 a power of 2, to A >> (N + log2(B)). */
10618 strict_overflow_p
= false;
10619 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10620 && (TYPE_UNSIGNED (type
)
10621 || tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
)))
10623 tree sval
= TREE_OPERAND (arg1
, 0);
10624 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
10626 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
10627 unsigned long pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
10629 if (strict_overflow_p
)
10630 fold_overflow_warning (("assuming signed overflow does not "
10631 "occur when simplifying A / (B << N)"),
10632 WARN_STRICT_OVERFLOW_MISC
);
10634 sh_cnt
= fold_build2 (PLUS_EXPR
, TREE_TYPE (sh_cnt
),
10635 sh_cnt
, build_int_cst (NULL_TREE
, pow2
));
10636 return fold_build2 (RSHIFT_EXPR
, type
,
10637 fold_convert (type
, arg0
), sh_cnt
);
10642 case ROUND_DIV_EXPR
:
10643 case CEIL_DIV_EXPR
:
10644 case EXACT_DIV_EXPR
:
10645 if (integer_onep (arg1
))
10646 return non_lvalue (fold_convert (type
, arg0
));
10647 if (integer_zerop (arg1
))
10649 /* X / -1 is -X. */
10650 if (!TYPE_UNSIGNED (type
)
10651 && TREE_CODE (arg1
) == INTEGER_CST
10652 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
10653 && TREE_INT_CST_HIGH (arg1
) == -1)
10654 return fold_convert (type
, negate_expr (arg0
));
10656 /* Convert -A / -B to A / B when the type is signed and overflow is
10658 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10659 && TREE_CODE (arg0
) == NEGATE_EXPR
10660 && negate_expr_p (arg1
))
10662 if (INTEGRAL_TYPE_P (type
))
10663 fold_overflow_warning (("assuming signed overflow does not occur "
10664 "when distributing negation across "
10666 WARN_STRICT_OVERFLOW_MISC
);
10667 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
10668 negate_expr (arg1
));
10670 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10671 && TREE_CODE (arg1
) == NEGATE_EXPR
10672 && negate_expr_p (arg0
))
10674 if (INTEGRAL_TYPE_P (type
))
10675 fold_overflow_warning (("assuming signed overflow does not occur "
10676 "when distributing negation across "
10678 WARN_STRICT_OVERFLOW_MISC
);
10679 return fold_build2 (code
, type
, negate_expr (arg0
),
10680 TREE_OPERAND (arg1
, 0));
10683 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10684 operation, EXACT_DIV_EXPR.
10686 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10687 At one time others generated faster code, it's not clear if they do
10688 after the last round to changes to the DIV code in expmed.c. */
10689 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
10690 && multiple_of_p (type
, arg0
, arg1
))
10691 return fold_build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
);
10693 strict_overflow_p
= false;
10694 if (TREE_CODE (arg1
) == INTEGER_CST
10695 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10696 &strict_overflow_p
)))
10698 if (strict_overflow_p
)
10699 fold_overflow_warning (("assuming signed overflow does not occur "
10700 "when simplifying division"),
10701 WARN_STRICT_OVERFLOW_MISC
);
10702 return fold_convert (type
, tem
);
10707 case CEIL_MOD_EXPR
:
10708 case FLOOR_MOD_EXPR
:
10709 case ROUND_MOD_EXPR
:
10710 case TRUNC_MOD_EXPR
:
10711 /* X % 1 is always zero, but be sure to preserve any side
10713 if (integer_onep (arg1
))
10714 return omit_one_operand (type
, integer_zero_node
, arg0
);
10716 /* X % 0, return X % 0 unchanged so that we can get the
10717 proper warnings and errors. */
10718 if (integer_zerop (arg1
))
10721 /* 0 % X is always zero, but be sure to preserve any side
10722 effects in X. Place this after checking for X == 0. */
10723 if (integer_zerop (arg0
))
10724 return omit_one_operand (type
, integer_zero_node
, arg1
);
10726 /* X % -1 is zero. */
10727 if (!TYPE_UNSIGNED (type
)
10728 && TREE_CODE (arg1
) == INTEGER_CST
10729 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
10730 && TREE_INT_CST_HIGH (arg1
) == -1)
10731 return omit_one_operand (type
, integer_zero_node
, arg0
);
10733 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10734 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10735 strict_overflow_p
= false;
10736 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
10737 && (TYPE_UNSIGNED (type
)
10738 || tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
)))
10741 /* Also optimize A % (C << N) where C is a power of 2,
10742 to A & ((C << N) - 1). */
10743 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
10744 c
= TREE_OPERAND (arg1
, 0);
10746 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
10748 tree mask
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
10749 build_int_cst (TREE_TYPE (arg1
), 1));
10750 if (strict_overflow_p
)
10751 fold_overflow_warning (("assuming signed overflow does not "
10752 "occur when simplifying "
10753 "X % (power of two)"),
10754 WARN_STRICT_OVERFLOW_MISC
);
10755 return fold_build2 (BIT_AND_EXPR
, type
,
10756 fold_convert (type
, arg0
),
10757 fold_convert (type
, mask
));
10761 /* X % -C is the same as X % C. */
10762 if (code
== TRUNC_MOD_EXPR
10763 && !TYPE_UNSIGNED (type
)
10764 && TREE_CODE (arg1
) == INTEGER_CST
10765 && !TREE_OVERFLOW (arg1
)
10766 && TREE_INT_CST_HIGH (arg1
) < 0
10767 && !TYPE_OVERFLOW_TRAPS (type
)
10768 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10769 && !sign_bit_p (arg1
, arg1
))
10770 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
10771 fold_convert (type
, negate_expr (arg1
)));
10773 /* X % -Y is the same as X % Y. */
10774 if (code
== TRUNC_MOD_EXPR
10775 && !TYPE_UNSIGNED (type
)
10776 && TREE_CODE (arg1
) == NEGATE_EXPR
10777 && !TYPE_OVERFLOW_TRAPS (type
))
10778 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
10779 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
10781 if (TREE_CODE (arg1
) == INTEGER_CST
10782 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10783 &strict_overflow_p
)))
10785 if (strict_overflow_p
)
10786 fold_overflow_warning (("assuming signed overflow does not occur "
10787 "when simplifying modulos"),
10788 WARN_STRICT_OVERFLOW_MISC
);
10789 return fold_convert (type
, tem
);
10796 if (integer_all_onesp (arg0
))
10797 return omit_one_operand (type
, arg0
, arg1
);
10801 /* Optimize -1 >> x for arithmetic right shifts. */
10802 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
10803 return omit_one_operand (type
, arg0
, arg1
);
10804 /* ... fall through ... */
10808 if (integer_zerop (arg1
))
10809 return non_lvalue (fold_convert (type
, arg0
));
10810 if (integer_zerop (arg0
))
10811 return omit_one_operand (type
, arg0
, arg1
);
10813 /* Since negative shift count is not well-defined,
10814 don't try to compute it in the compiler. */
10815 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
10818 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10819 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
10820 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
10821 && host_integerp (TREE_OPERAND (arg0
, 1), false)
10822 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
10824 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
10825 + TREE_INT_CST_LOW (arg1
));
10827 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10828 being well defined. */
10829 if (low
>= TYPE_PRECISION (type
))
10831 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
10832 low
= low
% TYPE_PRECISION (type
);
10833 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
10834 return build_int_cst (type
, 0);
10836 low
= TYPE_PRECISION (type
) - 1;
10839 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
10840 build_int_cst (type
, low
));
10843 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10844 into x & ((unsigned)-1 >> c) for unsigned types. */
10845 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
10846 || (TYPE_UNSIGNED (type
)
10847 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
10848 && host_integerp (arg1
, false)
10849 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
10850 && host_integerp (TREE_OPERAND (arg0
, 1), false)
10851 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
10853 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
10854 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
10860 arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10862 lshift
= build_int_cst (type
, -1);
10863 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
10865 return fold_build2 (BIT_AND_EXPR
, type
, arg00
, lshift
);
10869 /* Rewrite an LROTATE_EXPR by a constant into an
10870 RROTATE_EXPR by a new constant. */
10871 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
10873 tree tem
= build_int_cst (TREE_TYPE (arg1
),
10874 GET_MODE_BITSIZE (TYPE_MODE (type
)));
10875 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
10876 return fold_build2 (RROTATE_EXPR
, type
, arg0
, tem
);
10879 /* If we have a rotate of a bit operation with the rotate count and
10880 the second operand of the bit operation both constant,
10881 permute the two operations. */
10882 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10883 && (TREE_CODE (arg0
) == BIT_AND_EXPR
10884 || TREE_CODE (arg0
) == BIT_IOR_EXPR
10885 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
10886 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10887 return fold_build2 (TREE_CODE (arg0
), type
,
10888 fold_build2 (code
, type
,
10889 TREE_OPERAND (arg0
, 0), arg1
),
10890 fold_build2 (code
, type
,
10891 TREE_OPERAND (arg0
, 1), arg1
));
10893 /* Two consecutive rotates adding up to the width of the mode can
10895 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10896 && TREE_CODE (arg0
) == RROTATE_EXPR
10897 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10898 && TREE_INT_CST_HIGH (arg1
) == 0
10899 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
10900 && ((TREE_INT_CST_LOW (arg1
)
10901 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
10902 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
10903 return TREE_OPERAND (arg0
, 0);
10908 if (operand_equal_p (arg0
, arg1
, 0))
10909 return omit_one_operand (type
, arg0
, arg1
);
10910 if (INTEGRAL_TYPE_P (type
)
10911 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
10912 return omit_one_operand (type
, arg1
, arg0
);
10913 tem
= fold_minmax (MIN_EXPR
, type
, arg0
, arg1
);
10919 if (operand_equal_p (arg0
, arg1
, 0))
10920 return omit_one_operand (type
, arg0
, arg1
);
10921 if (INTEGRAL_TYPE_P (type
)
10922 && TYPE_MAX_VALUE (type
)
10923 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
10924 return omit_one_operand (type
, arg1
, arg0
);
10925 tem
= fold_minmax (MAX_EXPR
, type
, arg0
, arg1
);
10930 case TRUTH_ANDIF_EXPR
:
10931 /* Note that the operands of this must be ints
10932 and their values must be 0 or 1.
10933 ("true" is a fixed value perhaps depending on the language.) */
10934 /* If first arg is constant zero, return it. */
10935 if (integer_zerop (arg0
))
10936 return fold_convert (type
, arg0
);
10937 case TRUTH_AND_EXPR
:
10938 /* If either arg is constant true, drop it. */
10939 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10940 return non_lvalue (fold_convert (type
, arg1
));
10941 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
10942 /* Preserve sequence points. */
10943 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10944 return non_lvalue (fold_convert (type
, arg0
));
10945 /* If second arg is constant zero, result is zero, but first arg
10946 must be evaluated. */
10947 if (integer_zerop (arg1
))
10948 return omit_one_operand (type
, arg1
, arg0
);
10949 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10950 case will be handled here. */
10951 if (integer_zerop (arg0
))
10952 return omit_one_operand (type
, arg0
, arg1
);
10954 /* !X && X is always false. */
10955 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10956 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10957 return omit_one_operand (type
, integer_zero_node
, arg1
);
10958 /* X && !X is always false. */
10959 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10960 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10961 return omit_one_operand (type
, integer_zero_node
, arg0
);
10963 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10964 means A >= Y && A != MAX, but in this case we know that
10967 if (!TREE_SIDE_EFFECTS (arg0
)
10968 && !TREE_SIDE_EFFECTS (arg1
))
10970 tem
= fold_to_nonsharp_ineq_using_bound (arg0
, arg1
);
10971 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
10972 return fold_build2 (code
, type
, tem
, arg1
);
10974 tem
= fold_to_nonsharp_ineq_using_bound (arg1
, arg0
);
10975 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
10976 return fold_build2 (code
, type
, arg0
, tem
);
10980 /* We only do these simplifications if we are optimizing. */
10984 /* Check for things like (A || B) && (A || C). We can convert this
10985 to A || (B && C). Note that either operator can be any of the four
10986 truth and/or operations and the transformation will still be
10987 valid. Also note that we only care about order for the
10988 ANDIF and ORIF operators. If B contains side effects, this
10989 might change the truth-value of A. */
10990 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
10991 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
10992 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
10993 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
10994 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
10995 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
10997 tree a00
= TREE_OPERAND (arg0
, 0);
10998 tree a01
= TREE_OPERAND (arg0
, 1);
10999 tree a10
= TREE_OPERAND (arg1
, 0);
11000 tree a11
= TREE_OPERAND (arg1
, 1);
11001 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
11002 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
11003 && (code
== TRUTH_AND_EXPR
11004 || code
== TRUTH_OR_EXPR
));
11006 if (operand_equal_p (a00
, a10
, 0))
11007 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
11008 fold_build2 (code
, type
, a01
, a11
));
11009 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
11010 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
11011 fold_build2 (code
, type
, a01
, a10
));
11012 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
11013 return fold_build2 (TREE_CODE (arg0
), type
, a01
,
11014 fold_build2 (code
, type
, a00
, a11
));
11016 /* This case if tricky because we must either have commutative
11017 operators or else A10 must not have side-effects. */
11019 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
11020 && operand_equal_p (a01
, a11
, 0))
11021 return fold_build2 (TREE_CODE (arg0
), type
,
11022 fold_build2 (code
, type
, a00
, a10
),
11026 /* See if we can build a range comparison. */
11027 if (0 != (tem
= fold_range_test (code
, type
, op0
, op1
)))
11030 /* Check for the possibility of merging component references. If our
11031 lhs is another similar operation, try to merge its rhs with our
11032 rhs. Then try to merge our lhs and rhs. */
11033 if (TREE_CODE (arg0
) == code
11034 && 0 != (tem
= fold_truthop (code
, type
,
11035 TREE_OPERAND (arg0
, 1), arg1
)))
11036 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11038 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
11043 case TRUTH_ORIF_EXPR
:
11044 /* Note that the operands of this must be ints
11045 and their values must be 0 or true.
11046 ("true" is a fixed value perhaps depending on the language.) */
11047 /* If first arg is constant true, return it. */
11048 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11049 return fold_convert (type
, arg0
);
11050 case TRUTH_OR_EXPR
:
11051 /* If either arg is constant zero, drop it. */
11052 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
11053 return non_lvalue (fold_convert (type
, arg1
));
11054 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
11055 /* Preserve sequence points. */
11056 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11057 return non_lvalue (fold_convert (type
, arg0
));
11058 /* If second arg is constant true, result is true, but we must
11059 evaluate first arg. */
11060 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
11061 return omit_one_operand (type
, arg1
, arg0
);
11062 /* Likewise for first arg, but note this only occurs here for
11064 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11065 return omit_one_operand (type
, arg0
, arg1
);
11067 /* !X || X is always true. */
11068 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11069 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11070 return omit_one_operand (type
, integer_one_node
, arg1
);
11071 /* X || !X is always true. */
11072 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11073 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11074 return omit_one_operand (type
, integer_one_node
, arg0
);
11078 case TRUTH_XOR_EXPR
:
11079 /* If the second arg is constant zero, drop it. */
11080 if (integer_zerop (arg1
))
11081 return non_lvalue (fold_convert (type
, arg0
));
11082 /* If the second arg is constant true, this is a logical inversion. */
11083 if (integer_onep (arg1
))
11085 /* Only call invert_truthvalue if operand is a truth value. */
11086 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
11087 tem
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
11089 tem
= invert_truthvalue (arg0
);
11090 return non_lvalue (fold_convert (type
, tem
));
11092 /* Identical arguments cancel to zero. */
11093 if (operand_equal_p (arg0
, arg1
, 0))
11094 return omit_one_operand (type
, integer_zero_node
, arg0
);
11096 /* !X ^ X is always true. */
11097 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11098 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11099 return omit_one_operand (type
, integer_one_node
, arg1
);
11101 /* X ^ !X is always true. */
11102 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11103 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11104 return omit_one_operand (type
, integer_one_node
, arg0
);
11110 tem
= fold_comparison (code
, type
, op0
, op1
);
11111 if (tem
!= NULL_TREE
)
11114 /* bool_var != 0 becomes bool_var. */
11115 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11116 && code
== NE_EXPR
)
11117 return non_lvalue (fold_convert (type
, arg0
));
11119 /* bool_var == 1 becomes bool_var. */
11120 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11121 && code
== EQ_EXPR
)
11122 return non_lvalue (fold_convert (type
, arg0
));
11124 /* bool_var != 1 becomes !bool_var. */
11125 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11126 && code
== NE_EXPR
)
11127 return fold_build1 (TRUTH_NOT_EXPR
, type
, arg0
);
11129 /* bool_var == 0 becomes !bool_var. */
11130 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11131 && code
== EQ_EXPR
)
11132 return fold_build1 (TRUTH_NOT_EXPR
, type
, arg0
);
11134 /* If this is an equality comparison of the address of a non-weak
11135 object against zero, then we know the result. */
11136 if (TREE_CODE (arg0
) == ADDR_EXPR
11137 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
11138 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
11139 && integer_zerop (arg1
))
11140 return constant_boolean_node (code
!= EQ_EXPR
, type
);
11142 /* If this is an equality comparison of the address of two non-weak,
11143 unaliased symbols neither of which are extern (since we do not
11144 have access to attributes for externs), then we know the result. */
11145 if (TREE_CODE (arg0
) == ADDR_EXPR
11146 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
11147 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
11148 && ! lookup_attribute ("alias",
11149 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
11150 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
11151 && TREE_CODE (arg1
) == ADDR_EXPR
11152 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
11153 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
11154 && ! lookup_attribute ("alias",
11155 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
11156 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
11158 /* We know that we're looking at the address of two
11159 non-weak, unaliased, static _DECL nodes.
11161 It is both wasteful and incorrect to call operand_equal_p
11162 to compare the two ADDR_EXPR nodes. It is wasteful in that
11163 all we need to do is test pointer equality for the arguments
11164 to the two ADDR_EXPR nodes. It is incorrect to use
11165 operand_equal_p as that function is NOT equivalent to a
11166 C equality test. It can in fact return false for two
11167 objects which would test as equal using the C equality
11169 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
11170 return constant_boolean_node (equal
11171 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
11175 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11176 a MINUS_EXPR of a constant, we can convert it into a comparison with
11177 a revised constant as long as no overflow occurs. */
11178 if (TREE_CODE (arg1
) == INTEGER_CST
11179 && (TREE_CODE (arg0
) == PLUS_EXPR
11180 || TREE_CODE (arg0
) == MINUS_EXPR
)
11181 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11182 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
11183 ? MINUS_EXPR
: PLUS_EXPR
,
11184 fold_convert (TREE_TYPE (arg0
), arg1
),
11185 TREE_OPERAND (arg0
, 1), 0))
11186 && !TREE_OVERFLOW (tem
))
11187 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11189 /* Similarly for a NEGATE_EXPR. */
11190 if (TREE_CODE (arg0
) == NEGATE_EXPR
11191 && TREE_CODE (arg1
) == INTEGER_CST
11192 && 0 != (tem
= negate_expr (arg1
))
11193 && TREE_CODE (tem
) == INTEGER_CST
11194 && !TREE_OVERFLOW (tem
))
11195 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11197 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11198 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11199 && TREE_CODE (arg1
) == INTEGER_CST
11200 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11201 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11202 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg0
),
11203 fold_convert (TREE_TYPE (arg0
), arg1
),
11204 TREE_OPERAND (arg0
, 1)));
11206 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11207 for !=. Don't do this for ordered comparisons due to overflow. */
11208 if (TREE_CODE (arg0
) == MINUS_EXPR
11209 && integer_zerop (arg1
))
11210 return fold_build2 (code
, type
,
11211 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
11213 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11214 if (TREE_CODE (arg0
) == ABS_EXPR
11215 && (integer_zerop (arg1
) || real_zerop (arg1
)))
11216 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
11218 /* If this is an EQ or NE comparison with zero and ARG0 is
11219 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11220 two operations, but the latter can be done in one less insn
11221 on machines that have only two-operand insns or on which a
11222 constant cannot be the first operand. */
11223 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11224 && integer_zerop (arg1
))
11226 tree arg00
= TREE_OPERAND (arg0
, 0);
11227 tree arg01
= TREE_OPERAND (arg0
, 1);
11228 if (TREE_CODE (arg00
) == LSHIFT_EXPR
11229 && integer_onep (TREE_OPERAND (arg00
, 0)))
11231 fold_build2 (code
, type
,
11232 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
11233 build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
11234 arg01
, TREE_OPERAND (arg00
, 1)),
11235 fold_convert (TREE_TYPE (arg0
),
11236 integer_one_node
)),
11238 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
11239 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
11241 fold_build2 (code
, type
,
11242 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
11243 build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
11244 arg00
, TREE_OPERAND (arg01
, 1)),
11245 fold_convert (TREE_TYPE (arg0
),
11246 integer_one_node
)),
11250 /* If this is an NE or EQ comparison of zero against the result of a
11251 signed MOD operation whose second operand is a power of 2, make
11252 the MOD operation unsigned since it is simpler and equivalent. */
11253 if (integer_zerop (arg1
)
11254 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
11255 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
11256 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
11257 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
11258 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
11259 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11261 tree newtype
= lang_hooks
.types
.unsigned_type (TREE_TYPE (arg0
));
11262 tree newmod
= fold_build2 (TREE_CODE (arg0
), newtype
,
11263 fold_convert (newtype
,
11264 TREE_OPERAND (arg0
, 0)),
11265 fold_convert (newtype
,
11266 TREE_OPERAND (arg0
, 1)));
11268 return fold_build2 (code
, type
, newmod
,
11269 fold_convert (newtype
, arg1
));
11272 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11273 C1 is a valid shift constant, and C2 is a power of two, i.e.
11275 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11276 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
11277 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
11279 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11280 && integer_zerop (arg1
))
11282 tree itype
= TREE_TYPE (arg0
);
11283 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
11284 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
11286 /* Check for a valid shift count. */
11287 if (TREE_INT_CST_HIGH (arg001
) == 0
11288 && TREE_INT_CST_LOW (arg001
) < prec
)
11290 tree arg01
= TREE_OPERAND (arg0
, 1);
11291 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
11292 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
11293 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11294 can be rewritten as (X & (C2 << C1)) != 0. */
11295 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
11297 tem
= fold_build2 (LSHIFT_EXPR
, itype
, arg01
, arg001
);
11298 tem
= fold_build2 (BIT_AND_EXPR
, itype
, arg000
, tem
);
11299 return fold_build2 (code
, type
, tem
, arg1
);
11301 /* Otherwise, for signed (arithmetic) shifts,
11302 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11303 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11304 else if (!TYPE_UNSIGNED (itype
))
11305 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
11306 arg000
, build_int_cst (itype
, 0));
11307 /* Otherwise, of unsigned (logical) shifts,
11308 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11309 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11311 return omit_one_operand (type
,
11312 code
== EQ_EXPR
? integer_one_node
11313 : integer_zero_node
,
11318 /* If this is an NE comparison of zero with an AND of one, remove the
11319 comparison since the AND will give the correct value. */
11320 if (code
== NE_EXPR
11321 && integer_zerop (arg1
)
11322 && TREE_CODE (arg0
) == BIT_AND_EXPR
11323 && integer_onep (TREE_OPERAND (arg0
, 1)))
11324 return fold_convert (type
, arg0
);
11326 /* If we have (A & C) == C where C is a power of 2, convert this into
11327 (A & C) != 0. Similarly for NE_EXPR. */
11328 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11329 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11330 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11331 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11332 arg0
, fold_convert (TREE_TYPE (arg0
),
11333 integer_zero_node
));
11335 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11336 bit, then fold the expression into A < 0 or A >= 0. */
11337 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
, type
);
11341 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11342 Similarly for NE_EXPR. */
11343 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11344 && TREE_CODE (arg1
) == INTEGER_CST
11345 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11347 tree notc
= fold_build1 (BIT_NOT_EXPR
,
11348 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
11349 TREE_OPERAND (arg0
, 1));
11350 tree dandnotc
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
11352 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
11353 if (integer_nonzerop (dandnotc
))
11354 return omit_one_operand (type
, rslt
, arg0
);
11357 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11358 Similarly for NE_EXPR. */
11359 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11360 && TREE_CODE (arg1
) == INTEGER_CST
11361 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11363 tree notd
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
11364 tree candnotd
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
11365 TREE_OPERAND (arg0
, 1), notd
);
11366 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
11367 if (integer_nonzerop (candnotd
))
11368 return omit_one_operand (type
, rslt
, arg0
);
11371 /* If this is a comparison of a field, we may be able to simplify it. */
11372 if ((TREE_CODE (arg0
) == COMPONENT_REF
11373 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
11374 /* Handle the constant case even without -O
11375 to make sure the warnings are given. */
11376 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
11378 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
11383 /* Optimize comparisons of strlen vs zero to a compare of the
11384 first character of the string vs zero. To wit,
11385 strlen(ptr) == 0 => *ptr == 0
11386 strlen(ptr) != 0 => *ptr != 0
11387 Other cases should reduce to one of these two (or a constant)
11388 due to the return value of strlen being unsigned. */
11389 if (TREE_CODE (arg0
) == CALL_EXPR
11390 && integer_zerop (arg1
))
11392 tree fndecl
= get_callee_fndecl (arg0
);
11395 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
11396 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
11397 && call_expr_nargs (arg0
) == 1
11398 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
11400 tree iref
= build_fold_indirect_ref (CALL_EXPR_ARG (arg0
, 0));
11401 return fold_build2 (code
, type
, iref
,
11402 build_int_cst (TREE_TYPE (iref
), 0));
11406 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11407 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11408 if (TREE_CODE (arg0
) == RSHIFT_EXPR
11409 && integer_zerop (arg1
)
11410 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11412 tree arg00
= TREE_OPERAND (arg0
, 0);
11413 tree arg01
= TREE_OPERAND (arg0
, 1);
11414 tree itype
= TREE_TYPE (arg00
);
11415 if (TREE_INT_CST_HIGH (arg01
) == 0
11416 && TREE_INT_CST_LOW (arg01
)
11417 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
11419 if (TYPE_UNSIGNED (itype
))
11421 itype
= lang_hooks
.types
.signed_type (itype
);
11422 arg00
= fold_convert (itype
, arg00
);
11424 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
11425 type
, arg00
, build_int_cst (itype
, 0));
11429 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11430 if (integer_zerop (arg1
)
11431 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11432 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11433 TREE_OPERAND (arg0
, 1));
11435 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11436 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11437 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11438 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11439 build_int_cst (TREE_TYPE (arg1
), 0));
11440 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11441 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11442 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11443 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11444 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 1),
11445 build_int_cst (TREE_TYPE (arg1
), 0));
11447 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11448 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11449 && TREE_CODE (arg1
) == INTEGER_CST
11450 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11451 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11452 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg1
),
11453 TREE_OPERAND (arg0
, 1), arg1
));
11455 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11456 (X & C) == 0 when C is a single bit. */
11457 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11458 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
11459 && integer_zerop (arg1
)
11460 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11462 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
11463 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
11464 TREE_OPERAND (arg0
, 1));
11465 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
11469 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11470 constant C is a power of two, i.e. a single bit. */
11471 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11472 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11473 && integer_zerop (arg1
)
11474 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11475 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11476 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
11478 tree arg00
= TREE_OPERAND (arg0
, 0);
11479 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11480 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
11483 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11484 when is C is a power of two, i.e. a single bit. */
11485 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11486 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
11487 && integer_zerop (arg1
)
11488 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11489 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11490 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
11492 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
11493 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg000
),
11494 arg000
, TREE_OPERAND (arg0
, 1));
11495 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11496 tem
, build_int_cst (TREE_TYPE (tem
), 0));
11499 if (integer_zerop (arg1
)
11500 && tree_expr_nonzero_p (arg0
))
11502 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
11503 return omit_one_operand (type
, res
, arg0
);
11506 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11507 if (TREE_CODE (arg0
) == NEGATE_EXPR
11508 && TREE_CODE (arg1
) == NEGATE_EXPR
)
11509 return fold_build2 (code
, type
,
11510 TREE_OPERAND (arg0
, 0),
11511 TREE_OPERAND (arg1
, 0));
11513 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11514 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11515 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11517 tree arg00
= TREE_OPERAND (arg0
, 0);
11518 tree arg01
= TREE_OPERAND (arg0
, 1);
11519 tree arg10
= TREE_OPERAND (arg1
, 0);
11520 tree arg11
= TREE_OPERAND (arg1
, 1);
11521 tree itype
= TREE_TYPE (arg0
);
11523 if (operand_equal_p (arg01
, arg11
, 0))
11524 return fold_build2 (code
, type
,
11525 fold_build2 (BIT_AND_EXPR
, itype
,
11526 fold_build2 (BIT_XOR_EXPR
, itype
,
11529 build_int_cst (itype
, 0));
11531 if (operand_equal_p (arg01
, arg10
, 0))
11532 return fold_build2 (code
, type
,
11533 fold_build2 (BIT_AND_EXPR
, itype
,
11534 fold_build2 (BIT_XOR_EXPR
, itype
,
11537 build_int_cst (itype
, 0));
11539 if (operand_equal_p (arg00
, arg11
, 0))
11540 return fold_build2 (code
, type
,
11541 fold_build2 (BIT_AND_EXPR
, itype
,
11542 fold_build2 (BIT_XOR_EXPR
, itype
,
11545 build_int_cst (itype
, 0));
11547 if (operand_equal_p (arg00
, arg10
, 0))
11548 return fold_build2 (code
, type
,
11549 fold_build2 (BIT_AND_EXPR
, itype
,
11550 fold_build2 (BIT_XOR_EXPR
, itype
,
11553 build_int_cst (itype
, 0));
11556 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11557 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
11559 tree arg00
= TREE_OPERAND (arg0
, 0);
11560 tree arg01
= TREE_OPERAND (arg0
, 1);
11561 tree arg10
= TREE_OPERAND (arg1
, 0);
11562 tree arg11
= TREE_OPERAND (arg1
, 1);
11563 tree itype
= TREE_TYPE (arg0
);
11565 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11566 operand_equal_p guarantees no side-effects so we don't need
11567 to use omit_one_operand on Z. */
11568 if (operand_equal_p (arg01
, arg11
, 0))
11569 return fold_build2 (code
, type
, arg00
, arg10
);
11570 if (operand_equal_p (arg01
, arg10
, 0))
11571 return fold_build2 (code
, type
, arg00
, arg11
);
11572 if (operand_equal_p (arg00
, arg11
, 0))
11573 return fold_build2 (code
, type
, arg01
, arg10
);
11574 if (operand_equal_p (arg00
, arg10
, 0))
11575 return fold_build2 (code
, type
, arg01
, arg11
);
11577 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11578 if (TREE_CODE (arg01
) == INTEGER_CST
11579 && TREE_CODE (arg11
) == INTEGER_CST
)
11580 return fold_build2 (code
, type
,
11581 fold_build2 (BIT_XOR_EXPR
, itype
, arg00
,
11582 fold_build2 (BIT_XOR_EXPR
, itype
,
11592 tem
= fold_comparison (code
, type
, op0
, op1
);
11593 if (tem
!= NULL_TREE
)
11596 /* Transform comparisons of the form X +- C CMP X. */
11597 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
11598 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11599 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
11600 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
11601 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11602 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
11604 tree arg01
= TREE_OPERAND (arg0
, 1);
11605 enum tree_code code0
= TREE_CODE (arg0
);
11608 if (TREE_CODE (arg01
) == REAL_CST
)
11609 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
11611 is_positive
= tree_int_cst_sgn (arg01
);
11613 /* (X - c) > X becomes false. */
11614 if (code
== GT_EXPR
11615 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11616 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11618 if (TREE_CODE (arg01
) == INTEGER_CST
11619 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11620 fold_overflow_warning (("assuming signed overflow does not "
11621 "occur when assuming that (X - c) > X "
11622 "is always false"),
11623 WARN_STRICT_OVERFLOW_ALL
);
11624 return constant_boolean_node (0, type
);
11627 /* Likewise (X + c) < X becomes false. */
11628 if (code
== LT_EXPR
11629 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11630 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11632 if (TREE_CODE (arg01
) == INTEGER_CST
11633 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11634 fold_overflow_warning (("assuming signed overflow does not "
11635 "occur when assuming that "
11636 "(X + c) < X is always false"),
11637 WARN_STRICT_OVERFLOW_ALL
);
11638 return constant_boolean_node (0, type
);
11641 /* Convert (X - c) <= X to true. */
11642 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
11644 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11645 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11647 if (TREE_CODE (arg01
) == INTEGER_CST
11648 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11649 fold_overflow_warning (("assuming signed overflow does not "
11650 "occur when assuming that "
11651 "(X - c) <= X is always true"),
11652 WARN_STRICT_OVERFLOW_ALL
);
11653 return constant_boolean_node (1, type
);
11656 /* Convert (X + c) >= X to true. */
11657 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
11659 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11660 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11662 if (TREE_CODE (arg01
) == INTEGER_CST
11663 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11664 fold_overflow_warning (("assuming signed overflow does not "
11665 "occur when assuming that "
11666 "(X + c) >= X is always true"),
11667 WARN_STRICT_OVERFLOW_ALL
);
11668 return constant_boolean_node (1, type
);
11671 if (TREE_CODE (arg01
) == INTEGER_CST
)
11673 /* Convert X + c > X and X - c < X to true for integers. */
11674 if (code
== GT_EXPR
11675 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11676 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11678 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11679 fold_overflow_warning (("assuming signed overflow does "
11680 "not occur when assuming that "
11681 "(X + c) > X is always true"),
11682 WARN_STRICT_OVERFLOW_ALL
);
11683 return constant_boolean_node (1, type
);
11686 if (code
== LT_EXPR
11687 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11688 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11690 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11691 fold_overflow_warning (("assuming signed overflow does "
11692 "not occur when assuming that "
11693 "(X - c) < X is always true"),
11694 WARN_STRICT_OVERFLOW_ALL
);
11695 return constant_boolean_node (1, type
);
11698 /* Convert X + c <= X and X - c >= X to false for integers. */
11699 if (code
== LE_EXPR
11700 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11701 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11703 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11704 fold_overflow_warning (("assuming signed overflow does "
11705 "not occur when assuming that "
11706 "(X + c) <= X is always false"),
11707 WARN_STRICT_OVERFLOW_ALL
);
11708 return constant_boolean_node (0, type
);
11711 if (code
== GE_EXPR
11712 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11713 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11715 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11716 fold_overflow_warning (("assuming signed overflow does "
11717 "not occur when assuming that "
11718 "(X - c) >= X is always true"),
11719 WARN_STRICT_OVERFLOW_ALL
);
11720 return constant_boolean_node (0, type
);
11725 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11726 This transformation affects the cases which are handled in later
11727 optimizations involving comparisons with non-negative constants. */
11728 if (TREE_CODE (arg1
) == INTEGER_CST
11729 && TREE_CODE (arg0
) != INTEGER_CST
11730 && tree_int_cst_sgn (arg1
) > 0)
11732 if (code
== GE_EXPR
)
11734 arg1
= const_binop (MINUS_EXPR
, arg1
,
11735 build_int_cst (TREE_TYPE (arg1
), 1), 0);
11736 return fold_build2 (GT_EXPR
, type
, arg0
,
11737 fold_convert (TREE_TYPE (arg0
), arg1
));
11739 if (code
== LT_EXPR
)
11741 arg1
= const_binop (MINUS_EXPR
, arg1
,
11742 build_int_cst (TREE_TYPE (arg1
), 1), 0);
11743 return fold_build2 (LE_EXPR
, type
, arg0
,
11744 fold_convert (TREE_TYPE (arg0
), arg1
));
11748 /* Comparisons with the highest or lowest possible integer of
11749 the specified precision will have known values. */
11751 tree arg1_type
= TREE_TYPE (arg1
);
11752 unsigned int width
= TYPE_PRECISION (arg1_type
);
11754 if (TREE_CODE (arg1
) == INTEGER_CST
11755 && !TREE_OVERFLOW (arg1
)
11756 && width
<= 2 * HOST_BITS_PER_WIDE_INT
11757 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
11759 HOST_WIDE_INT signed_max_hi
;
11760 unsigned HOST_WIDE_INT signed_max_lo
;
11761 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
11763 if (width
<= HOST_BITS_PER_WIDE_INT
)
11765 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
11770 if (TYPE_UNSIGNED (arg1_type
))
11772 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
11778 max_lo
= signed_max_lo
;
11779 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
11785 width
-= HOST_BITS_PER_WIDE_INT
;
11786 signed_max_lo
= -1;
11787 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
11792 if (TYPE_UNSIGNED (arg1_type
))
11794 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
11799 max_hi
= signed_max_hi
;
11800 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
11804 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
11805 && TREE_INT_CST_LOW (arg1
) == max_lo
)
11809 return omit_one_operand (type
, integer_zero_node
, arg0
);
11812 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
11815 return omit_one_operand (type
, integer_one_node
, arg0
);
11818 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
11820 /* The GE_EXPR and LT_EXPR cases above are not normally
11821 reached because of previous transformations. */
11826 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
11828 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
11832 arg1
= const_binop (PLUS_EXPR
, arg1
,
11833 build_int_cst (TREE_TYPE (arg1
), 1), 0);
11834 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
11836 arg1
= const_binop (PLUS_EXPR
, arg1
,
11837 build_int_cst (TREE_TYPE (arg1
), 1), 0);
11838 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
11842 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
11844 && TREE_INT_CST_LOW (arg1
) == min_lo
)
11848 return omit_one_operand (type
, integer_zero_node
, arg0
);
11851 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
11854 return omit_one_operand (type
, integer_one_node
, arg0
);
11857 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
11862 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
11864 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
11868 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
11869 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
11871 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
11872 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
11877 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
11878 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
11879 && TYPE_UNSIGNED (arg1_type
)
11880 /* We will flip the signedness of the comparison operator
11881 associated with the mode of arg1, so the sign bit is
11882 specified by this mode. Check that arg1 is the signed
11883 max associated with this sign bit. */
11884 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
11885 /* signed_type does not work on pointer types. */
11886 && INTEGRAL_TYPE_P (arg1_type
))
11888 /* The following case also applies to X < signed_max+1
11889 and X >= signed_max+1 because previous transformations. */
11890 if (code
== LE_EXPR
|| code
== GT_EXPR
)
11893 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (arg0
));
11894 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (arg1
));
11895 return fold_build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
11896 type
, fold_convert (st0
, arg0
),
11897 build_int_cst (st1
, 0));
11903 /* If we are comparing an ABS_EXPR with a constant, we can
11904 convert all the cases into explicit comparisons, but they may
11905 well not be faster than doing the ABS and one comparison.
11906 But ABS (X) <= C is a range comparison, which becomes a subtraction
11907 and a comparison, and is probably faster. */
11908 if (code
== LE_EXPR
11909 && TREE_CODE (arg1
) == INTEGER_CST
11910 && TREE_CODE (arg0
) == ABS_EXPR
11911 && ! TREE_SIDE_EFFECTS (arg0
)
11912 && (0 != (tem
= negate_expr (arg1
)))
11913 && TREE_CODE (tem
) == INTEGER_CST
11914 && !TREE_OVERFLOW (tem
))
11915 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
11916 build2 (GE_EXPR
, type
,
11917 TREE_OPERAND (arg0
, 0), tem
),
11918 build2 (LE_EXPR
, type
,
11919 TREE_OPERAND (arg0
, 0), arg1
));
11921 /* Convert ABS_EXPR<x> >= 0 to true. */
11922 strict_overflow_p
= false;
11923 if (code
== GE_EXPR
11924 && (integer_zerop (arg1
)
11925 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11926 && real_zerop (arg1
)))
11927 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11929 if (strict_overflow_p
)
11930 fold_overflow_warning (("assuming signed overflow does not occur "
11931 "when simplifying comparison of "
11932 "absolute value and zero"),
11933 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11934 return omit_one_operand (type
, integer_one_node
, arg0
);
11937 /* Convert ABS_EXPR<x> < 0 to false. */
11938 strict_overflow_p
= false;
11939 if (code
== LT_EXPR
11940 && (integer_zerop (arg1
) || real_zerop (arg1
))
11941 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11943 if (strict_overflow_p
)
11944 fold_overflow_warning (("assuming signed overflow does not occur "
11945 "when simplifying comparison of "
11946 "absolute value and zero"),
11947 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11948 return omit_one_operand (type
, integer_zero_node
, arg0
);
11951 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11952 and similarly for >= into !=. */
11953 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11954 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11955 && TREE_CODE (arg1
) == LSHIFT_EXPR
11956 && integer_onep (TREE_OPERAND (arg1
, 0)))
11957 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11958 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11959 TREE_OPERAND (arg1
, 1)),
11960 build_int_cst (TREE_TYPE (arg0
), 0));
11962 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11963 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11964 && (TREE_CODE (arg1
) == NOP_EXPR
11965 || TREE_CODE (arg1
) == CONVERT_EXPR
)
11966 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
11967 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
11969 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11970 fold_convert (TREE_TYPE (arg0
),
11971 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11972 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
11974 build_int_cst (TREE_TYPE (arg0
), 0));
11978 case UNORDERED_EXPR
:
11986 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
11988 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
11989 if (t1
!= NULL_TREE
)
11993 /* If the first operand is NaN, the result is constant. */
11994 if (TREE_CODE (arg0
) == REAL_CST
11995 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
11996 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
11998 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
11999 ? integer_zero_node
12000 : integer_one_node
;
12001 return omit_one_operand (type
, t1
, arg1
);
12004 /* If the second operand is NaN, the result is constant. */
12005 if (TREE_CODE (arg1
) == REAL_CST
12006 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
12007 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
12009 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
12010 ? integer_zero_node
12011 : integer_one_node
;
12012 return omit_one_operand (type
, t1
, arg0
);
12015 /* Simplify unordered comparison of something with itself. */
12016 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
12017 && operand_equal_p (arg0
, arg1
, 0))
12018 return constant_boolean_node (1, type
);
12020 if (code
== LTGT_EXPR
12021 && !flag_trapping_math
12022 && operand_equal_p (arg0
, arg1
, 0))
12023 return constant_boolean_node (0, type
);
12025 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12027 tree targ0
= strip_float_extensions (arg0
);
12028 tree targ1
= strip_float_extensions (arg1
);
12029 tree newtype
= TREE_TYPE (targ0
);
12031 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
12032 newtype
= TREE_TYPE (targ1
);
12034 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
12035 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
12036 fold_convert (newtype
, targ1
));
12041 case COMPOUND_EXPR
:
12042 /* When pedantic, a compound expression can be neither an lvalue
12043 nor an integer constant expression. */
12044 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
12046 /* Don't let (0, 0) be null pointer constant. */
12047 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
12048 : fold_convert (type
, arg1
);
12049 return pedantic_non_lvalue (tem
);
12052 if ((TREE_CODE (arg0
) == REAL_CST
12053 && TREE_CODE (arg1
) == REAL_CST
)
12054 || (TREE_CODE (arg0
) == INTEGER_CST
12055 && TREE_CODE (arg1
) == INTEGER_CST
))
12056 return build_complex (type
, arg0
, arg1
);
12060 /* An ASSERT_EXPR should never be passed to fold_binary. */
12061 gcc_unreachable ();
12065 } /* switch (code) */
12068 /* Callback for walk_tree, looking for LABEL_EXPR.
12069 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12070 Do not check the sub-tree of GOTO_EXPR. */
12073 contains_label_1 (tree
*tp
,
12074 int *walk_subtrees
,
12075 void *data ATTRIBUTE_UNUSED
)
12077 switch (TREE_CODE (*tp
))
12082 *walk_subtrees
= 0;
12089 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12090 accessible from outside the sub-tree. Returns NULL_TREE if no
12091 addressable label is found. */
12094 contains_label_p (tree st
)
12096 return (walk_tree (&st
, contains_label_1
, NULL
, NULL
) != NULL_TREE
);
12099 /* Fold a ternary expression of code CODE and type TYPE with operands
12100 OP0, OP1, and OP2. Return the folded expression if folding is
12101 successful. Otherwise, return NULL_TREE. */
12104 fold_ternary (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
12107 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
12108 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12110 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
12111 && TREE_CODE_LENGTH (code
) == 3);
12113 /* Strip any conversions that don't change the mode. This is safe
12114 for every expression, except for a comparison expression because
12115 its signedness is derived from its operands. So, in the latter
12116 case, only strip conversions that don't change the signedness.
12118 Note that this is done as an internal manipulation within the
12119 constant folder, in order to find the simplest representation of
12120 the arguments so that their form can be studied. In any cases,
12121 the appropriate type conversions should be put back in the tree
12122 that will get out of the constant folder. */
12137 case COMPONENT_REF
:
12138 if (TREE_CODE (arg0
) == CONSTRUCTOR
12139 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
12141 unsigned HOST_WIDE_INT idx
;
12143 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
12150 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12151 so all simple results must be passed through pedantic_non_lvalue. */
12152 if (TREE_CODE (arg0
) == INTEGER_CST
)
12154 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
12155 tem
= integer_zerop (arg0
) ? op2
: op1
;
12156 /* Only optimize constant conditions when the selected branch
12157 has the same type as the COND_EXPR. This avoids optimizing
12158 away "c ? x : throw", where the throw has a void type.
12159 Avoid throwing away that operand which contains label. */
12160 if ((!TREE_SIDE_EFFECTS (unused_op
)
12161 || !contains_label_p (unused_op
))
12162 && (! VOID_TYPE_P (TREE_TYPE (tem
))
12163 || VOID_TYPE_P (type
)))
12164 return pedantic_non_lvalue (tem
);
12167 if (operand_equal_p (arg1
, op2
, 0))
12168 return pedantic_omit_one_operand (type
, arg1
, arg0
);
12170 /* If we have A op B ? A : C, we may be able to convert this to a
12171 simpler expression, depending on the operation and the values
12172 of B and C. Signed zeros prevent all of these transformations,
12173 for reasons given above each one.
12175 Also try swapping the arguments and inverting the conditional. */
12176 if (COMPARISON_CLASS_P (arg0
)
12177 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
12178 arg1
, TREE_OPERAND (arg0
, 1))
12179 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
12181 tem
= fold_cond_expr_with_comparison (type
, arg0
, op1
, op2
);
12186 if (COMPARISON_CLASS_P (arg0
)
12187 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
12189 TREE_OPERAND (arg0
, 1))
12190 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
12192 tem
= fold_truth_not_expr (arg0
);
12193 if (tem
&& COMPARISON_CLASS_P (tem
))
12195 tem
= fold_cond_expr_with_comparison (type
, tem
, op2
, op1
);
12201 /* If the second operand is simpler than the third, swap them
12202 since that produces better jump optimization results. */
12203 if (truth_value_p (TREE_CODE (arg0
))
12204 && tree_swap_operands_p (op1
, op2
, false))
12206 /* See if this can be inverted. If it can't, possibly because
12207 it was a floating-point inequality comparison, don't do
12209 tem
= fold_truth_not_expr (arg0
);
12211 return fold_build3 (code
, type
, tem
, op2
, op1
);
12214 /* Convert A ? 1 : 0 to simply A. */
12215 if (integer_onep (op1
)
12216 && integer_zerop (op2
)
12217 /* If we try to convert OP0 to our type, the
12218 call to fold will try to move the conversion inside
12219 a COND, which will recurse. In that case, the COND_EXPR
12220 is probably the best choice, so leave it alone. */
12221 && type
== TREE_TYPE (arg0
))
12222 return pedantic_non_lvalue (arg0
);
12224 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12225 over COND_EXPR in cases such as floating point comparisons. */
12226 if (integer_zerop (op1
)
12227 && integer_onep (op2
)
12228 && truth_value_p (TREE_CODE (arg0
)))
12229 return pedantic_non_lvalue (fold_convert (type
,
12230 invert_truthvalue (arg0
)));
12232 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12233 if (TREE_CODE (arg0
) == LT_EXPR
12234 && integer_zerop (TREE_OPERAND (arg0
, 1))
12235 && integer_zerop (op2
)
12236 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
12238 /* sign_bit_p only checks ARG1 bits within A's precision.
12239 If <sign bit of A> has wider type than A, bits outside
12240 of A's precision in <sign bit of A> need to be checked.
12241 If they are all 0, this optimization needs to be done
12242 in unsigned A's type, if they are all 1 in signed A's type,
12243 otherwise this can't be done. */
12244 if (TYPE_PRECISION (TREE_TYPE (tem
))
12245 < TYPE_PRECISION (TREE_TYPE (arg1
))
12246 && TYPE_PRECISION (TREE_TYPE (tem
))
12247 < TYPE_PRECISION (type
))
12249 unsigned HOST_WIDE_INT mask_lo
;
12250 HOST_WIDE_INT mask_hi
;
12251 int inner_width
, outer_width
;
12254 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
12255 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
12256 if (outer_width
> TYPE_PRECISION (type
))
12257 outer_width
= TYPE_PRECISION (type
);
12259 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
12261 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
12262 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
12268 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
12269 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
12271 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
12273 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
12274 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
12278 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
12279 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
12281 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
12282 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
12284 tem_type
= lang_hooks
.types
.signed_type (TREE_TYPE (tem
));
12285 tem
= fold_convert (tem_type
, tem
);
12287 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
12288 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
12290 tem_type
= lang_hooks
.types
.unsigned_type (TREE_TYPE (tem
));
12291 tem
= fold_convert (tem_type
, tem
);
12298 return fold_convert (type
,
12299 fold_build2 (BIT_AND_EXPR
,
12300 TREE_TYPE (tem
), tem
,
12301 fold_convert (TREE_TYPE (tem
),
12305 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12306 already handled above. */
12307 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12308 && integer_onep (TREE_OPERAND (arg0
, 1))
12309 && integer_zerop (op2
)
12310 && integer_pow2p (arg1
))
12312 tree tem
= TREE_OPERAND (arg0
, 0);
12314 if (TREE_CODE (tem
) == RSHIFT_EXPR
12315 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
12316 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
12317 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
12318 return fold_build2 (BIT_AND_EXPR
, type
,
12319 TREE_OPERAND (tem
, 0), arg1
);
12322 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12323 is probably obsolete because the first operand should be a
12324 truth value (that's why we have the two cases above), but let's
12325 leave it in until we can confirm this for all front-ends. */
12326 if (integer_zerop (op2
)
12327 && TREE_CODE (arg0
) == NE_EXPR
12328 && integer_zerop (TREE_OPERAND (arg0
, 1))
12329 && integer_pow2p (arg1
)
12330 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12331 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12332 arg1
, OEP_ONLY_CONST
))
12333 return pedantic_non_lvalue (fold_convert (type
,
12334 TREE_OPERAND (arg0
, 0)));
12336 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12337 if (integer_zerop (op2
)
12338 && truth_value_p (TREE_CODE (arg0
))
12339 && truth_value_p (TREE_CODE (arg1
)))
12340 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
12341 fold_convert (type
, arg0
),
12344 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12345 if (integer_onep (op2
)
12346 && truth_value_p (TREE_CODE (arg0
))
12347 && truth_value_p (TREE_CODE (arg1
)))
12349 /* Only perform transformation if ARG0 is easily inverted. */
12350 tem
= fold_truth_not_expr (arg0
);
12352 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
12353 fold_convert (type
, tem
),
12357 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12358 if (integer_zerop (arg1
)
12359 && truth_value_p (TREE_CODE (arg0
))
12360 && truth_value_p (TREE_CODE (op2
)))
12362 /* Only perform transformation if ARG0 is easily inverted. */
12363 tem
= fold_truth_not_expr (arg0
);
12365 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
12366 fold_convert (type
, tem
),
12370 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12371 if (integer_onep (arg1
)
12372 && truth_value_p (TREE_CODE (arg0
))
12373 && truth_value_p (TREE_CODE (op2
)))
12374 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
12375 fold_convert (type
, arg0
),
12381 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12382 of fold_ternary on them. */
12383 gcc_unreachable ();
12385 case BIT_FIELD_REF
:
12386 if (TREE_CODE (arg0
) == VECTOR_CST
12387 && type
== TREE_TYPE (TREE_TYPE (arg0
))
12388 && host_integerp (arg1
, 1)
12389 && host_integerp (op2
, 1))
12391 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
12392 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
12395 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
12396 && (idx
% width
) == 0
12397 && (idx
= idx
/ width
)
12398 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
12400 tree elements
= TREE_VECTOR_CST_ELTS (arg0
);
12401 while (idx
-- > 0 && elements
)
12402 elements
= TREE_CHAIN (elements
);
12404 return TREE_VALUE (elements
);
12406 return fold_convert (type
, integer_zero_node
);
12413 } /* switch (code) */
12416 /* Perform constant folding and related simplification of EXPR.
12417 The related simplifications include x*1 => x, x*0 => 0, etc.,
12418 and application of the associative law.
12419 NOP_EXPR conversions may be removed freely (as long as we
12420 are careful not to change the type of the overall expression).
12421 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12422 but we can constant-fold them if they have constant operands. */
12424 #ifdef ENABLE_FOLD_CHECKING
12425 # define fold(x) fold_1 (x)
12426 static tree
fold_1 (tree
);
12432 const tree t
= expr
;
12433 enum tree_code code
= TREE_CODE (t
);
12434 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12437 /* Return right away if a constant. */
12438 if (kind
== tcc_constant
)
12441 /* CALL_EXPR-like objects with variable numbers of operands are
12442 treated specially. */
12443 if (kind
== tcc_vl_exp
)
12445 if (code
== CALL_EXPR
)
12447 tem
= fold_call_expr (expr
, false);
12448 return tem
? tem
: expr
;
12453 if (IS_EXPR_CODE_CLASS (kind
)
12454 || IS_GIMPLE_STMT_CODE_CLASS (kind
))
12456 tree type
= TREE_TYPE (t
);
12457 tree op0
, op1
, op2
;
12459 switch (TREE_CODE_LENGTH (code
))
12462 op0
= TREE_OPERAND (t
, 0);
12463 tem
= fold_unary (code
, type
, op0
);
12464 return tem
? tem
: expr
;
12466 op0
= TREE_OPERAND (t
, 0);
12467 op1
= TREE_OPERAND (t
, 1);
12468 tem
= fold_binary (code
, type
, op0
, op1
);
12469 return tem
? tem
: expr
;
12471 op0
= TREE_OPERAND (t
, 0);
12472 op1
= TREE_OPERAND (t
, 1);
12473 op2
= TREE_OPERAND (t
, 2);
12474 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
12475 return tem
? tem
: expr
;
12484 return fold (DECL_INITIAL (t
));
12488 } /* switch (code) */
12491 #ifdef ENABLE_FOLD_CHECKING
12494 static void fold_checksum_tree (tree
, struct md5_ctx
*, htab_t
);
12495 static void fold_check_failed (tree
, tree
);
12496 void print_fold_checksum (tree
);
12498 /* When --enable-checking=fold, compute a digest of expr before
12499 and after actual fold call to see if fold did not accidentally
12500 change original expr. */
12506 struct md5_ctx ctx
;
12507 unsigned char checksum_before
[16], checksum_after
[16];
12510 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
12511 md5_init_ctx (&ctx
);
12512 fold_checksum_tree (expr
, &ctx
, ht
);
12513 md5_finish_ctx (&ctx
, checksum_before
);
12516 ret
= fold_1 (expr
);
12518 md5_init_ctx (&ctx
);
12519 fold_checksum_tree (expr
, &ctx
, ht
);
12520 md5_finish_ctx (&ctx
, checksum_after
);
12523 if (memcmp (checksum_before
, checksum_after
, 16))
12524 fold_check_failed (expr
, ret
);
12530 print_fold_checksum (tree expr
)
12532 struct md5_ctx ctx
;
12533 unsigned char checksum
[16], cnt
;
12536 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
12537 md5_init_ctx (&ctx
);
12538 fold_checksum_tree (expr
, &ctx
, ht
);
12539 md5_finish_ctx (&ctx
, checksum
);
12541 for (cnt
= 0; cnt
< 16; ++cnt
)
12542 fprintf (stderr
, "%02x", checksum
[cnt
]);
12543 putc ('\n', stderr
);
12547 fold_check_failed (tree expr ATTRIBUTE_UNUSED
, tree ret ATTRIBUTE_UNUSED
)
12549 internal_error ("fold check: original tree changed by fold");
12553 fold_checksum_tree (tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
12556 enum tree_code code
;
12557 struct tree_function_decl buf
;
12562 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
12563 <= sizeof (struct tree_function_decl
))
12564 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
12567 slot
= htab_find_slot (ht
, expr
, INSERT
);
12571 code
= TREE_CODE (expr
);
12572 if (TREE_CODE_CLASS (code
) == tcc_declaration
12573 && DECL_ASSEMBLER_NAME_SET_P (expr
))
12575 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12576 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12577 expr
= (tree
) &buf
;
12578 SET_DECL_ASSEMBLER_NAME (expr
, NULL
);
12580 else if (TREE_CODE_CLASS (code
) == tcc_type
12581 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
12582 || TYPE_CACHED_VALUES_P (expr
)
12583 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)))
12585 /* Allow these fields to be modified. */
12586 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12587 expr
= (tree
) &buf
;
12588 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
) = 0;
12589 TYPE_POINTER_TO (expr
) = NULL
;
12590 TYPE_REFERENCE_TO (expr
) = NULL
;
12591 if (TYPE_CACHED_VALUES_P (expr
))
12593 TYPE_CACHED_VALUES_P (expr
) = 0;
12594 TYPE_CACHED_VALUES (expr
) = NULL
;
12597 md5_process_bytes (expr
, tree_size (expr
), ctx
);
12598 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
12599 if (TREE_CODE_CLASS (code
) != tcc_type
12600 && TREE_CODE_CLASS (code
) != tcc_declaration
12601 && code
!= TREE_LIST
)
12602 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
12603 switch (TREE_CODE_CLASS (code
))
12609 md5_process_bytes (TREE_STRING_POINTER (expr
),
12610 TREE_STRING_LENGTH (expr
), ctx
);
12613 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
12614 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
12617 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
12623 case tcc_exceptional
:
12627 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
12628 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
12629 expr
= TREE_CHAIN (expr
);
12630 goto recursive_label
;
12633 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
12634 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
12640 case tcc_expression
:
12641 case tcc_reference
:
12642 case tcc_comparison
:
12645 case tcc_statement
:
12647 len
= TREE_OPERAND_LENGTH (expr
);
12648 for (i
= 0; i
< len
; ++i
)
12649 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
12651 case tcc_declaration
:
12652 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
12653 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
12654 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
12656 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
12657 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
12658 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
12659 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
12660 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
12662 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
12663 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
12665 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
12667 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
12668 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
12669 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
12673 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
12674 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
12675 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
12676 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
12677 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
12678 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
12679 if (INTEGRAL_TYPE_P (expr
)
12680 || SCALAR_FLOAT_TYPE_P (expr
))
12682 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
12683 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
12685 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
12686 if (TREE_CODE (expr
) == RECORD_TYPE
12687 || TREE_CODE (expr
) == UNION_TYPE
12688 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
12689 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
12690 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
12699 /* Fold a unary tree expression with code CODE of type TYPE with an
12700 operand OP0. Return a folded expression if successful. Otherwise,
12701 return a tree expression with code CODE of type TYPE with an
12705 fold_build1_stat (enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
12708 #ifdef ENABLE_FOLD_CHECKING
12709 unsigned char checksum_before
[16], checksum_after
[16];
12710 struct md5_ctx ctx
;
12713 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
12714 md5_init_ctx (&ctx
);
12715 fold_checksum_tree (op0
, &ctx
, ht
);
12716 md5_finish_ctx (&ctx
, checksum_before
);
12720 tem
= fold_unary (code
, type
, op0
);
12722 tem
= build1_stat (code
, type
, op0 PASS_MEM_STAT
);
12724 #ifdef ENABLE_FOLD_CHECKING
12725 md5_init_ctx (&ctx
);
12726 fold_checksum_tree (op0
, &ctx
, ht
);
12727 md5_finish_ctx (&ctx
, checksum_after
);
12730 if (memcmp (checksum_before
, checksum_after
, 16))
12731 fold_check_failed (op0
, tem
);
12736 /* Fold a binary tree expression with code CODE of type TYPE with
12737 operands OP0 and OP1. Return a folded expression if successful.
12738 Otherwise, return a tree expression with code CODE of type TYPE
12739 with operands OP0 and OP1. */
12742 fold_build2_stat (enum tree_code code
, tree type
, tree op0
, tree op1
12746 #ifdef ENABLE_FOLD_CHECKING
12747 unsigned char checksum_before_op0
[16],
12748 checksum_before_op1
[16],
12749 checksum_after_op0
[16],
12750 checksum_after_op1
[16];
12751 struct md5_ctx ctx
;
12754 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
12755 md5_init_ctx (&ctx
);
12756 fold_checksum_tree (op0
, &ctx
, ht
);
12757 md5_finish_ctx (&ctx
, checksum_before_op0
);
12760 md5_init_ctx (&ctx
);
12761 fold_checksum_tree (op1
, &ctx
, ht
);
12762 md5_finish_ctx (&ctx
, checksum_before_op1
);
12766 tem
= fold_binary (code
, type
, op0
, op1
);
12768 tem
= build2_stat (code
, type
, op0
, op1 PASS_MEM_STAT
);
12770 #ifdef ENABLE_FOLD_CHECKING
12771 md5_init_ctx (&ctx
);
12772 fold_checksum_tree (op0
, &ctx
, ht
);
12773 md5_finish_ctx (&ctx
, checksum_after_op0
);
12776 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12777 fold_check_failed (op0
, tem
);
12779 md5_init_ctx (&ctx
);
12780 fold_checksum_tree (op1
, &ctx
, ht
);
12781 md5_finish_ctx (&ctx
, checksum_after_op1
);
12784 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12785 fold_check_failed (op1
, tem
);
12790 /* Fold a ternary tree expression with code CODE of type TYPE with
12791 operands OP0, OP1, and OP2. Return a folded expression if
12792 successful. Otherwise, return a tree expression with code CODE of
12793 type TYPE with operands OP0, OP1, and OP2. */
12796 fold_build3_stat (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
12800 #ifdef ENABLE_FOLD_CHECKING
12801 unsigned char checksum_before_op0
[16],
12802 checksum_before_op1
[16],
12803 checksum_before_op2
[16],
12804 checksum_after_op0
[16],
12805 checksum_after_op1
[16],
12806 checksum_after_op2
[16];
12807 struct md5_ctx ctx
;
12810 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
12811 md5_init_ctx (&ctx
);
12812 fold_checksum_tree (op0
, &ctx
, ht
);
12813 md5_finish_ctx (&ctx
, checksum_before_op0
);
12816 md5_init_ctx (&ctx
);
12817 fold_checksum_tree (op1
, &ctx
, ht
);
12818 md5_finish_ctx (&ctx
, checksum_before_op1
);
12821 md5_init_ctx (&ctx
);
12822 fold_checksum_tree (op2
, &ctx
, ht
);
12823 md5_finish_ctx (&ctx
, checksum_before_op2
);
12827 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
12828 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
12830 tem
= build3_stat (code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
12832 #ifdef ENABLE_FOLD_CHECKING
12833 md5_init_ctx (&ctx
);
12834 fold_checksum_tree (op0
, &ctx
, ht
);
12835 md5_finish_ctx (&ctx
, checksum_after_op0
);
12838 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12839 fold_check_failed (op0
, tem
);
12841 md5_init_ctx (&ctx
);
12842 fold_checksum_tree (op1
, &ctx
, ht
);
12843 md5_finish_ctx (&ctx
, checksum_after_op1
);
12846 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12847 fold_check_failed (op1
, tem
);
12849 md5_init_ctx (&ctx
);
12850 fold_checksum_tree (op2
, &ctx
, ht
);
12851 md5_finish_ctx (&ctx
, checksum_after_op2
);
12854 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
12855 fold_check_failed (op2
, tem
);
12860 /* Fold a CALL_EXPR expression of type TYPE with operands FN and ARGLIST
12861 and a null static chain.
12862 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12863 of type TYPE from the given operands as constructed by build_call_list. */
12866 fold_build_call_list (tree type
, tree fn
, tree arglist
)
12869 #ifdef ENABLE_FOLD_CHECKING
12870 unsigned char checksum_before_fn
[16],
12871 checksum_before_arglist
[16],
12872 checksum_after_fn
[16],
12873 checksum_after_arglist
[16];
12874 struct md5_ctx ctx
;
12877 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
12878 md5_init_ctx (&ctx
);
12879 fold_checksum_tree (fn
, &ctx
, ht
);
12880 md5_finish_ctx (&ctx
, checksum_before_fn
);
12883 md5_init_ctx (&ctx
);
12884 fold_checksum_tree (arglist
, &ctx
, ht
);
12885 md5_finish_ctx (&ctx
, checksum_before_arglist
);
12889 tem
= fold_builtin_call_list (type
, fn
, arglist
);
12891 #ifdef ENABLE_FOLD_CHECKING
12892 md5_init_ctx (&ctx
);
12893 fold_checksum_tree (fn
, &ctx
, ht
);
12894 md5_finish_ctx (&ctx
, checksum_after_fn
);
12897 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
12898 fold_check_failed (fn
, tem
);
12900 md5_init_ctx (&ctx
);
12901 fold_checksum_tree (arglist
, &ctx
, ht
);
12902 md5_finish_ctx (&ctx
, checksum_after_arglist
);
12905 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
12906 fold_check_failed (arglist
, tem
);
12911 /* Perform constant folding and related simplification of initializer
12912 expression EXPR. These behave identically to "fold_buildN" but ignore
12913 potential run-time traps and exceptions that fold must preserve. */
12915 #define START_FOLD_INIT \
12916 int saved_signaling_nans = flag_signaling_nans;\
12917 int saved_trapping_math = flag_trapping_math;\
12918 int saved_rounding_math = flag_rounding_math;\
12919 int saved_trapv = flag_trapv;\
12920 int saved_folding_initializer = folding_initializer;\
12921 flag_signaling_nans = 0;\
12922 flag_trapping_math = 0;\
12923 flag_rounding_math = 0;\
12925 folding_initializer = 1;
12927 #define END_FOLD_INIT \
12928 flag_signaling_nans = saved_signaling_nans;\
12929 flag_trapping_math = saved_trapping_math;\
12930 flag_rounding_math = saved_rounding_math;\
12931 flag_trapv = saved_trapv;\
12932 folding_initializer = saved_folding_initializer;
12935 fold_build1_initializer (enum tree_code code
, tree type
, tree op
)
12940 result
= fold_build1 (code
, type
, op
);
12947 fold_build2_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
)
12952 result
= fold_build2 (code
, type
, op0
, op1
);
12959 fold_build3_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
,
12965 result
= fold_build3 (code
, type
, op0
, op1
, op2
);
12972 fold_build_call_list_initializer (tree type
, tree fn
, tree arglist
)
12977 result
= fold_build_call_list (type
, fn
, arglist
);
12983 #undef START_FOLD_INIT
12984 #undef END_FOLD_INIT
12986 /* Determine if first argument is a multiple of second argument. Return 0 if
12987 it is not, or we cannot easily determined it to be.
12989 An example of the sort of thing we care about (at this point; this routine
12990 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12991 fold cases do now) is discovering that
12993 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12999 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13001 This code also handles discovering that
13003 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13005 is a multiple of 8 so we don't have to worry about dealing with a
13006 possible remainder.
13008 Note that we *look* inside a SAVE_EXPR only to determine how it was
13009 calculated; it is not safe for fold to do much of anything else with the
13010 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13011 at run time. For example, the latter example above *cannot* be implemented
13012 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13013 evaluation time of the original SAVE_EXPR is not necessarily the same at
13014 the time the new expression is evaluated. The only optimization of this
13015 sort that would be valid is changing
13017 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13021 SAVE_EXPR (I) * SAVE_EXPR (J)
13023 (where the same SAVE_EXPR (J) is used in the original and the
13024 transformed version). */
13027 multiple_of_p (tree type
, tree top
, tree bottom
)
13029 if (operand_equal_p (top
, bottom
, 0))
13032 if (TREE_CODE (type
) != INTEGER_TYPE
)
13035 switch (TREE_CODE (top
))
13038 /* Bitwise and provides a power of two multiple. If the mask is
13039 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13040 if (!integer_pow2p (bottom
))
13045 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13046 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13050 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13051 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13054 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
13058 op1
= TREE_OPERAND (top
, 1);
13059 /* const_binop may not detect overflow correctly,
13060 so check for it explicitly here. */
13061 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
13062 > TREE_INT_CST_LOW (op1
)
13063 && TREE_INT_CST_HIGH (op1
) == 0
13064 && 0 != (t1
= fold_convert (type
,
13065 const_binop (LSHIFT_EXPR
,
13068 && !TREE_OVERFLOW (t1
))
13069 return multiple_of_p (type
, t1
, bottom
);
13074 /* Can't handle conversions from non-integral or wider integral type. */
13075 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
13076 || (TYPE_PRECISION (type
)
13077 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
13080 /* .. fall through ... */
13083 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
13086 if (TREE_CODE (bottom
) != INTEGER_CST
13087 || (TYPE_UNSIGNED (type
)
13088 && (tree_int_cst_sgn (top
) < 0
13089 || tree_int_cst_sgn (bottom
) < 0)))
13091 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
13099 /* Return true if `t' is known to be non-negative. If the return
13100 value is based on the assumption that signed overflow is undefined,
13101 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13102 *STRICT_OVERFLOW_P. */
13105 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
13107 if (t
== error_mark_node
)
13110 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
13113 switch (TREE_CODE (t
))
13116 /* Query VRP to see if it has recorded any information about
13117 the range of this object. */
13118 return ssa_name_nonnegative_p (t
);
13121 /* We can't return 1 if flag_wrapv is set because
13122 ABS_EXPR<INT_MIN> = INT_MIN. */
13123 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
13125 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
13127 *strict_overflow_p
= true;
13133 return tree_int_cst_sgn (t
) >= 0;
13136 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
13139 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
13140 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
13142 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
13143 strict_overflow_p
));
13145 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13146 both unsigned and at least 2 bits shorter than the result. */
13147 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
13148 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
13149 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
13151 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
13152 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
13153 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
13154 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
13156 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
13157 TYPE_PRECISION (inner2
)) + 1;
13158 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
13164 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
13166 /* x * x for floating point x is always non-negative. */
13167 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
13169 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
13171 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
13172 strict_overflow_p
));
13175 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13176 both unsigned and their total bits is shorter than the result. */
13177 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
13178 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
13179 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
13181 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
13182 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
13183 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
13184 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
13185 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
13186 < TYPE_PRECISION (TREE_TYPE (t
));
13192 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
13194 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
13195 strict_overflow_p
));
13201 case TRUNC_DIV_EXPR
:
13202 case CEIL_DIV_EXPR
:
13203 case FLOOR_DIV_EXPR
:
13204 case ROUND_DIV_EXPR
:
13205 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
13207 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
13208 strict_overflow_p
));
13210 case TRUNC_MOD_EXPR
:
13211 case CEIL_MOD_EXPR
:
13212 case FLOOR_MOD_EXPR
:
13213 case ROUND_MOD_EXPR
:
13215 case NON_LVALUE_EXPR
:
13217 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
13218 strict_overflow_p
);
13220 case COMPOUND_EXPR
:
13222 case GIMPLE_MODIFY_STMT
:
13223 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t
, 1),
13224 strict_overflow_p
);
13227 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
13228 strict_overflow_p
);
13231 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
13233 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
13234 strict_overflow_p
));
13238 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
13239 tree outer_type
= TREE_TYPE (t
);
13241 if (TREE_CODE (outer_type
) == REAL_TYPE
)
13243 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13244 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
13245 strict_overflow_p
);
13246 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
13248 if (TYPE_UNSIGNED (inner_type
))
13250 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
13251 strict_overflow_p
);
13254 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
13256 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13257 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
,0),
13258 strict_overflow_p
);
13259 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
13260 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
13261 && TYPE_UNSIGNED (inner_type
);
13268 tree temp
= TARGET_EXPR_SLOT (t
);
13269 t
= TARGET_EXPR_INITIAL (t
);
13271 /* If the initializer is non-void, then it's a normal expression
13272 that will be assigned to the slot. */
13273 if (!VOID_TYPE_P (t
))
13274 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
13276 /* Otherwise, the initializer sets the slot in some way. One common
13277 way is an assignment statement at the end of the initializer. */
13280 if (TREE_CODE (t
) == BIND_EXPR
)
13281 t
= expr_last (BIND_EXPR_BODY (t
));
13282 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
13283 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
13284 t
= expr_last (TREE_OPERAND (t
, 0));
13285 else if (TREE_CODE (t
) == STATEMENT_LIST
)
13290 if ((TREE_CODE (t
) == MODIFY_EXPR
13291 || TREE_CODE (t
) == GIMPLE_MODIFY_STMT
)
13292 && GENERIC_TREE_OPERAND (t
, 0) == temp
)
13293 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t
, 1),
13294 strict_overflow_p
);
13301 tree fndecl
= get_callee_fndecl (t
);
13302 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
13303 switch (DECL_FUNCTION_CODE (fndecl
))
13305 CASE_FLT_FN (BUILT_IN_ACOS
):
13306 CASE_FLT_FN (BUILT_IN_ACOSH
):
13307 CASE_FLT_FN (BUILT_IN_CABS
):
13308 CASE_FLT_FN (BUILT_IN_COSH
):
13309 CASE_FLT_FN (BUILT_IN_ERFC
):
13310 CASE_FLT_FN (BUILT_IN_EXP
):
13311 CASE_FLT_FN (BUILT_IN_EXP10
):
13312 CASE_FLT_FN (BUILT_IN_EXP2
):
13313 CASE_FLT_FN (BUILT_IN_FABS
):
13314 CASE_FLT_FN (BUILT_IN_FDIM
):
13315 CASE_FLT_FN (BUILT_IN_HYPOT
):
13316 CASE_FLT_FN (BUILT_IN_POW10
):
13317 CASE_INT_FN (BUILT_IN_FFS
):
13318 CASE_INT_FN (BUILT_IN_PARITY
):
13319 CASE_INT_FN (BUILT_IN_POPCOUNT
):
13320 case BUILT_IN_BSWAP32
:
13321 case BUILT_IN_BSWAP64
:
13325 CASE_FLT_FN (BUILT_IN_SQRT
):
13326 /* sqrt(-0.0) is -0.0. */
13327 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
13329 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
13330 strict_overflow_p
);
13332 CASE_FLT_FN (BUILT_IN_ASINH
):
13333 CASE_FLT_FN (BUILT_IN_ATAN
):
13334 CASE_FLT_FN (BUILT_IN_ATANH
):
13335 CASE_FLT_FN (BUILT_IN_CBRT
):
13336 CASE_FLT_FN (BUILT_IN_CEIL
):
13337 CASE_FLT_FN (BUILT_IN_ERF
):
13338 CASE_FLT_FN (BUILT_IN_EXPM1
):
13339 CASE_FLT_FN (BUILT_IN_FLOOR
):
13340 CASE_FLT_FN (BUILT_IN_FMOD
):
13341 CASE_FLT_FN (BUILT_IN_FREXP
):
13342 CASE_FLT_FN (BUILT_IN_LCEIL
):
13343 CASE_FLT_FN (BUILT_IN_LDEXP
):
13344 CASE_FLT_FN (BUILT_IN_LFLOOR
):
13345 CASE_FLT_FN (BUILT_IN_LLCEIL
):
13346 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
13347 CASE_FLT_FN (BUILT_IN_LLRINT
):
13348 CASE_FLT_FN (BUILT_IN_LLROUND
):
13349 CASE_FLT_FN (BUILT_IN_LRINT
):
13350 CASE_FLT_FN (BUILT_IN_LROUND
):
13351 CASE_FLT_FN (BUILT_IN_MODF
):
13352 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
13353 CASE_FLT_FN (BUILT_IN_RINT
):
13354 CASE_FLT_FN (BUILT_IN_ROUND
):
13355 CASE_FLT_FN (BUILT_IN_SCALB
):
13356 CASE_FLT_FN (BUILT_IN_SCALBLN
):
13357 CASE_FLT_FN (BUILT_IN_SCALBN
):
13358 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
13359 CASE_FLT_FN (BUILT_IN_SINH
):
13360 CASE_FLT_FN (BUILT_IN_TANH
):
13361 CASE_FLT_FN (BUILT_IN_TRUNC
):
13362 /* True if the 1st argument is nonnegative. */
13363 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
13364 strict_overflow_p
);
13366 CASE_FLT_FN (BUILT_IN_FMAX
):
13367 /* True if the 1st OR 2nd arguments are nonnegative. */
13368 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
13370 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 1),
13371 strict_overflow_p
)));
13373 CASE_FLT_FN (BUILT_IN_FMIN
):
13374 /* True if the 1st AND 2nd arguments are nonnegative. */
13375 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
13377 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 1),
13378 strict_overflow_p
)));
13380 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
13381 /* True if the 2nd argument is nonnegative. */
13382 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 1),
13383 strict_overflow_p
);
13385 CASE_FLT_FN (BUILT_IN_POWI
):
13386 /* True if the 1st argument is nonnegative or the second
13387 argument is an even integer. */
13388 if (TREE_CODE (CALL_EXPR_ARG (t
, 1)) == INTEGER_CST
)
13390 tree arg1
= CALL_EXPR_ARG (t
, 1);
13391 if ((TREE_INT_CST_LOW (arg1
) & 1) == 0)
13394 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
13395 strict_overflow_p
);
13397 CASE_FLT_FN (BUILT_IN_POW
):
13398 /* True if the 1st argument is nonnegative or the second
13399 argument is an even integer valued real. */
13400 if (TREE_CODE (CALL_EXPR_ARG (t
, 1)) == REAL_CST
)
13405 c
= TREE_REAL_CST (CALL_EXPR_ARG (t
, 1));
13406 n
= real_to_integer (&c
);
13409 REAL_VALUE_TYPE cint
;
13410 real_from_integer (&cint
, VOIDmode
, n
,
13411 n
< 0 ? -1 : 0, 0);
13412 if (real_identical (&c
, &cint
))
13416 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
13417 strict_overflow_p
);
13424 /* ... fall through ... */
13427 if (truth_value_p (TREE_CODE (t
)))
13428 /* Truth values evaluate to 0 or 1, which is nonnegative. */
13432 /* We don't know sign of `t', so be conservative and return false. */
13436 /* Return true if `t' is known to be non-negative. Handle warnings
13437 about undefined signed overflow. */
13440 tree_expr_nonnegative_p (tree t
)
13442 bool ret
, strict_overflow_p
;
13444 strict_overflow_p
= false;
13445 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
13446 if (strict_overflow_p
)
13447 fold_overflow_warning (("assuming signed overflow does not occur when "
13448 "determining that expression is always "
13450 WARN_STRICT_OVERFLOW_MISC
);
13454 /* Return true when T is an address and is known to be nonzero.
13455 For floating point we further ensure that T is not denormal.
13456 Similar logic is present in nonzero_address in rtlanal.h.
13458 If the return value is based on the assumption that signed overflow
13459 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13460 change *STRICT_OVERFLOW_P. */
13463 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
13465 tree type
= TREE_TYPE (t
);
13466 bool sub_strict_overflow_p
;
13468 /* Doing something useful for floating point would need more work. */
13469 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
13472 switch (TREE_CODE (t
))
13475 /* Query VRP to see if it has recorded any information about
13476 the range of this object. */
13477 return ssa_name_nonzero_p (t
);
13480 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
13481 strict_overflow_p
);
13484 return !integer_zerop (t
);
13487 if (TYPE_OVERFLOW_UNDEFINED (type
))
13489 /* With the presence of negative values it is hard
13490 to say something. */
13491 sub_strict_overflow_p
= false;
13492 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
13493 &sub_strict_overflow_p
)
13494 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
13495 &sub_strict_overflow_p
))
13497 /* One of operands must be positive and the other non-negative. */
13498 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13499 overflows, on a twos-complement machine the sum of two
13500 nonnegative numbers can never be zero. */
13501 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
13503 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
13504 strict_overflow_p
));
13509 if (TYPE_OVERFLOW_UNDEFINED (type
))
13511 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
13513 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
13514 strict_overflow_p
))
13516 *strict_overflow_p
= true;
13524 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
13525 tree outer_type
= TREE_TYPE (t
);
13527 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
13528 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
13529 strict_overflow_p
));
13535 tree base
= get_base_address (TREE_OPERAND (t
, 0));
13540 /* Weak declarations may link to NULL. */
13541 if (VAR_OR_FUNCTION_DECL_P (base
))
13542 return !DECL_WEAK (base
);
13544 /* Constants are never weak. */
13545 if (CONSTANT_CLASS_P (base
))
13552 sub_strict_overflow_p
= false;
13553 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
13554 &sub_strict_overflow_p
)
13555 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
13556 &sub_strict_overflow_p
))
13558 if (sub_strict_overflow_p
)
13559 *strict_overflow_p
= true;
13565 sub_strict_overflow_p
= false;
13566 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
13567 &sub_strict_overflow_p
)
13568 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
13569 &sub_strict_overflow_p
))
13571 if (sub_strict_overflow_p
)
13572 *strict_overflow_p
= true;
13577 sub_strict_overflow_p
= false;
13578 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
13579 &sub_strict_overflow_p
))
13581 if (sub_strict_overflow_p
)
13582 *strict_overflow_p
= true;
13584 /* When both operands are nonzero, then MAX must be too. */
13585 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
13586 strict_overflow_p
))
13589 /* MAX where operand 0 is positive is positive. */
13590 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
13591 strict_overflow_p
);
13593 /* MAX where operand 1 is positive is positive. */
13594 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
13595 &sub_strict_overflow_p
)
13596 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
13597 &sub_strict_overflow_p
))
13599 if (sub_strict_overflow_p
)
13600 *strict_overflow_p
= true;
13605 case COMPOUND_EXPR
:
13607 case GIMPLE_MODIFY_STMT
:
13609 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t
, 1),
13610 strict_overflow_p
);
13613 case NON_LVALUE_EXPR
:
13614 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
13615 strict_overflow_p
);
13618 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
13620 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
13621 strict_overflow_p
));
13624 return alloca_call_p (t
);
13632 /* Return true when T is an address and is known to be nonzero.
13633 Handle warnings about undefined signed overflow. */
13636 tree_expr_nonzero_p (tree t
)
13638 bool ret
, strict_overflow_p
;
13640 strict_overflow_p
= false;
13641 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
13642 if (strict_overflow_p
)
13643 fold_overflow_warning (("assuming signed overflow does not occur when "
13644 "determining that expression is always "
13646 WARN_STRICT_OVERFLOW_MISC
);
13650 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13651 attempt to fold the expression to a constant without modifying TYPE,
13654 If the expression could be simplified to a constant, then return
13655 the constant. If the expression would not be simplified to a
13656 constant, then return NULL_TREE. */
13659 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
13661 tree tem
= fold_binary (code
, type
, op0
, op1
);
13662 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13665 /* Given the components of a unary expression CODE, TYPE and OP0,
13666 attempt to fold the expression to a constant without modifying
13669 If the expression could be simplified to a constant, then return
13670 the constant. If the expression would not be simplified to a
13671 constant, then return NULL_TREE. */
13674 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
13676 tree tem
= fold_unary (code
, type
, op0
);
13677 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13680 /* If EXP represents referencing an element in a constant string
13681 (either via pointer arithmetic or array indexing), return the
13682 tree representing the value accessed, otherwise return NULL. */
13685 fold_read_from_constant_string (tree exp
)
13687 if ((TREE_CODE (exp
) == INDIRECT_REF
13688 || TREE_CODE (exp
) == ARRAY_REF
)
13689 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
13691 tree exp1
= TREE_OPERAND (exp
, 0);
13695 if (TREE_CODE (exp
) == INDIRECT_REF
)
13696 string
= string_constant (exp1
, &index
);
13699 tree low_bound
= array_ref_low_bound (exp
);
13700 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
13702 /* Optimize the special-case of a zero lower bound.
13704 We convert the low_bound to sizetype to avoid some problems
13705 with constant folding. (E.g. suppose the lower bound is 1,
13706 and its mode is QI. Without the conversion,l (ARRAY
13707 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13708 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13709 if (! integer_zerop (low_bound
))
13710 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
13716 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
13717 && TREE_CODE (string
) == STRING_CST
13718 && TREE_CODE (index
) == INTEGER_CST
13719 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
13720 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
13722 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
13723 return fold_convert (TREE_TYPE (exp
),
13724 build_int_cst (NULL_TREE
,
13725 (TREE_STRING_POINTER (string
)
13726 [TREE_INT_CST_LOW (index
)])));
13731 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13732 an integer constant or real constant.
13734 TYPE is the type of the result. */
13737 fold_negate_const (tree arg0
, tree type
)
13739 tree t
= NULL_TREE
;
13741 switch (TREE_CODE (arg0
))
13745 unsigned HOST_WIDE_INT low
;
13746 HOST_WIDE_INT high
;
13747 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
13748 TREE_INT_CST_HIGH (arg0
),
13750 t
= force_fit_type_double (type
, low
, high
, 1,
13751 (overflow
| TREE_OVERFLOW (arg0
))
13752 && !TYPE_UNSIGNED (type
));
13757 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
13761 gcc_unreachable ();
13767 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13768 an integer constant or real constant.
13770 TYPE is the type of the result. */
13773 fold_abs_const (tree arg0
, tree type
)
13775 tree t
= NULL_TREE
;
13777 switch (TREE_CODE (arg0
))
13780 /* If the value is unsigned, then the absolute value is
13781 the same as the ordinary value. */
13782 if (TYPE_UNSIGNED (type
))
13784 /* Similarly, if the value is non-negative. */
13785 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
13787 /* If the value is negative, then the absolute value is
13791 unsigned HOST_WIDE_INT low
;
13792 HOST_WIDE_INT high
;
13793 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
13794 TREE_INT_CST_HIGH (arg0
),
13796 t
= force_fit_type_double (type
, low
, high
, -1,
13797 overflow
| TREE_OVERFLOW (arg0
));
13802 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
13803 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
13809 gcc_unreachable ();
13815 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13816 constant. TYPE is the type of the result. */
13819 fold_not_const (tree arg0
, tree type
)
13821 tree t
= NULL_TREE
;
13823 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
13825 t
= force_fit_type_double (type
, ~TREE_INT_CST_LOW (arg0
),
13826 ~TREE_INT_CST_HIGH (arg0
), 0,
13827 TREE_OVERFLOW (arg0
));
13832 /* Given CODE, a relational operator, the target type, TYPE and two
13833 constant operands OP0 and OP1, return the result of the
13834 relational operation. If the result is not a compile time
13835 constant, then return NULL_TREE. */
13838 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
13840 int result
, invert
;
13842 /* From here on, the only cases we handle are when the result is
13843 known to be a constant. */
13845 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
13847 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
13848 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
13850 /* Handle the cases where either operand is a NaN. */
13851 if (real_isnan (c0
) || real_isnan (c1
))
13861 case UNORDERED_EXPR
:
13875 if (flag_trapping_math
)
13881 gcc_unreachable ();
13884 return constant_boolean_node (result
, type
);
13887 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
13890 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13892 To compute GT, swap the arguments and do LT.
13893 To compute GE, do LT and invert the result.
13894 To compute LE, swap the arguments, do LT and invert the result.
13895 To compute NE, do EQ and invert the result.
13897 Therefore, the code below must handle only EQ and LT. */
13899 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13904 code
= swap_tree_comparison (code
);
13907 /* Note that it is safe to invert for real values here because we
13908 have already handled the one case that it matters. */
13911 if (code
== NE_EXPR
|| code
== GE_EXPR
)
13914 code
= invert_tree_comparison (code
, false);
13917 /* Compute a result for LT or EQ if args permit;
13918 Otherwise return T. */
13919 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
13921 if (code
== EQ_EXPR
)
13922 result
= tree_int_cst_equal (op0
, op1
);
13923 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
13924 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
13926 result
= INT_CST_LT (op0
, op1
);
13933 return constant_boolean_node (result
, type
);
13936 /* Build an expression for the a clean point containing EXPR with type TYPE.
13937 Don't build a cleanup point expression for EXPR which don't have side
13941 fold_build_cleanup_point_expr (tree type
, tree expr
)
13943 /* If the expression does not have side effects then we don't have to wrap
13944 it with a cleanup point expression. */
13945 if (!TREE_SIDE_EFFECTS (expr
))
13948 /* If the expression is a return, check to see if the expression inside the
13949 return has no side effects or the right hand side of the modify expression
13950 inside the return. If either don't have side effects set we don't need to
13951 wrap the expression in a cleanup point expression. Note we don't check the
13952 left hand side of the modify because it should always be a return decl. */
13953 if (TREE_CODE (expr
) == RETURN_EXPR
)
13955 tree op
= TREE_OPERAND (expr
, 0);
13956 if (!op
|| !TREE_SIDE_EFFECTS (op
))
13958 op
= TREE_OPERAND (op
, 1);
13959 if (!TREE_SIDE_EFFECTS (op
))
13963 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
13966 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13967 avoid confusing the gimplify process. */
13970 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
13972 /* The size of the object is not relevant when talking about its address. */
13973 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
13974 t
= TREE_OPERAND (t
, 0);
13976 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13977 if (TREE_CODE (t
) == INDIRECT_REF
13978 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
13980 t
= TREE_OPERAND (t
, 0);
13981 if (TREE_TYPE (t
) != ptrtype
)
13982 t
= build1 (NOP_EXPR
, ptrtype
, t
);
13988 while (handled_component_p (base
))
13989 base
= TREE_OPERAND (base
, 0);
13991 TREE_ADDRESSABLE (base
) = 1;
13993 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
14000 build_fold_addr_expr (tree t
)
14002 return build_fold_addr_expr_with_type (t
, build_pointer_type (TREE_TYPE (t
)));
14005 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14006 of an indirection through OP0, or NULL_TREE if no simplification is
14010 fold_indirect_ref_1 (tree type
, tree op0
)
14016 subtype
= TREE_TYPE (sub
);
14017 if (!POINTER_TYPE_P (subtype
))
14020 if (TREE_CODE (sub
) == ADDR_EXPR
)
14022 tree op
= TREE_OPERAND (sub
, 0);
14023 tree optype
= TREE_TYPE (op
);
14024 /* *&CONST_DECL -> to the value of the const decl. */
14025 if (TREE_CODE (op
) == CONST_DECL
)
14026 return DECL_INITIAL (op
);
14027 /* *&p => p; make sure to handle *&"str"[cst] here. */
14028 if (type
== optype
)
14030 tree fop
= fold_read_from_constant_string (op
);
14036 /* *(foo *)&fooarray => fooarray[0] */
14037 else if (TREE_CODE (optype
) == ARRAY_TYPE
14038 && type
== TREE_TYPE (optype
))
14040 tree type_domain
= TYPE_DOMAIN (optype
);
14041 tree min_val
= size_zero_node
;
14042 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14043 min_val
= TYPE_MIN_VALUE (type_domain
);
14044 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
14046 /* *(foo *)&complexfoo => __real__ complexfoo */
14047 else if (TREE_CODE (optype
) == COMPLEX_TYPE
14048 && type
== TREE_TYPE (optype
))
14049 return fold_build1 (REALPART_EXPR
, type
, op
);
14050 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14051 else if (TREE_CODE (optype
) == VECTOR_TYPE
14052 && type
== TREE_TYPE (optype
))
14054 tree part_width
= TYPE_SIZE (type
);
14055 tree index
= bitsize_int (0);
14056 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
14060 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14061 if (TREE_CODE (sub
) == PLUS_EXPR
14062 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
14064 tree op00
= TREE_OPERAND (sub
, 0);
14065 tree op01
= TREE_OPERAND (sub
, 1);
14069 op00type
= TREE_TYPE (op00
);
14070 if (TREE_CODE (op00
) == ADDR_EXPR
14071 && TREE_CODE (TREE_TYPE (op00type
)) == COMPLEX_TYPE
14072 && type
== TREE_TYPE (TREE_TYPE (op00type
)))
14074 tree size
= TYPE_SIZE_UNIT (type
);
14075 if (tree_int_cst_equal (size
, op01
))
14076 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (op00
, 0));
14080 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14081 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
14082 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
14085 tree min_val
= size_zero_node
;
14086 sub
= build_fold_indirect_ref (sub
);
14087 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
14088 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14089 min_val
= TYPE_MIN_VALUE (type_domain
);
14090 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
14096 /* Builds an expression for an indirection through T, simplifying some
14100 build_fold_indirect_ref (tree t
)
14102 tree type
= TREE_TYPE (TREE_TYPE (t
));
14103 tree sub
= fold_indirect_ref_1 (type
, t
);
14108 return build1 (INDIRECT_REF
, type
, t
);
14111 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14114 fold_indirect_ref (tree t
)
14116 tree sub
= fold_indirect_ref_1 (TREE_TYPE (t
), TREE_OPERAND (t
, 0));
14124 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14125 whose result is ignored. The type of the returned tree need not be
14126 the same as the original expression. */
14129 fold_ignored_result (tree t
)
14131 if (!TREE_SIDE_EFFECTS (t
))
14132 return integer_zero_node
;
14135 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
14138 t
= TREE_OPERAND (t
, 0);
14142 case tcc_comparison
:
14143 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14144 t
= TREE_OPERAND (t
, 0);
14145 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
14146 t
= TREE_OPERAND (t
, 1);
14151 case tcc_expression
:
14152 switch (TREE_CODE (t
))
14154 case COMPOUND_EXPR
:
14155 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14157 t
= TREE_OPERAND (t
, 0);
14161 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
14162 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
14164 t
= TREE_OPERAND (t
, 0);
14177 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
14178 This can only be applied to objects of a sizetype. */
14181 round_up (tree value
, int divisor
)
14183 tree div
= NULL_TREE
;
14185 gcc_assert (divisor
> 0);
14189 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14190 have to do anything. Only do this when we are not given a const,
14191 because in that case, this check is more expensive than just
14193 if (TREE_CODE (value
) != INTEGER_CST
)
14195 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14197 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14201 /* If divisor is a power of two, simplify this to bit manipulation. */
14202 if (divisor
== (divisor
& -divisor
))
14204 if (TREE_CODE (value
) == INTEGER_CST
)
14206 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (value
);
14207 unsigned HOST_WIDE_INT high
;
14210 if ((low
& (divisor
- 1)) == 0)
14213 overflow_p
= TREE_OVERFLOW (value
);
14214 high
= TREE_INT_CST_HIGH (value
);
14215 low
&= ~(divisor
- 1);
14224 return force_fit_type_double (TREE_TYPE (value
), low
, high
,
14231 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
14232 value
= size_binop (PLUS_EXPR
, value
, t
);
14233 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
14234 value
= size_binop (BIT_AND_EXPR
, value
, t
);
14240 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14241 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
14242 value
= size_binop (MULT_EXPR
, value
, div
);
14248 /* Likewise, but round down. */
14251 round_down (tree value
, int divisor
)
14253 tree div
= NULL_TREE
;
14255 gcc_assert (divisor
> 0);
14259 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14260 have to do anything. Only do this when we are not given a const,
14261 because in that case, this check is more expensive than just
14263 if (TREE_CODE (value
) != INTEGER_CST
)
14265 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14267 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14271 /* If divisor is a power of two, simplify this to bit manipulation. */
14272 if (divisor
== (divisor
& -divisor
))
14276 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
14277 value
= size_binop (BIT_AND_EXPR
, value
, t
);
14282 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14283 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
14284 value
= size_binop (MULT_EXPR
, value
, div
);
14290 /* Returns the pointer to the base of the object addressed by EXP and
14291 extracts the information about the offset of the access, storing it
14292 to PBITPOS and POFFSET. */
14295 split_address_to_core_and_offset (tree exp
,
14296 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
14299 enum machine_mode mode
;
14300 int unsignedp
, volatilep
;
14301 HOST_WIDE_INT bitsize
;
14303 if (TREE_CODE (exp
) == ADDR_EXPR
)
14305 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
14306 poffset
, &mode
, &unsignedp
, &volatilep
,
14308 core
= build_fold_addr_expr (core
);
14314 *poffset
= NULL_TREE
;
14320 /* Returns true if addresses of E1 and E2 differ by a constant, false
14321 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14324 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
14327 HOST_WIDE_INT bitpos1
, bitpos2
;
14328 tree toffset1
, toffset2
, tdiff
, type
;
14330 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
14331 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
14333 if (bitpos1
% BITS_PER_UNIT
!= 0
14334 || bitpos2
% BITS_PER_UNIT
!= 0
14335 || !operand_equal_p (core1
, core2
, 0))
14338 if (toffset1
&& toffset2
)
14340 type
= TREE_TYPE (toffset1
);
14341 if (type
!= TREE_TYPE (toffset2
))
14342 toffset2
= fold_convert (type
, toffset2
);
14344 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
14345 if (!cst_and_fits_in_hwi (tdiff
))
14348 *diff
= int_cst_value (tdiff
);
14350 else if (toffset1
|| toffset2
)
14352 /* If only one of the offsets is non-constant, the difference cannot
14359 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
14363 /* Simplify the floating point expression EXP when the sign of the
14364 result is not significant. Return NULL_TREE if no simplification
14368 fold_strip_sign_ops (tree exp
)
14372 switch (TREE_CODE (exp
))
14376 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
14377 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
14381 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
14383 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
14384 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
14385 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
14386 return fold_build2 (TREE_CODE (exp
), TREE_TYPE (exp
),
14387 arg0
? arg0
: TREE_OPERAND (exp
, 0),
14388 arg1
? arg1
: TREE_OPERAND (exp
, 1));
14391 case COMPOUND_EXPR
:
14392 arg0
= TREE_OPERAND (exp
, 0);
14393 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
14395 return fold_build2 (COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
14399 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
14400 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
14402 return fold_build3 (COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
14403 arg0
? arg0
: TREE_OPERAND (exp
, 1),
14404 arg1
? arg1
: TREE_OPERAND (exp
, 2));
14409 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
14412 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14413 /* Strip copysign function call, return the 1st argument. */
14414 arg0
= CALL_EXPR_ARG (exp
, 0);
14415 arg1
= CALL_EXPR_ARG (exp
, 1);
14416 return omit_one_operand (TREE_TYPE (exp
), arg0
, arg1
);
14419 /* Strip sign ops from the argument of "odd" math functions. */
14420 if (negate_mathfn_p (fcode
))
14422 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
14424 return build_call_expr (get_callee_fndecl (exp
), 1, arg0
);