1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
66 #include "langhooks.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
72 int folding_initializer
= 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code
{
96 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
97 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
98 static bool negate_mathfn_p (enum built_in_function
);
99 static bool negate_expr_p (tree
);
100 static tree
negate_expr (tree
);
101 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
102 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
103 static tree
const_binop (enum tree_code
, tree
, tree
, int);
104 static enum comparison_code
comparison_to_compcode (enum tree_code
);
105 static enum tree_code
compcode_to_comparison (enum comparison_code
);
106 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
107 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
108 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
109 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
110 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
111 static tree
make_bit_field_ref (location_t
, tree
, tree
,
112 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
113 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
115 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
117 enum machine_mode
*, int *, int *,
119 static int all_ones_mask_p (const_tree
, int);
120 static tree
sign_bit_p (tree
, const_tree
);
121 static int simple_operand_p (const_tree
);
122 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
123 static tree
range_predecessor (tree
);
124 static tree
range_successor (tree
);
125 extern tree
make_range (tree
, int *, tree
*, tree
*, bool *);
126 extern bool merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int,
128 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
129 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
130 static tree
unextend (tree
, int, int, tree
);
131 static tree
fold_truthop (location_t
, enum tree_code
, tree
, tree
, tree
);
132 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
134 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
135 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
136 static tree
fold_binary_op_with_conditional_arg (location_t
,
137 enum tree_code
, tree
,
140 static tree
fold_mathfn_compare (location_t
,
141 enum built_in_function
, enum tree_code
,
143 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
144 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
145 static bool reorder_operands_p (const_tree
, const_tree
);
146 static tree
fold_negate_const (tree
, tree
);
147 static tree
fold_not_const (tree
, tree
);
148 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
149 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
152 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
153 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
154 and SUM1. Then this yields nonzero if overflow occurred during the
157 Overflow occurs if A and B have the same sign, but A and SUM differ in
158 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
160 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
162 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
163 We do that by representing the two-word integer in 4 words, with only
164 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
165 number. The value of the word is LOWPART + HIGHPART * BASE. */
168 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
169 #define HIGHPART(x) \
170 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
171 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
173 /* Unpack a two-word integer into 4 words.
174 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
175 WORDS points to the array of HOST_WIDE_INTs. */
178 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
180 words
[0] = LOWPART (low
);
181 words
[1] = HIGHPART (low
);
182 words
[2] = LOWPART (hi
);
183 words
[3] = HIGHPART (hi
);
186 /* Pack an array of 4 words into a two-word integer.
187 WORDS points to the array of words.
188 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
191 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
194 *low
= words
[0] + words
[1] * BASE
;
195 *hi
= words
[2] + words
[3] * BASE
;
198 /* Force the double-word integer L1, H1 to be within the range of the
199 integer type TYPE. Stores the properly truncated and sign-extended
200 double-word integer in *LV, *HV. Returns true if the operation
201 overflows, that is, argument and result are different. */
204 fit_double_type (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
205 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, const_tree type
)
207 unsigned HOST_WIDE_INT low0
= l1
;
208 HOST_WIDE_INT high0
= h1
;
209 unsigned int prec
= TYPE_PRECISION (type
);
210 int sign_extended_type
;
212 /* Size types *are* sign extended. */
213 sign_extended_type
= (!TYPE_UNSIGNED (type
)
214 || (TREE_CODE (type
) == INTEGER_TYPE
215 && TYPE_IS_SIZETYPE (type
)));
217 /* First clear all bits that are beyond the type's precision. */
218 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
220 else if (prec
> HOST_BITS_PER_WIDE_INT
)
221 h1
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
225 if (prec
< HOST_BITS_PER_WIDE_INT
)
226 l1
&= ~((HOST_WIDE_INT
) (-1) << prec
);
229 /* Then do sign extension if necessary. */
230 if (!sign_extended_type
)
231 /* No sign extension */;
232 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
233 /* Correct width already. */;
234 else if (prec
> HOST_BITS_PER_WIDE_INT
)
236 /* Sign extend top half? */
237 if (h1
& ((unsigned HOST_WIDE_INT
)1
238 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
239 h1
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
241 else if (prec
== HOST_BITS_PER_WIDE_INT
)
243 if ((HOST_WIDE_INT
)l1
< 0)
248 /* Sign extend bottom half? */
249 if (l1
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
252 l1
|= (HOST_WIDE_INT
)(-1) << prec
;
259 /* If the value didn't fit, signal overflow. */
260 return l1
!= low0
|| h1
!= high0
;
263 /* We force the double-int HIGH:LOW to the range of the type TYPE by
264 sign or zero extending it.
265 OVERFLOWABLE indicates if we are interested
266 in overflow of the value, when >0 we are only interested in signed
267 overflow, for <0 we are interested in any overflow. OVERFLOWED
268 indicates whether overflow has already occurred. CONST_OVERFLOWED
269 indicates whether constant overflow has already occurred. We force
270 T's value to be within range of T's type (by setting to 0 or 1 all
271 the bits outside the type's range). We set TREE_OVERFLOWED if,
272 OVERFLOWED is nonzero,
273 or OVERFLOWABLE is >0 and signed overflow occurs
274 or OVERFLOWABLE is <0 and any overflow occurs
275 We return a new tree node for the extended double-int. The node
276 is shared if no overflow flags are set. */
279 force_fit_type_double (tree type
, unsigned HOST_WIDE_INT low
,
280 HOST_WIDE_INT high
, int overflowable
,
283 int sign_extended_type
;
286 /* Size types *are* sign extended. */
287 sign_extended_type
= (!TYPE_UNSIGNED (type
)
288 || (TREE_CODE (type
) == INTEGER_TYPE
289 && TYPE_IS_SIZETYPE (type
)));
291 overflow
= fit_double_type (low
, high
, &low
, &high
, type
);
293 /* If we need to set overflow flags, return a new unshared node. */
294 if (overflowed
|| overflow
)
298 || (overflowable
> 0 && sign_extended_type
))
300 tree t
= make_node (INTEGER_CST
);
301 TREE_INT_CST_LOW (t
) = low
;
302 TREE_INT_CST_HIGH (t
) = high
;
303 TREE_TYPE (t
) = type
;
304 TREE_OVERFLOW (t
) = 1;
309 /* Else build a shared node. */
310 return build_int_cst_wide (type
, low
, high
);
313 /* Add two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows according to UNSIGNED_P.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 add_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
321 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
322 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
325 unsigned HOST_WIDE_INT l
;
329 h
= (HOST_WIDE_INT
) ((unsigned HOST_WIDE_INT
) h1
330 + (unsigned HOST_WIDE_INT
) h2
337 return ((unsigned HOST_WIDE_INT
) h
< (unsigned HOST_WIDE_INT
) h1
341 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
344 /* Negate a doubleword integer with doubleword result.
345 Return nonzero if the operation overflows, assuming it's signed.
346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
351 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
357 return (*hv
& h1
) < 0;
367 /* Multiply two doubleword integers with doubleword result.
368 Return nonzero if the operation overflows according to UNSIGNED_P.
369 Each argument is given as two `HOST_WIDE_INT' pieces.
370 One argument is L1 and H1; the other, L2 and H2.
371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374 mul_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
375 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
376 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
379 HOST_WIDE_INT arg1
[4];
380 HOST_WIDE_INT arg2
[4];
381 HOST_WIDE_INT prod
[4 * 2];
382 unsigned HOST_WIDE_INT carry
;
384 unsigned HOST_WIDE_INT toplow
, neglow
;
385 HOST_WIDE_INT tophigh
, neghigh
;
387 encode (arg1
, l1
, h1
);
388 encode (arg2
, l2
, h2
);
390 memset (prod
, 0, sizeof prod
);
392 for (i
= 0; i
< 4; i
++)
395 for (j
= 0; j
< 4; j
++)
398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
399 carry
+= arg1
[i
] * arg2
[j
];
400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
402 prod
[k
] = LOWPART (carry
);
403 carry
= HIGHPART (carry
);
408 decode (prod
, lv
, hv
);
409 decode (prod
+ 4, &toplow
, &tophigh
);
411 /* Unsigned overflow is immediate. */
413 return (toplow
| tophigh
) != 0;
415 /* Check for signed overflow by calculating the signed representation of the
416 top half of the result; it should agree with the low half's sign bit. */
419 neg_double (l2
, h2
, &neglow
, &neghigh
);
420 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
424 neg_double (l1
, h1
, &neglow
, &neghigh
);
425 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
427 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
430 /* Shift the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Shift right if COUNT is negative.
433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
437 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
438 HOST_WIDE_INT count
, unsigned int prec
,
439 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
441 unsigned HOST_WIDE_INT signmask
;
445 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
449 if (SHIFT_COUNT_TRUNCATED
)
452 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
454 /* Shifting by the host word size is undefined according to the
455 ANSI standard, so we must handle this as a special case. */
459 else if (count
>= HOST_BITS_PER_WIDE_INT
)
461 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
466 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
467 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
471 /* Sign extend all bits that are beyond the precision. */
473 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
474 ? ((unsigned HOST_WIDE_INT
) *hv
475 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
476 : (*lv
>> (prec
- 1))) & 1);
478 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
480 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
482 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
483 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
488 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
489 *lv
|= signmask
<< prec
;
493 /* Shift the doubleword integer in L1, H1 right by COUNT places
494 keeping only PREC bits of result. COUNT must be positive.
495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
499 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
500 HOST_WIDE_INT count
, unsigned int prec
,
501 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
504 unsigned HOST_WIDE_INT signmask
;
507 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
510 if (SHIFT_COUNT_TRUNCATED
)
513 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
515 /* Shifting by the host word size is undefined according to the
516 ANSI standard, so we must handle this as a special case. */
520 else if (count
>= HOST_BITS_PER_WIDE_INT
)
523 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
527 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
529 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
532 /* Zero / sign extend all bits that are beyond the precision. */
534 if (count
>= (HOST_WIDE_INT
)prec
)
539 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
541 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
543 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
544 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
549 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
550 *lv
|= signmask
<< (prec
- count
);
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result.
556 Rotate right if COUNT is negative.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
560 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
561 HOST_WIDE_INT count
, unsigned int prec
,
562 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
564 unsigned HOST_WIDE_INT s1l
, s2l
;
565 HOST_WIDE_INT s1h
, s2h
;
571 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
572 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
577 /* Rotate the doubleword integer in L1, H1 left by COUNT places
578 keeping only PREC bits of result. COUNT must be positive.
579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
582 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
583 HOST_WIDE_INT count
, unsigned int prec
,
584 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
586 unsigned HOST_WIDE_INT s1l
, s2l
;
587 HOST_WIDE_INT s1h
, s2h
;
593 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
594 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601 CODE is a tree code for a kind of division, one of
602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
604 It controls how the quotient is rounded to an integer.
605 Return nonzero if the operation overflows.
606 UNS nonzero says do unsigned division. */
609 div_and_round_double (enum tree_code code
, int uns
,
610 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
611 HOST_WIDE_INT hnum_orig
,
612 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
613 HOST_WIDE_INT hden_orig
,
614 unsigned HOST_WIDE_INT
*lquo
,
615 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
619 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
620 HOST_WIDE_INT den
[4], quo
[4];
622 unsigned HOST_WIDE_INT work
;
623 unsigned HOST_WIDE_INT carry
= 0;
624 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
625 HOST_WIDE_INT hnum
= hnum_orig
;
626 unsigned HOST_WIDE_INT lden
= lden_orig
;
627 HOST_WIDE_INT hden
= hden_orig
;
630 if (hden
== 0 && lden
== 0)
631 overflow
= 1, lden
= 1;
633 /* Calculate quotient sign and convert operands to unsigned. */
639 /* (minimum integer) / (-1) is the only overflow case. */
640 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
641 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
647 neg_double (lden
, hden
, &lden
, &hden
);
651 if (hnum
== 0 && hden
== 0)
652 { /* single precision */
654 /* This unsigned division rounds toward zero. */
660 { /* trivial case: dividend < divisor */
661 /* hden != 0 already checked. */
668 memset (quo
, 0, sizeof quo
);
670 memset (num
, 0, sizeof num
); /* to zero 9th element */
671 memset (den
, 0, sizeof den
);
673 encode (num
, lnum
, hnum
);
674 encode (den
, lden
, hden
);
676 /* Special code for when the divisor < BASE. */
677 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
679 /* hnum != 0 already checked. */
680 for (i
= 4 - 1; i
>= 0; i
--)
682 work
= num
[i
] + carry
* BASE
;
683 quo
[i
] = work
/ lden
;
689 /* Full double precision division,
690 with thanks to Don Knuth's "Seminumerical Algorithms". */
691 int num_hi_sig
, den_hi_sig
;
692 unsigned HOST_WIDE_INT quo_est
, scale
;
694 /* Find the highest nonzero divisor digit. */
695 for (i
= 4 - 1;; i
--)
702 /* Insure that the first digit of the divisor is at least BASE/2.
703 This is required by the quotient digit estimation algorithm. */
705 scale
= BASE
/ (den
[den_hi_sig
] + 1);
707 { /* scale divisor and dividend */
709 for (i
= 0; i
<= 4 - 1; i
++)
711 work
= (num
[i
] * scale
) + carry
;
712 num
[i
] = LOWPART (work
);
713 carry
= HIGHPART (work
);
718 for (i
= 0; i
<= 4 - 1; i
++)
720 work
= (den
[i
] * scale
) + carry
;
721 den
[i
] = LOWPART (work
);
722 carry
= HIGHPART (work
);
723 if (den
[i
] != 0) den_hi_sig
= i
;
730 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
732 /* Guess the next quotient digit, quo_est, by dividing the first
733 two remaining dividend digits by the high order quotient digit.
734 quo_est is never low and is at most 2 high. */
735 unsigned HOST_WIDE_INT tmp
;
737 num_hi_sig
= i
+ den_hi_sig
+ 1;
738 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
739 if (num
[num_hi_sig
] != den
[den_hi_sig
])
740 quo_est
= work
/ den
[den_hi_sig
];
744 /* Refine quo_est so it's usually correct, and at most one high. */
745 tmp
= work
- quo_est
* den
[den_hi_sig
];
747 && (den
[den_hi_sig
- 1] * quo_est
748 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
751 /* Try QUO_EST as the quotient digit, by multiplying the
752 divisor by QUO_EST and subtracting from the remaining dividend.
753 Keep in mind that QUO_EST is the I - 1st digit. */
756 for (j
= 0; j
<= den_hi_sig
; j
++)
758 work
= quo_est
* den
[j
] + carry
;
759 carry
= HIGHPART (work
);
760 work
= num
[i
+ j
] - LOWPART (work
);
761 num
[i
+ j
] = LOWPART (work
);
762 carry
+= HIGHPART (work
) != 0;
765 /* If quo_est was high by one, then num[i] went negative and
766 we need to correct things. */
767 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
770 carry
= 0; /* add divisor back in */
771 for (j
= 0; j
<= den_hi_sig
; j
++)
773 work
= num
[i
+ j
] + den
[j
] + carry
;
774 carry
= HIGHPART (work
);
775 num
[i
+ j
] = LOWPART (work
);
778 num
[num_hi_sig
] += carry
;
781 /* Store the quotient digit. */
786 decode (quo
, lquo
, hquo
);
789 /* If result is negative, make it so. */
791 neg_double (*lquo
, *hquo
, lquo
, hquo
);
793 /* Compute trial remainder: rem = num - (quo * den) */
794 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
795 neg_double (*lrem
, *hrem
, lrem
, hrem
);
796 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
801 case TRUNC_MOD_EXPR
: /* round toward zero */
802 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
806 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
807 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
810 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
818 case CEIL_MOD_EXPR
: /* round toward positive infinity */
819 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
821 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
829 case ROUND_MOD_EXPR
: /* round to closest integer */
831 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
832 HOST_WIDE_INT habs_rem
= *hrem
;
833 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
834 HOST_WIDE_INT habs_den
= hden
, htwice
;
836 /* Get absolute values. */
838 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
840 neg_double (lden
, hden
, &labs_den
, &habs_den
);
842 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
843 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
844 labs_rem
, habs_rem
, <wice
, &htwice
);
846 if (((unsigned HOST_WIDE_INT
) habs_den
847 < (unsigned HOST_WIDE_INT
) htwice
)
848 || (((unsigned HOST_WIDE_INT
) habs_den
849 == (unsigned HOST_WIDE_INT
) htwice
)
850 && (labs_den
<= ltwice
)))
854 add_double (*lquo
, *hquo
,
855 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
858 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
870 /* Compute true remainder: rem = num - (quo * den) */
871 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
872 neg_double (*lrem
, *hrem
, lrem
, hrem
);
873 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
877 /* If ARG2 divides ARG1 with zero remainder, carries out the division
878 of type CODE and returns the quotient.
879 Otherwise returns NULL_TREE. */
882 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
884 unsigned HOST_WIDE_INT int1l
, int2l
;
885 HOST_WIDE_INT int1h
, int2h
;
886 unsigned HOST_WIDE_INT quol
, reml
;
887 HOST_WIDE_INT quoh
, remh
;
890 /* The sign of the division is according to operand two, that
891 does the correct thing for POINTER_PLUS_EXPR where we want
892 a signed division. */
893 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
894 if (TREE_CODE (TREE_TYPE (arg2
)) == INTEGER_TYPE
895 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2
)))
898 int1l
= TREE_INT_CST_LOW (arg1
);
899 int1h
= TREE_INT_CST_HIGH (arg1
);
900 int2l
= TREE_INT_CST_LOW (arg2
);
901 int2h
= TREE_INT_CST_HIGH (arg2
);
903 div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
904 &quol
, &quoh
, &reml
, &remh
);
905 if (remh
!= 0 || reml
!= 0)
908 return build_int_cst_wide (TREE_TYPE (arg1
), quol
, quoh
);
911 /* This is nonzero if we should defer warnings about undefined
912 overflow. This facility exists because these warnings are a
913 special case. The code to estimate loop iterations does not want
914 to issue any warnings, since it works with expressions which do not
915 occur in user code. Various bits of cleanup code call fold(), but
916 only use the result if it has certain characteristics (e.g., is a
917 constant); that code only wants to issue a warning if the result is
920 static int fold_deferring_overflow_warnings
;
922 /* If a warning about undefined overflow is deferred, this is the
923 warning. Note that this may cause us to turn two warnings into
924 one, but that is fine since it is sufficient to only give one
925 warning per expression. */
927 static const char* fold_deferred_overflow_warning
;
929 /* If a warning about undefined overflow is deferred, this is the
930 level at which the warning should be emitted. */
932 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
934 /* Start deferring overflow warnings. We could use a stack here to
935 permit nested calls, but at present it is not necessary. */
938 fold_defer_overflow_warnings (void)
940 ++fold_deferring_overflow_warnings
;
943 /* Stop deferring overflow warnings. If there is a pending warning,
944 and ISSUE is true, then issue the warning if appropriate. STMT is
945 the statement with which the warning should be associated (used for
946 location information); STMT may be NULL. CODE is the level of the
947 warning--a warn_strict_overflow_code value. This function will use
948 the smaller of CODE and the deferred code when deciding whether to
949 issue the warning. CODE may be zero to mean to always use the
953 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
958 gcc_assert (fold_deferring_overflow_warnings
> 0);
959 --fold_deferring_overflow_warnings
;
960 if (fold_deferring_overflow_warnings
> 0)
962 if (fold_deferred_overflow_warning
!= NULL
964 && code
< (int) fold_deferred_overflow_code
)
965 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
969 warnmsg
= fold_deferred_overflow_warning
;
970 fold_deferred_overflow_warning
= NULL
;
972 if (!issue
|| warnmsg
== NULL
)
975 if (gimple_no_warning_p (stmt
))
978 /* Use the smallest code level when deciding to issue the
980 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
981 code
= fold_deferred_overflow_code
;
983 if (!issue_strict_overflow_warning (code
))
987 locus
= input_location
;
989 locus
= gimple_location (stmt
);
990 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
993 /* Stop deferring overflow warnings, ignoring any deferred
997 fold_undefer_and_ignore_overflow_warnings (void)
999 fold_undefer_overflow_warnings (false, NULL
, 0);
1002 /* Whether we are deferring overflow warnings. */
1005 fold_deferring_overflow_warnings_p (void)
1007 return fold_deferring_overflow_warnings
> 0;
1010 /* This is called when we fold something based on the fact that signed
1011 overflow is undefined. */
1014 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
1016 if (fold_deferring_overflow_warnings
> 0)
1018 if (fold_deferred_overflow_warning
== NULL
1019 || wc
< fold_deferred_overflow_code
)
1021 fold_deferred_overflow_warning
= gmsgid
;
1022 fold_deferred_overflow_code
= wc
;
1025 else if (issue_strict_overflow_warning (wc
))
1026 warning (OPT_Wstrict_overflow
, gmsgid
);
1029 /* Return true if the built-in mathematical function specified by CODE
1030 is odd, i.e. -f(x) == f(-x). */
1033 negate_mathfn_p (enum built_in_function code
)
1037 CASE_FLT_FN (BUILT_IN_ASIN
):
1038 CASE_FLT_FN (BUILT_IN_ASINH
):
1039 CASE_FLT_FN (BUILT_IN_ATAN
):
1040 CASE_FLT_FN (BUILT_IN_ATANH
):
1041 CASE_FLT_FN (BUILT_IN_CASIN
):
1042 CASE_FLT_FN (BUILT_IN_CASINH
):
1043 CASE_FLT_FN (BUILT_IN_CATAN
):
1044 CASE_FLT_FN (BUILT_IN_CATANH
):
1045 CASE_FLT_FN (BUILT_IN_CBRT
):
1046 CASE_FLT_FN (BUILT_IN_CPROJ
):
1047 CASE_FLT_FN (BUILT_IN_CSIN
):
1048 CASE_FLT_FN (BUILT_IN_CSINH
):
1049 CASE_FLT_FN (BUILT_IN_CTAN
):
1050 CASE_FLT_FN (BUILT_IN_CTANH
):
1051 CASE_FLT_FN (BUILT_IN_ERF
):
1052 CASE_FLT_FN (BUILT_IN_LLROUND
):
1053 CASE_FLT_FN (BUILT_IN_LROUND
):
1054 CASE_FLT_FN (BUILT_IN_ROUND
):
1055 CASE_FLT_FN (BUILT_IN_SIN
):
1056 CASE_FLT_FN (BUILT_IN_SINH
):
1057 CASE_FLT_FN (BUILT_IN_TAN
):
1058 CASE_FLT_FN (BUILT_IN_TANH
):
1059 CASE_FLT_FN (BUILT_IN_TRUNC
):
1062 CASE_FLT_FN (BUILT_IN_LLRINT
):
1063 CASE_FLT_FN (BUILT_IN_LRINT
):
1064 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
1065 CASE_FLT_FN (BUILT_IN_RINT
):
1066 return !flag_rounding_math
;
1074 /* Check whether we may negate an integer constant T without causing
1078 may_negate_without_overflow_p (const_tree t
)
1080 unsigned HOST_WIDE_INT val
;
1084 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
1086 type
= TREE_TYPE (t
);
1087 if (TYPE_UNSIGNED (type
))
1090 prec
= TYPE_PRECISION (type
);
1091 if (prec
> HOST_BITS_PER_WIDE_INT
)
1093 if (TREE_INT_CST_LOW (t
) != 0)
1095 prec
-= HOST_BITS_PER_WIDE_INT
;
1096 val
= TREE_INT_CST_HIGH (t
);
1099 val
= TREE_INT_CST_LOW (t
);
1100 if (prec
< HOST_BITS_PER_WIDE_INT
)
1101 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
1102 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
1105 /* Determine whether an expression T can be cheaply negated using
1106 the function negate_expr without introducing undefined overflow. */
1109 negate_expr_p (tree t
)
1116 type
= TREE_TYPE (t
);
1118 STRIP_SIGN_NOPS (t
);
1119 switch (TREE_CODE (t
))
1122 if (TYPE_OVERFLOW_WRAPS (type
))
1125 /* Check that -CST will not overflow type. */
1126 return may_negate_without_overflow_p (t
);
1128 return (INTEGRAL_TYPE_P (type
)
1129 && TYPE_OVERFLOW_WRAPS (type
));
1136 /* We want to canonicalize to positive real constants. Pretend
1137 that only negative ones can be easily negated. */
1138 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
1141 return negate_expr_p (TREE_REALPART (t
))
1142 && negate_expr_p (TREE_IMAGPART (t
));
1145 return negate_expr_p (TREE_OPERAND (t
, 0))
1146 && negate_expr_p (TREE_OPERAND (t
, 1));
1149 return negate_expr_p (TREE_OPERAND (t
, 0));
1152 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1153 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1155 /* -(A + B) -> (-B) - A. */
1156 if (negate_expr_p (TREE_OPERAND (t
, 1))
1157 && reorder_operands_p (TREE_OPERAND (t
, 0),
1158 TREE_OPERAND (t
, 1)))
1160 /* -(A + B) -> (-A) - B. */
1161 return negate_expr_p (TREE_OPERAND (t
, 0));
1164 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1165 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1166 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1167 && reorder_operands_p (TREE_OPERAND (t
, 0),
1168 TREE_OPERAND (t
, 1));
1171 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1177 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1178 return negate_expr_p (TREE_OPERAND (t
, 1))
1179 || negate_expr_p (TREE_OPERAND (t
, 0));
1182 case TRUNC_DIV_EXPR
:
1183 case ROUND_DIV_EXPR
:
1184 case FLOOR_DIV_EXPR
:
1186 case EXACT_DIV_EXPR
:
1187 /* In general we can't negate A / B, because if A is INT_MIN and
1188 B is 1, we may turn this into INT_MIN / -1 which is undefined
1189 and actually traps on some architectures. But if overflow is
1190 undefined, we can negate, because - (INT_MIN / 1) is an
1192 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
1193 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
1195 return negate_expr_p (TREE_OPERAND (t
, 1))
1196 || negate_expr_p (TREE_OPERAND (t
, 0));
1199 /* Negate -((double)float) as (double)(-float). */
1200 if (TREE_CODE (type
) == REAL_TYPE
)
1202 tree tem
= strip_float_extensions (t
);
1204 return negate_expr_p (tem
);
1209 /* Negate -f(x) as f(-x). */
1210 if (negate_mathfn_p (builtin_mathfn_code (t
)))
1211 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
1215 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1216 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1218 tree op1
= TREE_OPERAND (t
, 1);
1219 if (TREE_INT_CST_HIGH (op1
) == 0
1220 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1221 == TREE_INT_CST_LOW (op1
))
1232 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1233 simplification is possible.
1234 If negate_expr_p would return true for T, NULL_TREE will never be
1238 fold_negate_expr (location_t loc
, tree t
)
1240 tree type
= TREE_TYPE (t
);
1243 switch (TREE_CODE (t
))
1245 /* Convert - (~A) to A + 1. */
1247 if (INTEGRAL_TYPE_P (type
))
1248 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
1249 build_int_cst (type
, 1));
1253 tem
= fold_negate_const (t
, type
);
1254 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
1255 || !TYPE_OVERFLOW_TRAPS (type
))
1260 tem
= fold_negate_const (t
, type
);
1261 /* Two's complement FP formats, such as c4x, may overflow. */
1262 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
1267 tem
= fold_negate_const (t
, type
);
1272 tree rpart
= negate_expr (TREE_REALPART (t
));
1273 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1275 if ((TREE_CODE (rpart
) == REAL_CST
1276 && TREE_CODE (ipart
) == REAL_CST
)
1277 || (TREE_CODE (rpart
) == INTEGER_CST
1278 && TREE_CODE (ipart
) == INTEGER_CST
))
1279 return build_complex (type
, rpart
, ipart
);
1284 if (negate_expr_p (t
))
1285 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
1286 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
1287 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
1291 if (negate_expr_p (t
))
1292 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
1293 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
1297 return TREE_OPERAND (t
, 0);
1300 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1301 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1303 /* -(A + B) -> (-B) - A. */
1304 if (negate_expr_p (TREE_OPERAND (t
, 1))
1305 && reorder_operands_p (TREE_OPERAND (t
, 0),
1306 TREE_OPERAND (t
, 1)))
1308 tem
= negate_expr (TREE_OPERAND (t
, 1));
1309 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
1310 tem
, TREE_OPERAND (t
, 0));
1313 /* -(A + B) -> (-A) - B. */
1314 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1316 tem
= negate_expr (TREE_OPERAND (t
, 0));
1317 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
1318 tem
, TREE_OPERAND (t
, 1));
1324 /* - (A - B) -> B - A */
1325 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1326 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1327 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1328 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
1329 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
1333 if (TYPE_UNSIGNED (type
))
1339 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
1341 tem
= TREE_OPERAND (t
, 1);
1342 if (negate_expr_p (tem
))
1343 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
1344 TREE_OPERAND (t
, 0), negate_expr (tem
));
1345 tem
= TREE_OPERAND (t
, 0);
1346 if (negate_expr_p (tem
))
1347 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
1348 negate_expr (tem
), TREE_OPERAND (t
, 1));
1352 case TRUNC_DIV_EXPR
:
1353 case ROUND_DIV_EXPR
:
1354 case FLOOR_DIV_EXPR
:
1356 case EXACT_DIV_EXPR
:
1357 /* In general we can't negate A / B, because if A is INT_MIN and
1358 B is 1, we may turn this into INT_MIN / -1 which is undefined
1359 and actually traps on some architectures. But if overflow is
1360 undefined, we can negate, because - (INT_MIN / 1) is an
1362 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
1364 const char * const warnmsg
= G_("assuming signed overflow does not "
1365 "occur when negating a division");
1366 tem
= TREE_OPERAND (t
, 1);
1367 if (negate_expr_p (tem
))
1369 if (INTEGRAL_TYPE_P (type
)
1370 && (TREE_CODE (tem
) != INTEGER_CST
1371 || integer_onep (tem
)))
1372 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
1373 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
1374 TREE_OPERAND (t
, 0), negate_expr (tem
));
1376 tem
= TREE_OPERAND (t
, 0);
1377 if (negate_expr_p (tem
))
1379 if (INTEGRAL_TYPE_P (type
)
1380 && (TREE_CODE (tem
) != INTEGER_CST
1381 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
1382 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
1383 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
1384 negate_expr (tem
), TREE_OPERAND (t
, 1));
1390 /* Convert -((double)float) into (double)(-float). */
1391 if (TREE_CODE (type
) == REAL_TYPE
)
1393 tem
= strip_float_extensions (t
);
1394 if (tem
!= t
&& negate_expr_p (tem
))
1395 return fold_convert_loc (loc
, type
, negate_expr (tem
));
1400 /* Negate -f(x) as f(-x). */
1401 if (negate_mathfn_p (builtin_mathfn_code (t
))
1402 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
1406 fndecl
= get_callee_fndecl (t
);
1407 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
1408 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
1413 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1414 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1416 tree op1
= TREE_OPERAND (t
, 1);
1417 if (TREE_INT_CST_HIGH (op1
) == 0
1418 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1419 == TREE_INT_CST_LOW (op1
))
1421 tree ntype
= TYPE_UNSIGNED (type
)
1422 ? signed_type_for (type
)
1423 : unsigned_type_for (type
);
1424 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
1425 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
1426 return fold_convert_loc (loc
, type
, temp
);
1438 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1439 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1440 return NULL_TREE. */
1443 negate_expr (tree t
)
1451 loc
= EXPR_LOCATION (t
);
1452 type
= TREE_TYPE (t
);
1453 STRIP_SIGN_NOPS (t
);
1455 tem
= fold_negate_expr (loc
, t
);
1458 tem
= build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1459 SET_EXPR_LOCATION (tem
, loc
);
1461 return fold_convert_loc (loc
, type
, tem
);
1464 /* Split a tree IN into a constant, literal and variable parts that could be
1465 combined with CODE to make IN. "constant" means an expression with
1466 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1467 commutative arithmetic operation. Store the constant part into *CONP,
1468 the literal in *LITP and return the variable part. If a part isn't
1469 present, set it to null. If the tree does not decompose in this way,
1470 return the entire tree as the variable part and the other parts as null.
1472 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1473 case, we negate an operand that was subtracted. Except if it is a
1474 literal for which we use *MINUS_LITP instead.
1476 If NEGATE_P is true, we are negating all of IN, again except a literal
1477 for which we use *MINUS_LITP instead.
1479 If IN is itself a literal or constant, return it as appropriate.
1481 Note that we do not guarantee that any of the three values will be the
1482 same type as IN, but they will have the same signedness and mode. */
1485 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1486 tree
*minus_litp
, int negate_p
)
1494 /* Strip any conversions that don't change the machine mode or signedness. */
1495 STRIP_SIGN_NOPS (in
);
1497 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
1498 || TREE_CODE (in
) == FIXED_CST
)
1500 else if (TREE_CODE (in
) == code
1501 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
1502 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
1503 /* We can associate addition and subtraction together (even
1504 though the C standard doesn't say so) for integers because
1505 the value is not affected. For reals, the value might be
1506 affected, so we can't. */
1507 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1508 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1510 tree op0
= TREE_OPERAND (in
, 0);
1511 tree op1
= TREE_OPERAND (in
, 1);
1512 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1513 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1515 /* First see if either of the operands is a literal, then a constant. */
1516 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
1517 || TREE_CODE (op0
) == FIXED_CST
)
1518 *litp
= op0
, op0
= 0;
1519 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
1520 || TREE_CODE (op1
) == FIXED_CST
)
1521 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1523 if (op0
!= 0 && TREE_CONSTANT (op0
))
1524 *conp
= op0
, op0
= 0;
1525 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1526 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1528 /* If we haven't dealt with either operand, this is not a case we can
1529 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1530 if (op0
!= 0 && op1
!= 0)
1535 var
= op1
, neg_var_p
= neg1_p
;
1537 /* Now do any needed negations. */
1539 *minus_litp
= *litp
, *litp
= 0;
1541 *conp
= negate_expr (*conp
);
1543 var
= negate_expr (var
);
1545 else if (TREE_CONSTANT (in
))
1553 *minus_litp
= *litp
, *litp
= 0;
1554 else if (*minus_litp
)
1555 *litp
= *minus_litp
, *minus_litp
= 0;
1556 *conp
= negate_expr (*conp
);
1557 var
= negate_expr (var
);
1563 /* Re-associate trees split by the above function. T1 and T2 are
1564 either expressions to associate or null. Return the new
1565 expression, if any. LOC is the location of the new expression. If
1566 we build an operation, do it in TYPE and with CODE. */
1569 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
1578 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1579 try to fold this since we will have infinite recursion. But do
1580 deal with any NEGATE_EXPRs. */
1581 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1582 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1584 if (code
== PLUS_EXPR
)
1586 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1587 tem
= build2 (MINUS_EXPR
, type
, fold_convert_loc (loc
, type
, t2
),
1588 fold_convert_loc (loc
, type
, TREE_OPERAND (t1
, 0)));
1589 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1590 tem
= build2 (MINUS_EXPR
, type
, fold_convert_loc (loc
, type
, t1
),
1591 fold_convert_loc (loc
, type
, TREE_OPERAND (t2
, 0)));
1592 else if (integer_zerop (t2
))
1593 return fold_convert_loc (loc
, type
, t1
);
1595 else if (code
== MINUS_EXPR
)
1597 if (integer_zerop (t2
))
1598 return fold_convert_loc (loc
, type
, t1
);
1601 tem
= build2 (code
, type
, fold_convert_loc (loc
, type
, t1
),
1602 fold_convert_loc (loc
, type
, t2
));
1603 goto associate_trees_exit
;
1606 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
1607 fold_convert_loc (loc
, type
, t2
));
1608 associate_trees_exit
:
1609 protected_set_expr_location (tem
, loc
);
1613 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1614 for use in int_const_binop, size_binop and size_diffop. */
1617 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
1619 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
1621 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
1636 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
1637 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
1638 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
1642 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1643 to produce a new constant. Return NULL_TREE if we don't know how
1644 to evaluate CODE at compile-time.
1646 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1649 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
, int notrunc
)
1651 unsigned HOST_WIDE_INT int1l
, int2l
;
1652 HOST_WIDE_INT int1h
, int2h
;
1653 unsigned HOST_WIDE_INT low
;
1655 unsigned HOST_WIDE_INT garbagel
;
1656 HOST_WIDE_INT garbageh
;
1658 tree type
= TREE_TYPE (arg1
);
1659 int uns
= TYPE_UNSIGNED (type
);
1661 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1664 int1l
= TREE_INT_CST_LOW (arg1
);
1665 int1h
= TREE_INT_CST_HIGH (arg1
);
1666 int2l
= TREE_INT_CST_LOW (arg2
);
1667 int2h
= TREE_INT_CST_HIGH (arg2
);
1672 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1676 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1680 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1686 /* It's unclear from the C standard whether shifts can overflow.
1687 The following code ignores overflow; perhaps a C standard
1688 interpretation ruling is needed. */
1689 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1696 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1701 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1705 neg_double (int2l
, int2h
, &low
, &hi
);
1706 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1707 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1711 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1714 case TRUNC_DIV_EXPR
:
1715 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1716 case EXACT_DIV_EXPR
:
1717 /* This is a shortcut for a common special case. */
1718 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1719 && !TREE_OVERFLOW (arg1
)
1720 && !TREE_OVERFLOW (arg2
)
1721 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1723 if (code
== CEIL_DIV_EXPR
)
1726 low
= int1l
/ int2l
, hi
= 0;
1730 /* ... fall through ... */
1732 case ROUND_DIV_EXPR
:
1733 if (int2h
== 0 && int2l
== 0)
1735 if (int2h
== 0 && int2l
== 1)
1737 low
= int1l
, hi
= int1h
;
1740 if (int1l
== int2l
&& int1h
== int2h
1741 && ! (int1l
== 0 && int1h
== 0))
1746 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1747 &low
, &hi
, &garbagel
, &garbageh
);
1750 case TRUNC_MOD_EXPR
:
1751 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1752 /* This is a shortcut for a common special case. */
1753 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1754 && !TREE_OVERFLOW (arg1
)
1755 && !TREE_OVERFLOW (arg2
)
1756 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1758 if (code
== CEIL_MOD_EXPR
)
1760 low
= int1l
% int2l
, hi
= 0;
1764 /* ... fall through ... */
1766 case ROUND_MOD_EXPR
:
1767 if (int2h
== 0 && int2l
== 0)
1769 overflow
= div_and_round_double (code
, uns
,
1770 int1l
, int1h
, int2l
, int2h
,
1771 &garbagel
, &garbageh
, &low
, &hi
);
1777 low
= (((unsigned HOST_WIDE_INT
) int1h
1778 < (unsigned HOST_WIDE_INT
) int2h
)
1779 || (((unsigned HOST_WIDE_INT
) int1h
1780 == (unsigned HOST_WIDE_INT
) int2h
)
1783 low
= (int1h
< int2h
1784 || (int1h
== int2h
&& int1l
< int2l
));
1786 if (low
== (code
== MIN_EXPR
))
1787 low
= int1l
, hi
= int1h
;
1789 low
= int2l
, hi
= int2h
;
1798 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1800 /* Propagate overflow flags ourselves. */
1801 if (((!uns
|| is_sizetype
) && overflow
)
1802 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1805 TREE_OVERFLOW (t
) = 1;
1809 t
= force_fit_type_double (TREE_TYPE (arg1
), low
, hi
, 1,
1810 ((!uns
|| is_sizetype
) && overflow
)
1811 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1816 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1817 constant. We assume ARG1 and ARG2 have the same data type, or at least
1818 are the same kind of constant and the same machine mode. Return zero if
1819 combining the constants is not allowed in the current operating mode.
1821 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1824 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1826 /* Sanity check for the recursive cases. */
1833 if (TREE_CODE (arg1
) == INTEGER_CST
)
1834 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1836 if (TREE_CODE (arg1
) == REAL_CST
)
1838 enum machine_mode mode
;
1841 REAL_VALUE_TYPE value
;
1842 REAL_VALUE_TYPE result
;
1846 /* The following codes are handled by real_arithmetic. */
1861 d1
= TREE_REAL_CST (arg1
);
1862 d2
= TREE_REAL_CST (arg2
);
1864 type
= TREE_TYPE (arg1
);
1865 mode
= TYPE_MODE (type
);
1867 /* Don't perform operation if we honor signaling NaNs and
1868 either operand is a NaN. */
1869 if (HONOR_SNANS (mode
)
1870 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1873 /* Don't perform operation if it would raise a division
1874 by zero exception. */
1875 if (code
== RDIV_EXPR
1876 && REAL_VALUES_EQUAL (d2
, dconst0
)
1877 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1880 /* If either operand is a NaN, just return it. Otherwise, set up
1881 for floating-point trap; we return an overflow. */
1882 if (REAL_VALUE_ISNAN (d1
))
1884 else if (REAL_VALUE_ISNAN (d2
))
1887 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1888 real_convert (&result
, mode
, &value
);
1890 /* Don't constant fold this floating point operation if
1891 the result has overflowed and flag_trapping_math. */
1892 if (flag_trapping_math
1893 && MODE_HAS_INFINITIES (mode
)
1894 && REAL_VALUE_ISINF (result
)
1895 && !REAL_VALUE_ISINF (d1
)
1896 && !REAL_VALUE_ISINF (d2
))
1899 /* Don't constant fold this floating point operation if the
1900 result may dependent upon the run-time rounding mode and
1901 flag_rounding_math is set, or if GCC's software emulation
1902 is unable to accurately represent the result. */
1903 if ((flag_rounding_math
1904 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1905 && (inexact
|| !real_identical (&result
, &value
)))
1908 t
= build_real (type
, result
);
1910 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1914 if (TREE_CODE (arg1
) == FIXED_CST
)
1916 FIXED_VALUE_TYPE f1
;
1917 FIXED_VALUE_TYPE f2
;
1918 FIXED_VALUE_TYPE result
;
1923 /* The following codes are handled by fixed_arithmetic. */
1929 case TRUNC_DIV_EXPR
:
1930 f2
= TREE_FIXED_CST (arg2
);
1935 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1936 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1944 f1
= TREE_FIXED_CST (arg1
);
1945 type
= TREE_TYPE (arg1
);
1946 sat_p
= TYPE_SATURATING (type
);
1947 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1948 t
= build_fixed (type
, result
);
1949 /* Propagate overflow flags. */
1950 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1951 TREE_OVERFLOW (t
) = 1;
1955 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1957 tree type
= TREE_TYPE (arg1
);
1958 tree r1
= TREE_REALPART (arg1
);
1959 tree i1
= TREE_IMAGPART (arg1
);
1960 tree r2
= TREE_REALPART (arg2
);
1961 tree i2
= TREE_IMAGPART (arg2
);
1968 real
= const_binop (code
, r1
, r2
, notrunc
);
1969 imag
= const_binop (code
, i1
, i2
, notrunc
);
1973 if (COMPLEX_FLOAT_TYPE_P (type
))
1974 return do_mpc_arg2 (arg1
, arg2
, type
,
1975 /* do_nonfinite= */ folding_initializer
,
1978 real
= const_binop (MINUS_EXPR
,
1979 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1980 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1982 imag
= const_binop (PLUS_EXPR
,
1983 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1984 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1989 if (COMPLEX_FLOAT_TYPE_P (type
))
1990 return do_mpc_arg2 (arg1
, arg2
, type
,
1991 /* do_nonfinite= */ folding_initializer
,
1994 case TRUNC_DIV_EXPR
:
1996 case FLOOR_DIV_EXPR
:
1997 case ROUND_DIV_EXPR
:
1998 if (flag_complex_method
== 0)
2000 /* Keep this algorithm in sync with
2001 tree-complex.c:expand_complex_div_straight().
2003 Expand complex division to scalars, straightforward algorithm.
2004 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
2008 = const_binop (PLUS_EXPR
,
2009 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
2010 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
2013 = const_binop (PLUS_EXPR
,
2014 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
2015 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
2018 = const_binop (MINUS_EXPR
,
2019 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
2020 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
2023 real
= const_binop (code
, t1
, magsquared
, notrunc
);
2024 imag
= const_binop (code
, t2
, magsquared
, notrunc
);
2028 /* Keep this algorithm in sync with
2029 tree-complex.c:expand_complex_div_wide().
2031 Expand complex division to scalars, modified algorithm to minimize
2032 overflow with wide input ranges. */
2033 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
2034 fold_abs_const (r2
, TREE_TYPE (type
)),
2035 fold_abs_const (i2
, TREE_TYPE (type
)));
2037 if (integer_nonzerop (compare
))
2039 /* In the TRUE branch, we compute
2041 div = (br * ratio) + bi;
2042 tr = (ar * ratio) + ai;
2043 ti = (ai * ratio) - ar;
2046 tree ratio
= const_binop (code
, r2
, i2
, notrunc
);
2047 tree div
= const_binop (PLUS_EXPR
, i2
,
2048 const_binop (MULT_EXPR
, r2
, ratio
,
2051 real
= const_binop (MULT_EXPR
, r1
, ratio
, notrunc
);
2052 real
= const_binop (PLUS_EXPR
, real
, i1
, notrunc
);
2053 real
= const_binop (code
, real
, div
, notrunc
);
2055 imag
= const_binop (MULT_EXPR
, i1
, ratio
, notrunc
);
2056 imag
= const_binop (MINUS_EXPR
, imag
, r1
, notrunc
);
2057 imag
= const_binop (code
, imag
, div
, notrunc
);
2061 /* In the FALSE branch, we compute
2063 divisor = (d * ratio) + c;
2064 tr = (b * ratio) + a;
2065 ti = b - (a * ratio);
2068 tree ratio
= const_binop (code
, i2
, r2
, notrunc
);
2069 tree div
= const_binop (PLUS_EXPR
, r2
,
2070 const_binop (MULT_EXPR
, i2
, ratio
,
2074 real
= const_binop (MULT_EXPR
, i1
, ratio
, notrunc
);
2075 real
= const_binop (PLUS_EXPR
, real
, r1
, notrunc
);
2076 real
= const_binop (code
, real
, div
, notrunc
);
2078 imag
= const_binop (MULT_EXPR
, r1
, ratio
, notrunc
);
2079 imag
= const_binop (MINUS_EXPR
, i1
, imag
, notrunc
);
2080 imag
= const_binop (code
, imag
, div
, notrunc
);
2090 return build_complex (type
, real
, imag
);
2093 if (TREE_CODE (arg1
) == VECTOR_CST
)
2095 tree type
= TREE_TYPE(arg1
);
2096 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
2097 tree elements1
, elements2
, list
= NULL_TREE
;
2099 if(TREE_CODE(arg2
) != VECTOR_CST
)
2102 elements1
= TREE_VECTOR_CST_ELTS (arg1
);
2103 elements2
= TREE_VECTOR_CST_ELTS (arg2
);
2105 for (i
= 0; i
< count
; i
++)
2107 tree elem1
, elem2
, elem
;
2109 /* The trailing elements can be empty and should be treated as 0 */
2111 elem1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2114 elem1
= TREE_VALUE(elements1
);
2115 elements1
= TREE_CHAIN (elements1
);
2119 elem2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2122 elem2
= TREE_VALUE(elements2
);
2123 elements2
= TREE_CHAIN (elements2
);
2126 elem
= const_binop (code
, elem1
, elem2
, notrunc
);
2128 /* It is possible that const_binop cannot handle the given
2129 code and return NULL_TREE */
2130 if(elem
== NULL_TREE
)
2133 list
= tree_cons (NULL_TREE
, elem
, list
);
2135 return build_vector(type
, nreverse(list
));
2140 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2141 indicates which particular sizetype to create. */
2144 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
2146 return build_int_cst (sizetype_tab
[(int) kind
], number
);
2149 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2150 is a tree code. The type of the result is taken from the operands.
2151 Both must be equivalent integer types, ala int_binop_types_match_p.
2152 If the operands are constant, so is the result. */
2155 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
2157 tree type
= TREE_TYPE (arg0
);
2159 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
2160 return error_mark_node
;
2162 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
2165 /* Handle the special case of two integer constants faster. */
2166 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2168 /* And some specific cases even faster than that. */
2169 if (code
== PLUS_EXPR
)
2171 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
2173 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
2176 else if (code
== MINUS_EXPR
)
2178 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
2181 else if (code
== MULT_EXPR
)
2183 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
2187 /* Handle general case of two integer constants. */
2188 return int_const_binop (code
, arg0
, arg1
, 0);
2191 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
2194 /* Given two values, either both of sizetype or both of bitsizetype,
2195 compute the difference between the two values. Return the value
2196 in signed type corresponding to the type of the operands. */
2199 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
2201 tree type
= TREE_TYPE (arg0
);
2204 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
2207 /* If the type is already signed, just do the simple thing. */
2208 if (!TYPE_UNSIGNED (type
))
2209 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
2211 if (type
== sizetype
)
2213 else if (type
== bitsizetype
)
2214 ctype
= sbitsizetype
;
2216 ctype
= signed_type_for (type
);
2218 /* If either operand is not a constant, do the conversions to the signed
2219 type and subtract. The hardware will do the right thing with any
2220 overflow in the subtraction. */
2221 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
2222 return size_binop_loc (loc
, MINUS_EXPR
,
2223 fold_convert_loc (loc
, ctype
, arg0
),
2224 fold_convert_loc (loc
, ctype
, arg1
));
2226 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2227 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2228 overflow) and negate (which can't either). Special-case a result
2229 of zero while we're here. */
2230 if (tree_int_cst_equal (arg0
, arg1
))
2231 return build_int_cst (ctype
, 0);
2232 else if (tree_int_cst_lt (arg1
, arg0
))
2233 return fold_convert_loc (loc
, ctype
,
2234 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
2236 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
2237 fold_convert_loc (loc
, ctype
,
2238 size_binop_loc (loc
,
2243 /* A subroutine of fold_convert_const handling conversions of an
2244 INTEGER_CST to another integer type. */
2247 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
2251 /* Given an integer constant, make new constant with new type,
2252 appropriately sign-extended or truncated. */
2253 t
= force_fit_type_double (type
, TREE_INT_CST_LOW (arg1
),
2254 TREE_INT_CST_HIGH (arg1
),
2255 /* Don't set the overflow when
2256 converting from a pointer, */
2257 !POINTER_TYPE_P (TREE_TYPE (arg1
))
2258 /* or to a sizetype with same signedness
2259 and the precision is unchanged.
2260 ??? sizetype is always sign-extended,
2261 but its signedness depends on the
2262 frontend. Thus we see spurious overflows
2263 here if we do not check this. */
2264 && !((TYPE_PRECISION (TREE_TYPE (arg1
))
2265 == TYPE_PRECISION (type
))
2266 && (TYPE_UNSIGNED (TREE_TYPE (arg1
))
2267 == TYPE_UNSIGNED (type
))
2268 && ((TREE_CODE (TREE_TYPE (arg1
)) == INTEGER_TYPE
2269 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1
)))
2270 || (TREE_CODE (type
) == INTEGER_TYPE
2271 && TYPE_IS_SIZETYPE (type
)))),
2272 (TREE_INT_CST_HIGH (arg1
) < 0
2273 && (TYPE_UNSIGNED (type
)
2274 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2275 | TREE_OVERFLOW (arg1
));
2280 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2281 to an integer type. */
2284 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
2289 /* The following code implements the floating point to integer
2290 conversion rules required by the Java Language Specification,
2291 that IEEE NaNs are mapped to zero and values that overflow
2292 the target precision saturate, i.e. values greater than
2293 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2294 are mapped to INT_MIN. These semantics are allowed by the
2295 C and C++ standards that simply state that the behavior of
2296 FP-to-integer conversion is unspecified upon overflow. */
2298 HOST_WIDE_INT high
, low
;
2300 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
2304 case FIX_TRUNC_EXPR
:
2305 real_trunc (&r
, VOIDmode
, &x
);
2312 /* If R is NaN, return zero and show we have an overflow. */
2313 if (REAL_VALUE_ISNAN (r
))
2320 /* See if R is less than the lower bound or greater than the
2325 tree lt
= TYPE_MIN_VALUE (type
);
2326 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
2327 if (REAL_VALUES_LESS (r
, l
))
2330 high
= TREE_INT_CST_HIGH (lt
);
2331 low
= TREE_INT_CST_LOW (lt
);
2337 tree ut
= TYPE_MAX_VALUE (type
);
2340 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
2341 if (REAL_VALUES_LESS (u
, r
))
2344 high
= TREE_INT_CST_HIGH (ut
);
2345 low
= TREE_INT_CST_LOW (ut
);
2351 REAL_VALUE_TO_INT (&low
, &high
, r
);
2353 t
= force_fit_type_double (type
, low
, high
, -1,
2354 overflow
| TREE_OVERFLOW (arg1
));
2358 /* A subroutine of fold_convert_const handling conversions of a
2359 FIXED_CST to an integer type. */
2362 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
2365 double_int temp
, temp_trunc
;
2368 /* Right shift FIXED_CST to temp by fbit. */
2369 temp
= TREE_FIXED_CST (arg1
).data
;
2370 mode
= TREE_FIXED_CST (arg1
).mode
;
2371 if (GET_MODE_FBIT (mode
) < 2 * HOST_BITS_PER_WIDE_INT
)
2373 lshift_double (temp
.low
, temp
.high
,
2374 - GET_MODE_FBIT (mode
), 2 * HOST_BITS_PER_WIDE_INT
,
2375 &temp
.low
, &temp
.high
, SIGNED_FIXED_POINT_MODE_P (mode
));
2377 /* Left shift temp to temp_trunc by fbit. */
2378 lshift_double (temp
.low
, temp
.high
,
2379 GET_MODE_FBIT (mode
), 2 * HOST_BITS_PER_WIDE_INT
,
2380 &temp_trunc
.low
, &temp_trunc
.high
,
2381 SIGNED_FIXED_POINT_MODE_P (mode
));
2388 temp_trunc
.high
= 0;
2391 /* If FIXED_CST is negative, we need to round the value toward 0.
2392 By checking if the fractional bits are not zero to add 1 to temp. */
2393 if (SIGNED_FIXED_POINT_MODE_P (mode
) && temp_trunc
.high
< 0
2394 && !double_int_equal_p (TREE_FIXED_CST (arg1
).data
, temp_trunc
))
2399 temp
= double_int_add (temp
, one
);
2402 /* Given a fixed-point constant, make new constant with new type,
2403 appropriately sign-extended or truncated. */
2404 t
= force_fit_type_double (type
, temp
.low
, temp
.high
, -1,
2406 && (TYPE_UNSIGNED (type
)
2407 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2408 | TREE_OVERFLOW (arg1
));
2413 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2414 to another floating point type. */
2417 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
2419 REAL_VALUE_TYPE value
;
2422 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
2423 t
= build_real (type
, value
);
2425 /* If converting an infinity or NAN to a representation that doesn't
2426 have one, set the overflow bit so that we can produce some kind of
2427 error message at the appropriate point if necessary. It's not the
2428 most user-friendly message, but it's better than nothing. */
2429 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
2430 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
2431 TREE_OVERFLOW (t
) = 1;
2432 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
2433 && !MODE_HAS_NANS (TYPE_MODE (type
)))
2434 TREE_OVERFLOW (t
) = 1;
2435 /* Regular overflow, conversion produced an infinity in a mode that
2436 can't represent them. */
2437 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
2438 && REAL_VALUE_ISINF (value
)
2439 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
2440 TREE_OVERFLOW (t
) = 1;
2442 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2446 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2447 to a floating point type. */
2450 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2452 REAL_VALUE_TYPE value
;
2455 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
2456 t
= build_real (type
, value
);
2458 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2462 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2463 to another fixed-point type. */
2466 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2468 FIXED_VALUE_TYPE value
;
2472 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2473 TYPE_SATURATING (type
));
2474 t
= build_fixed (type
, value
);
2476 /* Propagate overflow flags. */
2477 if (overflow_p
| TREE_OVERFLOW (arg1
))
2478 TREE_OVERFLOW (t
) = 1;
2482 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2483 to a fixed-point type. */
2486 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2488 FIXED_VALUE_TYPE value
;
2492 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
2493 TREE_INT_CST (arg1
),
2494 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2495 TYPE_SATURATING (type
));
2496 t
= build_fixed (type
, value
);
2498 /* Propagate overflow flags. */
2499 if (overflow_p
| TREE_OVERFLOW (arg1
))
2500 TREE_OVERFLOW (t
) = 1;
2504 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2505 to a fixed-point type. */
2508 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2510 FIXED_VALUE_TYPE value
;
2514 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2515 &TREE_REAL_CST (arg1
),
2516 TYPE_SATURATING (type
));
2517 t
= build_fixed (type
, value
);
2519 /* Propagate overflow flags. */
2520 if (overflow_p
| TREE_OVERFLOW (arg1
))
2521 TREE_OVERFLOW (t
) = 1;
2525 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2526 type TYPE. If no simplification can be done return NULL_TREE. */
2529 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2531 if (TREE_TYPE (arg1
) == type
)
2534 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2535 || TREE_CODE (type
) == OFFSET_TYPE
)
2537 if (TREE_CODE (arg1
) == INTEGER_CST
)
2538 return fold_convert_const_int_from_int (type
, arg1
);
2539 else if (TREE_CODE (arg1
) == REAL_CST
)
2540 return fold_convert_const_int_from_real (code
, type
, arg1
);
2541 else if (TREE_CODE (arg1
) == FIXED_CST
)
2542 return fold_convert_const_int_from_fixed (type
, arg1
);
2544 else if (TREE_CODE (type
) == REAL_TYPE
)
2546 if (TREE_CODE (arg1
) == INTEGER_CST
)
2547 return build_real_from_int_cst (type
, arg1
);
2548 else if (TREE_CODE (arg1
) == REAL_CST
)
2549 return fold_convert_const_real_from_real (type
, arg1
);
2550 else if (TREE_CODE (arg1
) == FIXED_CST
)
2551 return fold_convert_const_real_from_fixed (type
, arg1
);
2553 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2555 if (TREE_CODE (arg1
) == FIXED_CST
)
2556 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2557 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2558 return fold_convert_const_fixed_from_int (type
, arg1
);
2559 else if (TREE_CODE (arg1
) == REAL_CST
)
2560 return fold_convert_const_fixed_from_real (type
, arg1
);
2565 /* Construct a vector of zero elements of vector type TYPE. */
2568 build_zero_vector (tree type
)
2573 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2574 units
= TYPE_VECTOR_SUBPARTS (type
);
2577 for (i
= 0; i
< units
; i
++)
2578 list
= tree_cons (NULL_TREE
, elem
, list
);
2579 return build_vector (type
, list
);
2582 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2585 fold_convertible_p (const_tree type
, const_tree arg
)
2587 tree orig
= TREE_TYPE (arg
);
2592 if (TREE_CODE (arg
) == ERROR_MARK
2593 || TREE_CODE (type
) == ERROR_MARK
2594 || TREE_CODE (orig
) == ERROR_MARK
)
2597 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2600 switch (TREE_CODE (type
))
2602 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2603 case POINTER_TYPE
: case REFERENCE_TYPE
:
2605 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2606 || TREE_CODE (orig
) == OFFSET_TYPE
)
2608 return (TREE_CODE (orig
) == VECTOR_TYPE
2609 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2612 case FIXED_POINT_TYPE
:
2616 return TREE_CODE (type
) == TREE_CODE (orig
);
2623 /* Convert expression ARG to type TYPE. Used by the middle-end for
2624 simple conversions in preference to calling the front-end's convert. */
2627 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2629 tree orig
= TREE_TYPE (arg
);
2635 if (TREE_CODE (arg
) == ERROR_MARK
2636 || TREE_CODE (type
) == ERROR_MARK
2637 || TREE_CODE (orig
) == ERROR_MARK
)
2638 return error_mark_node
;
2640 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2641 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2643 switch (TREE_CODE (type
))
2646 case REFERENCE_TYPE
:
2647 /* Handle conversions between pointers to different address spaces. */
2648 if (POINTER_TYPE_P (orig
)
2649 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2650 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2651 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2654 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2656 if (TREE_CODE (arg
) == INTEGER_CST
)
2658 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2659 if (tem
!= NULL_TREE
)
2662 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2663 || TREE_CODE (orig
) == OFFSET_TYPE
)
2664 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2665 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2666 return fold_convert_loc (loc
, type
,
2667 fold_build1_loc (loc
, REALPART_EXPR
,
2668 TREE_TYPE (orig
), arg
));
2669 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2670 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2671 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2674 if (TREE_CODE (arg
) == INTEGER_CST
)
2676 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2677 if (tem
!= NULL_TREE
)
2680 else if (TREE_CODE (arg
) == REAL_CST
)
2682 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2683 if (tem
!= NULL_TREE
)
2686 else if (TREE_CODE (arg
) == FIXED_CST
)
2688 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2689 if (tem
!= NULL_TREE
)
2693 switch (TREE_CODE (orig
))
2696 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2697 case POINTER_TYPE
: case REFERENCE_TYPE
:
2698 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2701 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2703 case FIXED_POINT_TYPE
:
2704 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2707 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2708 return fold_convert_loc (loc
, type
, tem
);
2714 case FIXED_POINT_TYPE
:
2715 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2716 || TREE_CODE (arg
) == REAL_CST
)
2718 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2719 if (tem
!= NULL_TREE
)
2720 goto fold_convert_exit
;
2723 switch (TREE_CODE (orig
))
2725 case FIXED_POINT_TYPE
:
2730 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2733 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2734 return fold_convert_loc (loc
, type
, tem
);
2741 switch (TREE_CODE (orig
))
2744 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2745 case POINTER_TYPE
: case REFERENCE_TYPE
:
2747 case FIXED_POINT_TYPE
:
2748 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2749 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2750 fold_convert_loc (loc
, TREE_TYPE (type
),
2751 integer_zero_node
));
2756 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2758 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2759 TREE_OPERAND (arg
, 0));
2760 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2761 TREE_OPERAND (arg
, 1));
2762 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2765 arg
= save_expr (arg
);
2766 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2767 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2768 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2769 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2770 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2778 if (integer_zerop (arg
))
2779 return build_zero_vector (type
);
2780 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2781 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2782 || TREE_CODE (orig
) == VECTOR_TYPE
);
2783 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2786 tem
= fold_ignored_result (arg
);
2787 if (TREE_CODE (tem
) == MODIFY_EXPR
)
2788 goto fold_convert_exit
;
2789 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2795 protected_set_expr_location (tem
, loc
);
2799 /* Return false if expr can be assumed not to be an lvalue, true
2803 maybe_lvalue_p (const_tree x
)
2805 /* We only need to wrap lvalue tree codes. */
2806 switch (TREE_CODE (x
))
2817 case ALIGN_INDIRECT_REF
:
2818 case MISALIGNED_INDIRECT_REF
:
2820 case ARRAY_RANGE_REF
:
2826 case PREINCREMENT_EXPR
:
2827 case PREDECREMENT_EXPR
:
2829 case TRY_CATCH_EXPR
:
2830 case WITH_CLEANUP_EXPR
:
2839 /* Assume the worst for front-end tree codes. */
2840 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2848 /* Return an expr equal to X but certainly not valid as an lvalue. */
2851 non_lvalue_loc (location_t loc
, tree x
)
2853 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2858 if (! maybe_lvalue_p (x
))
2860 x
= build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2861 SET_EXPR_LOCATION (x
, loc
);
2865 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2866 Zero means allow extended lvalues. */
2868 int pedantic_lvalues
;
2870 /* When pedantic, return an expr equal to X but certainly not valid as a
2871 pedantic lvalue. Otherwise, return X. */
2874 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2876 if (pedantic_lvalues
)
2877 return non_lvalue_loc (loc
, x
);
2878 protected_set_expr_location (x
, loc
);
2882 /* Given a tree comparison code, return the code that is the logical inverse
2883 of the given code. It is not safe to do this for floating-point
2884 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2885 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2888 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2890 if (honor_nans
&& flag_trapping_math
)
2900 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2902 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2904 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2906 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2920 return UNORDERED_EXPR
;
2921 case UNORDERED_EXPR
:
2922 return ORDERED_EXPR
;
2928 /* Similar, but return the comparison that results if the operands are
2929 swapped. This is safe for floating-point. */
2932 swap_tree_comparison (enum tree_code code
)
2939 case UNORDERED_EXPR
:
2965 /* Convert a comparison tree code from an enum tree_code representation
2966 into a compcode bit-based encoding. This function is the inverse of
2967 compcode_to_comparison. */
2969 static enum comparison_code
2970 comparison_to_compcode (enum tree_code code
)
2987 return COMPCODE_ORD
;
2988 case UNORDERED_EXPR
:
2989 return COMPCODE_UNORD
;
2991 return COMPCODE_UNLT
;
2993 return COMPCODE_UNEQ
;
2995 return COMPCODE_UNLE
;
2997 return COMPCODE_UNGT
;
2999 return COMPCODE_LTGT
;
3001 return COMPCODE_UNGE
;
3007 /* Convert a compcode bit-based encoding of a comparison operator back
3008 to GCC's enum tree_code representation. This function is the
3009 inverse of comparison_to_compcode. */
3011 static enum tree_code
3012 compcode_to_comparison (enum comparison_code code
)
3029 return ORDERED_EXPR
;
3030 case COMPCODE_UNORD
:
3031 return UNORDERED_EXPR
;
3049 /* Return a tree for the comparison which is the combination of
3050 doing the AND or OR (depending on CODE) of the two operations LCODE
3051 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
3052 the possibility of trapping if the mode has NaNs, and return NULL_TREE
3053 if this makes the transformation invalid. */
3056 combine_comparisons (location_t loc
,
3057 enum tree_code code
, enum tree_code lcode
,
3058 enum tree_code rcode
, tree truth_type
,
3059 tree ll_arg
, tree lr_arg
)
3061 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
3062 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
3063 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
3068 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
3069 compcode
= lcompcode
& rcompcode
;
3072 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
3073 compcode
= lcompcode
| rcompcode
;
3082 /* Eliminate unordered comparisons, as well as LTGT and ORD
3083 which are not used unless the mode has NaNs. */
3084 compcode
&= ~COMPCODE_UNORD
;
3085 if (compcode
== COMPCODE_LTGT
)
3086 compcode
= COMPCODE_NE
;
3087 else if (compcode
== COMPCODE_ORD
)
3088 compcode
= COMPCODE_TRUE
;
3090 else if (flag_trapping_math
)
3092 /* Check that the original operation and the optimized ones will trap
3093 under the same condition. */
3094 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
3095 && (lcompcode
!= COMPCODE_EQ
)
3096 && (lcompcode
!= COMPCODE_ORD
);
3097 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
3098 && (rcompcode
!= COMPCODE_EQ
)
3099 && (rcompcode
!= COMPCODE_ORD
);
3100 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
3101 && (compcode
!= COMPCODE_EQ
)
3102 && (compcode
!= COMPCODE_ORD
);
3104 /* In a short-circuited boolean expression the LHS might be
3105 such that the RHS, if evaluated, will never trap. For
3106 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3107 if neither x nor y is NaN. (This is a mixed blessing: for
3108 example, the expression above will never trap, hence
3109 optimizing it to x < y would be invalid). */
3110 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
3111 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
3114 /* If the comparison was short-circuited, and only the RHS
3115 trapped, we may now generate a spurious trap. */
3117 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3120 /* If we changed the conditions that cause a trap, we lose. */
3121 if ((ltrap
|| rtrap
) != trap
)
3125 if (compcode
== COMPCODE_TRUE
)
3126 return constant_boolean_node (true, truth_type
);
3127 else if (compcode
== COMPCODE_FALSE
)
3128 return constant_boolean_node (false, truth_type
);
3131 enum tree_code tcode
;
3133 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
3134 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
3138 /* Return nonzero if two operands (typically of the same tree node)
3139 are necessarily equal. If either argument has side-effects this
3140 function returns zero. FLAGS modifies behavior as follows:
3142 If OEP_ONLY_CONST is set, only return nonzero for constants.
3143 This function tests whether the operands are indistinguishable;
3144 it does not test whether they are equal using C's == operation.
3145 The distinction is important for IEEE floating point, because
3146 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3147 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3149 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3150 even though it may hold multiple values during a function.
3151 This is because a GCC tree node guarantees that nothing else is
3152 executed between the evaluation of its "operands" (which may often
3153 be evaluated in arbitrary order). Hence if the operands themselves
3154 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3155 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3156 unset means assuming isochronic (or instantaneous) tree equivalence.
3157 Unless comparing arbitrary expression trees, such as from different
3158 statements, this flag can usually be left unset.
3160 If OEP_PURE_SAME is set, then pure functions with identical arguments
3161 are considered the same. It is used when the caller has other ways
3162 to ensure that global memory is unchanged in between. */
3165 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
3167 /* If either is ERROR_MARK, they aren't equal. */
3168 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
3169 || TREE_TYPE (arg0
) == error_mark_node
3170 || TREE_TYPE (arg1
) == error_mark_node
)
3173 /* Check equality of integer constants before bailing out due to
3174 precision differences. */
3175 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
3176 return tree_int_cst_equal (arg0
, arg1
);
3178 /* If both types don't have the same signedness, then we can't consider
3179 them equal. We must check this before the STRIP_NOPS calls
3180 because they may change the signedness of the arguments. As pointers
3181 strictly don't have a signedness, require either two pointers or
3182 two non-pointers as well. */
3183 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
3184 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
3187 /* We cannot consider pointers to different address space equal. */
3188 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
3189 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
3190 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
3193 /* If both types don't have the same precision, then it is not safe
3195 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
3201 /* In case both args are comparisons but with different comparison
3202 code, try to swap the comparison operands of one arg to produce
3203 a match and compare that variant. */
3204 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3205 && COMPARISON_CLASS_P (arg0
)
3206 && COMPARISON_CLASS_P (arg1
))
3208 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
3210 if (TREE_CODE (arg0
) == swap_code
)
3211 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3212 TREE_OPERAND (arg1
, 1), flags
)
3213 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3214 TREE_OPERAND (arg1
, 0), flags
);
3217 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3218 /* This is needed for conversions and for COMPONENT_REF.
3219 Might as well play it safe and always test this. */
3220 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
3221 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
3222 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
3225 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3226 We don't care about side effects in that case because the SAVE_EXPR
3227 takes care of that for us. In all other cases, two expressions are
3228 equal if they have no side effects. If we have two identical
3229 expressions with side effects that should be treated the same due
3230 to the only side effects being identical SAVE_EXPR's, that will
3231 be detected in the recursive calls below. */
3232 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
3233 && (TREE_CODE (arg0
) == SAVE_EXPR
3234 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
3237 /* Next handle constant cases, those for which we can return 1 even
3238 if ONLY_CONST is set. */
3239 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
3240 switch (TREE_CODE (arg0
))
3243 return tree_int_cst_equal (arg0
, arg1
);
3246 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
3247 TREE_FIXED_CST (arg1
));
3250 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
3251 TREE_REAL_CST (arg1
)))
3255 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
3257 /* If we do not distinguish between signed and unsigned zero,
3258 consider them equal. */
3259 if (real_zerop (arg0
) && real_zerop (arg1
))
3268 v1
= TREE_VECTOR_CST_ELTS (arg0
);
3269 v2
= TREE_VECTOR_CST_ELTS (arg1
);
3272 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
3275 v1
= TREE_CHAIN (v1
);
3276 v2
= TREE_CHAIN (v2
);
3283 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
3285 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
3289 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
3290 && ! memcmp (TREE_STRING_POINTER (arg0
),
3291 TREE_STRING_POINTER (arg1
),
3292 TREE_STRING_LENGTH (arg0
)));
3295 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
3301 if (flags
& OEP_ONLY_CONST
)
3304 /* Define macros to test an operand from arg0 and arg1 for equality and a
3305 variant that allows null and views null as being different from any
3306 non-null value. In the latter case, if either is null, the both
3307 must be; otherwise, do the normal comparison. */
3308 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3309 TREE_OPERAND (arg1, N), flags)
3311 #define OP_SAME_WITH_NULL(N) \
3312 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3313 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3315 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
3318 /* Two conversions are equal only if signedness and modes match. */
3319 switch (TREE_CODE (arg0
))
3322 case FIX_TRUNC_EXPR
:
3323 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
3324 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3334 case tcc_comparison
:
3336 if (OP_SAME (0) && OP_SAME (1))
3339 /* For commutative ops, allow the other order. */
3340 return (commutative_tree_code (TREE_CODE (arg0
))
3341 && operand_equal_p (TREE_OPERAND (arg0
, 0),
3342 TREE_OPERAND (arg1
, 1), flags
)
3343 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3344 TREE_OPERAND (arg1
, 0), flags
));
3347 /* If either of the pointer (or reference) expressions we are
3348 dereferencing contain a side effect, these cannot be equal. */
3349 if (TREE_SIDE_EFFECTS (arg0
)
3350 || TREE_SIDE_EFFECTS (arg1
))
3353 switch (TREE_CODE (arg0
))
3356 case ALIGN_INDIRECT_REF
:
3357 case MISALIGNED_INDIRECT_REF
:
3363 case ARRAY_RANGE_REF
:
3364 /* Operands 2 and 3 may be null.
3365 Compare the array index by value if it is constant first as we
3366 may have different types but same value here. */
3368 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3369 TREE_OPERAND (arg1
, 1))
3371 && OP_SAME_WITH_NULL (2)
3372 && OP_SAME_WITH_NULL (3));
3375 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3376 may be NULL when we're called to compare MEM_EXPRs. */
3377 return OP_SAME_WITH_NULL (0)
3379 && OP_SAME_WITH_NULL (2);
3382 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3388 case tcc_expression
:
3389 switch (TREE_CODE (arg0
))
3392 case TRUTH_NOT_EXPR
:
3395 case TRUTH_ANDIF_EXPR
:
3396 case TRUTH_ORIF_EXPR
:
3397 return OP_SAME (0) && OP_SAME (1);
3399 case TRUTH_AND_EXPR
:
3401 case TRUTH_XOR_EXPR
:
3402 if (OP_SAME (0) && OP_SAME (1))
3405 /* Otherwise take into account this is a commutative operation. */
3406 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3407 TREE_OPERAND (arg1
, 1), flags
)
3408 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3409 TREE_OPERAND (arg1
, 0), flags
));
3412 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3419 switch (TREE_CODE (arg0
))
3422 /* If the CALL_EXPRs call different functions, then they
3423 clearly can not be equal. */
3424 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3429 unsigned int cef
= call_expr_flags (arg0
);
3430 if (flags
& OEP_PURE_SAME
)
3431 cef
&= ECF_CONST
| ECF_PURE
;
3438 /* Now see if all the arguments are the same. */
3440 const_call_expr_arg_iterator iter0
, iter1
;
3442 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3443 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3445 a0
= next_const_call_expr_arg (&iter0
),
3446 a1
= next_const_call_expr_arg (&iter1
))
3447 if (! operand_equal_p (a0
, a1
, flags
))
3450 /* If we get here and both argument lists are exhausted
3451 then the CALL_EXPRs are equal. */
3452 return ! (a0
|| a1
);
3458 case tcc_declaration
:
3459 /* Consider __builtin_sqrt equal to sqrt. */
3460 return (TREE_CODE (arg0
) == FUNCTION_DECL
3461 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3462 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3463 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3470 #undef OP_SAME_WITH_NULL
3473 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3474 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3476 When in doubt, return 0. */
3479 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3481 int unsignedp1
, unsignedpo
;
3482 tree primarg0
, primarg1
, primother
;
3483 unsigned int correct_width
;
3485 if (operand_equal_p (arg0
, arg1
, 0))
3488 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3489 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3492 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3493 and see if the inner values are the same. This removes any
3494 signedness comparison, which doesn't matter here. */
3495 primarg0
= arg0
, primarg1
= arg1
;
3496 STRIP_NOPS (primarg0
);
3497 STRIP_NOPS (primarg1
);
3498 if (operand_equal_p (primarg0
, primarg1
, 0))
3501 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3502 actual comparison operand, ARG0.
3504 First throw away any conversions to wider types
3505 already present in the operands. */
3507 primarg1
= get_narrower (arg1
, &unsignedp1
);
3508 primother
= get_narrower (other
, &unsignedpo
);
3510 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3511 if (unsignedp1
== unsignedpo
3512 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3513 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3515 tree type
= TREE_TYPE (arg0
);
3517 /* Make sure shorter operand is extended the right way
3518 to match the longer operand. */
3519 primarg1
= fold_convert (signed_or_unsigned_type_for
3520 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3522 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3529 /* See if ARG is an expression that is either a comparison or is performing
3530 arithmetic on comparisons. The comparisons must only be comparing
3531 two different values, which will be stored in *CVAL1 and *CVAL2; if
3532 they are nonzero it means that some operands have already been found.
3533 No variables may be used anywhere else in the expression except in the
3534 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3535 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3537 If this is true, return 1. Otherwise, return zero. */
3540 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3542 enum tree_code code
= TREE_CODE (arg
);
3543 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3545 /* We can handle some of the tcc_expression cases here. */
3546 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3548 else if (tclass
== tcc_expression
3549 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3550 || code
== COMPOUND_EXPR
))
3551 tclass
= tcc_binary
;
3553 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
3554 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3556 /* If we've already found a CVAL1 or CVAL2, this expression is
3557 two complex to handle. */
3558 if (*cval1
|| *cval2
)
3568 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3571 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3572 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3573 cval1
, cval2
, save_p
));
3578 case tcc_expression
:
3579 if (code
== COND_EXPR
)
3580 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3581 cval1
, cval2
, save_p
)
3582 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3583 cval1
, cval2
, save_p
)
3584 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3585 cval1
, cval2
, save_p
));
3588 case tcc_comparison
:
3589 /* First see if we can handle the first operand, then the second. For
3590 the second operand, we know *CVAL1 can't be zero. It must be that
3591 one side of the comparison is each of the values; test for the
3592 case where this isn't true by failing if the two operands
3595 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3596 TREE_OPERAND (arg
, 1), 0))
3600 *cval1
= TREE_OPERAND (arg
, 0);
3601 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3603 else if (*cval2
== 0)
3604 *cval2
= TREE_OPERAND (arg
, 0);
3605 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3610 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3612 else if (*cval2
== 0)
3613 *cval2
= TREE_OPERAND (arg
, 1);
3614 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3626 /* ARG is a tree that is known to contain just arithmetic operations and
3627 comparisons. Evaluate the operations in the tree substituting NEW0 for
3628 any occurrence of OLD0 as an operand of a comparison and likewise for
3632 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
3633 tree old1
, tree new1
)
3635 tree type
= TREE_TYPE (arg
);
3636 enum tree_code code
= TREE_CODE (arg
);
3637 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3639 /* We can handle some of the tcc_expression cases here. */
3640 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3642 else if (tclass
== tcc_expression
3643 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3644 tclass
= tcc_binary
;
3649 return fold_build1_loc (loc
, code
, type
,
3650 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3651 old0
, new0
, old1
, new1
));
3654 return fold_build2_loc (loc
, code
, type
,
3655 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3656 old0
, new0
, old1
, new1
),
3657 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3658 old0
, new0
, old1
, new1
));
3660 case tcc_expression
:
3664 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3668 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3672 return fold_build3_loc (loc
, code
, type
,
3673 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3674 old0
, new0
, old1
, new1
),
3675 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3676 old0
, new0
, old1
, new1
),
3677 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3678 old0
, new0
, old1
, new1
));
3682 /* Fall through - ??? */
3684 case tcc_comparison
:
3686 tree arg0
= TREE_OPERAND (arg
, 0);
3687 tree arg1
= TREE_OPERAND (arg
, 1);
3689 /* We need to check both for exact equality and tree equality. The
3690 former will be true if the operand has a side-effect. In that
3691 case, we know the operand occurred exactly once. */
3693 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3695 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3698 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3700 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3703 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3711 /* Return a tree for the case when the result of an expression is RESULT
3712 converted to TYPE and OMITTED was previously an operand of the expression
3713 but is now not needed (e.g., we folded OMITTED * 0).
3715 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3716 the conversion of RESULT to TYPE. */
3719 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3721 tree t
= fold_convert_loc (loc
, type
, result
);
3723 /* If the resulting operand is an empty statement, just return the omitted
3724 statement casted to void. */
3725 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3727 t
= build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
3728 goto omit_one_operand_exit
;
3731 if (TREE_SIDE_EFFECTS (omitted
))
3733 t
= build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3734 goto omit_one_operand_exit
;
3737 return non_lvalue_loc (loc
, t
);
3739 omit_one_operand_exit
:
3740 protected_set_expr_location (t
, loc
);
3744 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3747 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
3750 tree t
= fold_convert_loc (loc
, type
, result
);
3752 /* If the resulting operand is an empty statement, just return the omitted
3753 statement casted to void. */
3754 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3756 t
= build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
3757 goto pedantic_omit_one_operand_exit
;
3760 if (TREE_SIDE_EFFECTS (omitted
))
3762 t
= build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3763 goto pedantic_omit_one_operand_exit
;
3766 return pedantic_non_lvalue_loc (loc
, t
);
3768 pedantic_omit_one_operand_exit
:
3769 protected_set_expr_location (t
, loc
);
3773 /* Return a tree for the case when the result of an expression is RESULT
3774 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3775 of the expression but are now not needed.
3777 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3778 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3779 evaluated before OMITTED2. Otherwise, if neither has side effects,
3780 just do the conversion of RESULT to TYPE. */
3783 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3784 tree omitted1
, tree omitted2
)
3786 tree t
= fold_convert_loc (loc
, type
, result
);
3788 if (TREE_SIDE_EFFECTS (omitted2
))
3790 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
3791 SET_EXPR_LOCATION (t
, loc
);
3793 if (TREE_SIDE_EFFECTS (omitted1
))
3795 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
3796 SET_EXPR_LOCATION (t
, loc
);
3799 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3803 /* Return a simplified tree node for the truth-negation of ARG. This
3804 never alters ARG itself. We assume that ARG is an operation that
3805 returns a truth value (0 or 1).
3807 FIXME: one would think we would fold the result, but it causes
3808 problems with the dominator optimizer. */
3811 fold_truth_not_expr (location_t loc
, tree arg
)
3813 tree t
, type
= TREE_TYPE (arg
);
3814 enum tree_code code
= TREE_CODE (arg
);
3815 location_t loc1
, loc2
;
3817 /* If this is a comparison, we can simply invert it, except for
3818 floating-point non-equality comparisons, in which case we just
3819 enclose a TRUTH_NOT_EXPR around what we have. */
3821 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3823 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3824 if (FLOAT_TYPE_P (op_type
)
3825 && flag_trapping_math
3826 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3827 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3830 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3831 if (code
== ERROR_MARK
)
3834 t
= build2 (code
, type
, TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
3835 SET_EXPR_LOCATION (t
, loc
);
3842 return constant_boolean_node (integer_zerop (arg
), type
);
3844 case TRUTH_AND_EXPR
:
3845 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3846 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3847 if (loc1
== UNKNOWN_LOCATION
)
3849 if (loc2
== UNKNOWN_LOCATION
)
3851 t
= build2 (TRUTH_OR_EXPR
, type
,
3852 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3853 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3857 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3858 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3859 if (loc1
== UNKNOWN_LOCATION
)
3861 if (loc2
== UNKNOWN_LOCATION
)
3863 t
= build2 (TRUTH_AND_EXPR
, type
,
3864 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3865 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3868 case TRUTH_XOR_EXPR
:
3869 /* Here we can invert either operand. We invert the first operand
3870 unless the second operand is a TRUTH_NOT_EXPR in which case our
3871 result is the XOR of the first operand with the inside of the
3872 negation of the second operand. */
3874 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3875 t
= build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3876 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3878 t
= build2 (TRUTH_XOR_EXPR
, type
,
3879 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3880 TREE_OPERAND (arg
, 1));
3883 case TRUTH_ANDIF_EXPR
:
3884 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3885 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3886 if (loc1
== UNKNOWN_LOCATION
)
3888 if (loc2
== UNKNOWN_LOCATION
)
3890 t
= build2 (TRUTH_ORIF_EXPR
, type
,
3891 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3892 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3895 case TRUTH_ORIF_EXPR
:
3896 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3897 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3898 if (loc1
== UNKNOWN_LOCATION
)
3900 if (loc2
== UNKNOWN_LOCATION
)
3902 t
= build2 (TRUTH_ANDIF_EXPR
, type
,
3903 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3904 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3907 case TRUTH_NOT_EXPR
:
3908 return TREE_OPERAND (arg
, 0);
3912 tree arg1
= TREE_OPERAND (arg
, 1);
3913 tree arg2
= TREE_OPERAND (arg
, 2);
3915 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3916 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 2));
3917 if (loc1
== UNKNOWN_LOCATION
)
3919 if (loc2
== UNKNOWN_LOCATION
)
3922 /* A COND_EXPR may have a throw as one operand, which
3923 then has void type. Just leave void operands
3925 t
= build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3926 VOID_TYPE_P (TREE_TYPE (arg1
))
3927 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3928 VOID_TYPE_P (TREE_TYPE (arg2
))
3929 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3934 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3935 if (loc1
== UNKNOWN_LOCATION
)
3937 t
= build2 (COMPOUND_EXPR
, type
,
3938 TREE_OPERAND (arg
, 0),
3939 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3942 case NON_LVALUE_EXPR
:
3943 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3944 if (loc1
== UNKNOWN_LOCATION
)
3946 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3949 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3951 t
= build1 (TRUTH_NOT_EXPR
, type
, arg
);
3955 /* ... fall through ... */
3958 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3959 if (loc1
== UNKNOWN_LOCATION
)
3961 t
= build1 (TREE_CODE (arg
), type
,
3962 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3966 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3968 t
= build2 (EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3972 t
= build1 (TRUTH_NOT_EXPR
, type
, arg
);
3975 case CLEANUP_POINT_EXPR
:
3976 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3977 if (loc1
== UNKNOWN_LOCATION
)
3979 t
= build1 (CLEANUP_POINT_EXPR
, type
,
3980 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3989 SET_EXPR_LOCATION (t
, loc
);
3994 /* Return a simplified tree node for the truth-negation of ARG. This
3995 never alters ARG itself. We assume that ARG is an operation that
3996 returns a truth value (0 or 1).
3998 FIXME: one would think we would fold the result, but it causes
3999 problems with the dominator optimizer. */
4002 invert_truthvalue_loc (location_t loc
, tree arg
)
4006 if (TREE_CODE (arg
) == ERROR_MARK
)
4009 tem
= fold_truth_not_expr (loc
, arg
);
4012 tem
= build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
4013 SET_EXPR_LOCATION (tem
, loc
);
4019 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
4020 operands are another bit-wise operation with a common input. If so,
4021 distribute the bit operations to save an operation and possibly two if
4022 constants are involved. For example, convert
4023 (A | B) & (A | C) into A | (B & C)
4024 Further simplification will occur if B and C are constants.
4026 If this optimization cannot be done, 0 will be returned. */
4029 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
4030 tree arg0
, tree arg1
)
4035 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
4036 || TREE_CODE (arg0
) == code
4037 || (TREE_CODE (arg0
) != BIT_AND_EXPR
4038 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
4041 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
4043 common
= TREE_OPERAND (arg0
, 0);
4044 left
= TREE_OPERAND (arg0
, 1);
4045 right
= TREE_OPERAND (arg1
, 1);
4047 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
4049 common
= TREE_OPERAND (arg0
, 0);
4050 left
= TREE_OPERAND (arg0
, 1);
4051 right
= TREE_OPERAND (arg1
, 0);
4053 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
4055 common
= TREE_OPERAND (arg0
, 1);
4056 left
= TREE_OPERAND (arg0
, 0);
4057 right
= TREE_OPERAND (arg1
, 1);
4059 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
4061 common
= TREE_OPERAND (arg0
, 1);
4062 left
= TREE_OPERAND (arg0
, 0);
4063 right
= TREE_OPERAND (arg1
, 0);
4068 common
= fold_convert_loc (loc
, type
, common
);
4069 left
= fold_convert_loc (loc
, type
, left
);
4070 right
= fold_convert_loc (loc
, type
, right
);
4071 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
4072 fold_build2_loc (loc
, code
, type
, left
, right
));
4075 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
4076 with code CODE. This optimization is unsafe. */
4078 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
4079 tree arg0
, tree arg1
)
4081 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
4082 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
4084 /* (A / C) +- (B / C) -> (A +- B) / C. */
4086 && operand_equal_p (TREE_OPERAND (arg0
, 1),
4087 TREE_OPERAND (arg1
, 1), 0))
4088 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
4089 fold_build2_loc (loc
, code
, type
,
4090 TREE_OPERAND (arg0
, 0),
4091 TREE_OPERAND (arg1
, 0)),
4092 TREE_OPERAND (arg0
, 1));
4094 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
4095 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
4096 TREE_OPERAND (arg1
, 0), 0)
4097 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
4098 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
4100 REAL_VALUE_TYPE r0
, r1
;
4101 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
4102 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
4104 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
4106 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
4107 real_arithmetic (&r0
, code
, &r0
, &r1
);
4108 return fold_build2_loc (loc
, MULT_EXPR
, type
,
4109 TREE_OPERAND (arg0
, 0),
4110 build_real (type
, r0
));
4116 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4117 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
4120 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
4121 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
4123 tree result
, bftype
;
4127 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
4128 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
4129 || POINTER_TYPE_P (TREE_TYPE (inner
)))
4130 && host_integerp (size
, 0)
4131 && tree_low_cst (size
, 0) == bitsize
)
4132 return fold_convert_loc (loc
, type
, inner
);
4136 if (TYPE_PRECISION (bftype
) != bitsize
4137 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
4138 bftype
= build_nonstandard_integer_type (bitsize
, 0);
4140 result
= build3 (BIT_FIELD_REF
, bftype
, inner
,
4141 size_int (bitsize
), bitsize_int (bitpos
));
4142 SET_EXPR_LOCATION (result
, loc
);
4145 result
= fold_convert_loc (loc
, type
, result
);
4150 /* Optimize a bit-field compare.
4152 There are two cases: First is a compare against a constant and the
4153 second is a comparison of two items where the fields are at the same
4154 bit position relative to the start of a chunk (byte, halfword, word)
4155 large enough to contain it. In these cases we can avoid the shift
4156 implicit in bitfield extractions.
4158 For constants, we emit a compare of the shifted constant with the
4159 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4160 compared. For two fields at the same position, we do the ANDs with the
4161 similar mask and compare the result of the ANDs.
4163 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4164 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4165 are the left and right operands of the comparison, respectively.
4167 If the optimization described above can be done, we return the resulting
4168 tree. Otherwise we return zero. */
4171 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
4172 tree compare_type
, tree lhs
, tree rhs
)
4174 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
4175 tree type
= TREE_TYPE (lhs
);
4176 tree signed_type
, unsigned_type
;
4177 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
4178 enum machine_mode lmode
, rmode
, nmode
;
4179 int lunsignedp
, runsignedp
;
4180 int lvolatilep
= 0, rvolatilep
= 0;
4181 tree linner
, rinner
= NULL_TREE
;
4185 /* Get all the information about the extractions being done. If the bit size
4186 if the same as the size of the underlying object, we aren't doing an
4187 extraction at all and so can do nothing. We also don't want to
4188 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4189 then will no longer be able to replace it. */
4190 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
4191 &lunsignedp
, &lvolatilep
, false);
4192 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
4193 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
4198 /* If this is not a constant, we can only do something if bit positions,
4199 sizes, and signedness are the same. */
4200 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
4201 &runsignedp
, &rvolatilep
, false);
4203 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
4204 || lunsignedp
!= runsignedp
|| offset
!= 0
4205 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
4209 /* See if we can find a mode to refer to this field. We should be able to,
4210 but fail if we can't. */
4211 nmode
= get_best_mode (lbitsize
, lbitpos
,
4212 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
4213 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
4214 TYPE_ALIGN (TREE_TYPE (rinner
))),
4215 word_mode
, lvolatilep
|| rvolatilep
);
4216 if (nmode
== VOIDmode
)
4219 /* Set signed and unsigned types of the precision of this mode for the
4221 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
4222 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
4224 /* Compute the bit position and size for the new reference and our offset
4225 within it. If the new reference is the same size as the original, we
4226 won't optimize anything, so return zero. */
4227 nbitsize
= GET_MODE_BITSIZE (nmode
);
4228 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
4230 if (nbitsize
== lbitsize
)
4233 if (BYTES_BIG_ENDIAN
)
4234 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
4236 /* Make the mask to be used against the extracted field. */
4237 mask
= build_int_cst_type (unsigned_type
, -1);
4238 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
4239 mask
= const_binop (RSHIFT_EXPR
, mask
,
4240 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
4243 /* If not comparing with constant, just rework the comparison
4245 return fold_build2_loc (loc
, code
, compare_type
,
4246 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4247 make_bit_field_ref (loc
, linner
,
4252 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4253 make_bit_field_ref (loc
, rinner
,
4259 /* Otherwise, we are handling the constant case. See if the constant is too
4260 big for the field. Warn and return a tree of for 0 (false) if so. We do
4261 this not only for its own sake, but to avoid having to test for this
4262 error case below. If we didn't, we might generate wrong code.
4264 For unsigned fields, the constant shifted right by the field length should
4265 be all zero. For signed fields, the high-order bits should agree with
4270 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
4271 fold_convert_loc (loc
,
4272 unsigned_type
, rhs
),
4273 size_int (lbitsize
), 0)))
4275 warning (0, "comparison is always %d due to width of bit-field",
4277 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4282 tree tem
= const_binop (RSHIFT_EXPR
,
4283 fold_convert_loc (loc
, signed_type
, rhs
),
4284 size_int (lbitsize
- 1), 0);
4285 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
4287 warning (0, "comparison is always %d due to width of bit-field",
4289 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4293 /* Single-bit compares should always be against zero. */
4294 if (lbitsize
== 1 && ! integer_zerop (rhs
))
4296 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
4297 rhs
= build_int_cst (type
, 0);
4300 /* Make a new bitfield reference, shift the constant over the
4301 appropriate number of bits and mask it with the computed mask
4302 (in case this was a signed field). If we changed it, make a new one. */
4303 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
4306 TREE_SIDE_EFFECTS (lhs
) = 1;
4307 TREE_THIS_VOLATILE (lhs
) = 1;
4310 rhs
= const_binop (BIT_AND_EXPR
,
4311 const_binop (LSHIFT_EXPR
,
4312 fold_convert_loc (loc
, unsigned_type
, rhs
),
4313 size_int (lbitpos
), 0),
4316 lhs
= build2 (code
, compare_type
,
4317 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
4319 SET_EXPR_LOCATION (lhs
, loc
);
4323 /* Subroutine for fold_truthop: decode a field reference.
4325 If EXP is a comparison reference, we return the innermost reference.
4327 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4328 set to the starting bit number.
4330 If the innermost field can be completely contained in a mode-sized
4331 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4333 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4334 otherwise it is not changed.
4336 *PUNSIGNEDP is set to the signedness of the field.
4338 *PMASK is set to the mask used. This is either contained in a
4339 BIT_AND_EXPR or derived from the width of the field.
4341 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4343 Return 0 if this is not a component reference or is one that we can't
4344 do anything with. */
4347 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
4348 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
4349 int *punsignedp
, int *pvolatilep
,
4350 tree
*pmask
, tree
*pand_mask
)
4352 tree outer_type
= 0;
4354 tree mask
, inner
, offset
;
4356 unsigned int precision
;
4358 /* All the optimizations using this function assume integer fields.
4359 There are problems with FP fields since the type_for_size call
4360 below can fail for, e.g., XFmode. */
4361 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
4364 /* We are interested in the bare arrangement of bits, so strip everything
4365 that doesn't affect the machine mode. However, record the type of the
4366 outermost expression if it may matter below. */
4367 if (CONVERT_EXPR_P (exp
)
4368 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
4369 outer_type
= TREE_TYPE (exp
);
4372 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
4374 and_mask
= TREE_OPERAND (exp
, 1);
4375 exp
= TREE_OPERAND (exp
, 0);
4376 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
4377 if (TREE_CODE (and_mask
) != INTEGER_CST
)
4381 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
4382 punsignedp
, pvolatilep
, false);
4383 if ((inner
== exp
&& and_mask
== 0)
4384 || *pbitsize
< 0 || offset
!= 0
4385 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
4388 /* If the number of bits in the reference is the same as the bitsize of
4389 the outer type, then the outer type gives the signedness. Otherwise
4390 (in case of a small bitfield) the signedness is unchanged. */
4391 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
4392 *punsignedp
= TYPE_UNSIGNED (outer_type
);
4394 /* Compute the mask to access the bitfield. */
4395 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
4396 precision
= TYPE_PRECISION (unsigned_type
);
4398 mask
= build_int_cst_type (unsigned_type
, -1);
4400 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
4401 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
4403 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4405 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4406 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
4409 *pand_mask
= and_mask
;
4413 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4417 all_ones_mask_p (const_tree mask
, int size
)
4419 tree type
= TREE_TYPE (mask
);
4420 unsigned int precision
= TYPE_PRECISION (type
);
4423 tmask
= build_int_cst_type (signed_type_for (type
), -1);
4426 tree_int_cst_equal (mask
,
4427 const_binop (RSHIFT_EXPR
,
4428 const_binop (LSHIFT_EXPR
, tmask
,
4429 size_int (precision
- size
),
4431 size_int (precision
- size
), 0));
4434 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4435 represents the sign bit of EXP's type. If EXP represents a sign
4436 or zero extension, also test VAL against the unextended type.
4437 The return value is the (sub)expression whose sign bit is VAL,
4438 or NULL_TREE otherwise. */
4441 sign_bit_p (tree exp
, const_tree val
)
4443 unsigned HOST_WIDE_INT mask_lo
, lo
;
4444 HOST_WIDE_INT mask_hi
, hi
;
4448 /* Tree EXP must have an integral type. */
4449 t
= TREE_TYPE (exp
);
4450 if (! INTEGRAL_TYPE_P (t
))
4453 /* Tree VAL must be an integer constant. */
4454 if (TREE_CODE (val
) != INTEGER_CST
4455 || TREE_OVERFLOW (val
))
4458 width
= TYPE_PRECISION (t
);
4459 if (width
> HOST_BITS_PER_WIDE_INT
)
4461 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
4464 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
4465 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
4471 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
4474 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
4475 >> (HOST_BITS_PER_WIDE_INT
- width
));
4478 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4479 treat VAL as if it were unsigned. */
4480 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
4481 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
4484 /* Handle extension from a narrower type. */
4485 if (TREE_CODE (exp
) == NOP_EXPR
4486 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
4487 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4492 /* Subroutine for fold_truthop: determine if an operand is simple enough
4493 to be evaluated unconditionally. */
4496 simple_operand_p (const_tree exp
)
4498 /* Strip any conversions that don't change the machine mode. */
4501 return (CONSTANT_CLASS_P (exp
)
4502 || TREE_CODE (exp
) == SSA_NAME
4504 && ! TREE_ADDRESSABLE (exp
)
4505 && ! TREE_THIS_VOLATILE (exp
)
4506 && ! DECL_NONLOCAL (exp
)
4507 /* Don't regard global variables as simple. They may be
4508 allocated in ways unknown to the compiler (shared memory,
4509 #pragma weak, etc). */
4510 && ! TREE_PUBLIC (exp
)
4511 && ! DECL_EXTERNAL (exp
)
4512 /* Loading a static variable is unduly expensive, but global
4513 registers aren't expensive. */
4514 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4517 /* The following functions are subroutines to fold_range_test and allow it to
4518 try to change a logical combination of comparisons into a range test.
4521 X == 2 || X == 3 || X == 4 || X == 5
4525 (unsigned) (X - 2) <= 3
4527 We describe each set of comparisons as being either inside or outside
4528 a range, using a variable named like IN_P, and then describe the
4529 range with a lower and upper bound. If one of the bounds is omitted,
4530 it represents either the highest or lowest value of the type.
4532 In the comments below, we represent a range by two numbers in brackets
4533 preceded by a "+" to designate being inside that range, or a "-" to
4534 designate being outside that range, so the condition can be inverted by
4535 flipping the prefix. An omitted bound is represented by a "-". For
4536 example, "- [-, 10]" means being outside the range starting at the lowest
4537 possible value and ending at 10, in other words, being greater than 10.
4538 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4541 We set up things so that the missing bounds are handled in a consistent
4542 manner so neither a missing bound nor "true" and "false" need to be
4543 handled using a special case. */
4545 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4546 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4547 and UPPER1_P are nonzero if the respective argument is an upper bound
4548 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4549 must be specified for a comparison. ARG1 will be converted to ARG0's
4550 type if both are specified. */
4553 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4554 tree arg1
, int upper1_p
)
4560 /* If neither arg represents infinity, do the normal operation.
4561 Else, if not a comparison, return infinity. Else handle the special
4562 comparison rules. Note that most of the cases below won't occur, but
4563 are handled for consistency. */
4565 if (arg0
!= 0 && arg1
!= 0)
4567 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4568 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4570 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4573 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4576 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4577 for neither. In real maths, we cannot assume open ended ranges are
4578 the same. But, this is computer arithmetic, where numbers are finite.
4579 We can therefore make the transformation of any unbounded range with
4580 the value Z, Z being greater than any representable number. This permits
4581 us to treat unbounded ranges as equal. */
4582 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4583 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4587 result
= sgn0
== sgn1
;
4590 result
= sgn0
!= sgn1
;
4593 result
= sgn0
< sgn1
;
4596 result
= sgn0
<= sgn1
;
4599 result
= sgn0
> sgn1
;
4602 result
= sgn0
>= sgn1
;
4608 return constant_boolean_node (result
, type
);
4611 /* Given EXP, a logical expression, set the range it is testing into
4612 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4613 actually being tested. *PLOW and *PHIGH will be made of the same
4614 type as the returned expression. If EXP is not a comparison, we
4615 will most likely not be returning a useful value and range. Set
4616 *STRICT_OVERFLOW_P to true if the return value is only valid
4617 because signed overflow is undefined; otherwise, do not change
4618 *STRICT_OVERFLOW_P. */
4621 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4622 bool *strict_overflow_p
)
4624 enum tree_code code
;
4625 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
4626 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
4628 tree low
, high
, n_low
, n_high
;
4629 location_t loc
= EXPR_LOCATION (exp
);
4631 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4632 and see if we can refine the range. Some of the cases below may not
4633 happen, but it doesn't seem worth worrying about this. We "continue"
4634 the outer loop when we've changed something; otherwise we "break"
4635 the switch, which will "break" the while. */
4638 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4642 code
= TREE_CODE (exp
);
4643 exp_type
= TREE_TYPE (exp
);
4645 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4647 if (TREE_OPERAND_LENGTH (exp
) > 0)
4648 arg0
= TREE_OPERAND (exp
, 0);
4649 if (TREE_CODE_CLASS (code
) == tcc_comparison
4650 || TREE_CODE_CLASS (code
) == tcc_unary
4651 || TREE_CODE_CLASS (code
) == tcc_binary
)
4652 arg0_type
= TREE_TYPE (arg0
);
4653 if (TREE_CODE_CLASS (code
) == tcc_binary
4654 || TREE_CODE_CLASS (code
) == tcc_comparison
4655 || (TREE_CODE_CLASS (code
) == tcc_expression
4656 && TREE_OPERAND_LENGTH (exp
) > 1))
4657 arg1
= TREE_OPERAND (exp
, 1);
4662 case TRUTH_NOT_EXPR
:
4663 in_p
= ! in_p
, exp
= arg0
;
4666 case EQ_EXPR
: case NE_EXPR
:
4667 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4668 /* We can only do something if the range is testing for zero
4669 and if the second operand is an integer constant. Note that
4670 saying something is "in" the range we make is done by
4671 complementing IN_P since it will set in the initial case of
4672 being not equal to zero; "out" is leaving it alone. */
4673 if (low
== 0 || high
== 0
4674 || ! integer_zerop (low
) || ! integer_zerop (high
)
4675 || TREE_CODE (arg1
) != INTEGER_CST
)
4680 case NE_EXPR
: /* - [c, c] */
4683 case EQ_EXPR
: /* + [c, c] */
4684 in_p
= ! in_p
, low
= high
= arg1
;
4686 case GT_EXPR
: /* - [-, c] */
4687 low
= 0, high
= arg1
;
4689 case GE_EXPR
: /* + [c, -] */
4690 in_p
= ! in_p
, low
= arg1
, high
= 0;
4692 case LT_EXPR
: /* - [c, -] */
4693 low
= arg1
, high
= 0;
4695 case LE_EXPR
: /* + [-, c] */
4696 in_p
= ! in_p
, low
= 0, high
= arg1
;
4702 /* If this is an unsigned comparison, we also know that EXP is
4703 greater than or equal to zero. We base the range tests we make
4704 on that fact, so we record it here so we can parse existing
4705 range tests. We test arg0_type since often the return type
4706 of, e.g. EQ_EXPR, is boolean. */
4707 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4709 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4711 build_int_cst (arg0_type
, 0),
4715 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4717 /* If the high bound is missing, but we have a nonzero low
4718 bound, reverse the range so it goes from zero to the low bound
4720 if (high
== 0 && low
&& ! integer_zerop (low
))
4723 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4724 integer_one_node
, 0);
4725 low
= build_int_cst (arg0_type
, 0);
4733 /* (-x) IN [a,b] -> x in [-b, -a] */
4734 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4735 build_int_cst (exp_type
, 0),
4737 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4738 build_int_cst (exp_type
, 0),
4740 low
= n_low
, high
= n_high
;
4746 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4747 build_int_cst (exp_type
, 1));
4748 SET_EXPR_LOCATION (exp
, loc
);
4751 case PLUS_EXPR
: case MINUS_EXPR
:
4752 if (TREE_CODE (arg1
) != INTEGER_CST
)
4755 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4756 move a constant to the other side. */
4757 if (!TYPE_UNSIGNED (arg0_type
)
4758 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4761 /* If EXP is signed, any overflow in the computation is undefined,
4762 so we don't worry about it so long as our computations on
4763 the bounds don't overflow. For unsigned, overflow is defined
4764 and this is exactly the right thing. */
4765 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4766 arg0_type
, low
, 0, arg1
, 0);
4767 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4768 arg0_type
, high
, 1, arg1
, 0);
4769 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4770 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4773 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4774 *strict_overflow_p
= true;
4776 /* Check for an unsigned range which has wrapped around the maximum
4777 value thus making n_high < n_low, and normalize it. */
4778 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4780 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4781 integer_one_node
, 0);
4782 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4783 integer_one_node
, 0);
4785 /* If the range is of the form +/- [ x+1, x ], we won't
4786 be able to normalize it. But then, it represents the
4787 whole range or the empty set, so make it
4789 if (tree_int_cst_equal (n_low
, low
)
4790 && tree_int_cst_equal (n_high
, high
))
4796 low
= n_low
, high
= n_high
;
4801 CASE_CONVERT
: case NON_LVALUE_EXPR
:
4802 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4805 if (! INTEGRAL_TYPE_P (arg0_type
)
4806 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4807 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4810 n_low
= low
, n_high
= high
;
4813 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4816 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4819 /* If we're converting arg0 from an unsigned type, to exp,
4820 a signed type, we will be doing the comparison as unsigned.
4821 The tests above have already verified that LOW and HIGH
4824 So we have to ensure that we will handle large unsigned
4825 values the same way that the current signed bounds treat
4828 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4832 /* For fixed-point modes, we need to pass the saturating flag
4833 as the 2nd parameter. */
4834 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4835 equiv_type
= lang_hooks
.types
.type_for_mode
4836 (TYPE_MODE (arg0_type
),
4837 TYPE_SATURATING (arg0_type
));
4839 equiv_type
= lang_hooks
.types
.type_for_mode
4840 (TYPE_MODE (arg0_type
), 1);
4842 /* A range without an upper bound is, naturally, unbounded.
4843 Since convert would have cropped a very large value, use
4844 the max value for the destination type. */
4846 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4847 : TYPE_MAX_VALUE (arg0_type
);
4849 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4850 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4851 fold_convert_loc (loc
, arg0_type
,
4853 build_int_cst (arg0_type
, 1));
4855 /* If the low bound is specified, "and" the range with the
4856 range for which the original unsigned value will be
4860 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4861 1, n_low
, n_high
, 1,
4862 fold_convert_loc (loc
, arg0_type
,
4867 in_p
= (n_in_p
== in_p
);
4871 /* Otherwise, "or" the range with the range of the input
4872 that will be interpreted as negative. */
4873 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4874 0, n_low
, n_high
, 1,
4875 fold_convert_loc (loc
, arg0_type
,
4880 in_p
= (in_p
!= n_in_p
);
4885 low
= n_low
, high
= n_high
;
4895 /* If EXP is a constant, we can evaluate whether this is true or false. */
4896 if (TREE_CODE (exp
) == INTEGER_CST
)
4898 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4900 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4906 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4910 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4911 type, TYPE, return an expression to test if EXP is in (or out of, depending
4912 on IN_P) the range. Return 0 if the test couldn't be created. */
4915 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4916 tree low
, tree high
)
4918 tree etype
= TREE_TYPE (exp
), value
;
4920 #ifdef HAVE_canonicalize_funcptr_for_compare
4921 /* Disable this optimization for function pointer expressions
4922 on targets that require function pointer canonicalization. */
4923 if (HAVE_canonicalize_funcptr_for_compare
4924 && TREE_CODE (etype
) == POINTER_TYPE
4925 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4931 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4933 return invert_truthvalue_loc (loc
, value
);
4938 if (low
== 0 && high
== 0)
4939 return build_int_cst (type
, 1);
4942 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4943 fold_convert_loc (loc
, etype
, high
));
4946 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4947 fold_convert_loc (loc
, etype
, low
));
4949 if (operand_equal_p (low
, high
, 0))
4950 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4951 fold_convert_loc (loc
, etype
, low
));
4953 if (integer_zerop (low
))
4955 if (! TYPE_UNSIGNED (etype
))
4957 etype
= unsigned_type_for (etype
);
4958 high
= fold_convert_loc (loc
, etype
, high
);
4959 exp
= fold_convert_loc (loc
, etype
, exp
);
4961 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4964 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4965 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4967 unsigned HOST_WIDE_INT lo
;
4971 prec
= TYPE_PRECISION (etype
);
4972 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4975 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4979 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4980 lo
= (unsigned HOST_WIDE_INT
) -1;
4983 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4985 if (TYPE_UNSIGNED (etype
))
4987 tree signed_etype
= signed_type_for (etype
);
4988 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4990 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4992 etype
= signed_etype
;
4993 exp
= fold_convert_loc (loc
, etype
, exp
);
4995 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4996 build_int_cst (etype
, 0));
5000 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5001 This requires wrap-around arithmetics for the type of the expression.
5002 First make sure that arithmetics in this type is valid, then make sure
5003 that it wraps around. */
5004 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
5005 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
5006 TYPE_UNSIGNED (etype
));
5008 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
5010 tree utype
, minv
, maxv
;
5012 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5013 for the type in question, as we rely on this here. */
5014 utype
= unsigned_type_for (etype
);
5015 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
5016 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
5017 integer_one_node
, 1);
5018 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
5020 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
5027 high
= fold_convert_loc (loc
, etype
, high
);
5028 low
= fold_convert_loc (loc
, etype
, low
);
5029 exp
= fold_convert_loc (loc
, etype
, exp
);
5031 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
5034 if (POINTER_TYPE_P (etype
))
5036 if (value
!= 0 && !TREE_OVERFLOW (value
))
5038 low
= fold_convert_loc (loc
, sizetype
, low
);
5039 low
= fold_build1_loc (loc
, NEGATE_EXPR
, sizetype
, low
);
5040 return build_range_check (loc
, type
,
5041 fold_build2_loc (loc
, POINTER_PLUS_EXPR
,
5043 1, build_int_cst (etype
, 0), value
);
5048 if (value
!= 0 && !TREE_OVERFLOW (value
))
5049 return build_range_check (loc
, type
,
5050 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
5051 1, build_int_cst (etype
, 0), value
);
5056 /* Return the predecessor of VAL in its type, handling the infinite case. */
5059 range_predecessor (tree val
)
5061 tree type
= TREE_TYPE (val
);
5063 if (INTEGRAL_TYPE_P (type
)
5064 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
5067 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
5070 /* Return the successor of VAL in its type, handling the infinite case. */
5073 range_successor (tree val
)
5075 tree type
= TREE_TYPE (val
);
5077 if (INTEGRAL_TYPE_P (type
)
5078 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
5081 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
5084 /* Given two ranges, see if we can merge them into one. Return 1 if we
5085 can, 0 if we can't. Set the output range into the specified parameters. */
5088 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
5089 tree high0
, int in1_p
, tree low1
, tree high1
)
5097 int lowequal
= ((low0
== 0 && low1
== 0)
5098 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5099 low0
, 0, low1
, 0)));
5100 int highequal
= ((high0
== 0 && high1
== 0)
5101 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5102 high0
, 1, high1
, 1)));
5104 /* Make range 0 be the range that starts first, or ends last if they
5105 start at the same value. Swap them if it isn't. */
5106 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
5109 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
5110 high1
, 1, high0
, 1))))
5112 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
5113 tem
= low0
, low0
= low1
, low1
= tem
;
5114 tem
= high0
, high0
= high1
, high1
= tem
;
5117 /* Now flag two cases, whether the ranges are disjoint or whether the
5118 second range is totally subsumed in the first. Note that the tests
5119 below are simplified by the ones above. */
5120 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
5121 high0
, 1, low1
, 0));
5122 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
5123 high1
, 1, high0
, 1));
5125 /* We now have four cases, depending on whether we are including or
5126 excluding the two ranges. */
5129 /* If they don't overlap, the result is false. If the second range
5130 is a subset it is the result. Otherwise, the range is from the start
5131 of the second to the end of the first. */
5133 in_p
= 0, low
= high
= 0;
5135 in_p
= 1, low
= low1
, high
= high1
;
5137 in_p
= 1, low
= low1
, high
= high0
;
5140 else if (in0_p
&& ! in1_p
)
5142 /* If they don't overlap, the result is the first range. If they are
5143 equal, the result is false. If the second range is a subset of the
5144 first, and the ranges begin at the same place, we go from just after
5145 the end of the second range to the end of the first. If the second
5146 range is not a subset of the first, or if it is a subset and both
5147 ranges end at the same place, the range starts at the start of the
5148 first range and ends just before the second range.
5149 Otherwise, we can't describe this as a single range. */
5151 in_p
= 1, low
= low0
, high
= high0
;
5152 else if (lowequal
&& highequal
)
5153 in_p
= 0, low
= high
= 0;
5154 else if (subset
&& lowequal
)
5156 low
= range_successor (high1
);
5161 /* We are in the weird situation where high0 > high1 but
5162 high1 has no successor. Punt. */
5166 else if (! subset
|| highequal
)
5169 high
= range_predecessor (low1
);
5173 /* low0 < low1 but low1 has no predecessor. Punt. */
5181 else if (! in0_p
&& in1_p
)
5183 /* If they don't overlap, the result is the second range. If the second
5184 is a subset of the first, the result is false. Otherwise,
5185 the range starts just after the first range and ends at the
5186 end of the second. */
5188 in_p
= 1, low
= low1
, high
= high1
;
5189 else if (subset
|| highequal
)
5190 in_p
= 0, low
= high
= 0;
5193 low
= range_successor (high0
);
5198 /* high1 > high0 but high0 has no successor. Punt. */
5206 /* The case where we are excluding both ranges. Here the complex case
5207 is if they don't overlap. In that case, the only time we have a
5208 range is if they are adjacent. If the second is a subset of the
5209 first, the result is the first. Otherwise, the range to exclude
5210 starts at the beginning of the first range and ends at the end of the
5214 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5215 range_successor (high0
),
5217 in_p
= 0, low
= low0
, high
= high1
;
5220 /* Canonicalize - [min, x] into - [-, x]. */
5221 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
5222 switch (TREE_CODE (TREE_TYPE (low0
)))
5225 if (TYPE_PRECISION (TREE_TYPE (low0
))
5226 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
5230 if (tree_int_cst_equal (low0
,
5231 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
5235 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
5236 && integer_zerop (low0
))
5243 /* Canonicalize - [x, max] into - [x, -]. */
5244 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
5245 switch (TREE_CODE (TREE_TYPE (high1
)))
5248 if (TYPE_PRECISION (TREE_TYPE (high1
))
5249 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
5253 if (tree_int_cst_equal (high1
,
5254 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
5258 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
5259 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
5261 integer_one_node
, 1)))
5268 /* The ranges might be also adjacent between the maximum and
5269 minimum values of the given type. For
5270 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5271 return + [x + 1, y - 1]. */
5272 if (low0
== 0 && high1
== 0)
5274 low
= range_successor (high0
);
5275 high
= range_predecessor (low1
);
5276 if (low
== 0 || high
== 0)
5286 in_p
= 0, low
= low0
, high
= high0
;
5288 in_p
= 0, low
= low0
, high
= high1
;
5291 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
5296 /* Subroutine of fold, looking inside expressions of the form
5297 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5298 of the COND_EXPR. This function is being used also to optimize
5299 A op B ? C : A, by reversing the comparison first.
5301 Return a folded expression whose code is not a COND_EXPR
5302 anymore, or NULL_TREE if no folding opportunity is found. */
5305 fold_cond_expr_with_comparison (location_t loc
, tree type
,
5306 tree arg0
, tree arg1
, tree arg2
)
5308 enum tree_code comp_code
= TREE_CODE (arg0
);
5309 tree arg00
= TREE_OPERAND (arg0
, 0);
5310 tree arg01
= TREE_OPERAND (arg0
, 1);
5311 tree arg1_type
= TREE_TYPE (arg1
);
5317 /* If we have A op 0 ? A : -A, consider applying the following
5320 A == 0? A : -A same as -A
5321 A != 0? A : -A same as A
5322 A >= 0? A : -A same as abs (A)
5323 A > 0? A : -A same as abs (A)
5324 A <= 0? A : -A same as -abs (A)
5325 A < 0? A : -A same as -abs (A)
5327 None of these transformations work for modes with signed
5328 zeros. If A is +/-0, the first two transformations will
5329 change the sign of the result (from +0 to -0, or vice
5330 versa). The last four will fix the sign of the result,
5331 even though the original expressions could be positive or
5332 negative, depending on the sign of A.
5334 Note that all these transformations are correct if A is
5335 NaN, since the two alternatives (A and -A) are also NaNs. */
5336 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
5337 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
5338 ? real_zerop (arg01
)
5339 : integer_zerop (arg01
))
5340 && ((TREE_CODE (arg2
) == NEGATE_EXPR
5341 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
5342 /* In the case that A is of the form X-Y, '-A' (arg2) may
5343 have already been folded to Y-X, check for that. */
5344 || (TREE_CODE (arg1
) == MINUS_EXPR
5345 && TREE_CODE (arg2
) == MINUS_EXPR
5346 && operand_equal_p (TREE_OPERAND (arg1
, 0),
5347 TREE_OPERAND (arg2
, 1), 0)
5348 && operand_equal_p (TREE_OPERAND (arg1
, 1),
5349 TREE_OPERAND (arg2
, 0), 0))))
5354 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
5355 return pedantic_non_lvalue_loc (loc
,
5356 fold_convert_loc (loc
, type
,
5357 negate_expr (tem
)));
5360 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5363 if (flag_trapping_math
)
5368 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5369 arg1
= fold_convert_loc (loc
, signed_type_for
5370 (TREE_TYPE (arg1
)), arg1
);
5371 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5372 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5375 if (flag_trapping_math
)
5379 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5380 arg1
= fold_convert_loc (loc
, signed_type_for
5381 (TREE_TYPE (arg1
)), arg1
);
5382 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5383 return negate_expr (fold_convert_loc (loc
, type
, tem
));
5385 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5389 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5390 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5391 both transformations are correct when A is NaN: A != 0
5392 is then true, and A == 0 is false. */
5394 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
5395 && integer_zerop (arg01
) && integer_zerop (arg2
))
5397 if (comp_code
== NE_EXPR
)
5398 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5399 else if (comp_code
== EQ_EXPR
)
5400 return build_int_cst (type
, 0);
5403 /* Try some transformations of A op B ? A : B.
5405 A == B? A : B same as B
5406 A != B? A : B same as A
5407 A >= B? A : B same as max (A, B)
5408 A > B? A : B same as max (B, A)
5409 A <= B? A : B same as min (A, B)
5410 A < B? A : B same as min (B, A)
5412 As above, these transformations don't work in the presence
5413 of signed zeros. For example, if A and B are zeros of
5414 opposite sign, the first two transformations will change
5415 the sign of the result. In the last four, the original
5416 expressions give different results for (A=+0, B=-0) and
5417 (A=-0, B=+0), but the transformed expressions do not.
5419 The first two transformations are correct if either A or B
5420 is a NaN. In the first transformation, the condition will
5421 be false, and B will indeed be chosen. In the case of the
5422 second transformation, the condition A != B will be true,
5423 and A will be chosen.
5425 The conversions to max() and min() are not correct if B is
5426 a number and A is not. The conditions in the original
5427 expressions will be false, so all four give B. The min()
5428 and max() versions would give a NaN instead. */
5429 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
5430 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
5431 /* Avoid these transformations if the COND_EXPR may be used
5432 as an lvalue in the C++ front-end. PR c++/19199. */
5434 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
5435 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
5436 || ! maybe_lvalue_p (arg1
)
5437 || ! maybe_lvalue_p (arg2
)))
5439 tree comp_op0
= arg00
;
5440 tree comp_op1
= arg01
;
5441 tree comp_type
= TREE_TYPE (comp_op0
);
5443 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5444 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
5454 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
5456 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5461 /* In C++ a ?: expression can be an lvalue, so put the
5462 operand which will be used if they are equal first
5463 so that we can convert this back to the
5464 corresponding COND_EXPR. */
5465 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5467 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5468 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5469 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5470 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5471 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
5472 comp_op1
, comp_op0
);
5473 return pedantic_non_lvalue_loc (loc
,
5474 fold_convert_loc (loc
, type
, tem
));
5481 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5483 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5484 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5485 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5486 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5487 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
5488 comp_op1
, comp_op0
);
5489 return pedantic_non_lvalue_loc (loc
,
5490 fold_convert_loc (loc
, type
, tem
));
5494 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5495 return pedantic_non_lvalue_loc (loc
,
5496 fold_convert_loc (loc
, type
, arg2
));
5499 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5500 return pedantic_non_lvalue_loc (loc
,
5501 fold_convert_loc (loc
, type
, arg1
));
5504 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5509 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5510 we might still be able to simplify this. For example,
5511 if C1 is one less or one more than C2, this might have started
5512 out as a MIN or MAX and been transformed by this function.
5513 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5515 if (INTEGRAL_TYPE_P (type
)
5516 && TREE_CODE (arg01
) == INTEGER_CST
5517 && TREE_CODE (arg2
) == INTEGER_CST
)
5521 if (TREE_CODE (arg1
) == INTEGER_CST
)
5523 /* We can replace A with C1 in this case. */
5524 arg1
= fold_convert_loc (loc
, type
, arg01
);
5525 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
5528 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5529 MIN_EXPR, to preserve the signedness of the comparison. */
5530 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5532 && operand_equal_p (arg01
,
5533 const_binop (PLUS_EXPR
, arg2
,
5534 build_int_cst (type
, 1), 0),
5537 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5538 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5540 return pedantic_non_lvalue_loc (loc
,
5541 fold_convert_loc (loc
, type
, tem
));
5546 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5548 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5550 && operand_equal_p (arg01
,
5551 const_binop (MINUS_EXPR
, arg2
,
5552 build_int_cst (type
, 1), 0),
5555 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5556 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5558 return pedantic_non_lvalue_loc (loc
,
5559 fold_convert_loc (loc
, type
, tem
));
5564 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5565 MAX_EXPR, to preserve the signedness of the comparison. */
5566 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5568 && operand_equal_p (arg01
,
5569 const_binop (MINUS_EXPR
, arg2
,
5570 build_int_cst (type
, 1), 0),
5573 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5574 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5576 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5581 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5582 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5584 && operand_equal_p (arg01
,
5585 const_binop (PLUS_EXPR
, arg2
,
5586 build_int_cst (type
, 1), 0),
5589 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5590 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5592 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5606 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5607 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5608 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5612 /* EXP is some logical combination of boolean tests. See if we can
5613 merge it into some range test. Return the new tree if so. */
5616 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
5619 int or_op
= (code
== TRUTH_ORIF_EXPR
5620 || code
== TRUTH_OR_EXPR
);
5621 int in0_p
, in1_p
, in_p
;
5622 tree low0
, low1
, low
, high0
, high1
, high
;
5623 bool strict_overflow_p
= false;
5624 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5625 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5627 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5628 "when simplifying range test");
5630 /* If this is an OR operation, invert both sides; we will invert
5631 again at the end. */
5633 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5635 /* If both expressions are the same, if we can merge the ranges, and we
5636 can build the range test, return it or it inverted. If one of the
5637 ranges is always true or always false, consider it to be the same
5638 expression as the other. */
5639 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5640 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5642 && 0 != (tem
= (build_range_check (UNKNOWN_LOCATION
, type
,
5644 : rhs
!= 0 ? rhs
: integer_zero_node
,
5647 if (strict_overflow_p
)
5648 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5649 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5652 /* On machines where the branch cost is expensive, if this is a
5653 short-circuited branch and the underlying object on both sides
5654 is the same, make a non-short-circuit operation. */
5655 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5656 && lhs
!= 0 && rhs
!= 0
5657 && (code
== TRUTH_ANDIF_EXPR
5658 || code
== TRUTH_ORIF_EXPR
)
5659 && operand_equal_p (lhs
, rhs
, 0))
5661 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5662 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5663 which cases we can't do this. */
5664 if (simple_operand_p (lhs
))
5666 tem
= build2 (code
== TRUTH_ANDIF_EXPR
5667 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5669 SET_EXPR_LOCATION (tem
, loc
);
5673 else if (lang_hooks
.decls
.global_bindings_p () == 0
5674 && ! CONTAINS_PLACEHOLDER_P (lhs
))
5676 tree common
= save_expr (lhs
);
5678 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5679 or_op
? ! in0_p
: in0_p
,
5681 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5682 or_op
? ! in1_p
: in1_p
,
5685 if (strict_overflow_p
)
5686 fold_overflow_warning (warnmsg
,
5687 WARN_STRICT_OVERFLOW_COMPARISON
);
5688 tem
= build2 (code
== TRUTH_ANDIF_EXPR
5689 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5691 SET_EXPR_LOCATION (tem
, loc
);
5700 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5701 bit value. Arrange things so the extra bits will be set to zero if and
5702 only if C is signed-extended to its full width. If MASK is nonzero,
5703 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5706 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5708 tree type
= TREE_TYPE (c
);
5709 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5712 if (p
== modesize
|| unsignedp
)
5715 /* We work by getting just the sign bit into the low-order bit, then
5716 into the high-order bit, then sign-extend. We then XOR that value
5718 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
5719 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
5721 /* We must use a signed type in order to get an arithmetic right shift.
5722 However, we must also avoid introducing accidental overflows, so that
5723 a subsequent call to integer_zerop will work. Hence we must
5724 do the type conversion here. At this point, the constant is either
5725 zero or one, and the conversion to a signed type can never overflow.
5726 We could get an overflow if this conversion is done anywhere else. */
5727 if (TYPE_UNSIGNED (type
))
5728 temp
= fold_convert (signed_type_for (type
), temp
);
5730 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
5731 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
5733 temp
= const_binop (BIT_AND_EXPR
, temp
,
5734 fold_convert (TREE_TYPE (c
), mask
),
5736 /* If necessary, convert the type back to match the type of C. */
5737 if (TYPE_UNSIGNED (type
))
5738 temp
= fold_convert (type
, temp
);
5740 return fold_convert (type
,
5741 const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
5744 /* Find ways of folding logical expressions of LHS and RHS:
5745 Try to merge two comparisons to the same innermost item.
5746 Look for range tests like "ch >= '0' && ch <= '9'".
5747 Look for combinations of simple terms on machines with expensive branches
5748 and evaluate the RHS unconditionally.
5750 For example, if we have p->a == 2 && p->b == 4 and we can make an
5751 object large enough to span both A and B, we can do this with a comparison
5752 against the object ANDed with the a mask.
5754 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5755 operations to do this with one comparison.
5757 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5758 function and the one above.
5760 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5761 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5763 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5766 We return the simplified tree or 0 if no optimization is possible. */
5769 fold_truthop (location_t loc
, enum tree_code code
, tree truth_type
,
5772 /* If this is the "or" of two comparisons, we can do something if
5773 the comparisons are NE_EXPR. If this is the "and", we can do something
5774 if the comparisons are EQ_EXPR. I.e.,
5775 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5777 WANTED_CODE is this operation code. For single bit fields, we can
5778 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5779 comparison for one-bit fields. */
5781 enum tree_code wanted_code
;
5782 enum tree_code lcode
, rcode
;
5783 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5784 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5785 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5786 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5787 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5788 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5789 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5790 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5791 enum machine_mode lnmode
, rnmode
;
5792 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5793 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5794 tree l_const
, r_const
;
5795 tree lntype
, rntype
, result
;
5796 HOST_WIDE_INT first_bit
, end_bit
;
5798 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5799 enum tree_code orig_code
= code
;
5801 /* Start by getting the comparison codes. Fail if anything is volatile.
5802 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5803 it were surrounded with a NE_EXPR. */
5805 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5808 lcode
= TREE_CODE (lhs
);
5809 rcode
= TREE_CODE (rhs
);
5811 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5813 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5814 build_int_cst (TREE_TYPE (lhs
), 0));
5818 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5820 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5821 build_int_cst (TREE_TYPE (rhs
), 0));
5825 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5826 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5829 ll_arg
= TREE_OPERAND (lhs
, 0);
5830 lr_arg
= TREE_OPERAND (lhs
, 1);
5831 rl_arg
= TREE_OPERAND (rhs
, 0);
5832 rr_arg
= TREE_OPERAND (rhs
, 1);
5834 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5835 if (simple_operand_p (ll_arg
)
5836 && simple_operand_p (lr_arg
))
5839 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5840 && operand_equal_p (lr_arg
, rr_arg
, 0))
5842 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5843 truth_type
, ll_arg
, lr_arg
);
5847 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5848 && operand_equal_p (lr_arg
, rl_arg
, 0))
5850 result
= combine_comparisons (loc
, code
, lcode
,
5851 swap_tree_comparison (rcode
),
5852 truth_type
, ll_arg
, lr_arg
);
5858 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5859 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5861 /* If the RHS can be evaluated unconditionally and its operands are
5862 simple, it wins to evaluate the RHS unconditionally on machines
5863 with expensive branches. In this case, this isn't a comparison
5864 that can be merged. Avoid doing this if the RHS is a floating-point
5865 comparison since those can trap. */
5867 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5869 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5870 && simple_operand_p (rl_arg
)
5871 && simple_operand_p (rr_arg
))
5873 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5874 if (code
== TRUTH_OR_EXPR
5875 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5876 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5877 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5878 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5880 result
= build2 (NE_EXPR
, truth_type
,
5881 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5883 build_int_cst (TREE_TYPE (ll_arg
), 0));
5884 goto fold_truthop_exit
;
5887 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5888 if (code
== TRUTH_AND_EXPR
5889 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5890 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5891 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5892 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5894 result
= build2 (EQ_EXPR
, truth_type
,
5895 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5897 build_int_cst (TREE_TYPE (ll_arg
), 0));
5898 goto fold_truthop_exit
;
5901 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
5903 if (code
!= orig_code
|| lhs
!= orig_lhs
|| rhs
!= orig_rhs
)
5905 result
= build2 (code
, truth_type
, lhs
, rhs
);
5906 goto fold_truthop_exit
;
5912 /* See if the comparisons can be merged. Then get all the parameters for
5915 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5916 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5920 ll_inner
= decode_field_reference (loc
, ll_arg
,
5921 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5922 &ll_unsignedp
, &volatilep
, &ll_mask
,
5924 lr_inner
= decode_field_reference (loc
, lr_arg
,
5925 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5926 &lr_unsignedp
, &volatilep
, &lr_mask
,
5928 rl_inner
= decode_field_reference (loc
, rl_arg
,
5929 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5930 &rl_unsignedp
, &volatilep
, &rl_mask
,
5932 rr_inner
= decode_field_reference (loc
, rr_arg
,
5933 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5934 &rr_unsignedp
, &volatilep
, &rr_mask
,
5937 /* It must be true that the inner operation on the lhs of each
5938 comparison must be the same if we are to be able to do anything.
5939 Then see if we have constants. If not, the same must be true for
5941 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5942 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5945 if (TREE_CODE (lr_arg
) == INTEGER_CST
5946 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5947 l_const
= lr_arg
, r_const
= rr_arg
;
5948 else if (lr_inner
== 0 || rr_inner
== 0
5949 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5952 l_const
= r_const
= 0;
5954 /* If either comparison code is not correct for our logical operation,
5955 fail. However, we can convert a one-bit comparison against zero into
5956 the opposite comparison against that bit being set in the field. */
5958 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5959 if (lcode
!= wanted_code
)
5961 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5963 /* Make the left operand unsigned, since we are only interested
5964 in the value of one bit. Otherwise we are doing the wrong
5973 /* This is analogous to the code for l_const above. */
5974 if (rcode
!= wanted_code
)
5976 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5985 /* See if we can find a mode that contains both fields being compared on
5986 the left. If we can't, fail. Otherwise, update all constants and masks
5987 to be relative to a field of that size. */
5988 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5989 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5990 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5991 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5993 if (lnmode
== VOIDmode
)
5996 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5997 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5998 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5999 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
6001 if (BYTES_BIG_ENDIAN
)
6003 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
6004 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
6007 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
6008 size_int (xll_bitpos
), 0);
6009 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
6010 size_int (xrl_bitpos
), 0);
6014 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
6015 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
6016 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
6017 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
6018 fold_build1_loc (loc
, BIT_NOT_EXPR
,
6022 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
6024 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
6029 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
6030 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
6031 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
6032 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
6033 fold_build1_loc (loc
, BIT_NOT_EXPR
,
6037 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
6039 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
6043 /* If the right sides are not constant, do the same for it. Also,
6044 disallow this optimization if a size or signedness mismatch occurs
6045 between the left and right sides. */
6048 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
6049 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
6050 /* Make sure the two fields on the right
6051 correspond to the left without being swapped. */
6052 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
6055 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
6056 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
6057 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
6058 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
6060 if (rnmode
== VOIDmode
)
6063 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
6064 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
6065 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
6066 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
6068 if (BYTES_BIG_ENDIAN
)
6070 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
6071 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
6074 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
6076 size_int (xlr_bitpos
), 0);
6077 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
6079 size_int (xrr_bitpos
), 0);
6081 /* Make a mask that corresponds to both fields being compared.
6082 Do this for both items being compared. If the operands are the
6083 same size and the bits being compared are in the same position
6084 then we can do this by masking both and comparing the masked
6086 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
6087 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
6088 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
6090 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
6091 ll_unsignedp
|| rl_unsignedp
);
6092 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
6093 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
6095 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
6096 lr_unsignedp
|| rr_unsignedp
);
6097 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
6098 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
6100 result
= build2 (wanted_code
, truth_type
, lhs
, rhs
);
6101 goto fold_truthop_exit
;
6104 /* There is still another way we can do something: If both pairs of
6105 fields being compared are adjacent, we may be able to make a wider
6106 field containing them both.
6108 Note that we still must mask the lhs/rhs expressions. Furthermore,
6109 the mask must be shifted to account for the shift done by
6110 make_bit_field_ref. */
6111 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
6112 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
6113 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
6114 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
6118 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
6119 ll_bitsize
+ rl_bitsize
,
6120 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
6121 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
6122 lr_bitsize
+ rr_bitsize
,
6123 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
6125 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
6126 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
6127 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
6128 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
6130 /* Convert to the smaller type before masking out unwanted bits. */
6132 if (lntype
!= rntype
)
6134 if (lnbitsize
> rnbitsize
)
6136 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
6137 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
6140 else if (lnbitsize
< rnbitsize
)
6142 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
6143 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
6148 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
6149 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
6151 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
6152 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
6154 result
= build2 (wanted_code
, truth_type
, lhs
, rhs
);
6155 goto fold_truthop_exit
;
6161 /* Handle the case of comparisons with constants. If there is something in
6162 common between the masks, those bits of the constants must be the same.
6163 If not, the condition is always false. Test for this to avoid generating
6164 incorrect code below. */
6165 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
6166 if (! integer_zerop (result
)
6167 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
6168 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
6170 if (wanted_code
== NE_EXPR
)
6172 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6173 return constant_boolean_node (true, truth_type
);
6177 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6178 return constant_boolean_node (false, truth_type
);
6182 /* Construct the expression we will return. First get the component
6183 reference we will make. Unless the mask is all ones the width of
6184 that field, perform the mask operation. Then compare with the
6186 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
6187 ll_unsignedp
|| rl_unsignedp
);
6189 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
6190 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
6192 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
6193 SET_EXPR_LOCATION (result
, loc
);
6196 result
= build2 (wanted_code
, truth_type
, result
,
6197 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
6200 SET_EXPR_LOCATION (result
, loc
);
6204 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
6208 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
6212 enum tree_code op_code
;
6215 int consts_equal
, consts_lt
;
6218 STRIP_SIGN_NOPS (arg0
);
6220 op_code
= TREE_CODE (arg0
);
6221 minmax_const
= TREE_OPERAND (arg0
, 1);
6222 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
6223 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
6224 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
6225 inner
= TREE_OPERAND (arg0
, 0);
6227 /* If something does not permit us to optimize, return the original tree. */
6228 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
6229 || TREE_CODE (comp_const
) != INTEGER_CST
6230 || TREE_OVERFLOW (comp_const
)
6231 || TREE_CODE (minmax_const
) != INTEGER_CST
6232 || TREE_OVERFLOW (minmax_const
))
6235 /* Now handle all the various comparison codes. We only handle EQ_EXPR
6236 and GT_EXPR, doing the rest with recursive calls using logical
6240 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
6243 = optimize_minmax_comparison (loc
,
6244 invert_tree_comparison (code
, false),
6247 return invert_truthvalue_loc (loc
, tem
);
6253 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
6254 optimize_minmax_comparison
6255 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
6256 optimize_minmax_comparison
6257 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
6260 if (op_code
== MAX_EXPR
&& consts_equal
)
6261 /* MAX (X, 0) == 0 -> X <= 0 */
6262 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
6264 else if (op_code
== MAX_EXPR
&& consts_lt
)
6265 /* MAX (X, 0) == 5 -> X == 5 */
6266 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
6268 else if (op_code
== MAX_EXPR
)
6269 /* MAX (X, 0) == -1 -> false */
6270 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
6272 else if (consts_equal
)
6273 /* MIN (X, 0) == 0 -> X >= 0 */
6274 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
6277 /* MIN (X, 0) == 5 -> false */
6278 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
6281 /* MIN (X, 0) == -1 -> X == -1 */
6282 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
6285 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
6286 /* MAX (X, 0) > 0 -> X > 0
6287 MAX (X, 0) > 5 -> X > 5 */
6288 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
6290 else if (op_code
== MAX_EXPR
)
6291 /* MAX (X, 0) > -1 -> true */
6292 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
6294 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
6295 /* MIN (X, 0) > 0 -> false
6296 MIN (X, 0) > 5 -> false */
6297 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
6300 /* MIN (X, 0) > -1 -> X > -1 */
6301 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
6308 /* T is an integer expression that is being multiplied, divided, or taken a
6309 modulus (CODE says which and what kind of divide or modulus) by a
6310 constant C. See if we can eliminate that operation by folding it with
6311 other operations already in T. WIDE_TYPE, if non-null, is a type that
6312 should be used for the computation if wider than our type.
6314 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6315 (X * 2) + (Y * 4). We must, however, be assured that either the original
6316 expression would not overflow or that overflow is undefined for the type
6317 in the language in question.
6319 If we return a non-null expression, it is an equivalent form of the
6320 original computation, but need not be in the original type.
6322 We set *STRICT_OVERFLOW_P to true if the return values depends on
6323 signed overflow being undefined. Otherwise we do not change
6324 *STRICT_OVERFLOW_P. */
6327 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6328 bool *strict_overflow_p
)
6330 /* To avoid exponential search depth, refuse to allow recursion past
6331 three levels. Beyond that (1) it's highly unlikely that we'll find
6332 something interesting and (2) we've probably processed it before
6333 when we built the inner expression. */
6342 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
6349 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6350 bool *strict_overflow_p
)
6352 tree type
= TREE_TYPE (t
);
6353 enum tree_code tcode
= TREE_CODE (t
);
6354 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
6355 > GET_MODE_SIZE (TYPE_MODE (type
)))
6356 ? wide_type
: type
);
6358 int same_p
= tcode
== code
;
6359 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
6360 bool sub_strict_overflow_p
;
6362 /* Don't deal with constants of zero here; they confuse the code below. */
6363 if (integer_zerop (c
))
6366 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
6367 op0
= TREE_OPERAND (t
, 0);
6369 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
6370 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
6372 /* Note that we need not handle conditional operations here since fold
6373 already handles those cases. So just do arithmetic here. */
6377 /* For a constant, we can always simplify if we are a multiply
6378 or (for divide and modulus) if it is a multiple of our constant. */
6379 if (code
== MULT_EXPR
6380 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
6381 return const_binop (code
, fold_convert (ctype
, t
),
6382 fold_convert (ctype
, c
), 0);
6385 CASE_CONVERT
: case NON_LVALUE_EXPR
:
6386 /* If op0 is an expression ... */
6387 if ((COMPARISON_CLASS_P (op0
)
6388 || UNARY_CLASS_P (op0
)
6389 || BINARY_CLASS_P (op0
)
6390 || VL_EXP_CLASS_P (op0
)
6391 || EXPRESSION_CLASS_P (op0
))
6392 /* ... and has wrapping overflow, and its type is smaller
6393 than ctype, then we cannot pass through as widening. */
6394 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
6395 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
6396 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
6397 && (TYPE_PRECISION (ctype
)
6398 > TYPE_PRECISION (TREE_TYPE (op0
))))
6399 /* ... or this is a truncation (t is narrower than op0),
6400 then we cannot pass through this narrowing. */
6401 || (TYPE_PRECISION (type
)
6402 < TYPE_PRECISION (TREE_TYPE (op0
)))
6403 /* ... or signedness changes for division or modulus,
6404 then we cannot pass through this conversion. */
6405 || (code
!= MULT_EXPR
6406 && (TYPE_UNSIGNED (ctype
)
6407 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
6408 /* ... or has undefined overflow while the converted to
6409 type has not, we cannot do the operation in the inner type
6410 as that would introduce undefined overflow. */
6411 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
6412 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6415 /* Pass the constant down and see if we can make a simplification. If
6416 we can, replace this expression with the inner simplification for
6417 possible later conversion to our or some other type. */
6418 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6419 && TREE_CODE (t2
) == INTEGER_CST
6420 && !TREE_OVERFLOW (t2
)
6421 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
6423 ? ctype
: NULL_TREE
,
6424 strict_overflow_p
))))
6429 /* If widening the type changes it from signed to unsigned, then we
6430 must avoid building ABS_EXPR itself as unsigned. */
6431 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6433 tree cstype
= (*signed_type_for
) (ctype
);
6434 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6437 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6438 return fold_convert (ctype
, t1
);
6442 /* If the constant is negative, we cannot simplify this. */
6443 if (tree_int_cst_sgn (c
) == -1)
6447 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6449 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6452 case MIN_EXPR
: case MAX_EXPR
:
6453 /* If widening the type changes the signedness, then we can't perform
6454 this optimization as that changes the result. */
6455 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6458 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6459 sub_strict_overflow_p
= false;
6460 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6461 &sub_strict_overflow_p
)) != 0
6462 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6463 &sub_strict_overflow_p
)) != 0)
6465 if (tree_int_cst_sgn (c
) < 0)
6466 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6467 if (sub_strict_overflow_p
)
6468 *strict_overflow_p
= true;
6469 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6470 fold_convert (ctype
, t2
));
6474 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6475 /* If the second operand is constant, this is a multiplication
6476 or floor division, by a power of two, so we can treat it that
6477 way unless the multiplier or divisor overflows. Signed
6478 left-shift overflow is implementation-defined rather than
6479 undefined in C90, so do not convert signed left shift into
6481 if (TREE_CODE (op1
) == INTEGER_CST
6482 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6483 /* const_binop may not detect overflow correctly,
6484 so check for it explicitly here. */
6485 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
6486 && TREE_INT_CST_HIGH (op1
) == 0
6487 && 0 != (t1
= fold_convert (ctype
,
6488 const_binop (LSHIFT_EXPR
,
6491 && !TREE_OVERFLOW (t1
))
6492 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6493 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6495 fold_convert (ctype
, op0
),
6497 c
, code
, wide_type
, strict_overflow_p
);
6500 case PLUS_EXPR
: case MINUS_EXPR
:
6501 /* See if we can eliminate the operation on both sides. If we can, we
6502 can return a new PLUS or MINUS. If we can't, the only remaining
6503 cases where we can do anything are if the second operand is a
6505 sub_strict_overflow_p
= false;
6506 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6507 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6508 if (t1
!= 0 && t2
!= 0
6509 && (code
== MULT_EXPR
6510 /* If not multiplication, we can only do this if both operands
6511 are divisible by c. */
6512 || (multiple_of_p (ctype
, op0
, c
)
6513 && multiple_of_p (ctype
, op1
, c
))))
6515 if (sub_strict_overflow_p
)
6516 *strict_overflow_p
= true;
6517 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6518 fold_convert (ctype
, t2
));
6521 /* If this was a subtraction, negate OP1 and set it to be an addition.
6522 This simplifies the logic below. */
6523 if (tcode
== MINUS_EXPR
)
6525 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6526 /* If OP1 was not easily negatable, the constant may be OP0. */
6527 if (TREE_CODE (op0
) == INTEGER_CST
)
6538 if (TREE_CODE (op1
) != INTEGER_CST
)
6541 /* If either OP1 or C are negative, this optimization is not safe for
6542 some of the division and remainder types while for others we need
6543 to change the code. */
6544 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6546 if (code
== CEIL_DIV_EXPR
)
6547 code
= FLOOR_DIV_EXPR
;
6548 else if (code
== FLOOR_DIV_EXPR
)
6549 code
= CEIL_DIV_EXPR
;
6550 else if (code
!= MULT_EXPR
6551 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6555 /* If it's a multiply or a division/modulus operation of a multiple
6556 of our constant, do the operation and verify it doesn't overflow. */
6557 if (code
== MULT_EXPR
6558 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
6560 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6561 fold_convert (ctype
, c
), 0);
6562 /* We allow the constant to overflow with wrapping semantics. */
6564 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6570 /* If we have an unsigned type is not a sizetype, we cannot widen
6571 the operation since it will change the result if the original
6572 computation overflowed. */
6573 if (TYPE_UNSIGNED (ctype
)
6574 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
6578 /* If we were able to eliminate our operation from the first side,
6579 apply our operation to the second side and reform the PLUS. */
6580 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
6581 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
6583 /* The last case is if we are a multiply. In that case, we can
6584 apply the distributive law to commute the multiply and addition
6585 if the multiplication of the constants doesn't overflow. */
6586 if (code
== MULT_EXPR
)
6587 return fold_build2 (tcode
, ctype
,
6588 fold_build2 (code
, ctype
,
6589 fold_convert (ctype
, op0
),
6590 fold_convert (ctype
, c
)),
6596 /* We have a special case here if we are doing something like
6597 (C * 8) % 4 since we know that's zero. */
6598 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6599 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6600 /* If the multiplication can overflow we cannot optimize this.
6601 ??? Until we can properly mark individual operations as
6602 not overflowing we need to treat sizetype special here as
6603 stor-layout relies on this opimization to make
6604 DECL_FIELD_BIT_OFFSET always a constant. */
6605 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6606 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
6607 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))))
6608 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6609 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
6611 *strict_overflow_p
= true;
6612 return omit_one_operand (type
, integer_zero_node
, op0
);
6615 /* ... fall through ... */
6617 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6618 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6619 /* If we can extract our operation from the LHS, do so and return a
6620 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6621 do something only if the second operand is a constant. */
6623 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6624 strict_overflow_p
)) != 0)
6625 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6626 fold_convert (ctype
, op1
));
6627 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6628 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6629 strict_overflow_p
)) != 0)
6630 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6631 fold_convert (ctype
, t1
));
6632 else if (TREE_CODE (op1
) != INTEGER_CST
)
6635 /* If these are the same operation types, we can associate them
6636 assuming no overflow. */
6638 && 0 != (t1
= int_const_binop (MULT_EXPR
,
6639 fold_convert (ctype
, op1
),
6640 fold_convert (ctype
, c
), 1))
6641 && 0 != (t1
= force_fit_type_double (ctype
, TREE_INT_CST_LOW (t1
),
6642 TREE_INT_CST_HIGH (t1
),
6643 (TYPE_UNSIGNED (ctype
)
6644 && tcode
!= MULT_EXPR
) ? -1 : 1,
6645 TREE_OVERFLOW (t1
)))
6646 && !TREE_OVERFLOW (t1
))
6647 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
6649 /* If these operations "cancel" each other, we have the main
6650 optimizations of this pass, which occur when either constant is a
6651 multiple of the other, in which case we replace this with either an
6652 operation or CODE or TCODE.
6654 If we have an unsigned type that is not a sizetype, we cannot do
6655 this since it will change the result if the original computation
6657 if ((TYPE_OVERFLOW_UNDEFINED (ctype
)
6658 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
6659 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6660 || (tcode
== MULT_EXPR
6661 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6662 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6663 && code
!= MULT_EXPR
)))
6665 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
6667 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6668 *strict_overflow_p
= true;
6669 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6670 fold_convert (ctype
,
6671 const_binop (TRUNC_DIV_EXPR
,
6674 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
6676 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6677 *strict_overflow_p
= true;
6678 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6679 fold_convert (ctype
,
6680 const_binop (TRUNC_DIV_EXPR
,
6693 /* Return a node which has the indicated constant VALUE (either 0 or
6694 1), and is of the indicated TYPE. */
6697 constant_boolean_node (int value
, tree type
)
6699 if (type
== integer_type_node
)
6700 return value
? integer_one_node
: integer_zero_node
;
6701 else if (type
== boolean_type_node
)
6702 return value
? boolean_true_node
: boolean_false_node
;
6704 return build_int_cst (type
, value
);
6708 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6709 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6710 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6711 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6712 COND is the first argument to CODE; otherwise (as in the example
6713 given here), it is the second argument. TYPE is the type of the
6714 original expression. Return NULL_TREE if no simplification is
6718 fold_binary_op_with_conditional_arg (location_t loc
,
6719 enum tree_code code
,
6720 tree type
, tree op0
, tree op1
,
6721 tree cond
, tree arg
, int cond_first_p
)
6723 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6724 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6725 tree test
, true_value
, false_value
;
6726 tree lhs
= NULL_TREE
;
6727 tree rhs
= NULL_TREE
;
6729 /* This transformation is only worthwhile if we don't have to wrap
6730 arg in a SAVE_EXPR, and the operation can be simplified on at least
6731 one of the branches once its pushed inside the COND_EXPR. */
6732 if (!TREE_CONSTANT (arg
))
6735 if (TREE_CODE (cond
) == COND_EXPR
)
6737 test
= TREE_OPERAND (cond
, 0);
6738 true_value
= TREE_OPERAND (cond
, 1);
6739 false_value
= TREE_OPERAND (cond
, 2);
6740 /* If this operand throws an expression, then it does not make
6741 sense to try to perform a logical or arithmetic operation
6743 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6745 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6750 tree testtype
= TREE_TYPE (cond
);
6752 true_value
= constant_boolean_node (true, testtype
);
6753 false_value
= constant_boolean_node (false, testtype
);
6756 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6759 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6761 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6763 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6767 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6769 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6771 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6774 test
= fold_build3_loc (loc
, COND_EXPR
, type
, test
, lhs
, rhs
);
6775 return fold_convert_loc (loc
, type
, test
);
6779 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6781 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6782 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6783 ADDEND is the same as X.
6785 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6786 and finite. The problematic cases are when X is zero, and its mode
6787 has signed zeros. In the case of rounding towards -infinity,
6788 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6789 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6792 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6794 if (!real_zerop (addend
))
6797 /* Don't allow the fold with -fsignaling-nans. */
6798 if (HONOR_SNANS (TYPE_MODE (type
)))
6801 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6802 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6805 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6806 if (TREE_CODE (addend
) == REAL_CST
6807 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6810 /* The mode has signed zeros, and we have to honor their sign.
6811 In this situation, there is only one case we can return true for.
6812 X - 0 is the same as X unless rounding towards -infinity is
6814 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6817 /* Subroutine of fold() that checks comparisons of built-in math
6818 functions against real constants.
6820 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6821 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6822 is the type of the result and ARG0 and ARG1 are the operands of the
6823 comparison. ARG1 must be a TREE_REAL_CST.
6825 The function returns the constant folded tree if a simplification
6826 can be made, and NULL_TREE otherwise. */
6829 fold_mathfn_compare (location_t loc
,
6830 enum built_in_function fcode
, enum tree_code code
,
6831 tree type
, tree arg0
, tree arg1
)
6835 if (BUILTIN_SQRT_P (fcode
))
6837 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6838 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6840 c
= TREE_REAL_CST (arg1
);
6841 if (REAL_VALUE_NEGATIVE (c
))
6843 /* sqrt(x) < y is always false, if y is negative. */
6844 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6845 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6847 /* sqrt(x) > y is always true, if y is negative and we
6848 don't care about NaNs, i.e. negative values of x. */
6849 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6850 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6852 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6853 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6854 build_real (TREE_TYPE (arg
), dconst0
));
6856 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6860 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6861 real_convert (&c2
, mode
, &c2
);
6863 if (REAL_VALUE_ISINF (c2
))
6865 /* sqrt(x) > y is x == +Inf, when y is very large. */
6866 if (HONOR_INFINITIES (mode
))
6867 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6868 build_real (TREE_TYPE (arg
), c2
));
6870 /* sqrt(x) > y is always false, when y is very large
6871 and we don't care about infinities. */
6872 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6875 /* sqrt(x) > c is the same as x > c*c. */
6876 return fold_build2_loc (loc
, code
, type
, arg
,
6877 build_real (TREE_TYPE (arg
), c2
));
6879 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6883 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6884 real_convert (&c2
, mode
, &c2
);
6886 if (REAL_VALUE_ISINF (c2
))
6888 /* sqrt(x) < y is always true, when y is a very large
6889 value and we don't care about NaNs or Infinities. */
6890 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6891 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6893 /* sqrt(x) < y is x != +Inf when y is very large and we
6894 don't care about NaNs. */
6895 if (! HONOR_NANS (mode
))
6896 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6897 build_real (TREE_TYPE (arg
), c2
));
6899 /* sqrt(x) < y is x >= 0 when y is very large and we
6900 don't care about Infinities. */
6901 if (! HONOR_INFINITIES (mode
))
6902 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6903 build_real (TREE_TYPE (arg
), dconst0
));
6905 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6906 if (lang_hooks
.decls
.global_bindings_p () != 0
6907 || CONTAINS_PLACEHOLDER_P (arg
))
6910 arg
= save_expr (arg
);
6911 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6912 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6913 build_real (TREE_TYPE (arg
),
6915 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6916 build_real (TREE_TYPE (arg
),
6920 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6921 if (! HONOR_NANS (mode
))
6922 return fold_build2_loc (loc
, code
, type
, arg
,
6923 build_real (TREE_TYPE (arg
), c2
));
6925 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6926 if (lang_hooks
.decls
.global_bindings_p () == 0
6927 && ! CONTAINS_PLACEHOLDER_P (arg
))
6929 arg
= save_expr (arg
);
6930 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6931 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6932 build_real (TREE_TYPE (arg
),
6934 fold_build2_loc (loc
, code
, type
, arg
,
6935 build_real (TREE_TYPE (arg
),
6944 /* Subroutine of fold() that optimizes comparisons against Infinities,
6945 either +Inf or -Inf.
6947 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6948 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6949 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6951 The function returns the constant folded tree if a simplification
6952 can be made, and NULL_TREE otherwise. */
6955 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6956 tree arg0
, tree arg1
)
6958 enum machine_mode mode
;
6959 REAL_VALUE_TYPE max
;
6963 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6965 /* For negative infinity swap the sense of the comparison. */
6966 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6968 code
= swap_tree_comparison (code
);
6973 /* x > +Inf is always false, if with ignore sNANs. */
6974 if (HONOR_SNANS (mode
))
6976 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6979 /* x <= +Inf is always true, if we don't case about NaNs. */
6980 if (! HONOR_NANS (mode
))
6981 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6983 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6984 if (lang_hooks
.decls
.global_bindings_p () == 0
6985 && ! CONTAINS_PLACEHOLDER_P (arg0
))
6987 arg0
= save_expr (arg0
);
6988 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6994 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6995 real_maxval (&max
, neg
, mode
);
6996 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6997 arg0
, build_real (TREE_TYPE (arg0
), max
));
7000 /* x < +Inf is always equal to x <= DBL_MAX. */
7001 real_maxval (&max
, neg
, mode
);
7002 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
7003 arg0
, build_real (TREE_TYPE (arg0
), max
));
7006 /* x != +Inf is always equal to !(x > DBL_MAX). */
7007 real_maxval (&max
, neg
, mode
);
7008 if (! HONOR_NANS (mode
))
7009 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
7010 arg0
, build_real (TREE_TYPE (arg0
), max
));
7012 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
7013 arg0
, build_real (TREE_TYPE (arg0
), max
));
7014 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
7023 /* Subroutine of fold() that optimizes comparisons of a division by
7024 a nonzero integer constant against an integer constant, i.e.
7027 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7028 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
7029 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
7031 The function returns the constant folded tree if a simplification
7032 can be made, and NULL_TREE otherwise. */
7035 fold_div_compare (location_t loc
,
7036 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
7038 tree prod
, tmp
, hi
, lo
;
7039 tree arg00
= TREE_OPERAND (arg0
, 0);
7040 tree arg01
= TREE_OPERAND (arg0
, 1);
7041 unsigned HOST_WIDE_INT lpart
;
7042 HOST_WIDE_INT hpart
;
7043 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
7047 /* We have to do this the hard way to detect unsigned overflow.
7048 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
7049 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
7050 TREE_INT_CST_HIGH (arg01
),
7051 TREE_INT_CST_LOW (arg1
),
7052 TREE_INT_CST_HIGH (arg1
),
7053 &lpart
, &hpart
, unsigned_p
);
7054 prod
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
7056 neg_overflow
= false;
7060 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
7061 build_int_cst (TREE_TYPE (arg01
), 1), 0);
7064 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
7065 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
7066 TREE_INT_CST_HIGH (prod
),
7067 TREE_INT_CST_LOW (tmp
),
7068 TREE_INT_CST_HIGH (tmp
),
7069 &lpart
, &hpart
, unsigned_p
);
7070 hi
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
7071 -1, overflow
| TREE_OVERFLOW (prod
));
7073 else if (tree_int_cst_sgn (arg01
) >= 0)
7075 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
7076 build_int_cst (TREE_TYPE (arg01
), 1), 0);
7077 switch (tree_int_cst_sgn (arg1
))
7080 neg_overflow
= true;
7081 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
7086 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
7091 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
7101 /* A negative divisor reverses the relational operators. */
7102 code
= swap_tree_comparison (code
);
7104 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
7105 build_int_cst (TREE_TYPE (arg01
), 1), 0);
7106 switch (tree_int_cst_sgn (arg1
))
7109 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
7114 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
7119 neg_overflow
= true;
7120 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
7132 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
7133 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
7134 if (TREE_OVERFLOW (hi
))
7135 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
7136 if (TREE_OVERFLOW (lo
))
7137 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
7138 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
7141 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
7142 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
7143 if (TREE_OVERFLOW (hi
))
7144 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
7145 if (TREE_OVERFLOW (lo
))
7146 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
7147 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
7150 if (TREE_OVERFLOW (lo
))
7152 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
7153 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
7155 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
7158 if (TREE_OVERFLOW (hi
))
7160 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
7161 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
7163 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
7166 if (TREE_OVERFLOW (hi
))
7168 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
7169 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
7171 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
7174 if (TREE_OVERFLOW (lo
))
7176 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
7177 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
7179 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
7189 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7190 equality/inequality test, then return a simplified form of the test
7191 using a sign testing. Otherwise return NULL. TYPE is the desired
7195 fold_single_bit_test_into_sign_test (location_t loc
,
7196 enum tree_code code
, tree arg0
, tree arg1
,
7199 /* If this is testing a single bit, we can optimize the test. */
7200 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7201 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
7202 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
7204 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7205 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7206 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
7208 if (arg00
!= NULL_TREE
7209 /* This is only a win if casting to a signed type is cheap,
7210 i.e. when arg00's type is not a partial mode. */
7211 && TYPE_PRECISION (TREE_TYPE (arg00
))
7212 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
7214 tree stype
= signed_type_for (TREE_TYPE (arg00
));
7215 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
7217 fold_convert_loc (loc
, stype
, arg00
),
7218 build_int_cst (stype
, 0));
7225 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7226 equality/inequality test, then return a simplified form of
7227 the test using shifts and logical operations. Otherwise return
7228 NULL. TYPE is the desired result type. */
7231 fold_single_bit_test (location_t loc
, enum tree_code code
,
7232 tree arg0
, tree arg1
, tree result_type
)
7234 /* If this is testing a single bit, we can optimize the test. */
7235 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7236 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
7237 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
7239 tree inner
= TREE_OPERAND (arg0
, 0);
7240 tree type
= TREE_TYPE (arg0
);
7241 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
7242 enum machine_mode operand_mode
= TYPE_MODE (type
);
7244 tree signed_type
, unsigned_type
, intermediate_type
;
7247 /* First, see if we can fold the single bit test into a sign-bit
7249 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
7254 /* Otherwise we have (A & C) != 0 where C is a single bit,
7255 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7256 Similarly for (A & C) == 0. */
7258 /* If INNER is a right shift of a constant and it plus BITNUM does
7259 not overflow, adjust BITNUM and INNER. */
7260 if (TREE_CODE (inner
) == RSHIFT_EXPR
7261 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
7262 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
7263 && bitnum
< TYPE_PRECISION (type
)
7264 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
7265 bitnum
- TYPE_PRECISION (type
)))
7267 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
7268 inner
= TREE_OPERAND (inner
, 0);
7271 /* If we are going to be able to omit the AND below, we must do our
7272 operations as unsigned. If we must use the AND, we have a choice.
7273 Normally unsigned is faster, but for some machines signed is. */
7274 #ifdef LOAD_EXTEND_OP
7275 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
7276 && !flag_syntax_only
) ? 0 : 1;
7281 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
7282 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
7283 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
7284 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
7287 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
7288 inner
, size_int (bitnum
));
7290 one
= build_int_cst (intermediate_type
, 1);
7292 if (code
== EQ_EXPR
)
7293 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
7295 /* Put the AND last so it can combine with more things. */
7296 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
7298 /* Make sure to return the proper type. */
7299 inner
= fold_convert_loc (loc
, result_type
, inner
);
7306 /* Check whether we are allowed to reorder operands arg0 and arg1,
7307 such that the evaluation of arg1 occurs before arg0. */
7310 reorder_operands_p (const_tree arg0
, const_tree arg1
)
7312 if (! flag_evaluation_order
)
7314 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
7316 return ! TREE_SIDE_EFFECTS (arg0
)
7317 && ! TREE_SIDE_EFFECTS (arg1
);
7320 /* Test whether it is preferable two swap two operands, ARG0 and
7321 ARG1, for example because ARG0 is an integer constant and ARG1
7322 isn't. If REORDER is true, only recommend swapping if we can
7323 evaluate the operands in reverse order. */
7326 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
7328 STRIP_SIGN_NOPS (arg0
);
7329 STRIP_SIGN_NOPS (arg1
);
7331 if (TREE_CODE (arg1
) == INTEGER_CST
)
7333 if (TREE_CODE (arg0
) == INTEGER_CST
)
7336 if (TREE_CODE (arg1
) == REAL_CST
)
7338 if (TREE_CODE (arg0
) == REAL_CST
)
7341 if (TREE_CODE (arg1
) == FIXED_CST
)
7343 if (TREE_CODE (arg0
) == FIXED_CST
)
7346 if (TREE_CODE (arg1
) == COMPLEX_CST
)
7348 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7351 if (TREE_CONSTANT (arg1
))
7353 if (TREE_CONSTANT (arg0
))
7356 if (optimize_function_for_size_p (cfun
))
7359 if (reorder
&& flag_evaluation_order
7360 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
7363 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7364 for commutative and comparison operators. Ensuring a canonical
7365 form allows the optimizers to find additional redundancies without
7366 having to explicitly check for both orderings. */
7367 if (TREE_CODE (arg0
) == SSA_NAME
7368 && TREE_CODE (arg1
) == SSA_NAME
7369 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
7372 /* Put SSA_NAMEs last. */
7373 if (TREE_CODE (arg1
) == SSA_NAME
)
7375 if (TREE_CODE (arg0
) == SSA_NAME
)
7378 /* Put variables last. */
7387 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7388 ARG0 is extended to a wider type. */
7391 fold_widened_comparison (location_t loc
, enum tree_code code
,
7392 tree type
, tree arg0
, tree arg1
)
7394 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
7396 tree shorter_type
, outer_type
;
7400 if (arg0_unw
== arg0
)
7402 shorter_type
= TREE_TYPE (arg0_unw
);
7404 #ifdef HAVE_canonicalize_funcptr_for_compare
7405 /* Disable this optimization if we're casting a function pointer
7406 type on targets that require function pointer canonicalization. */
7407 if (HAVE_canonicalize_funcptr_for_compare
7408 && TREE_CODE (shorter_type
) == POINTER_TYPE
7409 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
7413 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
7416 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
7418 /* If possible, express the comparison in the shorter mode. */
7419 if ((code
== EQ_EXPR
|| code
== NE_EXPR
7420 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
7421 && (TREE_TYPE (arg1_unw
) == shorter_type
7422 || ((TYPE_PRECISION (shorter_type
)
7423 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
7424 && (TYPE_UNSIGNED (shorter_type
)
7425 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
7426 || (TREE_CODE (arg1_unw
) == INTEGER_CST
7427 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
7428 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
7429 && int_fits_type_p (arg1_unw
, shorter_type
))))
7430 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
7431 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
7433 if (TREE_CODE (arg1_unw
) != INTEGER_CST
7434 || TREE_CODE (shorter_type
) != INTEGER_TYPE
7435 || !int_fits_type_p (arg1_unw
, shorter_type
))
7438 /* If we are comparing with the integer that does not fit into the range
7439 of the shorter type, the result is known. */
7440 outer_type
= TREE_TYPE (arg1_unw
);
7441 min
= lower_bound_in_type (outer_type
, shorter_type
);
7442 max
= upper_bound_in_type (outer_type
, shorter_type
);
7444 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
7446 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
7453 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
7458 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
7464 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
7466 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
7471 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
7473 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
7482 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7483 ARG0 just the signedness is changed. */
7486 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
7487 tree arg0
, tree arg1
)
7490 tree inner_type
, outer_type
;
7492 if (!CONVERT_EXPR_P (arg0
))
7495 outer_type
= TREE_TYPE (arg0
);
7496 arg0_inner
= TREE_OPERAND (arg0
, 0);
7497 inner_type
= TREE_TYPE (arg0_inner
);
7499 #ifdef HAVE_canonicalize_funcptr_for_compare
7500 /* Disable this optimization if we're casting a function pointer
7501 type on targets that require function pointer canonicalization. */
7502 if (HAVE_canonicalize_funcptr_for_compare
7503 && TREE_CODE (inner_type
) == POINTER_TYPE
7504 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
7508 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
7511 if (TREE_CODE (arg1
) != INTEGER_CST
7512 && !(CONVERT_EXPR_P (arg1
)
7513 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
7516 if ((TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
7517 || POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
7522 if (TREE_CODE (arg1
) == INTEGER_CST
)
7523 arg1
= force_fit_type_double (inner_type
, TREE_INT_CST_LOW (arg1
),
7524 TREE_INT_CST_HIGH (arg1
), 0,
7525 TREE_OVERFLOW (arg1
));
7527 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
7529 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
7532 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7533 step of the array. Reconstructs s and delta in the case of s *
7534 delta being an integer constant (and thus already folded). ADDR is
7535 the address. MULT is the multiplicative expression. If the
7536 function succeeds, the new address expression is returned.
7537 Otherwise NULL_TREE is returned. LOC is the location of the
7538 resulting expression. */
7541 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
7543 tree s
, delta
, step
;
7544 tree ref
= TREE_OPERAND (addr
, 0), pref
;
7549 /* Strip the nops that might be added when converting op1 to sizetype. */
7552 /* Canonicalize op1 into a possibly non-constant delta
7553 and an INTEGER_CST s. */
7554 if (TREE_CODE (op1
) == MULT_EXPR
)
7556 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
7561 if (TREE_CODE (arg0
) == INTEGER_CST
)
7566 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7574 else if (TREE_CODE (op1
) == INTEGER_CST
)
7581 /* Simulate we are delta * 1. */
7583 s
= integer_one_node
;
7586 for (;; ref
= TREE_OPERAND (ref
, 0))
7588 if (TREE_CODE (ref
) == ARRAY_REF
)
7592 /* Remember if this was a multi-dimensional array. */
7593 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
7596 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
7599 itype
= TREE_TYPE (domain
);
7601 step
= array_ref_element_size (ref
);
7602 if (TREE_CODE (step
) != INTEGER_CST
)
7607 if (! tree_int_cst_equal (step
, s
))
7612 /* Try if delta is a multiple of step. */
7613 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
7619 /* Only fold here if we can verify we do not overflow one
7620 dimension of a multi-dimensional array. */
7625 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
7626 || !TYPE_MAX_VALUE (domain
)
7627 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
7630 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
7631 fold_convert_loc (loc
, itype
,
7632 TREE_OPERAND (ref
, 1)),
7633 fold_convert_loc (loc
, itype
, delta
));
7635 || TREE_CODE (tmp
) != INTEGER_CST
7636 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
7645 if (!handled_component_p (ref
))
7649 /* We found the suitable array reference. So copy everything up to it,
7650 and replace the index. */
7652 pref
= TREE_OPERAND (addr
, 0);
7653 ret
= copy_node (pref
);
7654 SET_EXPR_LOCATION (ret
, loc
);
7659 pref
= TREE_OPERAND (pref
, 0);
7660 TREE_OPERAND (pos
, 0) = copy_node (pref
);
7661 pos
= TREE_OPERAND (pos
, 0);
7664 TREE_OPERAND (pos
, 1) = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
7665 fold_convert_loc (loc
, itype
,
7666 TREE_OPERAND (pos
, 1)),
7667 fold_convert_loc (loc
, itype
, delta
));
7669 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
7673 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7674 means A >= Y && A != MAX, but in this case we know that
7675 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7678 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7680 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7682 if (TREE_CODE (bound
) == LT_EXPR
)
7683 a
= TREE_OPERAND (bound
, 0);
7684 else if (TREE_CODE (bound
) == GT_EXPR
)
7685 a
= TREE_OPERAND (bound
, 1);
7689 typea
= TREE_TYPE (a
);
7690 if (!INTEGRAL_TYPE_P (typea
)
7691 && !POINTER_TYPE_P (typea
))
7694 if (TREE_CODE (ineq
) == LT_EXPR
)
7696 a1
= TREE_OPERAND (ineq
, 1);
7697 y
= TREE_OPERAND (ineq
, 0);
7699 else if (TREE_CODE (ineq
) == GT_EXPR
)
7701 a1
= TREE_OPERAND (ineq
, 0);
7702 y
= TREE_OPERAND (ineq
, 1);
7707 if (TREE_TYPE (a1
) != typea
)
7710 if (POINTER_TYPE_P (typea
))
7712 /* Convert the pointer types into integer before taking the difference. */
7713 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7714 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7715 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7718 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7720 if (!diff
|| !integer_onep (diff
))
7723 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7726 /* Fold a sum or difference of at least one multiplication.
7727 Returns the folded tree or NULL if no simplification could be made. */
7730 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7731 tree arg0
, tree arg1
)
7733 tree arg00
, arg01
, arg10
, arg11
;
7734 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7736 /* (A * C) +- (B * C) -> (A+-B) * C.
7737 (A * C) +- A -> A * (C+-1).
7738 We are most concerned about the case where C is a constant,
7739 but other combinations show up during loop reduction. Since
7740 it is not difficult, try all four possibilities. */
7742 if (TREE_CODE (arg0
) == MULT_EXPR
)
7744 arg00
= TREE_OPERAND (arg0
, 0);
7745 arg01
= TREE_OPERAND (arg0
, 1);
7747 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7749 arg00
= build_one_cst (type
);
7754 /* We cannot generate constant 1 for fract. */
7755 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7758 arg01
= build_one_cst (type
);
7760 if (TREE_CODE (arg1
) == MULT_EXPR
)
7762 arg10
= TREE_OPERAND (arg1
, 0);
7763 arg11
= TREE_OPERAND (arg1
, 1);
7765 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7767 arg10
= build_one_cst (type
);
7768 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7769 the purpose of this canonicalization. */
7770 if (TREE_INT_CST_HIGH (arg1
) == -1
7771 && negate_expr_p (arg1
)
7772 && code
== PLUS_EXPR
)
7774 arg11
= negate_expr (arg1
);
7782 /* We cannot generate constant 1 for fract. */
7783 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7786 arg11
= build_one_cst (type
);
7790 if (operand_equal_p (arg01
, arg11
, 0))
7791 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7792 else if (operand_equal_p (arg00
, arg10
, 0))
7793 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7794 else if (operand_equal_p (arg00
, arg11
, 0))
7795 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7796 else if (operand_equal_p (arg01
, arg10
, 0))
7797 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7799 /* No identical multiplicands; see if we can find a common
7800 power-of-two factor in non-power-of-two multiplies. This
7801 can help in multi-dimensional array access. */
7802 else if (host_integerp (arg01
, 0)
7803 && host_integerp (arg11
, 0))
7805 HOST_WIDE_INT int01
, int11
, tmp
;
7808 int01
= TREE_INT_CST_LOW (arg01
);
7809 int11
= TREE_INT_CST_LOW (arg11
);
7811 /* Move min of absolute values to int11. */
7812 if ((int01
>= 0 ? int01
: -int01
)
7813 < (int11
>= 0 ? int11
: -int11
))
7815 tmp
= int01
, int01
= int11
, int11
= tmp
;
7816 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7823 if (exact_log2 (abs (int11
)) > 0 && int01
% int11
== 0
7824 /* The remainder should not be a constant, otherwise we
7825 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7826 increased the number of multiplications necessary. */
7827 && TREE_CODE (arg10
) != INTEGER_CST
)
7829 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7830 build_int_cst (TREE_TYPE (arg00
),
7835 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7840 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7841 fold_build2_loc (loc
, code
, type
,
7842 fold_convert_loc (loc
, type
, alt0
),
7843 fold_convert_loc (loc
, type
, alt1
)),
7844 fold_convert_loc (loc
, type
, same
));
7849 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7850 specified by EXPR into the buffer PTR of length LEN bytes.
7851 Return the number of bytes placed in the buffer, or zero
7855 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7857 tree type
= TREE_TYPE (expr
);
7858 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7859 int byte
, offset
, word
, words
;
7860 unsigned char value
;
7862 if (total_bytes
> len
)
7864 words
= total_bytes
/ UNITS_PER_WORD
;
7866 for (byte
= 0; byte
< total_bytes
; byte
++)
7868 int bitpos
= byte
* BITS_PER_UNIT
;
7869 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7870 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7872 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7873 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7875 if (total_bytes
> UNITS_PER_WORD
)
7877 word
= byte
/ UNITS_PER_WORD
;
7878 if (WORDS_BIG_ENDIAN
)
7879 word
= (words
- 1) - word
;
7880 offset
= word
* UNITS_PER_WORD
;
7881 if (BYTES_BIG_ENDIAN
)
7882 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7884 offset
+= byte
% UNITS_PER_WORD
;
7887 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7888 ptr
[offset
] = value
;
7894 /* Subroutine of native_encode_expr. Encode the REAL_CST
7895 specified by EXPR into the buffer PTR of length LEN bytes.
7896 Return the number of bytes placed in the buffer, or zero
7900 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7902 tree type
= TREE_TYPE (expr
);
7903 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7904 int byte
, offset
, word
, words
, bitpos
;
7905 unsigned char value
;
7907 /* There are always 32 bits in each long, no matter the size of
7908 the hosts long. We handle floating point representations with
7912 if (total_bytes
> len
)
7914 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7916 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7918 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7919 bitpos
+= BITS_PER_UNIT
)
7921 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7922 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7924 if (UNITS_PER_WORD
< 4)
7926 word
= byte
/ UNITS_PER_WORD
;
7927 if (WORDS_BIG_ENDIAN
)
7928 word
= (words
- 1) - word
;
7929 offset
= word
* UNITS_PER_WORD
;
7930 if (BYTES_BIG_ENDIAN
)
7931 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7933 offset
+= byte
% UNITS_PER_WORD
;
7936 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7937 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7942 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7943 specified by EXPR into the buffer PTR of length LEN bytes.
7944 Return the number of bytes placed in the buffer, or zero
7948 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7953 part
= TREE_REALPART (expr
);
7954 rsize
= native_encode_expr (part
, ptr
, len
);
7957 part
= TREE_IMAGPART (expr
);
7958 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7961 return rsize
+ isize
;
7965 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7966 specified by EXPR into the buffer PTR of length LEN bytes.
7967 Return the number of bytes placed in the buffer, or zero
7971 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7973 int i
, size
, offset
, count
;
7974 tree itype
, elem
, elements
;
7977 elements
= TREE_VECTOR_CST_ELTS (expr
);
7978 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
7979 itype
= TREE_TYPE (TREE_TYPE (expr
));
7980 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7981 for (i
= 0; i
< count
; i
++)
7985 elem
= TREE_VALUE (elements
);
7986 elements
= TREE_CHAIN (elements
);
7993 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7998 if (offset
+ size
> len
)
8000 memset (ptr
+offset
, 0, size
);
8008 /* Subroutine of native_encode_expr. Encode the STRING_CST
8009 specified by EXPR into the buffer PTR of length LEN bytes.
8010 Return the number of bytes placed in the buffer, or zero
8014 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
8016 tree type
= TREE_TYPE (expr
);
8017 HOST_WIDE_INT total_bytes
;
8019 if (TREE_CODE (type
) != ARRAY_TYPE
8020 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
8021 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
8022 || !host_integerp (TYPE_SIZE_UNIT (type
), 0))
8024 total_bytes
= tree_low_cst (TYPE_SIZE_UNIT (type
), 0);
8025 if (total_bytes
> len
)
8027 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
8029 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
8030 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
8031 total_bytes
- TREE_STRING_LENGTH (expr
));
8034 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
8039 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
8040 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
8041 buffer PTR of length LEN bytes. Return the number of bytes
8042 placed in the buffer, or zero upon failure. */
8045 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
8047 switch (TREE_CODE (expr
))
8050 return native_encode_int (expr
, ptr
, len
);
8053 return native_encode_real (expr
, ptr
, len
);
8056 return native_encode_complex (expr
, ptr
, len
);
8059 return native_encode_vector (expr
, ptr
, len
);
8062 return native_encode_string (expr
, ptr
, len
);
8070 /* Subroutine of native_interpret_expr. Interpret the contents of
8071 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8072 If the buffer cannot be interpreted, return NULL_TREE. */
8075 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
8077 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
8078 int byte
, offset
, word
, words
;
8079 unsigned char value
;
8080 unsigned int HOST_WIDE_INT lo
= 0;
8081 HOST_WIDE_INT hi
= 0;
8083 if (total_bytes
> len
)
8085 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
8087 words
= total_bytes
/ UNITS_PER_WORD
;
8089 for (byte
= 0; byte
< total_bytes
; byte
++)
8091 int bitpos
= byte
* BITS_PER_UNIT
;
8092 if (total_bytes
> UNITS_PER_WORD
)
8094 word
= byte
/ UNITS_PER_WORD
;
8095 if (WORDS_BIG_ENDIAN
)
8096 word
= (words
- 1) - word
;
8097 offset
= word
* UNITS_PER_WORD
;
8098 if (BYTES_BIG_ENDIAN
)
8099 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
8101 offset
+= byte
% UNITS_PER_WORD
;
8104 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
8105 value
= ptr
[offset
];
8107 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
8108 lo
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
8110 hi
|= (unsigned HOST_WIDE_INT
) value
8111 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
8114 return build_int_cst_wide_type (type
, lo
, hi
);
8118 /* Subroutine of native_interpret_expr. Interpret the contents of
8119 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8120 If the buffer cannot be interpreted, return NULL_TREE. */
8123 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
8125 enum machine_mode mode
= TYPE_MODE (type
);
8126 int total_bytes
= GET_MODE_SIZE (mode
);
8127 int byte
, offset
, word
, words
, bitpos
;
8128 unsigned char value
;
8129 /* There are always 32 bits in each long, no matter the size of
8130 the hosts long. We handle floating point representations with
8135 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
8136 if (total_bytes
> len
|| total_bytes
> 24)
8138 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
8140 memset (tmp
, 0, sizeof (tmp
));
8141 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
8142 bitpos
+= BITS_PER_UNIT
)
8144 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
8145 if (UNITS_PER_WORD
< 4)
8147 word
= byte
/ UNITS_PER_WORD
;
8148 if (WORDS_BIG_ENDIAN
)
8149 word
= (words
- 1) - word
;
8150 offset
= word
* UNITS_PER_WORD
;
8151 if (BYTES_BIG_ENDIAN
)
8152 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
8154 offset
+= byte
% UNITS_PER_WORD
;
8157 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
8158 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
8160 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
8163 real_from_target (&r
, tmp
, mode
);
8164 return build_real (type
, r
);
8168 /* Subroutine of native_interpret_expr. Interpret the contents of
8169 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8170 If the buffer cannot be interpreted, return NULL_TREE. */
8173 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
8175 tree etype
, rpart
, ipart
;
8178 etype
= TREE_TYPE (type
);
8179 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
8182 rpart
= native_interpret_expr (etype
, ptr
, size
);
8185 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
8188 return build_complex (type
, rpart
, ipart
);
8192 /* Subroutine of native_interpret_expr. Interpret the contents of
8193 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8194 If the buffer cannot be interpreted, return NULL_TREE. */
8197 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
8199 tree etype
, elem
, elements
;
8202 etype
= TREE_TYPE (type
);
8203 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
8204 count
= TYPE_VECTOR_SUBPARTS (type
);
8205 if (size
* count
> len
)
8208 elements
= NULL_TREE
;
8209 for (i
= count
- 1; i
>= 0; i
--)
8211 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
8214 elements
= tree_cons (NULL_TREE
, elem
, elements
);
8216 return build_vector (type
, elements
);
8220 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8221 the buffer PTR of length LEN as a constant of type TYPE. For
8222 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8223 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8224 return NULL_TREE. */
8227 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
8229 switch (TREE_CODE (type
))
8234 return native_interpret_int (type
, ptr
, len
);
8237 return native_interpret_real (type
, ptr
, len
);
8240 return native_interpret_complex (type
, ptr
, len
);
8243 return native_interpret_vector (type
, ptr
, len
);
8251 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8252 TYPE at compile-time. If we're unable to perform the conversion
8253 return NULL_TREE. */
8256 fold_view_convert_expr (tree type
, tree expr
)
8258 /* We support up to 512-bit values (for V8DFmode). */
8259 unsigned char buffer
[64];
8262 /* Check that the host and target are sane. */
8263 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
8266 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
8270 return native_interpret_expr (type
, buffer
, len
);
8273 /* Build an expression for the address of T. Folds away INDIRECT_REF
8274 to avoid confusing the gimplify process. */
8277 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
8279 /* The size of the object is not relevant when talking about its address. */
8280 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
8281 t
= TREE_OPERAND (t
, 0);
8283 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
8284 if (TREE_CODE (t
) == INDIRECT_REF
8285 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
8287 t
= TREE_OPERAND (t
, 0);
8289 if (TREE_TYPE (t
) != ptrtype
)
8291 t
= build1 (NOP_EXPR
, ptrtype
, t
);
8292 SET_EXPR_LOCATION (t
, loc
);
8295 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
8297 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
8299 if (TREE_TYPE (t
) != ptrtype
)
8300 t
= fold_convert_loc (loc
, ptrtype
, t
);
8304 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
8305 SET_EXPR_LOCATION (t
, loc
);
8311 /* Build an expression for the address of T. */
8314 build_fold_addr_expr_loc (location_t loc
, tree t
)
8316 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
8318 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
8321 /* Fold a unary expression of code CODE and type TYPE with operand
8322 OP0. Return the folded expression if folding is successful.
8323 Otherwise, return NULL_TREE. */
8326 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
8330 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
8332 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
8333 && TREE_CODE_LENGTH (code
) == 1);
8338 if (CONVERT_EXPR_CODE_P (code
)
8339 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
8341 /* Don't use STRIP_NOPS, because signedness of argument type
8343 STRIP_SIGN_NOPS (arg0
);
8347 /* Strip any conversions that don't change the mode. This
8348 is safe for every expression, except for a comparison
8349 expression because its signedness is derived from its
8352 Note that this is done as an internal manipulation within
8353 the constant folder, in order to find the simplest
8354 representation of the arguments so that their form can be
8355 studied. In any cases, the appropriate type conversions
8356 should be put back in the tree that will get out of the
8362 if (TREE_CODE_CLASS (code
) == tcc_unary
)
8364 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
8365 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8366 fold_build1_loc (loc
, code
, type
,
8367 fold_convert_loc (loc
, TREE_TYPE (op0
),
8368 TREE_OPERAND (arg0
, 1))));
8369 else if (TREE_CODE (arg0
) == COND_EXPR
)
8371 tree arg01
= TREE_OPERAND (arg0
, 1);
8372 tree arg02
= TREE_OPERAND (arg0
, 2);
8373 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
8374 arg01
= fold_build1_loc (loc
, code
, type
,
8375 fold_convert_loc (loc
,
8376 TREE_TYPE (op0
), arg01
));
8377 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
8378 arg02
= fold_build1_loc (loc
, code
, type
,
8379 fold_convert_loc (loc
,
8380 TREE_TYPE (op0
), arg02
));
8381 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8384 /* If this was a conversion, and all we did was to move into
8385 inside the COND_EXPR, bring it back out. But leave it if
8386 it is a conversion from integer to integer and the
8387 result precision is no wider than a word since such a
8388 conversion is cheap and may be optimized away by combine,
8389 while it couldn't if it were outside the COND_EXPR. Then return
8390 so we don't get into an infinite recursion loop taking the
8391 conversion out and then back in. */
8393 if ((CONVERT_EXPR_CODE_P (code
)
8394 || code
== NON_LVALUE_EXPR
)
8395 && TREE_CODE (tem
) == COND_EXPR
8396 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
8397 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
8398 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
8399 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
8400 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
8401 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
8402 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8404 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
8405 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
8406 || flag_syntax_only
))
8408 tem
= build1 (code
, type
,
8410 TREE_TYPE (TREE_OPERAND
8411 (TREE_OPERAND (tem
, 1), 0)),
8412 TREE_OPERAND (tem
, 0),
8413 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
8414 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
8415 SET_EXPR_LOCATION (tem
, loc
);
8419 else if (COMPARISON_CLASS_P (arg0
))
8421 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
8423 arg0
= copy_node (arg0
);
8424 TREE_TYPE (arg0
) = type
;
8427 else if (TREE_CODE (type
) != INTEGER_TYPE
)
8428 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
,
8429 fold_build1_loc (loc
, code
, type
,
8431 fold_build1_loc (loc
, code
, type
,
8432 integer_zero_node
));
8439 /* Re-association barriers around constants and other re-association
8440 barriers can be removed. */
8441 if (CONSTANT_CLASS_P (op0
)
8442 || TREE_CODE (op0
) == PAREN_EXPR
)
8443 return fold_convert_loc (loc
, type
, op0
);
8448 case FIX_TRUNC_EXPR
:
8449 if (TREE_TYPE (op0
) == type
)
8452 /* If we have (type) (a CMP b) and type is an integral type, return
8453 new expression involving the new type. */
8454 if (COMPARISON_CLASS_P (op0
) && INTEGRAL_TYPE_P (type
))
8455 return fold_build2_loc (loc
, TREE_CODE (op0
), type
, TREE_OPERAND (op0
, 0),
8456 TREE_OPERAND (op0
, 1));
8458 /* Handle cases of two conversions in a row. */
8459 if (CONVERT_EXPR_P (op0
))
8461 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
8462 tree inter_type
= TREE_TYPE (op0
);
8463 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
8464 int inside_ptr
= POINTER_TYPE_P (inside_type
);
8465 int inside_float
= FLOAT_TYPE_P (inside_type
);
8466 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
8467 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
8468 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
8469 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
8470 int inter_ptr
= POINTER_TYPE_P (inter_type
);
8471 int inter_float
= FLOAT_TYPE_P (inter_type
);
8472 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
8473 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
8474 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
8475 int final_int
= INTEGRAL_TYPE_P (type
);
8476 int final_ptr
= POINTER_TYPE_P (type
);
8477 int final_float
= FLOAT_TYPE_P (type
);
8478 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
8479 unsigned int final_prec
= TYPE_PRECISION (type
);
8480 int final_unsignedp
= TYPE_UNSIGNED (type
);
8482 /* In addition to the cases of two conversions in a row
8483 handled below, if we are converting something to its own
8484 type via an object of identical or wider precision, neither
8485 conversion is needed. */
8486 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
8487 && (((inter_int
|| inter_ptr
) && final_int
)
8488 || (inter_float
&& final_float
))
8489 && inter_prec
>= final_prec
)
8490 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8492 /* Likewise, if the intermediate and initial types are either both
8493 float or both integer, we don't need the middle conversion if the
8494 former is wider than the latter and doesn't change the signedness
8495 (for integers). Avoid this if the final type is a pointer since
8496 then we sometimes need the middle conversion. Likewise if the
8497 final type has a precision not equal to the size of its mode. */
8498 if (((inter_int
&& inside_int
)
8499 || (inter_float
&& inside_float
)
8500 || (inter_vec
&& inside_vec
))
8501 && inter_prec
>= inside_prec
8502 && (inter_float
|| inter_vec
8503 || inter_unsignedp
== inside_unsignedp
)
8504 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
8505 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
8507 && (! final_vec
|| inter_prec
== inside_prec
))
8508 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8510 /* If we have a sign-extension of a zero-extended value, we can
8511 replace that by a single zero-extension. */
8512 if (inside_int
&& inter_int
&& final_int
8513 && inside_prec
< inter_prec
&& inter_prec
< final_prec
8514 && inside_unsignedp
&& !inter_unsignedp
)
8515 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8517 /* Two conversions in a row are not needed unless:
8518 - some conversion is floating-point (overstrict for now), or
8519 - some conversion is a vector (overstrict for now), or
8520 - the intermediate type is narrower than both initial and
8522 - the intermediate type and innermost type differ in signedness,
8523 and the outermost type is wider than the intermediate, or
8524 - the initial type is a pointer type and the precisions of the
8525 intermediate and final types differ, or
8526 - the final type is a pointer type and the precisions of the
8527 initial and intermediate types differ. */
8528 if (! inside_float
&& ! inter_float
&& ! final_float
8529 && ! inside_vec
&& ! inter_vec
&& ! final_vec
8530 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
8531 && ! (inside_int
&& inter_int
8532 && inter_unsignedp
!= inside_unsignedp
8533 && inter_prec
< final_prec
)
8534 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
8535 == (final_unsignedp
&& final_prec
> inter_prec
))
8536 && ! (inside_ptr
&& inter_prec
!= final_prec
)
8537 && ! (final_ptr
&& inside_prec
!= inter_prec
)
8538 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
8539 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
8540 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8543 /* Handle (T *)&A.B.C for A being of type T and B and C
8544 living at offset zero. This occurs frequently in
8545 C++ upcasting and then accessing the base. */
8546 if (TREE_CODE (op0
) == ADDR_EXPR
8547 && POINTER_TYPE_P (type
)
8548 && handled_component_p (TREE_OPERAND (op0
, 0)))
8550 HOST_WIDE_INT bitsize
, bitpos
;
8552 enum machine_mode mode
;
8553 int unsignedp
, volatilep
;
8554 tree base
= TREE_OPERAND (op0
, 0);
8555 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
8556 &mode
, &unsignedp
, &volatilep
, false);
8557 /* If the reference was to a (constant) zero offset, we can use
8558 the address of the base if it has the same base type
8559 as the result type. */
8560 if (! offset
&& bitpos
== 0
8561 && TYPE_MAIN_VARIANT (TREE_TYPE (type
))
8562 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
8563 return fold_convert_loc (loc
, type
,
8564 build_fold_addr_expr_loc (loc
, base
));
8567 if (TREE_CODE (op0
) == MODIFY_EXPR
8568 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
8569 /* Detect assigning a bitfield. */
8570 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
8572 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
8574 /* Don't leave an assignment inside a conversion
8575 unless assigning a bitfield. */
8576 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
8577 /* First do the assignment, then return converted constant. */
8578 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
8579 TREE_NO_WARNING (tem
) = 1;
8580 TREE_USED (tem
) = 1;
8581 SET_EXPR_LOCATION (tem
, loc
);
8585 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8586 constants (if x has signed type, the sign bit cannot be set
8587 in c). This folds extension into the BIT_AND_EXPR.
8588 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8589 very likely don't have maximal range for their precision and this
8590 transformation effectively doesn't preserve non-maximal ranges. */
8591 if (TREE_CODE (type
) == INTEGER_TYPE
8592 && TREE_CODE (op0
) == BIT_AND_EXPR
8593 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8595 tree and_expr
= op0
;
8596 tree and0
= TREE_OPERAND (and_expr
, 0);
8597 tree and1
= TREE_OPERAND (and_expr
, 1);
8600 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
8601 || (TYPE_PRECISION (type
)
8602 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
8604 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8605 <= HOST_BITS_PER_WIDE_INT
8606 && host_integerp (and1
, 1))
8608 unsigned HOST_WIDE_INT cst
;
8610 cst
= tree_low_cst (and1
, 1);
8611 cst
&= (HOST_WIDE_INT
) -1
8612 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8613 change
= (cst
== 0);
8614 #ifdef LOAD_EXTEND_OP
8616 && !flag_syntax_only
8617 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
8620 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8621 and0
= fold_convert_loc (loc
, uns
, and0
);
8622 and1
= fold_convert_loc (loc
, uns
, and1
);
8628 tem
= force_fit_type_double (type
, TREE_INT_CST_LOW (and1
),
8629 TREE_INT_CST_HIGH (and1
), 0,
8630 TREE_OVERFLOW (and1
));
8631 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
8632 fold_convert_loc (loc
, type
, and0
), tem
);
8636 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8637 when one of the new casts will fold away. Conservatively we assume
8638 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8639 if (POINTER_TYPE_P (type
)
8640 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8641 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8642 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
8643 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
8645 tree arg00
= TREE_OPERAND (arg0
, 0);
8646 tree arg01
= TREE_OPERAND (arg0
, 1);
8648 return fold_build2_loc (loc
,
8649 TREE_CODE (arg0
), type
,
8650 fold_convert_loc (loc
, type
, arg00
),
8651 fold_convert_loc (loc
, sizetype
, arg01
));
8654 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8655 of the same precision, and X is an integer type not narrower than
8656 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8657 if (INTEGRAL_TYPE_P (type
)
8658 && TREE_CODE (op0
) == BIT_NOT_EXPR
8659 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8660 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
8661 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8663 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8664 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8665 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8666 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
8667 fold_convert_loc (loc
, type
, tem
));
8670 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8671 type of X and Y (integer types only). */
8672 if (INTEGRAL_TYPE_P (type
)
8673 && TREE_CODE (op0
) == MULT_EXPR
8674 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8675 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8677 /* Be careful not to introduce new overflows. */
8679 if (TYPE_OVERFLOW_WRAPS (type
))
8682 mult_type
= unsigned_type_for (type
);
8684 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8686 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8687 fold_convert_loc (loc
, mult_type
,
8688 TREE_OPERAND (op0
, 0)),
8689 fold_convert_loc (loc
, mult_type
,
8690 TREE_OPERAND (op0
, 1)));
8691 return fold_convert_loc (loc
, type
, tem
);
8695 tem
= fold_convert_const (code
, type
, op0
);
8696 return tem
? tem
: NULL_TREE
;
8698 case ADDR_SPACE_CONVERT_EXPR
:
8699 if (integer_zerop (arg0
))
8700 return fold_convert_const (code
, type
, arg0
);
8703 case FIXED_CONVERT_EXPR
:
8704 tem
= fold_convert_const (code
, type
, arg0
);
8705 return tem
? tem
: NULL_TREE
;
8707 case VIEW_CONVERT_EXPR
:
8708 if (TREE_TYPE (op0
) == type
)
8710 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8711 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8712 type
, TREE_OPERAND (op0
, 0));
8714 /* For integral conversions with the same precision or pointer
8715 conversions use a NOP_EXPR instead. */
8716 if ((INTEGRAL_TYPE_P (type
)
8717 || POINTER_TYPE_P (type
))
8718 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8719 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8720 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8721 return fold_convert_loc (loc
, type
, op0
);
8723 /* Strip inner integral conversions that do not change the precision. */
8724 if (CONVERT_EXPR_P (op0
)
8725 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8726 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8727 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8728 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8729 && (TYPE_PRECISION (TREE_TYPE (op0
))
8730 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8731 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8732 type
, TREE_OPERAND (op0
, 0));
8734 return fold_view_convert_expr (type
, op0
);
8737 tem
= fold_negate_expr (loc
, arg0
);
8739 return fold_convert_loc (loc
, type
, tem
);
8743 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8744 return fold_abs_const (arg0
, type
);
8745 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8746 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8747 /* Convert fabs((double)float) into (double)fabsf(float). */
8748 else if (TREE_CODE (arg0
) == NOP_EXPR
8749 && TREE_CODE (type
) == REAL_TYPE
)
8751 tree targ0
= strip_float_extensions (arg0
);
8753 return fold_convert_loc (loc
, type
,
8754 fold_build1_loc (loc
, ABS_EXPR
,
8758 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8759 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8761 else if (tree_expr_nonnegative_p (arg0
))
8764 /* Strip sign ops from argument. */
8765 if (TREE_CODE (type
) == REAL_TYPE
)
8767 tem
= fold_strip_sign_ops (arg0
);
8769 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8770 fold_convert_loc (loc
, type
, tem
));
8775 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8776 return fold_convert_loc (loc
, type
, arg0
);
8777 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8779 tree itype
= TREE_TYPE (type
);
8780 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8781 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8782 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8783 negate_expr (ipart
));
8785 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8787 tree itype
= TREE_TYPE (type
);
8788 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8789 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8790 return build_complex (type
, rpart
, negate_expr (ipart
));
8792 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8793 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8797 if (TREE_CODE (arg0
) == INTEGER_CST
)
8798 return fold_not_const (arg0
, type
);
8799 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8800 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8801 /* Convert ~ (-A) to A - 1. */
8802 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8803 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8804 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8805 build_int_cst (type
, 1));
8806 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8807 else if (INTEGRAL_TYPE_P (type
)
8808 && ((TREE_CODE (arg0
) == MINUS_EXPR
8809 && integer_onep (TREE_OPERAND (arg0
, 1)))
8810 || (TREE_CODE (arg0
) == PLUS_EXPR
8811 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8812 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8813 fold_convert_loc (loc
, type
,
8814 TREE_OPERAND (arg0
, 0)));
8815 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8816 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8817 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8818 fold_convert_loc (loc
, type
,
8819 TREE_OPERAND (arg0
, 0)))))
8820 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8821 fold_convert_loc (loc
, type
,
8822 TREE_OPERAND (arg0
, 1)));
8823 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8824 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8825 fold_convert_loc (loc
, type
,
8826 TREE_OPERAND (arg0
, 1)))))
8827 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8828 fold_convert_loc (loc
, type
,
8829 TREE_OPERAND (arg0
, 0)), tem
);
8830 /* Perform BIT_NOT_EXPR on each element individually. */
8831 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8833 tree elements
= TREE_VECTOR_CST_ELTS (arg0
), elem
, list
= NULL_TREE
;
8834 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
8836 for (i
= 0; i
< count
; i
++)
8840 elem
= TREE_VALUE (elements
);
8841 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8842 if (elem
== NULL_TREE
)
8844 elements
= TREE_CHAIN (elements
);
8847 elem
= build_int_cst (TREE_TYPE (type
), -1);
8848 list
= tree_cons (NULL_TREE
, elem
, list
);
8851 return build_vector (type
, nreverse (list
));
8856 case TRUTH_NOT_EXPR
:
8857 /* The argument to invert_truthvalue must have Boolean type. */
8858 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8859 arg0
= fold_convert_loc (loc
, boolean_type_node
, arg0
);
8861 /* Note that the operand of this must be an int
8862 and its values must be 0 or 1.
8863 ("true" is a fixed value perhaps depending on the language,
8864 but we don't handle values other than 1 correctly yet.) */
8865 tem
= fold_truth_not_expr (loc
, arg0
);
8868 return fold_convert_loc (loc
, type
, tem
);
8871 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8872 return fold_convert_loc (loc
, type
, arg0
);
8873 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8874 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8875 TREE_OPERAND (arg0
, 1));
8876 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8877 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8878 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8880 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8881 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8882 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8883 TREE_OPERAND (arg0
, 0)),
8884 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8885 TREE_OPERAND (arg0
, 1)));
8886 return fold_convert_loc (loc
, type
, tem
);
8888 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8890 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8891 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8892 TREE_OPERAND (arg0
, 0));
8893 return fold_convert_loc (loc
, type
, tem
);
8895 if (TREE_CODE (arg0
) == CALL_EXPR
)
8897 tree fn
= get_callee_fndecl (arg0
);
8898 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8899 switch (DECL_FUNCTION_CODE (fn
))
8901 CASE_FLT_FN (BUILT_IN_CEXPI
):
8902 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8904 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8914 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8915 return fold_convert_loc (loc
, type
, integer_zero_node
);
8916 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8917 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8918 TREE_OPERAND (arg0
, 0));
8919 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8920 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8921 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8923 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8924 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8925 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8926 TREE_OPERAND (arg0
, 0)),
8927 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8928 TREE_OPERAND (arg0
, 1)));
8929 return fold_convert_loc (loc
, type
, tem
);
8931 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8933 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8934 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8935 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8937 if (TREE_CODE (arg0
) == CALL_EXPR
)
8939 tree fn
= get_callee_fndecl (arg0
);
8940 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8941 switch (DECL_FUNCTION_CODE (fn
))
8943 CASE_FLT_FN (BUILT_IN_CEXPI
):
8944 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8946 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8956 /* Fold *&X to X if X is an lvalue. */
8957 if (TREE_CODE (op0
) == ADDR_EXPR
)
8959 tree op00
= TREE_OPERAND (op0
, 0);
8960 if ((TREE_CODE (op00
) == VAR_DECL
8961 || TREE_CODE (op00
) == PARM_DECL
8962 || TREE_CODE (op00
) == RESULT_DECL
)
8963 && !TREE_READONLY (op00
))
8970 } /* switch (code) */
8974 /* If the operation was a conversion do _not_ mark a resulting constant
8975 with TREE_OVERFLOW if the original constant was not. These conversions
8976 have implementation defined behavior and retaining the TREE_OVERFLOW
8977 flag here would confuse later passes such as VRP. */
8979 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8980 tree type
, tree op0
)
8982 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8984 && TREE_CODE (res
) == INTEGER_CST
8985 && TREE_CODE (op0
) == INTEGER_CST
8986 && CONVERT_EXPR_CODE_P (code
))
8987 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8992 /* Fold a binary expression of code CODE and type TYPE with operands
8993 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8994 Return the folded expression if folding is successful. Otherwise,
8995 return NULL_TREE. */
8998 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
9000 enum tree_code compl_code
;
9002 if (code
== MIN_EXPR
)
9003 compl_code
= MAX_EXPR
;
9004 else if (code
== MAX_EXPR
)
9005 compl_code
= MIN_EXPR
;
9009 /* MIN (MAX (a, b), b) == b. */
9010 if (TREE_CODE (op0
) == compl_code
9011 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
9012 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
9014 /* MIN (MAX (b, a), b) == b. */
9015 if (TREE_CODE (op0
) == compl_code
9016 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
9017 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
9018 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
9020 /* MIN (a, MAX (a, b)) == a. */
9021 if (TREE_CODE (op1
) == compl_code
9022 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
9023 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
9024 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
9026 /* MIN (a, MAX (b, a)) == a. */
9027 if (TREE_CODE (op1
) == compl_code
9028 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
9029 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
9030 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
9035 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9036 by changing CODE to reduce the magnitude of constants involved in
9037 ARG0 of the comparison.
9038 Returns a canonicalized comparison tree if a simplification was
9039 possible, otherwise returns NULL_TREE.
9040 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9041 valid if signed overflow is undefined. */
9044 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
9045 tree arg0
, tree arg1
,
9046 bool *strict_overflow_p
)
9048 enum tree_code code0
= TREE_CODE (arg0
);
9049 tree t
, cst0
= NULL_TREE
;
9053 /* Match A +- CST code arg1 and CST code arg1. We can change the
9054 first form only if overflow is undefined. */
9055 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9056 /* In principle pointers also have undefined overflow behavior,
9057 but that causes problems elsewhere. */
9058 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
9059 && (code0
== MINUS_EXPR
9060 || code0
== PLUS_EXPR
)
9061 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9062 || code0
== INTEGER_CST
))
9065 /* Identify the constant in arg0 and its sign. */
9066 if (code0
== INTEGER_CST
)
9069 cst0
= TREE_OPERAND (arg0
, 1);
9070 sgn0
= tree_int_cst_sgn (cst0
);
9072 /* Overflowed constants and zero will cause problems. */
9073 if (integer_zerop (cst0
)
9074 || TREE_OVERFLOW (cst0
))
9077 /* See if we can reduce the magnitude of the constant in
9078 arg0 by changing the comparison code. */
9079 if (code0
== INTEGER_CST
)
9081 /* CST <= arg1 -> CST-1 < arg1. */
9082 if (code
== LE_EXPR
&& sgn0
== 1)
9084 /* -CST < arg1 -> -CST-1 <= arg1. */
9085 else if (code
== LT_EXPR
&& sgn0
== -1)
9087 /* CST > arg1 -> CST-1 >= arg1. */
9088 else if (code
== GT_EXPR
&& sgn0
== 1)
9090 /* -CST >= arg1 -> -CST-1 > arg1. */
9091 else if (code
== GE_EXPR
&& sgn0
== -1)
9095 /* arg1 code' CST' might be more canonical. */
9100 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9102 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
9104 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9105 else if (code
== GT_EXPR
9106 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
9108 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9109 else if (code
== LE_EXPR
9110 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
9112 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9113 else if (code
== GE_EXPR
9114 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
9118 *strict_overflow_p
= true;
9121 /* Now build the constant reduced in magnitude. But not if that
9122 would produce one outside of its types range. */
9123 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
9125 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
9126 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
9128 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
9129 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
9130 /* We cannot swap the comparison here as that would cause us to
9131 endlessly recurse. */
9134 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
9135 cst0
, build_int_cst (TREE_TYPE (cst0
), 1), 0);
9136 if (code0
!= INTEGER_CST
)
9137 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
9139 /* If swapping might yield to a more canonical form, do so. */
9141 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
9143 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
9146 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9147 overflow further. Try to decrease the magnitude of constants involved
9148 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9149 and put sole constants at the second argument position.
9150 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9153 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
9154 tree arg0
, tree arg1
)
9157 bool strict_overflow_p
;
9158 const char * const warnmsg
= G_("assuming signed overflow does not occur "
9159 "when reducing constant in comparison");
9161 /* Try canonicalization by simplifying arg0. */
9162 strict_overflow_p
= false;
9163 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
9164 &strict_overflow_p
);
9167 if (strict_overflow_p
)
9168 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
9172 /* Try canonicalization by simplifying arg1 using the swapped
9174 code
= swap_tree_comparison (code
);
9175 strict_overflow_p
= false;
9176 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
9177 &strict_overflow_p
);
9178 if (t
&& strict_overflow_p
)
9179 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
9183 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9184 space. This is used to avoid issuing overflow warnings for
9185 expressions like &p->x which can not wrap. */
9188 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
9190 unsigned HOST_WIDE_INT offset_low
, total_low
;
9191 HOST_WIDE_INT size
, offset_high
, total_high
;
9193 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
9199 if (offset
== NULL_TREE
)
9204 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
9208 offset_low
= TREE_INT_CST_LOW (offset
);
9209 offset_high
= TREE_INT_CST_HIGH (offset
);
9212 if (add_double_with_sign (offset_low
, offset_high
,
9213 bitpos
/ BITS_PER_UNIT
, 0,
9214 &total_low
, &total_high
,
9218 if (total_high
!= 0)
9221 size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
9225 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9227 if (TREE_CODE (base
) == ADDR_EXPR
)
9229 HOST_WIDE_INT base_size
;
9231 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
9232 if (base_size
> 0 && size
< base_size
)
9236 return total_low
> (unsigned HOST_WIDE_INT
) size
;
9239 /* Subroutine of fold_binary. This routine performs all of the
9240 transformations that are common to the equality/inequality
9241 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9242 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9243 fold_binary should call fold_binary. Fold a comparison with
9244 tree code CODE and type TYPE with operands OP0 and OP1. Return
9245 the folded comparison or NULL_TREE. */
9248 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
9251 tree arg0
, arg1
, tem
;
9256 STRIP_SIGN_NOPS (arg0
);
9257 STRIP_SIGN_NOPS (arg1
);
9259 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9260 if (tem
!= NULL_TREE
)
9263 /* If one arg is a real or integer constant, put it last. */
9264 if (tree_swap_operands_p (arg0
, arg1
, true))
9265 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9267 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9268 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9269 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9270 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9271 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
9272 && (TREE_CODE (arg1
) == INTEGER_CST
9273 && !TREE_OVERFLOW (arg1
)))
9275 tree const1
= TREE_OPERAND (arg0
, 1);
9277 tree variable
= TREE_OPERAND (arg0
, 0);
9280 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
9282 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
9283 TREE_TYPE (arg1
), const2
, const1
);
9285 /* If the constant operation overflowed this can be
9286 simplified as a comparison against INT_MAX/INT_MIN. */
9287 if (TREE_CODE (lhs
) == INTEGER_CST
9288 && TREE_OVERFLOW (lhs
))
9290 int const1_sgn
= tree_int_cst_sgn (const1
);
9291 enum tree_code code2
= code
;
9293 /* Get the sign of the constant on the lhs if the
9294 operation were VARIABLE + CONST1. */
9295 if (TREE_CODE (arg0
) == MINUS_EXPR
)
9296 const1_sgn
= -const1_sgn
;
9298 /* The sign of the constant determines if we overflowed
9299 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9300 Canonicalize to the INT_MIN overflow by swapping the comparison
9302 if (const1_sgn
== -1)
9303 code2
= swap_tree_comparison (code
);
9305 /* We now can look at the canonicalized case
9306 VARIABLE + 1 CODE2 INT_MIN
9307 and decide on the result. */
9308 if (code2
== LT_EXPR
9310 || code2
== EQ_EXPR
)
9311 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
9312 else if (code2
== NE_EXPR
9314 || code2
== GT_EXPR
)
9315 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
9318 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
9319 && (TREE_CODE (lhs
) != INTEGER_CST
9320 || !TREE_OVERFLOW (lhs
)))
9322 fold_overflow_warning (("assuming signed overflow does not occur "
9323 "when changing X +- C1 cmp C2 to "
9325 WARN_STRICT_OVERFLOW_COMPARISON
);
9326 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
9330 /* For comparisons of pointers we can decompose it to a compile time
9331 comparison of the base objects and the offsets into the object.
9332 This requires at least one operand being an ADDR_EXPR or a
9333 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9334 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
9335 && (TREE_CODE (arg0
) == ADDR_EXPR
9336 || TREE_CODE (arg1
) == ADDR_EXPR
9337 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9338 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
9340 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
9341 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
9342 enum machine_mode mode
;
9343 int volatilep
, unsignedp
;
9344 bool indirect_base0
= false, indirect_base1
= false;
9346 /* Get base and offset for the access. Strip ADDR_EXPR for
9347 get_inner_reference, but put it back by stripping INDIRECT_REF
9348 off the base object if possible. indirect_baseN will be true
9349 if baseN is not an address but refers to the object itself. */
9351 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9353 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
9354 &bitsize
, &bitpos0
, &offset0
, &mode
,
9355 &unsignedp
, &volatilep
, false);
9356 if (TREE_CODE (base0
) == INDIRECT_REF
)
9357 base0
= TREE_OPERAND (base0
, 0);
9359 indirect_base0
= true;
9361 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9363 base0
= TREE_OPERAND (arg0
, 0);
9364 offset0
= TREE_OPERAND (arg0
, 1);
9368 if (TREE_CODE (arg1
) == ADDR_EXPR
)
9370 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
9371 &bitsize
, &bitpos1
, &offset1
, &mode
,
9372 &unsignedp
, &volatilep
, false);
9373 if (TREE_CODE (base1
) == INDIRECT_REF
)
9374 base1
= TREE_OPERAND (base1
, 0);
9376 indirect_base1
= true;
9378 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9380 base1
= TREE_OPERAND (arg1
, 0);
9381 offset1
= TREE_OPERAND (arg1
, 1);
9384 /* If we have equivalent bases we might be able to simplify. */
9385 if (indirect_base0
== indirect_base1
9386 && operand_equal_p (base0
, base1
, 0))
9388 /* We can fold this expression to a constant if the non-constant
9389 offset parts are equal. */
9390 if ((offset0
== offset1
9391 || (offset0
&& offset1
9392 && operand_equal_p (offset0
, offset1
, 0)))
9395 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9400 && bitpos0
!= bitpos1
9401 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9402 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9403 fold_overflow_warning (("assuming pointer wraparound does not "
9404 "occur when comparing P +- C1 with "
9406 WARN_STRICT_OVERFLOW_CONDITIONAL
);
9411 return constant_boolean_node (bitpos0
== bitpos1
, type
);
9413 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
9415 return constant_boolean_node (bitpos0
< bitpos1
, type
);
9417 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
9419 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
9421 return constant_boolean_node (bitpos0
> bitpos1
, type
);
9425 /* We can simplify the comparison to a comparison of the variable
9426 offset parts if the constant offset parts are equal.
9427 Be careful to use signed size type here because otherwise we
9428 mess with array offsets in the wrong way. This is possible
9429 because pointer arithmetic is restricted to retain within an
9430 object and overflow on pointer differences is undefined as of
9431 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9432 else if (bitpos0
== bitpos1
9433 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9434 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9436 tree signed_size_type_node
;
9437 signed_size_type_node
= signed_type_for (size_type_node
);
9439 /* By converting to signed size type we cover middle-end pointer
9440 arithmetic which operates on unsigned pointer types of size
9441 type size and ARRAY_REF offsets which are properly sign or
9442 zero extended from their type in case it is narrower than
9444 if (offset0
== NULL_TREE
)
9445 offset0
= build_int_cst (signed_size_type_node
, 0);
9447 offset0
= fold_convert_loc (loc
, signed_size_type_node
,
9449 if (offset1
== NULL_TREE
)
9450 offset1
= build_int_cst (signed_size_type_node
, 0);
9452 offset1
= fold_convert_loc (loc
, signed_size_type_node
,
9457 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9458 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9459 fold_overflow_warning (("assuming pointer wraparound does not "
9460 "occur when comparing P +- C1 with "
9462 WARN_STRICT_OVERFLOW_COMPARISON
);
9464 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9467 /* For non-equal bases we can simplify if they are addresses
9468 of local binding decls or constants. */
9469 else if (indirect_base0
&& indirect_base1
9470 /* We know that !operand_equal_p (base0, base1, 0)
9471 because the if condition was false. But make
9472 sure two decls are not the same. */
9474 && TREE_CODE (arg0
) == ADDR_EXPR
9475 && TREE_CODE (arg1
) == ADDR_EXPR
9476 && (((TREE_CODE (base0
) == VAR_DECL
9477 || TREE_CODE (base0
) == PARM_DECL
)
9478 && (targetm
.binds_local_p (base0
)
9479 || CONSTANT_CLASS_P (base1
)))
9480 || CONSTANT_CLASS_P (base0
))
9481 && (((TREE_CODE (base1
) == VAR_DECL
9482 || TREE_CODE (base1
) == PARM_DECL
)
9483 && (targetm
.binds_local_p (base1
)
9484 || CONSTANT_CLASS_P (base0
)))
9485 || CONSTANT_CLASS_P (base1
)))
9487 if (code
== EQ_EXPR
)
9488 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9490 else if (code
== NE_EXPR
)
9491 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9494 /* For equal offsets we can simplify to a comparison of the
9496 else if (bitpos0
== bitpos1
9498 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9500 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9501 && ((offset0
== offset1
)
9502 || (offset0
&& offset1
9503 && operand_equal_p (offset0
, offset1
, 0))))
9506 base0
= build_fold_addr_expr_loc (loc
, base0
);
9508 base1
= build_fold_addr_expr_loc (loc
, base1
);
9509 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9513 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9514 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9515 the resulting offset is smaller in absolute value than the
9517 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9518 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9519 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9520 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9521 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9522 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9523 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9525 tree const1
= TREE_OPERAND (arg0
, 1);
9526 tree const2
= TREE_OPERAND (arg1
, 1);
9527 tree variable1
= TREE_OPERAND (arg0
, 0);
9528 tree variable2
= TREE_OPERAND (arg1
, 0);
9530 const char * const warnmsg
= G_("assuming signed overflow does not "
9531 "occur when combining constants around "
9534 /* Put the constant on the side where it doesn't overflow and is
9535 of lower absolute value than before. */
9536 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9537 ? MINUS_EXPR
: PLUS_EXPR
,
9539 if (!TREE_OVERFLOW (cst
)
9540 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
9542 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9543 return fold_build2_loc (loc
, code
, type
,
9545 fold_build2_loc (loc
,
9546 TREE_CODE (arg1
), TREE_TYPE (arg1
),
9550 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9551 ? MINUS_EXPR
: PLUS_EXPR
,
9553 if (!TREE_OVERFLOW (cst
)
9554 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
9556 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9557 return fold_build2_loc (loc
, code
, type
,
9558 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
9564 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9565 signed arithmetic case. That form is created by the compiler
9566 often enough for folding it to be of value. One example is in
9567 computing loop trip counts after Operator Strength Reduction. */
9568 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9569 && TREE_CODE (arg0
) == MULT_EXPR
9570 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9571 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9572 && integer_zerop (arg1
))
9574 tree const1
= TREE_OPERAND (arg0
, 1);
9575 tree const2
= arg1
; /* zero */
9576 tree variable1
= TREE_OPERAND (arg0
, 0);
9577 enum tree_code cmp_code
= code
;
9579 gcc_assert (!integer_zerop (const1
));
9581 fold_overflow_warning (("assuming signed overflow does not occur when "
9582 "eliminating multiplication in comparison "
9584 WARN_STRICT_OVERFLOW_COMPARISON
);
9586 /* If const1 is negative we swap the sense of the comparison. */
9587 if (tree_int_cst_sgn (const1
) < 0)
9588 cmp_code
= swap_tree_comparison (cmp_code
);
9590 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9593 tem
= maybe_canonicalize_comparison (loc
, code
, type
, op0
, op1
);
9597 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9599 tree targ0
= strip_float_extensions (arg0
);
9600 tree targ1
= strip_float_extensions (arg1
);
9601 tree newtype
= TREE_TYPE (targ0
);
9603 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9604 newtype
= TREE_TYPE (targ1
);
9606 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9607 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9608 return fold_build2_loc (loc
, code
, type
,
9609 fold_convert_loc (loc
, newtype
, targ0
),
9610 fold_convert_loc (loc
, newtype
, targ1
));
9612 /* (-a) CMP (-b) -> b CMP a */
9613 if (TREE_CODE (arg0
) == NEGATE_EXPR
9614 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9615 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9616 TREE_OPERAND (arg0
, 0));
9618 if (TREE_CODE (arg1
) == REAL_CST
)
9620 REAL_VALUE_TYPE cst
;
9621 cst
= TREE_REAL_CST (arg1
);
9623 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9624 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9625 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9626 TREE_OPERAND (arg0
, 0),
9627 build_real (TREE_TYPE (arg1
),
9628 REAL_VALUE_NEGATE (cst
)));
9630 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9631 /* a CMP (-0) -> a CMP 0 */
9632 if (REAL_VALUE_MINUS_ZERO (cst
))
9633 return fold_build2_loc (loc
, code
, type
, arg0
,
9634 build_real (TREE_TYPE (arg1
), dconst0
));
9636 /* x != NaN is always true, other ops are always false. */
9637 if (REAL_VALUE_ISNAN (cst
)
9638 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9640 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9641 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9644 /* Fold comparisons against infinity. */
9645 if (REAL_VALUE_ISINF (cst
)
9646 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9648 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9649 if (tem
!= NULL_TREE
)
9654 /* If this is a comparison of a real constant with a PLUS_EXPR
9655 or a MINUS_EXPR of a real constant, we can convert it into a
9656 comparison with a revised real constant as long as no overflow
9657 occurs when unsafe_math_optimizations are enabled. */
9658 if (flag_unsafe_math_optimizations
9659 && TREE_CODE (arg1
) == REAL_CST
9660 && (TREE_CODE (arg0
) == PLUS_EXPR
9661 || TREE_CODE (arg0
) == MINUS_EXPR
)
9662 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9663 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9664 ? MINUS_EXPR
: PLUS_EXPR
,
9665 arg1
, TREE_OPERAND (arg0
, 1), 0))
9666 && !TREE_OVERFLOW (tem
))
9667 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9669 /* Likewise, we can simplify a comparison of a real constant with
9670 a MINUS_EXPR whose first operand is also a real constant, i.e.
9671 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9672 floating-point types only if -fassociative-math is set. */
9673 if (flag_associative_math
9674 && TREE_CODE (arg1
) == REAL_CST
9675 && TREE_CODE (arg0
) == MINUS_EXPR
9676 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9677 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9679 && !TREE_OVERFLOW (tem
))
9680 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9681 TREE_OPERAND (arg0
, 1), tem
);
9683 /* Fold comparisons against built-in math functions. */
9684 if (TREE_CODE (arg1
) == REAL_CST
9685 && flag_unsafe_math_optimizations
9686 && ! flag_errno_math
)
9688 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9690 if (fcode
!= END_BUILTINS
)
9692 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9693 if (tem
!= NULL_TREE
)
9699 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9700 && CONVERT_EXPR_P (arg0
))
9702 /* If we are widening one operand of an integer comparison,
9703 see if the other operand is similarly being widened. Perhaps we
9704 can do the comparison in the narrower type. */
9705 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9709 /* Or if we are changing signedness. */
9710 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9715 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9716 constant, we can simplify it. */
9717 if (TREE_CODE (arg1
) == INTEGER_CST
9718 && (TREE_CODE (arg0
) == MIN_EXPR
9719 || TREE_CODE (arg0
) == MAX_EXPR
)
9720 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9722 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9727 /* Simplify comparison of something with itself. (For IEEE
9728 floating-point, we can only do some of these simplifications.) */
9729 if (operand_equal_p (arg0
, arg1
, 0))
9734 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9735 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9736 return constant_boolean_node (1, type
);
9741 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9742 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9743 return constant_boolean_node (1, type
);
9744 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9747 /* For NE, we can only do this simplification if integer
9748 or we don't honor IEEE floating point NaNs. */
9749 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9750 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9752 /* ... fall through ... */
9755 return constant_boolean_node (0, type
);
9761 /* If we are comparing an expression that just has comparisons
9762 of two integer values, arithmetic expressions of those comparisons,
9763 and constants, we can simplify it. There are only three cases
9764 to check: the two values can either be equal, the first can be
9765 greater, or the second can be greater. Fold the expression for
9766 those three values. Since each value must be 0 or 1, we have
9767 eight possibilities, each of which corresponds to the constant 0
9768 or 1 or one of the six possible comparisons.
9770 This handles common cases like (a > b) == 0 but also handles
9771 expressions like ((x > y) - (y > x)) > 0, which supposedly
9772 occur in macroized code. */
9774 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9776 tree cval1
= 0, cval2
= 0;
9779 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9780 /* Don't handle degenerate cases here; they should already
9781 have been handled anyway. */
9782 && cval1
!= 0 && cval2
!= 0
9783 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9784 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9785 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9786 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9787 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9788 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9789 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9791 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9792 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9794 /* We can't just pass T to eval_subst in case cval1 or cval2
9795 was the same as ARG1. */
9798 = fold_build2_loc (loc
, code
, type
,
9799 eval_subst (loc
, arg0
, cval1
, maxval
,
9803 = fold_build2_loc (loc
, code
, type
,
9804 eval_subst (loc
, arg0
, cval1
, maxval
,
9808 = fold_build2_loc (loc
, code
, type
,
9809 eval_subst (loc
, arg0
, cval1
, minval
,
9813 /* All three of these results should be 0 or 1. Confirm they are.
9814 Then use those values to select the proper code to use. */
9816 if (TREE_CODE (high_result
) == INTEGER_CST
9817 && TREE_CODE (equal_result
) == INTEGER_CST
9818 && TREE_CODE (low_result
) == INTEGER_CST
)
9820 /* Make a 3-bit mask with the high-order bit being the
9821 value for `>', the next for '=', and the low for '<'. */
9822 switch ((integer_onep (high_result
) * 4)
9823 + (integer_onep (equal_result
) * 2)
9824 + integer_onep (low_result
))
9828 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9849 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9854 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9855 SET_EXPR_LOCATION (tem
, loc
);
9858 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9863 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9864 into a single range test. */
9865 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9866 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9867 && TREE_CODE (arg1
) == INTEGER_CST
9868 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9869 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9870 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9871 && !TREE_OVERFLOW (arg1
))
9873 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9874 if (tem
!= NULL_TREE
)
9878 /* Fold ~X op ~Y as Y op X. */
9879 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9880 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9882 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9883 return fold_build2_loc (loc
, code
, type
,
9884 fold_convert_loc (loc
, cmp_type
,
9885 TREE_OPERAND (arg1
, 0)),
9886 TREE_OPERAND (arg0
, 0));
9889 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9890 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9891 && TREE_CODE (arg1
) == INTEGER_CST
)
9893 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9894 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9895 TREE_OPERAND (arg0
, 0),
9896 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9897 fold_convert_loc (loc
, cmp_type
, arg1
)));
9904 /* Subroutine of fold_binary. Optimize complex multiplications of the
9905 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9906 argument EXPR represents the expression "z" of type TYPE. */
9909 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9911 tree itype
= TREE_TYPE (type
);
9912 tree rpart
, ipart
, tem
;
9914 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9916 rpart
= TREE_OPERAND (expr
, 0);
9917 ipart
= TREE_OPERAND (expr
, 1);
9919 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9921 rpart
= TREE_REALPART (expr
);
9922 ipart
= TREE_IMAGPART (expr
);
9926 expr
= save_expr (expr
);
9927 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9928 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9931 rpart
= save_expr (rpart
);
9932 ipart
= save_expr (ipart
);
9933 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9934 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9935 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9936 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9937 fold_convert_loc (loc
, itype
, integer_zero_node
));
9941 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9942 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9943 guarantees that P and N have the same least significant log2(M) bits.
9944 N is not otherwise constrained. In particular, N is not normalized to
9945 0 <= N < M as is common. In general, the precise value of P is unknown.
9946 M is chosen as large as possible such that constant N can be determined.
9948 Returns M and sets *RESIDUE to N.
9950 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9951 account. This is not always possible due to PR 35705.
9954 static unsigned HOST_WIDE_INT
9955 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9956 bool allow_func_align
)
9958 enum tree_code code
;
9962 code
= TREE_CODE (expr
);
9963 if (code
== ADDR_EXPR
)
9965 expr
= TREE_OPERAND (expr
, 0);
9966 if (handled_component_p (expr
))
9968 HOST_WIDE_INT bitsize
, bitpos
;
9970 enum machine_mode mode
;
9971 int unsignedp
, volatilep
;
9973 expr
= get_inner_reference (expr
, &bitsize
, &bitpos
, &offset
,
9974 &mode
, &unsignedp
, &volatilep
, false);
9975 *residue
= bitpos
/ BITS_PER_UNIT
;
9978 if (TREE_CODE (offset
) == INTEGER_CST
)
9979 *residue
+= TREE_INT_CST_LOW (offset
);
9981 /* We don't handle more complicated offset expressions. */
9987 && (allow_func_align
|| TREE_CODE (expr
) != FUNCTION_DECL
))
9988 return DECL_ALIGN_UNIT (expr
);
9990 else if (code
== POINTER_PLUS_EXPR
)
9993 unsigned HOST_WIDE_INT modulus
;
9994 enum tree_code inner_code
;
9996 op0
= TREE_OPERAND (expr
, 0);
9998 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
10001 op1
= TREE_OPERAND (expr
, 1);
10003 inner_code
= TREE_CODE (op1
);
10004 if (inner_code
== INTEGER_CST
)
10006 *residue
+= TREE_INT_CST_LOW (op1
);
10009 else if (inner_code
== MULT_EXPR
)
10011 op1
= TREE_OPERAND (op1
, 1);
10012 if (TREE_CODE (op1
) == INTEGER_CST
)
10014 unsigned HOST_WIDE_INT align
;
10016 /* Compute the greatest power-of-2 divisor of op1. */
10017 align
= TREE_INT_CST_LOW (op1
);
10020 /* If align is non-zero and less than *modulus, replace
10021 *modulus with align., If align is 0, then either op1 is 0
10022 or the greatest power-of-2 divisor of op1 doesn't fit in an
10023 unsigned HOST_WIDE_INT. In either case, no additional
10024 constraint is imposed. */
10026 modulus
= MIN (modulus
, align
);
10033 /* If we get here, we were unable to determine anything useful about the
10039 /* Fold a binary expression of code CODE and type TYPE with operands
10040 OP0 and OP1. LOC is the location of the resulting expression.
10041 Return the folded expression if folding is successful. Otherwise,
10042 return NULL_TREE. */
10045 fold_binary_loc (location_t loc
,
10046 enum tree_code code
, tree type
, tree op0
, tree op1
)
10048 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10049 tree arg0
, arg1
, tem
;
10050 tree t1
= NULL_TREE
;
10051 bool strict_overflow_p
;
10053 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
10054 && TREE_CODE_LENGTH (code
) == 2
10055 && op0
!= NULL_TREE
10056 && op1
!= NULL_TREE
);
10061 /* Strip any conversions that don't change the mode. This is
10062 safe for every expression, except for a comparison expression
10063 because its signedness is derived from its operands. So, in
10064 the latter case, only strip conversions that don't change the
10065 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10068 Note that this is done as an internal manipulation within the
10069 constant folder, in order to find the simplest representation
10070 of the arguments so that their form can be studied. In any
10071 cases, the appropriate type conversions should be put back in
10072 the tree that will get out of the constant folder. */
10074 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
10076 STRIP_SIGN_NOPS (arg0
);
10077 STRIP_SIGN_NOPS (arg1
);
10085 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10086 constant but we can't do arithmetic on them. */
10087 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10088 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
10089 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
10090 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10091 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
10092 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
10094 if (kind
== tcc_binary
)
10096 /* Make sure type and arg0 have the same saturating flag. */
10097 gcc_assert (TYPE_SATURATING (type
)
10098 == TYPE_SATURATING (TREE_TYPE (arg0
)));
10099 tem
= const_binop (code
, arg0
, arg1
, 0);
10101 else if (kind
== tcc_comparison
)
10102 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
10106 if (tem
!= NULL_TREE
)
10108 if (TREE_TYPE (tem
) != type
)
10109 tem
= fold_convert_loc (loc
, type
, tem
);
10114 /* If this is a commutative operation, and ARG0 is a constant, move it
10115 to ARG1 to reduce the number of tests below. */
10116 if (commutative_tree_code (code
)
10117 && tree_swap_operands_p (arg0
, arg1
, true))
10118 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
10120 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10122 First check for cases where an arithmetic operation is applied to a
10123 compound, conditional, or comparison operation. Push the arithmetic
10124 operation inside the compound or conditional to see if any folding
10125 can then be done. Convert comparison to conditional for this purpose.
10126 The also optimizes non-constant cases that used to be done in
10129 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10130 one of the operands is a comparison and the other is a comparison, a
10131 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10132 code below would make the expression more complex. Change it to a
10133 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10134 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10136 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
10137 || code
== EQ_EXPR
|| code
== NE_EXPR
)
10138 && ((truth_value_p (TREE_CODE (arg0
))
10139 && (truth_value_p (TREE_CODE (arg1
))
10140 || (TREE_CODE (arg1
) == BIT_AND_EXPR
10141 && integer_onep (TREE_OPERAND (arg1
, 1)))))
10142 || (truth_value_p (TREE_CODE (arg1
))
10143 && (truth_value_p (TREE_CODE (arg0
))
10144 || (TREE_CODE (arg0
) == BIT_AND_EXPR
10145 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
10147 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
10148 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
10151 fold_convert_loc (loc
, boolean_type_node
, arg0
),
10152 fold_convert_loc (loc
, boolean_type_node
, arg1
));
10154 if (code
== EQ_EXPR
)
10155 tem
= invert_truthvalue_loc (loc
, tem
);
10157 return fold_convert_loc (loc
, type
, tem
);
10160 if (TREE_CODE_CLASS (code
) == tcc_binary
10161 || TREE_CODE_CLASS (code
) == tcc_comparison
)
10163 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
10165 tem
= fold_build2_loc (loc
, code
, type
,
10166 fold_convert_loc (loc
, TREE_TYPE (op0
),
10167 TREE_OPERAND (arg0
, 1)), op1
);
10168 tem
= build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0), tem
);
10169 goto fold_binary_exit
;
10171 if (TREE_CODE (arg1
) == COMPOUND_EXPR
10172 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10174 tem
= fold_build2_loc (loc
, code
, type
, op0
,
10175 fold_convert_loc (loc
, TREE_TYPE (op1
),
10176 TREE_OPERAND (arg1
, 1)));
10177 tem
= build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0), tem
);
10178 goto fold_binary_exit
;
10181 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
10183 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10185 /*cond_first_p=*/1);
10186 if (tem
!= NULL_TREE
)
10190 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
10192 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10194 /*cond_first_p=*/0);
10195 if (tem
!= NULL_TREE
)
10202 case POINTER_PLUS_EXPR
:
10203 /* 0 +p index -> (type)index */
10204 if (integer_zerop (arg0
))
10205 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10207 /* PTR +p 0 -> PTR */
10208 if (integer_zerop (arg1
))
10209 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10211 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10212 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10213 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10214 return fold_convert_loc (loc
, type
,
10215 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10216 fold_convert_loc (loc
, sizetype
,
10218 fold_convert_loc (loc
, sizetype
,
10221 /* index +p PTR -> PTR +p index */
10222 if (POINTER_TYPE_P (TREE_TYPE (arg1
))
10223 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10224 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, type
,
10225 fold_convert_loc (loc
, type
, arg1
),
10226 fold_convert_loc (loc
, sizetype
, arg0
));
10228 /* (PTR +p B) +p A -> PTR +p (B + A) */
10229 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10232 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
10233 tree arg00
= TREE_OPERAND (arg0
, 0);
10234 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10235 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
10236 return fold_convert_loc (loc
, type
,
10237 fold_build2_loc (loc
, POINTER_PLUS_EXPR
,
10242 /* PTR_CST +p CST -> CST1 */
10243 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10244 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
10245 fold_convert_loc (loc
, type
, arg1
));
10247 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10248 of the array. Loop optimizer sometimes produce this type of
10250 if (TREE_CODE (arg0
) == ADDR_EXPR
)
10252 tem
= try_move_mult_to_index (loc
, arg0
,
10253 fold_convert_loc (loc
, sizetype
, arg1
));
10255 return fold_convert_loc (loc
, type
, tem
);
10261 /* A + (-B) -> A - B */
10262 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10263 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10264 fold_convert_loc (loc
, type
, arg0
),
10265 fold_convert_loc (loc
, type
,
10266 TREE_OPERAND (arg1
, 0)));
10267 /* (-A) + B -> B - A */
10268 if (TREE_CODE (arg0
) == NEGATE_EXPR
10269 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
10270 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10271 fold_convert_loc (loc
, type
, arg1
),
10272 fold_convert_loc (loc
, type
,
10273 TREE_OPERAND (arg0
, 0)));
10275 if (INTEGRAL_TYPE_P (type
))
10277 /* Convert ~A + 1 to -A. */
10278 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10279 && integer_onep (arg1
))
10280 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
10281 fold_convert_loc (loc
, type
,
10282 TREE_OPERAND (arg0
, 0)));
10284 /* ~X + X is -1. */
10285 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10286 && !TYPE_OVERFLOW_TRAPS (type
))
10288 tree tem
= TREE_OPERAND (arg0
, 0);
10291 if (operand_equal_p (tem
, arg1
, 0))
10293 t1
= build_int_cst_type (type
, -1);
10294 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10298 /* X + ~X is -1. */
10299 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10300 && !TYPE_OVERFLOW_TRAPS (type
))
10302 tree tem
= TREE_OPERAND (arg1
, 0);
10305 if (operand_equal_p (arg0
, tem
, 0))
10307 t1
= build_int_cst_type (type
, -1);
10308 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10312 /* X + (X / CST) * -CST is X % CST. */
10313 if (TREE_CODE (arg1
) == MULT_EXPR
10314 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10315 && operand_equal_p (arg0
,
10316 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10318 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10319 tree cst1
= TREE_OPERAND (arg1
, 1);
10320 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10322 if (sum
&& integer_zerop (sum
))
10323 return fold_convert_loc (loc
, type
,
10324 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10325 TREE_TYPE (arg0
), arg0
,
10330 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
10331 same or one. Make sure type is not saturating.
10332 fold_plusminus_mult_expr will re-associate. */
10333 if ((TREE_CODE (arg0
) == MULT_EXPR
10334 || TREE_CODE (arg1
) == MULT_EXPR
)
10335 && !TYPE_SATURATING (type
)
10336 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10338 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10343 if (! FLOAT_TYPE_P (type
))
10345 if (integer_zerop (arg1
))
10346 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10348 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10349 with a constant, and the two constants have no bits in common,
10350 we should treat this as a BIT_IOR_EXPR since this may produce more
10351 simplifications. */
10352 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10353 && TREE_CODE (arg1
) == BIT_AND_EXPR
10354 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10355 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10356 && integer_zerop (const_binop (BIT_AND_EXPR
,
10357 TREE_OPERAND (arg0
, 1),
10358 TREE_OPERAND (arg1
, 1), 0)))
10360 code
= BIT_IOR_EXPR
;
10364 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10365 (plus (plus (mult) (mult)) (foo)) so that we can
10366 take advantage of the factoring cases below. */
10367 if (((TREE_CODE (arg0
) == PLUS_EXPR
10368 || TREE_CODE (arg0
) == MINUS_EXPR
)
10369 && TREE_CODE (arg1
) == MULT_EXPR
)
10370 || ((TREE_CODE (arg1
) == PLUS_EXPR
10371 || TREE_CODE (arg1
) == MINUS_EXPR
)
10372 && TREE_CODE (arg0
) == MULT_EXPR
))
10374 tree parg0
, parg1
, parg
, marg
;
10375 enum tree_code pcode
;
10377 if (TREE_CODE (arg1
) == MULT_EXPR
)
10378 parg
= arg0
, marg
= arg1
;
10380 parg
= arg1
, marg
= arg0
;
10381 pcode
= TREE_CODE (parg
);
10382 parg0
= TREE_OPERAND (parg
, 0);
10383 parg1
= TREE_OPERAND (parg
, 1);
10384 STRIP_NOPS (parg0
);
10385 STRIP_NOPS (parg1
);
10387 if (TREE_CODE (parg0
) == MULT_EXPR
10388 && TREE_CODE (parg1
) != MULT_EXPR
)
10389 return fold_build2_loc (loc
, pcode
, type
,
10390 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10391 fold_convert_loc (loc
, type
,
10393 fold_convert_loc (loc
, type
,
10395 fold_convert_loc (loc
, type
, parg1
));
10396 if (TREE_CODE (parg0
) != MULT_EXPR
10397 && TREE_CODE (parg1
) == MULT_EXPR
)
10399 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10400 fold_convert_loc (loc
, type
, parg0
),
10401 fold_build2_loc (loc
, pcode
, type
,
10402 fold_convert_loc (loc
, type
, marg
),
10403 fold_convert_loc (loc
, type
,
10409 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10410 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10411 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10413 /* Likewise if the operands are reversed. */
10414 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10415 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10417 /* Convert X + -C into X - C. */
10418 if (TREE_CODE (arg1
) == REAL_CST
10419 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10421 tem
= fold_negate_const (arg1
, type
);
10422 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10423 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10424 fold_convert_loc (loc
, type
, arg0
),
10425 fold_convert_loc (loc
, type
, tem
));
10428 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10429 to __complex__ ( x, y ). This is not the same for SNaNs or
10430 if signed zeros are involved. */
10431 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10432 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10433 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10435 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10436 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10437 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10438 bool arg0rz
= false, arg0iz
= false;
10439 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10440 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10442 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10443 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10444 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10446 tree rp
= arg1r
? arg1r
10447 : build1 (REALPART_EXPR
, rtype
, arg1
);
10448 tree ip
= arg0i
? arg0i
10449 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10450 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10452 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10454 tree rp
= arg0r
? arg0r
10455 : build1 (REALPART_EXPR
, rtype
, arg0
);
10456 tree ip
= arg1i
? arg1i
10457 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10458 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10463 if (flag_unsafe_math_optimizations
10464 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10465 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10466 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10469 /* Convert x+x into x*2.0. */
10470 if (operand_equal_p (arg0
, arg1
, 0)
10471 && SCALAR_FLOAT_TYPE_P (type
))
10472 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10473 build_real (type
, dconst2
));
10475 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10476 We associate floats only if the user has specified
10477 -fassociative-math. */
10478 if (flag_associative_math
10479 && TREE_CODE (arg1
) == PLUS_EXPR
10480 && TREE_CODE (arg0
) != MULT_EXPR
)
10482 tree tree10
= TREE_OPERAND (arg1
, 0);
10483 tree tree11
= TREE_OPERAND (arg1
, 1);
10484 if (TREE_CODE (tree11
) == MULT_EXPR
10485 && TREE_CODE (tree10
) == MULT_EXPR
)
10488 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10489 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10492 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10493 We associate floats only if the user has specified
10494 -fassociative-math. */
10495 if (flag_associative_math
10496 && TREE_CODE (arg0
) == PLUS_EXPR
10497 && TREE_CODE (arg1
) != MULT_EXPR
)
10499 tree tree00
= TREE_OPERAND (arg0
, 0);
10500 tree tree01
= TREE_OPERAND (arg0
, 1);
10501 if (TREE_CODE (tree01
) == MULT_EXPR
10502 && TREE_CODE (tree00
) == MULT_EXPR
)
10505 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10506 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10512 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10513 is a rotate of A by C1 bits. */
10514 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10515 is a rotate of A by B bits. */
10517 enum tree_code code0
, code1
;
10519 code0
= TREE_CODE (arg0
);
10520 code1
= TREE_CODE (arg1
);
10521 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10522 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10523 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10524 TREE_OPERAND (arg1
, 0), 0)
10525 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10526 TYPE_UNSIGNED (rtype
))
10527 /* Only create rotates in complete modes. Other cases are not
10528 expanded properly. */
10529 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
10531 tree tree01
, tree11
;
10532 enum tree_code code01
, code11
;
10534 tree01
= TREE_OPERAND (arg0
, 1);
10535 tree11
= TREE_OPERAND (arg1
, 1);
10536 STRIP_NOPS (tree01
);
10537 STRIP_NOPS (tree11
);
10538 code01
= TREE_CODE (tree01
);
10539 code11
= TREE_CODE (tree11
);
10540 if (code01
== INTEGER_CST
10541 && code11
== INTEGER_CST
10542 && TREE_INT_CST_HIGH (tree01
) == 0
10543 && TREE_INT_CST_HIGH (tree11
) == 0
10544 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
10545 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10547 tem
= build2 (LROTATE_EXPR
,
10548 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10549 TREE_OPERAND (arg0
, 0),
10550 code0
== LSHIFT_EXPR
10551 ? tree01
: tree11
);
10552 SET_EXPR_LOCATION (tem
, loc
);
10553 return fold_convert_loc (loc
, type
, tem
);
10555 else if (code11
== MINUS_EXPR
)
10557 tree tree110
, tree111
;
10558 tree110
= TREE_OPERAND (tree11
, 0);
10559 tree111
= TREE_OPERAND (tree11
, 1);
10560 STRIP_NOPS (tree110
);
10561 STRIP_NOPS (tree111
);
10562 if (TREE_CODE (tree110
) == INTEGER_CST
10563 && 0 == compare_tree_int (tree110
,
10565 (TREE_TYPE (TREE_OPERAND
10567 && operand_equal_p (tree01
, tree111
, 0))
10569 fold_convert_loc (loc
, type
,
10570 build2 ((code0
== LSHIFT_EXPR
10573 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10574 TREE_OPERAND (arg0
, 0), tree01
));
10576 else if (code01
== MINUS_EXPR
)
10578 tree tree010
, tree011
;
10579 tree010
= TREE_OPERAND (tree01
, 0);
10580 tree011
= TREE_OPERAND (tree01
, 1);
10581 STRIP_NOPS (tree010
);
10582 STRIP_NOPS (tree011
);
10583 if (TREE_CODE (tree010
) == INTEGER_CST
10584 && 0 == compare_tree_int (tree010
,
10586 (TREE_TYPE (TREE_OPERAND
10588 && operand_equal_p (tree11
, tree011
, 0))
10589 return fold_convert_loc
10591 build2 ((code0
!= LSHIFT_EXPR
10594 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10595 TREE_OPERAND (arg0
, 0), tree11
));
10601 /* In most languages, can't associate operations on floats through
10602 parentheses. Rather than remember where the parentheses were, we
10603 don't associate floats at all, unless the user has specified
10604 -fassociative-math.
10605 And, we need to make sure type is not saturating. */
10607 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10608 && !TYPE_SATURATING (type
))
10610 tree var0
, con0
, lit0
, minus_lit0
;
10611 tree var1
, con1
, lit1
, minus_lit1
;
10614 /* Split both trees into variables, constants, and literals. Then
10615 associate each group together, the constants with literals,
10616 then the result with variables. This increases the chances of
10617 literals being recombined later and of generating relocatable
10618 expressions for the sum of a constant and literal. */
10619 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10620 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10621 code
== MINUS_EXPR
);
10623 /* With undefined overflow we can only associate constants
10624 with one variable. */
10625 if (((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10626 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10632 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10633 tmp0
= TREE_OPERAND (tmp0
, 0);
10634 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10635 tmp1
= TREE_OPERAND (tmp1
, 0);
10636 /* The only case we can still associate with two variables
10637 is if they are the same, modulo negation. */
10638 if (!operand_equal_p (tmp0
, tmp1
, 0))
10642 /* Only do something if we found more than two objects. Otherwise,
10643 nothing has changed and we risk infinite recursion. */
10645 && (2 < ((var0
!= 0) + (var1
!= 0)
10646 + (con0
!= 0) + (con1
!= 0)
10647 + (lit0
!= 0) + (lit1
!= 0)
10648 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10650 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10651 if (code
== MINUS_EXPR
)
10654 var0
= associate_trees (loc
, var0
, var1
, code
, type
);
10655 con0
= associate_trees (loc
, con0
, con1
, code
, type
);
10656 lit0
= associate_trees (loc
, lit0
, lit1
, code
, type
);
10657 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
, code
, type
);
10659 /* Preserve the MINUS_EXPR if the negative part of the literal is
10660 greater than the positive part. Otherwise, the multiplicative
10661 folding code (i.e extract_muldiv) may be fooled in case
10662 unsigned constants are subtracted, like in the following
10663 example: ((X*2 + 4) - 8U)/2. */
10664 if (minus_lit0
&& lit0
)
10666 if (TREE_CODE (lit0
) == INTEGER_CST
10667 && TREE_CODE (minus_lit0
) == INTEGER_CST
10668 && tree_int_cst_lt (lit0
, minus_lit0
))
10670 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10676 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10685 fold_convert_loc (loc
, type
,
10686 associate_trees (loc
, var0
, minus_lit0
,
10687 MINUS_EXPR
, type
));
10690 con0
= associate_trees (loc
, con0
, minus_lit0
,
10693 fold_convert_loc (loc
, type
,
10694 associate_trees (loc
, var0
, con0
,
10699 con0
= associate_trees (loc
, con0
, lit0
, code
, type
);
10701 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10709 /* Pointer simplifications for subtraction, simple reassociations. */
10710 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10712 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10713 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10714 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10716 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10717 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10718 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10719 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10720 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10721 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10723 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10726 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10727 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10729 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10730 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10731 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10732 fold_convert_loc (loc
, type
, arg1
));
10734 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10737 /* A - (-B) -> A + B */
10738 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10739 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10740 fold_convert_loc (loc
, type
,
10741 TREE_OPERAND (arg1
, 0)));
10742 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10743 if (TREE_CODE (arg0
) == NEGATE_EXPR
10744 && (FLOAT_TYPE_P (type
)
10745 || INTEGRAL_TYPE_P (type
))
10746 && negate_expr_p (arg1
)
10747 && reorder_operands_p (arg0
, arg1
))
10748 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10749 fold_convert_loc (loc
, type
,
10750 negate_expr (arg1
)),
10751 fold_convert_loc (loc
, type
,
10752 TREE_OPERAND (arg0
, 0)));
10753 /* Convert -A - 1 to ~A. */
10754 if (INTEGRAL_TYPE_P (type
)
10755 && TREE_CODE (arg0
) == NEGATE_EXPR
10756 && integer_onep (arg1
)
10757 && !TYPE_OVERFLOW_TRAPS (type
))
10758 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10759 fold_convert_loc (loc
, type
,
10760 TREE_OPERAND (arg0
, 0)));
10762 /* Convert -1 - A to ~A. */
10763 if (INTEGRAL_TYPE_P (type
)
10764 && integer_all_onesp (arg0
))
10765 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10768 /* X - (X / CST) * CST is X % CST. */
10769 if (INTEGRAL_TYPE_P (type
)
10770 && TREE_CODE (arg1
) == MULT_EXPR
10771 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10772 && operand_equal_p (arg0
,
10773 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10774 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10775 TREE_OPERAND (arg1
, 1), 0))
10777 fold_convert_loc (loc
, type
,
10778 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10779 arg0
, TREE_OPERAND (arg1
, 1)));
10781 if (! FLOAT_TYPE_P (type
))
10783 if (integer_zerop (arg0
))
10784 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10785 if (integer_zerop (arg1
))
10786 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10788 /* Fold A - (A & B) into ~B & A. */
10789 if (!TREE_SIDE_EFFECTS (arg0
)
10790 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10792 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10794 tree arg10
= fold_convert_loc (loc
, type
,
10795 TREE_OPERAND (arg1
, 0));
10796 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10797 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10799 fold_convert_loc (loc
, type
, arg0
));
10801 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10803 tree arg11
= fold_convert_loc (loc
,
10804 type
, TREE_OPERAND (arg1
, 1));
10805 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10806 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10808 fold_convert_loc (loc
, type
, arg0
));
10812 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10813 any power of 2 minus 1. */
10814 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10815 && TREE_CODE (arg1
) == BIT_AND_EXPR
10816 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10817 TREE_OPERAND (arg1
, 0), 0))
10819 tree mask0
= TREE_OPERAND (arg0
, 1);
10820 tree mask1
= TREE_OPERAND (arg1
, 1);
10821 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10823 if (operand_equal_p (tem
, mask1
, 0))
10825 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10826 TREE_OPERAND (arg0
, 0), mask1
);
10827 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10832 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10833 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10834 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10836 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10837 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10838 (-ARG1 + ARG0) reduces to -ARG1. */
10839 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10840 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10842 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10843 __complex__ ( x, -y ). This is not the same for SNaNs or if
10844 signed zeros are involved. */
10845 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10846 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10847 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10849 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10850 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10851 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10852 bool arg0rz
= false, arg0iz
= false;
10853 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10854 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10856 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10857 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10858 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10860 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10862 : build1 (REALPART_EXPR
, rtype
, arg1
));
10863 tree ip
= arg0i
? arg0i
10864 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10865 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10867 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10869 tree rp
= arg0r
? arg0r
10870 : build1 (REALPART_EXPR
, rtype
, arg0
);
10871 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10873 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10874 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10879 /* Fold &x - &x. This can happen from &x.foo - &x.
10880 This is unsafe for certain floats even in non-IEEE formats.
10881 In IEEE, it is unsafe because it does wrong for NaNs.
10882 Also note that operand_equal_p is always false if an operand
10885 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10886 && operand_equal_p (arg0
, arg1
, 0))
10887 return fold_convert_loc (loc
, type
, integer_zero_node
);
10889 /* A - B -> A + (-B) if B is easily negatable. */
10890 if (negate_expr_p (arg1
)
10891 && ((FLOAT_TYPE_P (type
)
10892 /* Avoid this transformation if B is a positive REAL_CST. */
10893 && (TREE_CODE (arg1
) != REAL_CST
10894 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10895 || INTEGRAL_TYPE_P (type
)))
10896 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10897 fold_convert_loc (loc
, type
, arg0
),
10898 fold_convert_loc (loc
, type
,
10899 negate_expr (arg1
)));
10901 /* Try folding difference of addresses. */
10903 HOST_WIDE_INT diff
;
10905 if ((TREE_CODE (arg0
) == ADDR_EXPR
10906 || TREE_CODE (arg1
) == ADDR_EXPR
)
10907 && ptr_difference_const (arg0
, arg1
, &diff
))
10908 return build_int_cst_type (type
, diff
);
10911 /* Fold &a[i] - &a[j] to i-j. */
10912 if (TREE_CODE (arg0
) == ADDR_EXPR
10913 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10914 && TREE_CODE (arg1
) == ADDR_EXPR
10915 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10917 tree aref0
= TREE_OPERAND (arg0
, 0);
10918 tree aref1
= TREE_OPERAND (arg1
, 0);
10919 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
10920 TREE_OPERAND (aref1
, 0), 0))
10922 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
10923 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
10924 tree esz
= array_ref_element_size (aref0
);
10925 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
10926 return fold_build2_loc (loc
, MULT_EXPR
, type
, diff
,
10927 fold_convert_loc (loc
, type
, esz
));
10932 if (FLOAT_TYPE_P (type
)
10933 && flag_unsafe_math_optimizations
10934 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10935 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10936 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10939 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10940 same or one. Make sure type is not saturating.
10941 fold_plusminus_mult_expr will re-associate. */
10942 if ((TREE_CODE (arg0
) == MULT_EXPR
10943 || TREE_CODE (arg1
) == MULT_EXPR
)
10944 && !TYPE_SATURATING (type
)
10945 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10947 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10955 /* (-A) * (-B) -> A * B */
10956 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10957 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10958 fold_convert_loc (loc
, type
,
10959 TREE_OPERAND (arg0
, 0)),
10960 fold_convert_loc (loc
, type
,
10961 negate_expr (arg1
)));
10962 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10963 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10964 fold_convert_loc (loc
, type
,
10965 negate_expr (arg0
)),
10966 fold_convert_loc (loc
, type
,
10967 TREE_OPERAND (arg1
, 0)));
10969 if (! FLOAT_TYPE_P (type
))
10971 if (integer_zerop (arg1
))
10972 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10973 if (integer_onep (arg1
))
10974 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10975 /* Transform x * -1 into -x. Make sure to do the negation
10976 on the original operand with conversions not stripped
10977 because we can only strip non-sign-changing conversions. */
10978 if (integer_all_onesp (arg1
))
10979 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10980 /* Transform x * -C into -x * C if x is easily negatable. */
10981 if (TREE_CODE (arg1
) == INTEGER_CST
10982 && tree_int_cst_sgn (arg1
) == -1
10983 && negate_expr_p (arg0
)
10984 && (tem
= negate_expr (arg1
)) != arg1
10985 && !TREE_OVERFLOW (tem
))
10986 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10987 fold_convert_loc (loc
, type
,
10988 negate_expr (arg0
)),
10991 /* (a * (1 << b)) is (a << b) */
10992 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10993 && integer_onep (TREE_OPERAND (arg1
, 0)))
10994 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10995 TREE_OPERAND (arg1
, 1));
10996 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10997 && integer_onep (TREE_OPERAND (arg0
, 0)))
10998 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10999 TREE_OPERAND (arg0
, 1));
11001 /* (A + A) * C -> A * 2 * C */
11002 if (TREE_CODE (arg0
) == PLUS_EXPR
11003 && TREE_CODE (arg1
) == INTEGER_CST
11004 && operand_equal_p (TREE_OPERAND (arg0
, 0),
11005 TREE_OPERAND (arg0
, 1), 0))
11006 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11007 omit_one_operand_loc (loc
, type
,
11008 TREE_OPERAND (arg0
, 0),
11009 TREE_OPERAND (arg0
, 1)),
11010 fold_build2_loc (loc
, MULT_EXPR
, type
,
11011 build_int_cst (type
, 2) , arg1
));
11013 strict_overflow_p
= false;
11014 if (TREE_CODE (arg1
) == INTEGER_CST
11015 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11016 &strict_overflow_p
)))
11018 if (strict_overflow_p
)
11019 fold_overflow_warning (("assuming signed overflow does not "
11020 "occur when simplifying "
11022 WARN_STRICT_OVERFLOW_MISC
);
11023 return fold_convert_loc (loc
, type
, tem
);
11026 /* Optimize z * conj(z) for integer complex numbers. */
11027 if (TREE_CODE (arg0
) == CONJ_EXPR
11028 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11029 return fold_mult_zconjz (loc
, type
, arg1
);
11030 if (TREE_CODE (arg1
) == CONJ_EXPR
11031 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11032 return fold_mult_zconjz (loc
, type
, arg0
);
11036 /* Maybe fold x * 0 to 0. The expressions aren't the same
11037 when x is NaN, since x * 0 is also NaN. Nor are they the
11038 same in modes with signed zeros, since multiplying a
11039 negative value by 0 gives -0, not +0. */
11040 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11041 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11042 && real_zerop (arg1
))
11043 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11044 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11045 Likewise for complex arithmetic with signed zeros. */
11046 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11047 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11048 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11049 && real_onep (arg1
))
11050 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11052 /* Transform x * -1.0 into -x. */
11053 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11054 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11055 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11056 && real_minus_onep (arg1
))
11057 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
11059 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11060 the result for floating point types due to rounding so it is applied
11061 only if -fassociative-math was specify. */
11062 if (flag_associative_math
11063 && TREE_CODE (arg0
) == RDIV_EXPR
11064 && TREE_CODE (arg1
) == REAL_CST
11065 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
11067 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
11070 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11071 TREE_OPERAND (arg0
, 1));
11074 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11075 if (operand_equal_p (arg0
, arg1
, 0))
11077 tree tem
= fold_strip_sign_ops (arg0
);
11078 if (tem
!= NULL_TREE
)
11080 tem
= fold_convert_loc (loc
, type
, tem
);
11081 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
11085 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11086 This is not the same for NaNs or if signed zeros are
11088 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11089 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11090 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11091 && TREE_CODE (arg1
) == COMPLEX_CST
11092 && real_zerop (TREE_REALPART (arg1
)))
11094 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
11095 if (real_onep (TREE_IMAGPART (arg1
)))
11097 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11098 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
11100 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
11101 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
11103 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11104 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
11105 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
11109 /* Optimize z * conj(z) for floating point complex numbers.
11110 Guarded by flag_unsafe_math_optimizations as non-finite
11111 imaginary components don't produce scalar results. */
11112 if (flag_unsafe_math_optimizations
11113 && TREE_CODE (arg0
) == CONJ_EXPR
11114 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11115 return fold_mult_zconjz (loc
, type
, arg1
);
11116 if (flag_unsafe_math_optimizations
11117 && TREE_CODE (arg1
) == CONJ_EXPR
11118 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11119 return fold_mult_zconjz (loc
, type
, arg0
);
11121 if (flag_unsafe_math_optimizations
)
11123 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11124 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11126 /* Optimizations of root(...)*root(...). */
11127 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
11130 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11131 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11133 /* Optimize sqrt(x)*sqrt(x) as x. */
11134 if (BUILTIN_SQRT_P (fcode0
)
11135 && operand_equal_p (arg00
, arg10
, 0)
11136 && ! HONOR_SNANS (TYPE_MODE (type
)))
11139 /* Optimize root(x)*root(y) as root(x*y). */
11140 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11141 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
11142 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
11145 /* Optimize expN(x)*expN(y) as expN(x+y). */
11146 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
11148 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11149 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11150 CALL_EXPR_ARG (arg0
, 0),
11151 CALL_EXPR_ARG (arg1
, 0));
11152 return build_call_expr_loc (loc
, expfn
, 1, arg
);
11155 /* Optimizations of pow(...)*pow(...). */
11156 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
11157 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
11158 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
11160 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11161 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11162 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11163 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11165 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11166 if (operand_equal_p (arg01
, arg11
, 0))
11168 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11169 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
11171 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
11174 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11175 if (operand_equal_p (arg00
, arg10
, 0))
11177 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11178 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11180 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
11184 /* Optimize tan(x)*cos(x) as sin(x). */
11185 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
11186 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
11187 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
11188 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
11189 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
11190 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
11191 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11192 CALL_EXPR_ARG (arg1
, 0), 0))
11194 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
11196 if (sinfn
!= NULL_TREE
)
11197 return build_call_expr_loc (loc
, sinfn
, 1,
11198 CALL_EXPR_ARG (arg0
, 0));
11201 /* Optimize x*pow(x,c) as pow(x,c+1). */
11202 if (fcode1
== BUILT_IN_POW
11203 || fcode1
== BUILT_IN_POWF
11204 || fcode1
== BUILT_IN_POWL
)
11206 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11207 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11208 if (TREE_CODE (arg11
) == REAL_CST
11209 && !TREE_OVERFLOW (arg11
)
11210 && operand_equal_p (arg0
, arg10
, 0))
11212 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11216 c
= TREE_REAL_CST (arg11
);
11217 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11218 arg
= build_real (type
, c
);
11219 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11223 /* Optimize pow(x,c)*x as pow(x,c+1). */
11224 if (fcode0
== BUILT_IN_POW
11225 || fcode0
== BUILT_IN_POWF
11226 || fcode0
== BUILT_IN_POWL
)
11228 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11229 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11230 if (TREE_CODE (arg01
) == REAL_CST
11231 && !TREE_OVERFLOW (arg01
)
11232 && operand_equal_p (arg1
, arg00
, 0))
11234 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11238 c
= TREE_REAL_CST (arg01
);
11239 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11240 arg
= build_real (type
, c
);
11241 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11245 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
11246 if (optimize_function_for_speed_p (cfun
)
11247 && operand_equal_p (arg0
, arg1
, 0))
11249 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
11253 tree arg
= build_real (type
, dconst2
);
11254 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11263 if (integer_all_onesp (arg1
))
11264 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11265 if (integer_zerop (arg1
))
11266 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11267 if (operand_equal_p (arg0
, arg1
, 0))
11268 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11270 /* ~X | X is -1. */
11271 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11272 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11274 t1
= fold_convert_loc (loc
, type
, integer_zero_node
);
11275 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11276 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11279 /* X | ~X is -1. */
11280 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11281 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11283 t1
= fold_convert_loc (loc
, type
, integer_zero_node
);
11284 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11285 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11288 /* Canonicalize (X & C1) | C2. */
11289 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11290 && TREE_CODE (arg1
) == INTEGER_CST
11291 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11293 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, hi3
, lo3
, mlo
, mhi
;
11294 int width
= TYPE_PRECISION (type
), w
;
11295 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
11296 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
11297 hi2
= TREE_INT_CST_HIGH (arg1
);
11298 lo2
= TREE_INT_CST_LOW (arg1
);
11300 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11301 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
11302 return omit_one_operand_loc (loc
, type
, arg1
,
11303 TREE_OPERAND (arg0
, 0));
11305 if (width
> HOST_BITS_PER_WIDE_INT
)
11307 mhi
= (unsigned HOST_WIDE_INT
) -1
11308 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
11314 mlo
= (unsigned HOST_WIDE_INT
) -1
11315 >> (HOST_BITS_PER_WIDE_INT
- width
);
11318 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11319 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
11320 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11321 TREE_OPERAND (arg0
, 0), arg1
);
11323 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11324 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11325 mode which allows further optimizations. */
11332 for (w
= BITS_PER_UNIT
;
11333 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
11336 unsigned HOST_WIDE_INT mask
11337 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
11338 if (((lo1
| lo2
) & mask
) == mask
11339 && (lo1
& ~mask
) == 0 && hi1
== 0)
11346 if (hi3
!= hi1
|| lo3
!= lo1
)
11347 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11348 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11349 TREE_OPERAND (arg0
, 0),
11350 build_int_cst_wide (type
,
11355 /* (X & Y) | Y is (X, Y). */
11356 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11357 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11358 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11359 /* (X & Y) | X is (Y, X). */
11360 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11361 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11362 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11363 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11364 /* X | (X & Y) is (Y, X). */
11365 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11366 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11367 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11368 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11369 /* X | (Y & X) is (Y, X). */
11370 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11371 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11372 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11373 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11375 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11376 if (t1
!= NULL_TREE
)
11379 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11381 This results in more efficient code for machines without a NAND
11382 instruction. Combine will canonicalize to the first form
11383 which will allow use of NAND instructions provided by the
11384 backend if they exist. */
11385 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11386 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11389 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11390 build2 (BIT_AND_EXPR
, type
,
11391 fold_convert_loc (loc
, type
,
11392 TREE_OPERAND (arg0
, 0)),
11393 fold_convert_loc (loc
, type
,
11394 TREE_OPERAND (arg1
, 0))));
11397 /* See if this can be simplified into a rotate first. If that
11398 is unsuccessful continue in the association code. */
11402 if (integer_zerop (arg1
))
11403 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11404 if (integer_all_onesp (arg1
))
11405 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11406 if (operand_equal_p (arg0
, arg1
, 0))
11407 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11409 /* ~X ^ X is -1. */
11410 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11411 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11413 t1
= fold_convert_loc (loc
, type
, integer_zero_node
);
11414 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11415 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11418 /* X ^ ~X is -1. */
11419 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11420 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11422 t1
= fold_convert_loc (loc
, type
, integer_zero_node
);
11423 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11424 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11427 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11428 with a constant, and the two constants have no bits in common,
11429 we should treat this as a BIT_IOR_EXPR since this may produce more
11430 simplifications. */
11431 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11432 && TREE_CODE (arg1
) == BIT_AND_EXPR
11433 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11434 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11435 && integer_zerop (const_binop (BIT_AND_EXPR
,
11436 TREE_OPERAND (arg0
, 1),
11437 TREE_OPERAND (arg1
, 1), 0)))
11439 code
= BIT_IOR_EXPR
;
11443 /* (X | Y) ^ X -> Y & ~ X*/
11444 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11445 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11447 tree t2
= TREE_OPERAND (arg0
, 1);
11448 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11450 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11451 fold_convert_loc (loc
, type
, t2
),
11452 fold_convert_loc (loc
, type
, t1
));
11456 /* (Y | X) ^ X -> Y & ~ X*/
11457 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11458 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11460 tree t2
= TREE_OPERAND (arg0
, 0);
11461 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11463 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11464 fold_convert_loc (loc
, type
, t2
),
11465 fold_convert_loc (loc
, type
, t1
));
11469 /* X ^ (X | Y) -> Y & ~ X*/
11470 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11471 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11473 tree t2
= TREE_OPERAND (arg1
, 1);
11474 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11476 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11477 fold_convert_loc (loc
, type
, t2
),
11478 fold_convert_loc (loc
, type
, t1
));
11482 /* X ^ (Y | X) -> Y & ~ X*/
11483 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11484 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11486 tree t2
= TREE_OPERAND (arg1
, 0);
11487 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11489 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11490 fold_convert_loc (loc
, type
, t2
),
11491 fold_convert_loc (loc
, type
, t1
));
11495 /* Convert ~X ^ ~Y to X ^ Y. */
11496 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11497 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11498 return fold_build2_loc (loc
, code
, type
,
11499 fold_convert_loc (loc
, type
,
11500 TREE_OPERAND (arg0
, 0)),
11501 fold_convert_loc (loc
, type
,
11502 TREE_OPERAND (arg1
, 0)));
11504 /* Convert ~X ^ C to X ^ ~C. */
11505 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11506 && TREE_CODE (arg1
) == INTEGER_CST
)
11507 return fold_build2_loc (loc
, code
, type
,
11508 fold_convert_loc (loc
, type
,
11509 TREE_OPERAND (arg0
, 0)),
11510 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11512 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11513 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11514 && integer_onep (TREE_OPERAND (arg0
, 1))
11515 && integer_onep (arg1
))
11516 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11517 build_int_cst (TREE_TYPE (arg0
), 0));
11519 /* Fold (X & Y) ^ Y as ~X & Y. */
11520 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11521 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11523 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11524 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11525 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11526 fold_convert_loc (loc
, type
, arg1
));
11528 /* Fold (X & Y) ^ X as ~Y & X. */
11529 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11530 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11531 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11533 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11534 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11535 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11536 fold_convert_loc (loc
, type
, arg1
));
11538 /* Fold X ^ (X & Y) as X & ~Y. */
11539 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11540 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11542 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11543 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11544 fold_convert_loc (loc
, type
, arg0
),
11545 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11547 /* Fold X ^ (Y & X) as ~Y & X. */
11548 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11549 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11550 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11552 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11553 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11554 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11555 fold_convert_loc (loc
, type
, arg0
));
11558 /* See if this can be simplified into a rotate first. If that
11559 is unsuccessful continue in the association code. */
11563 if (integer_all_onesp (arg1
))
11564 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11565 if (integer_zerop (arg1
))
11566 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11567 if (operand_equal_p (arg0
, arg1
, 0))
11568 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11570 /* ~X & X is always zero. */
11571 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11572 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11573 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11575 /* X & ~X is always zero. */
11576 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11577 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11578 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11580 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11581 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11582 && TREE_CODE (arg1
) == INTEGER_CST
11583 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11585 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11586 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11587 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11588 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11589 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11591 fold_convert_loc (loc
, type
,
11592 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11593 type
, tmp2
, tmp3
));
11596 /* (X | Y) & Y is (X, Y). */
11597 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11598 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11599 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11600 /* (X | Y) & X is (Y, X). */
11601 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11602 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11603 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11604 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11605 /* X & (X | Y) is (Y, X). */
11606 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11607 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11608 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11609 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11610 /* X & (Y | X) is (Y, X). */
11611 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11612 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11613 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11614 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11616 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11617 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11618 && integer_onep (TREE_OPERAND (arg0
, 1))
11619 && integer_onep (arg1
))
11621 tem
= TREE_OPERAND (arg0
, 0);
11622 return fold_build2_loc (loc
, EQ_EXPR
, type
,
11623 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
11624 build_int_cst (TREE_TYPE (tem
), 1)),
11625 build_int_cst (TREE_TYPE (tem
), 0));
11627 /* Fold ~X & 1 as (X & 1) == 0. */
11628 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11629 && integer_onep (arg1
))
11631 tem
= TREE_OPERAND (arg0
, 0);
11632 return fold_build2_loc (loc
, EQ_EXPR
, type
,
11633 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
11634 build_int_cst (TREE_TYPE (tem
), 1)),
11635 build_int_cst (TREE_TYPE (tem
), 0));
11638 /* Fold (X ^ Y) & Y as ~X & Y. */
11639 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11640 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11642 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11643 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11644 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11645 fold_convert_loc (loc
, type
, arg1
));
11647 /* Fold (X ^ Y) & X as ~Y & X. */
11648 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11649 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11650 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11652 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11653 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11654 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11655 fold_convert_loc (loc
, type
, arg1
));
11657 /* Fold X & (X ^ Y) as X & ~Y. */
11658 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11659 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11661 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11662 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11663 fold_convert_loc (loc
, type
, arg0
),
11664 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11666 /* Fold X & (Y ^ X) as ~Y & X. */
11667 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11668 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11669 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11671 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11672 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11673 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11674 fold_convert_loc (loc
, type
, arg0
));
11677 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11678 if (t1
!= NULL_TREE
)
11680 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11681 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11682 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11685 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11687 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11688 && (~TREE_INT_CST_LOW (arg1
)
11689 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11691 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11694 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11696 This results in more efficient code for machines without a NOR
11697 instruction. Combine will canonicalize to the first form
11698 which will allow use of NOR instructions provided by the
11699 backend if they exist. */
11700 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11701 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11703 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11704 build2 (BIT_IOR_EXPR
, type
,
11705 fold_convert_loc (loc
, type
,
11706 TREE_OPERAND (arg0
, 0)),
11707 fold_convert_loc (loc
, type
,
11708 TREE_OPERAND (arg1
, 0))));
11711 /* If arg0 is derived from the address of an object or function, we may
11712 be able to fold this expression using the object or function's
11714 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
11716 unsigned HOST_WIDE_INT modulus
, residue
;
11717 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
11719 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11720 integer_onep (arg1
));
11722 /* This works because modulus is a power of 2. If this weren't the
11723 case, we'd have to replace it by its greatest power-of-2
11724 divisor: modulus & -modulus. */
11726 return build_int_cst (type
, residue
& low
);
11729 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11730 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11731 if the new mask might be further optimized. */
11732 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11733 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11734 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
11735 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
11736 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
11737 < TYPE_PRECISION (TREE_TYPE (arg0
))
11738 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11739 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
11741 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
11742 unsigned HOST_WIDE_INT mask
11743 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11744 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11745 tree shift_type
= TREE_TYPE (arg0
);
11747 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11748 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11749 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11750 && TYPE_PRECISION (TREE_TYPE (arg0
))
11751 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
11753 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11754 tree arg00
= TREE_OPERAND (arg0
, 0);
11755 /* See if more bits can be proven as zero because of
11757 if (TREE_CODE (arg00
) == NOP_EXPR
11758 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11760 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11761 if (TYPE_PRECISION (inner_type
)
11762 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
11763 && TYPE_PRECISION (inner_type
) < prec
)
11765 prec
= TYPE_PRECISION (inner_type
);
11766 /* See if we can shorten the right shift. */
11768 shift_type
= inner_type
;
11771 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11772 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11773 zerobits
<<= prec
- shiftc
;
11774 /* For arithmetic shift if sign bit could be set, zerobits
11775 can contain actually sign bits, so no transformation is
11776 possible, unless MASK masks them all away. In that
11777 case the shift needs to be converted into logical shift. */
11778 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11779 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11781 if ((mask
& zerobits
) == 0)
11782 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11788 /* ((X << 16) & 0xff00) is (X, 0). */
11789 if ((mask
& zerobits
) == mask
)
11790 return omit_one_operand_loc (loc
, type
,
11791 build_int_cst (type
, 0), arg0
);
11793 newmask
= mask
| zerobits
;
11794 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11798 /* Only do the transformation if NEWMASK is some integer
11800 for (prec
= BITS_PER_UNIT
;
11801 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11802 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11804 if (prec
< HOST_BITS_PER_WIDE_INT
11805 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11809 if (shift_type
!= TREE_TYPE (arg0
))
11811 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11812 fold_convert_loc (loc
, shift_type
,
11813 TREE_OPERAND (arg0
, 0)),
11814 TREE_OPERAND (arg0
, 1));
11815 tem
= fold_convert_loc (loc
, type
, tem
);
11819 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11820 if (!tree_int_cst_equal (newmaskt
, arg1
))
11821 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11829 /* Don't touch a floating-point divide by zero unless the mode
11830 of the constant can represent infinity. */
11831 if (TREE_CODE (arg1
) == REAL_CST
11832 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11833 && real_zerop (arg1
))
11836 /* Optimize A / A to 1.0 if we don't care about
11837 NaNs or Infinities. Skip the transformation
11838 for non-real operands. */
11839 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11840 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11841 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11842 && operand_equal_p (arg0
, arg1
, 0))
11844 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11846 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11849 /* The complex version of the above A / A optimization. */
11850 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11851 && operand_equal_p (arg0
, arg1
, 0))
11853 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11854 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11855 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11857 tree r
= build_real (elem_type
, dconst1
);
11858 /* omit_two_operands will call fold_convert for us. */
11859 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11863 /* (-A) / (-B) -> A / B */
11864 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11865 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11866 TREE_OPERAND (arg0
, 0),
11867 negate_expr (arg1
));
11868 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11869 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11870 negate_expr (arg0
),
11871 TREE_OPERAND (arg1
, 0));
11873 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11874 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11875 && real_onep (arg1
))
11876 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11878 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11879 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11880 && real_minus_onep (arg1
))
11881 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11882 negate_expr (arg0
)));
11884 /* If ARG1 is a constant, we can convert this to a multiply by the
11885 reciprocal. This does not have the same rounding properties,
11886 so only do this if -freciprocal-math. We can actually
11887 always safely do it if ARG1 is a power of two, but it's hard to
11888 tell if it is or not in a portable manner. */
11889 if (TREE_CODE (arg1
) == REAL_CST
)
11891 if (flag_reciprocal_math
11892 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
11894 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11895 /* Find the reciprocal if optimizing and the result is exact. */
11899 r
= TREE_REAL_CST (arg1
);
11900 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
11902 tem
= build_real (type
, r
);
11903 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11904 fold_convert_loc (loc
, type
, arg0
), tem
);
11908 /* Convert A/B/C to A/(B*C). */
11909 if (flag_reciprocal_math
11910 && TREE_CODE (arg0
) == RDIV_EXPR
)
11911 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11912 fold_build2_loc (loc
, MULT_EXPR
, type
,
11913 TREE_OPERAND (arg0
, 1), arg1
));
11915 /* Convert A/(B/C) to (A/B)*C. */
11916 if (flag_reciprocal_math
11917 && TREE_CODE (arg1
) == RDIV_EXPR
)
11918 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11919 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11920 TREE_OPERAND (arg1
, 0)),
11921 TREE_OPERAND (arg1
, 1));
11923 /* Convert C1/(X*C2) into (C1/C2)/X. */
11924 if (flag_reciprocal_math
11925 && TREE_CODE (arg1
) == MULT_EXPR
11926 && TREE_CODE (arg0
) == REAL_CST
11927 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11929 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11930 TREE_OPERAND (arg1
, 1), 0);
11932 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11933 TREE_OPERAND (arg1
, 0));
11936 if (flag_unsafe_math_optimizations
)
11938 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11939 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11941 /* Optimize sin(x)/cos(x) as tan(x). */
11942 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11943 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11944 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11945 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11946 CALL_EXPR_ARG (arg1
, 0), 0))
11948 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11950 if (tanfn
!= NULL_TREE
)
11951 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11954 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11955 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11956 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11957 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11958 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11959 CALL_EXPR_ARG (arg1
, 0), 0))
11961 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11963 if (tanfn
!= NULL_TREE
)
11965 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11966 CALL_EXPR_ARG (arg0
, 0));
11967 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11968 build_real (type
, dconst1
), tmp
);
11972 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11973 NaNs or Infinities. */
11974 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11975 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11976 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11978 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11979 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11981 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11982 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11983 && operand_equal_p (arg00
, arg01
, 0))
11985 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11987 if (cosfn
!= NULL_TREE
)
11988 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11992 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11993 NaNs or Infinities. */
11994 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11995 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11996 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11998 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11999 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12001 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12002 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12003 && operand_equal_p (arg00
, arg01
, 0))
12005 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12007 if (cosfn
!= NULL_TREE
)
12009 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12010 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12011 build_real (type
, dconst1
),
12017 /* Optimize pow(x,c)/x as pow(x,c-1). */
12018 if (fcode0
== BUILT_IN_POW
12019 || fcode0
== BUILT_IN_POWF
12020 || fcode0
== BUILT_IN_POWL
)
12022 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12023 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
12024 if (TREE_CODE (arg01
) == REAL_CST
12025 && !TREE_OVERFLOW (arg01
)
12026 && operand_equal_p (arg1
, arg00
, 0))
12028 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
12032 c
= TREE_REAL_CST (arg01
);
12033 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
12034 arg
= build_real (type
, c
);
12035 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
12039 /* Optimize a/root(b/c) into a*root(c/b). */
12040 if (BUILTIN_ROOT_P (fcode1
))
12042 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
12044 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
12046 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12047 tree b
= TREE_OPERAND (rootarg
, 0);
12048 tree c
= TREE_OPERAND (rootarg
, 1);
12050 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
12052 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
12053 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
12057 /* Optimize x/expN(y) into x*expN(-y). */
12058 if (BUILTIN_EXPONENT_P (fcode1
))
12060 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12061 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
12062 arg1
= build_call_expr_loc (loc
,
12064 fold_convert_loc (loc
, type
, arg
));
12065 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12068 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12069 if (fcode1
== BUILT_IN_POW
12070 || fcode1
== BUILT_IN_POWF
12071 || fcode1
== BUILT_IN_POWL
)
12073 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12074 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
12075 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
12076 tree neg11
= fold_convert_loc (loc
, type
,
12077 negate_expr (arg11
));
12078 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
12079 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12084 case TRUNC_DIV_EXPR
:
12085 case FLOOR_DIV_EXPR
:
12086 /* Simplify A / (B << N) where A and B are positive and B is
12087 a power of 2, to A >> (N + log2(B)). */
12088 strict_overflow_p
= false;
12089 if (TREE_CODE (arg1
) == LSHIFT_EXPR
12090 && (TYPE_UNSIGNED (type
)
12091 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12093 tree sval
= TREE_OPERAND (arg1
, 0);
12094 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
12096 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
12097 unsigned long pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
12099 if (strict_overflow_p
)
12100 fold_overflow_warning (("assuming signed overflow does not "
12101 "occur when simplifying A / (B << N)"),
12102 WARN_STRICT_OVERFLOW_MISC
);
12104 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
12105 sh_cnt
, build_int_cst (NULL_TREE
, pow2
));
12106 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12107 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
12111 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12112 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12113 if (INTEGRAL_TYPE_P (type
)
12114 && TYPE_UNSIGNED (type
)
12115 && code
== FLOOR_DIV_EXPR
)
12116 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
12120 case ROUND_DIV_EXPR
:
12121 case CEIL_DIV_EXPR
:
12122 case EXACT_DIV_EXPR
:
12123 if (integer_onep (arg1
))
12124 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12125 if (integer_zerop (arg1
))
12127 /* X / -1 is -X. */
12128 if (!TYPE_UNSIGNED (type
)
12129 && TREE_CODE (arg1
) == INTEGER_CST
12130 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12131 && TREE_INT_CST_HIGH (arg1
) == -1)
12132 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
12134 /* Convert -A / -B to A / B when the type is signed and overflow is
12136 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12137 && TREE_CODE (arg0
) == NEGATE_EXPR
12138 && negate_expr_p (arg1
))
12140 if (INTEGRAL_TYPE_P (type
))
12141 fold_overflow_warning (("assuming signed overflow does not occur "
12142 "when distributing negation across "
12144 WARN_STRICT_OVERFLOW_MISC
);
12145 return fold_build2_loc (loc
, code
, type
,
12146 fold_convert_loc (loc
, type
,
12147 TREE_OPERAND (arg0
, 0)),
12148 fold_convert_loc (loc
, type
,
12149 negate_expr (arg1
)));
12151 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12152 && TREE_CODE (arg1
) == NEGATE_EXPR
12153 && negate_expr_p (arg0
))
12155 if (INTEGRAL_TYPE_P (type
))
12156 fold_overflow_warning (("assuming signed overflow does not occur "
12157 "when distributing negation across "
12159 WARN_STRICT_OVERFLOW_MISC
);
12160 return fold_build2_loc (loc
, code
, type
,
12161 fold_convert_loc (loc
, type
,
12162 negate_expr (arg0
)),
12163 fold_convert_loc (loc
, type
,
12164 TREE_OPERAND (arg1
, 0)));
12167 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12168 operation, EXACT_DIV_EXPR.
12170 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12171 At one time others generated faster code, it's not clear if they do
12172 after the last round to changes to the DIV code in expmed.c. */
12173 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12174 && multiple_of_p (type
, arg0
, arg1
))
12175 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12177 strict_overflow_p
= false;
12178 if (TREE_CODE (arg1
) == INTEGER_CST
12179 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12180 &strict_overflow_p
)))
12182 if (strict_overflow_p
)
12183 fold_overflow_warning (("assuming signed overflow does not occur "
12184 "when simplifying division"),
12185 WARN_STRICT_OVERFLOW_MISC
);
12186 return fold_convert_loc (loc
, type
, tem
);
12191 case CEIL_MOD_EXPR
:
12192 case FLOOR_MOD_EXPR
:
12193 case ROUND_MOD_EXPR
:
12194 case TRUNC_MOD_EXPR
:
12195 /* X % 1 is always zero, but be sure to preserve any side
12197 if (integer_onep (arg1
))
12198 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12200 /* X % 0, return X % 0 unchanged so that we can get the
12201 proper warnings and errors. */
12202 if (integer_zerop (arg1
))
12205 /* 0 % X is always zero, but be sure to preserve any side
12206 effects in X. Place this after checking for X == 0. */
12207 if (integer_zerop (arg0
))
12208 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12210 /* X % -1 is zero. */
12211 if (!TYPE_UNSIGNED (type
)
12212 && TREE_CODE (arg1
) == INTEGER_CST
12213 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12214 && TREE_INT_CST_HIGH (arg1
) == -1)
12215 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12217 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12218 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12219 strict_overflow_p
= false;
12220 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12221 && (TYPE_UNSIGNED (type
)
12222 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12225 /* Also optimize A % (C << N) where C is a power of 2,
12226 to A & ((C << N) - 1). */
12227 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12228 c
= TREE_OPERAND (arg1
, 0);
12230 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12232 tree mask
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12233 build_int_cst (TREE_TYPE (arg1
), 1));
12234 if (strict_overflow_p
)
12235 fold_overflow_warning (("assuming signed overflow does not "
12236 "occur when simplifying "
12237 "X % (power of two)"),
12238 WARN_STRICT_OVERFLOW_MISC
);
12239 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12240 fold_convert_loc (loc
, type
, arg0
),
12241 fold_convert_loc (loc
, type
, mask
));
12245 /* X % -C is the same as X % C. */
12246 if (code
== TRUNC_MOD_EXPR
12247 && !TYPE_UNSIGNED (type
)
12248 && TREE_CODE (arg1
) == INTEGER_CST
12249 && !TREE_OVERFLOW (arg1
)
12250 && TREE_INT_CST_HIGH (arg1
) < 0
12251 && !TYPE_OVERFLOW_TRAPS (type
)
12252 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12253 && !sign_bit_p (arg1
, arg1
))
12254 return fold_build2_loc (loc
, code
, type
,
12255 fold_convert_loc (loc
, type
, arg0
),
12256 fold_convert_loc (loc
, type
,
12257 negate_expr (arg1
)));
12259 /* X % -Y is the same as X % Y. */
12260 if (code
== TRUNC_MOD_EXPR
12261 && !TYPE_UNSIGNED (type
)
12262 && TREE_CODE (arg1
) == NEGATE_EXPR
12263 && !TYPE_OVERFLOW_TRAPS (type
))
12264 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12265 fold_convert_loc (loc
, type
,
12266 TREE_OPERAND (arg1
, 0)));
12268 if (TREE_CODE (arg1
) == INTEGER_CST
12269 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12270 &strict_overflow_p
)))
12272 if (strict_overflow_p
)
12273 fold_overflow_warning (("assuming signed overflow does not occur "
12274 "when simplifying modulus"),
12275 WARN_STRICT_OVERFLOW_MISC
);
12276 return fold_convert_loc (loc
, type
, tem
);
12283 if (integer_all_onesp (arg0
))
12284 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12288 /* Optimize -1 >> x for arithmetic right shifts. */
12289 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12290 && tree_expr_nonnegative_p (arg1
))
12291 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12292 /* ... fall through ... */
12296 if (integer_zerop (arg1
))
12297 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12298 if (integer_zerop (arg0
))
12299 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12301 /* Since negative shift count is not well-defined,
12302 don't try to compute it in the compiler. */
12303 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12306 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12307 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
12308 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12309 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12310 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12312 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
12313 + TREE_INT_CST_LOW (arg1
));
12315 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12316 being well defined. */
12317 if (low
>= TYPE_PRECISION (type
))
12319 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12320 low
= low
% TYPE_PRECISION (type
);
12321 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12322 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 0),
12323 TREE_OPERAND (arg0
, 0));
12325 low
= TYPE_PRECISION (type
) - 1;
12328 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12329 build_int_cst (type
, low
));
12332 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12333 into x & ((unsigned)-1 >> c) for unsigned types. */
12334 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12335 || (TYPE_UNSIGNED (type
)
12336 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12337 && host_integerp (arg1
, false)
12338 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12339 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12340 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12342 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
12343 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
12349 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12351 lshift
= build_int_cst (type
, -1);
12352 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
12354 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12358 /* Rewrite an LROTATE_EXPR by a constant into an
12359 RROTATE_EXPR by a new constant. */
12360 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12362 tree tem
= build_int_cst (TREE_TYPE (arg1
),
12363 TYPE_PRECISION (type
));
12364 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
12365 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12368 /* If we have a rotate of a bit operation with the rotate count and
12369 the second operand of the bit operation both constant,
12370 permute the two operations. */
12371 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12372 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12373 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12374 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12375 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12376 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12377 fold_build2_loc (loc
, code
, type
,
12378 TREE_OPERAND (arg0
, 0), arg1
),
12379 fold_build2_loc (loc
, code
, type
,
12380 TREE_OPERAND (arg0
, 1), arg1
));
12382 /* Two consecutive rotates adding up to the precision of the
12383 type can be ignored. */
12384 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12385 && TREE_CODE (arg0
) == RROTATE_EXPR
12386 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12387 && TREE_INT_CST_HIGH (arg1
) == 0
12388 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
12389 && ((TREE_INT_CST_LOW (arg1
)
12390 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
12391 == (unsigned int) TYPE_PRECISION (type
)))
12392 return TREE_OPERAND (arg0
, 0);
12394 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12395 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12396 if the latter can be further optimized. */
12397 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12398 && TREE_CODE (arg0
) == BIT_AND_EXPR
12399 && TREE_CODE (arg1
) == INTEGER_CST
12400 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12402 tree mask
= fold_build2_loc (loc
, code
, type
,
12403 fold_convert_loc (loc
, type
,
12404 TREE_OPERAND (arg0
, 1)),
12406 tree shift
= fold_build2_loc (loc
, code
, type
,
12407 fold_convert_loc (loc
, type
,
12408 TREE_OPERAND (arg0
, 0)),
12410 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12418 if (operand_equal_p (arg0
, arg1
, 0))
12419 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12420 if (INTEGRAL_TYPE_P (type
)
12421 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12422 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12423 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12429 if (operand_equal_p (arg0
, arg1
, 0))
12430 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12431 if (INTEGRAL_TYPE_P (type
)
12432 && TYPE_MAX_VALUE (type
)
12433 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12434 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12435 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12440 case TRUTH_ANDIF_EXPR
:
12441 /* Note that the operands of this must be ints
12442 and their values must be 0 or 1.
12443 ("true" is a fixed value perhaps depending on the language.) */
12444 /* If first arg is constant zero, return it. */
12445 if (integer_zerop (arg0
))
12446 return fold_convert_loc (loc
, type
, arg0
);
12447 case TRUTH_AND_EXPR
:
12448 /* If either arg is constant true, drop it. */
12449 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12450 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12451 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12452 /* Preserve sequence points. */
12453 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12454 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12455 /* If second arg is constant zero, result is zero, but first arg
12456 must be evaluated. */
12457 if (integer_zerop (arg1
))
12458 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12459 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12460 case will be handled here. */
12461 if (integer_zerop (arg0
))
12462 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12464 /* !X && X is always false. */
12465 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12466 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12467 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12468 /* X && !X is always false. */
12469 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12470 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12471 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12473 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12474 means A >= Y && A != MAX, but in this case we know that
12477 if (!TREE_SIDE_EFFECTS (arg0
)
12478 && !TREE_SIDE_EFFECTS (arg1
))
12480 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12481 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12482 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12484 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12485 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12486 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12490 /* We only do these simplifications if we are optimizing. */
12494 /* Check for things like (A || B) && (A || C). We can convert this
12495 to A || (B && C). Note that either operator can be any of the four
12496 truth and/or operations and the transformation will still be
12497 valid. Also note that we only care about order for the
12498 ANDIF and ORIF operators. If B contains side effects, this
12499 might change the truth-value of A. */
12500 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
12501 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
12502 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
12503 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
12504 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
12505 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
12507 tree a00
= TREE_OPERAND (arg0
, 0);
12508 tree a01
= TREE_OPERAND (arg0
, 1);
12509 tree a10
= TREE_OPERAND (arg1
, 0);
12510 tree a11
= TREE_OPERAND (arg1
, 1);
12511 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
12512 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
12513 && (code
== TRUTH_AND_EXPR
12514 || code
== TRUTH_OR_EXPR
));
12516 if (operand_equal_p (a00
, a10
, 0))
12517 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
12518 fold_build2_loc (loc
, code
, type
, a01
, a11
));
12519 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
12520 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
12521 fold_build2_loc (loc
, code
, type
, a01
, a10
));
12522 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
12523 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
12524 fold_build2_loc (loc
, code
, type
, a00
, a11
));
12526 /* This case if tricky because we must either have commutative
12527 operators or else A10 must not have side-effects. */
12529 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
12530 && operand_equal_p (a01
, a11
, 0))
12531 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12532 fold_build2_loc (loc
, code
, type
, a00
, a10
),
12536 /* See if we can build a range comparison. */
12537 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
12540 /* Check for the possibility of merging component references. If our
12541 lhs is another similar operation, try to merge its rhs with our
12542 rhs. Then try to merge our lhs and rhs. */
12543 if (TREE_CODE (arg0
) == code
12544 && 0 != (tem
= fold_truthop (loc
, code
, type
,
12545 TREE_OPERAND (arg0
, 1), arg1
)))
12546 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12548 if ((tem
= fold_truthop (loc
, code
, type
, arg0
, arg1
)) != 0)
12553 case TRUTH_ORIF_EXPR
:
12554 /* Note that the operands of this must be ints
12555 and their values must be 0 or true.
12556 ("true" is a fixed value perhaps depending on the language.) */
12557 /* If first arg is constant true, return it. */
12558 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12559 return fold_convert_loc (loc
, type
, arg0
);
12560 case TRUTH_OR_EXPR
:
12561 /* If either arg is constant zero, drop it. */
12562 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12563 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12564 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12565 /* Preserve sequence points. */
12566 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12567 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12568 /* If second arg is constant true, result is true, but we must
12569 evaluate first arg. */
12570 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12571 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12572 /* Likewise for first arg, but note this only occurs here for
12574 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12575 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12577 /* !X || X is always true. */
12578 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12579 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12580 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12581 /* X || !X is always true. */
12582 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12583 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12584 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12588 case TRUTH_XOR_EXPR
:
12589 /* If the second arg is constant zero, drop it. */
12590 if (integer_zerop (arg1
))
12591 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12592 /* If the second arg is constant true, this is a logical inversion. */
12593 if (integer_onep (arg1
))
12595 /* Only call invert_truthvalue if operand is a truth value. */
12596 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
12597 tem
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
12599 tem
= invert_truthvalue_loc (loc
, arg0
);
12600 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12602 /* Identical arguments cancel to zero. */
12603 if (operand_equal_p (arg0
, arg1
, 0))
12604 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12606 /* !X ^ X is always true. */
12607 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12608 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12609 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12611 /* X ^ !X is always true. */
12612 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12613 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12614 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12620 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12621 if (tem
!= NULL_TREE
)
12624 /* bool_var != 0 becomes bool_var. */
12625 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12626 && code
== NE_EXPR
)
12627 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12629 /* bool_var == 1 becomes bool_var. */
12630 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12631 && code
== EQ_EXPR
)
12632 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12634 /* bool_var != 1 becomes !bool_var. */
12635 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12636 && code
== NE_EXPR
)
12637 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
12638 fold_convert_loc (loc
, type
, arg0
));
12640 /* bool_var == 0 becomes !bool_var. */
12641 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12642 && code
== EQ_EXPR
)
12643 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
12644 fold_convert_loc (loc
, type
, arg0
));
12646 /* !exp != 0 becomes !exp */
12647 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12648 && code
== NE_EXPR
)
12649 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12651 /* If this is an equality comparison of the address of two non-weak,
12652 unaliased symbols neither of which are extern (since we do not
12653 have access to attributes for externs), then we know the result. */
12654 if (TREE_CODE (arg0
) == ADDR_EXPR
12655 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12656 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12657 && ! lookup_attribute ("alias",
12658 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12659 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12660 && TREE_CODE (arg1
) == ADDR_EXPR
12661 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12662 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12663 && ! lookup_attribute ("alias",
12664 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12665 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12667 /* We know that we're looking at the address of two
12668 non-weak, unaliased, static _DECL nodes.
12670 It is both wasteful and incorrect to call operand_equal_p
12671 to compare the two ADDR_EXPR nodes. It is wasteful in that
12672 all we need to do is test pointer equality for the arguments
12673 to the two ADDR_EXPR nodes. It is incorrect to use
12674 operand_equal_p as that function is NOT equivalent to a
12675 C equality test. It can in fact return false for two
12676 objects which would test as equal using the C equality
12678 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12679 return constant_boolean_node (equal
12680 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12684 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12685 a MINUS_EXPR of a constant, we can convert it into a comparison with
12686 a revised constant as long as no overflow occurs. */
12687 if (TREE_CODE (arg1
) == INTEGER_CST
12688 && (TREE_CODE (arg0
) == PLUS_EXPR
12689 || TREE_CODE (arg0
) == MINUS_EXPR
)
12690 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12691 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
12692 ? MINUS_EXPR
: PLUS_EXPR
,
12693 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12695 TREE_OPERAND (arg0
, 1), 0))
12696 && !TREE_OVERFLOW (tem
))
12697 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12699 /* Similarly for a NEGATE_EXPR. */
12700 if (TREE_CODE (arg0
) == NEGATE_EXPR
12701 && TREE_CODE (arg1
) == INTEGER_CST
12702 && 0 != (tem
= negate_expr (arg1
))
12703 && TREE_CODE (tem
) == INTEGER_CST
12704 && !TREE_OVERFLOW (tem
))
12705 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12707 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12708 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12709 && TREE_CODE (arg1
) == INTEGER_CST
12710 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12711 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12712 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12713 fold_convert_loc (loc
,
12716 TREE_OPERAND (arg0
, 1)));
12718 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12719 if ((TREE_CODE (arg0
) == PLUS_EXPR
12720 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12721 || TREE_CODE (arg0
) == MINUS_EXPR
)
12722 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12723 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12724 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12726 tree val
= TREE_OPERAND (arg0
, 1);
12727 return omit_two_operands_loc (loc
, type
,
12728 fold_build2_loc (loc
, code
, type
,
12730 build_int_cst (TREE_TYPE (val
),
12732 TREE_OPERAND (arg0
, 0), arg1
);
12735 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12736 if (TREE_CODE (arg0
) == MINUS_EXPR
12737 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12738 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0)
12739 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
12741 return omit_two_operands_loc (loc
, type
,
12743 ? boolean_true_node
: boolean_false_node
,
12744 TREE_OPERAND (arg0
, 1), arg1
);
12747 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12748 for !=. Don't do this for ordered comparisons due to overflow. */
12749 if (TREE_CODE (arg0
) == MINUS_EXPR
12750 && integer_zerop (arg1
))
12751 return fold_build2_loc (loc
, code
, type
,
12752 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
12754 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12755 if (TREE_CODE (arg0
) == ABS_EXPR
12756 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12757 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12759 /* If this is an EQ or NE comparison with zero and ARG0 is
12760 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12761 two operations, but the latter can be done in one less insn
12762 on machines that have only two-operand insns or on which a
12763 constant cannot be the first operand. */
12764 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12765 && integer_zerop (arg1
))
12767 tree arg00
= TREE_OPERAND (arg0
, 0);
12768 tree arg01
= TREE_OPERAND (arg0
, 1);
12769 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12770 && integer_onep (TREE_OPERAND (arg00
, 0)))
12772 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12773 arg01
, TREE_OPERAND (arg00
, 1));
12774 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12775 build_int_cst (TREE_TYPE (arg0
), 1));
12776 return fold_build2_loc (loc
, code
, type
,
12777 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12780 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12781 && integer_onep (TREE_OPERAND (arg01
, 0)))
12783 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12784 arg00
, TREE_OPERAND (arg01
, 1));
12785 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12786 build_int_cst (TREE_TYPE (arg0
), 1));
12787 return fold_build2_loc (loc
, code
, type
,
12788 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12793 /* If this is an NE or EQ comparison of zero against the result of a
12794 signed MOD operation whose second operand is a power of 2, make
12795 the MOD operation unsigned since it is simpler and equivalent. */
12796 if (integer_zerop (arg1
)
12797 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12798 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12799 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12800 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12801 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12802 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12804 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12805 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12806 fold_convert_loc (loc
, newtype
,
12807 TREE_OPERAND (arg0
, 0)),
12808 fold_convert_loc (loc
, newtype
,
12809 TREE_OPERAND (arg0
, 1)));
12811 return fold_build2_loc (loc
, code
, type
, newmod
,
12812 fold_convert_loc (loc
, newtype
, arg1
));
12815 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12816 C1 is a valid shift constant, and C2 is a power of two, i.e.
12818 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12819 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12820 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12822 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12823 && integer_zerop (arg1
))
12825 tree itype
= TREE_TYPE (arg0
);
12826 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
12827 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12829 /* Check for a valid shift count. */
12830 if (TREE_INT_CST_HIGH (arg001
) == 0
12831 && TREE_INT_CST_LOW (arg001
) < prec
)
12833 tree arg01
= TREE_OPERAND (arg0
, 1);
12834 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12835 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12836 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12837 can be rewritten as (X & (C2 << C1)) != 0. */
12838 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12840 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12841 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12842 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12844 /* Otherwise, for signed (arithmetic) shifts,
12845 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12846 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12847 else if (!TYPE_UNSIGNED (itype
))
12848 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12849 arg000
, build_int_cst (itype
, 0));
12850 /* Otherwise, of unsigned (logical) shifts,
12851 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12852 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12854 return omit_one_operand_loc (loc
, type
,
12855 code
== EQ_EXPR
? integer_one_node
12856 : integer_zero_node
,
12861 /* If this is an NE comparison of zero with an AND of one, remove the
12862 comparison since the AND will give the correct value. */
12863 if (code
== NE_EXPR
12864 && integer_zerop (arg1
)
12865 && TREE_CODE (arg0
) == BIT_AND_EXPR
12866 && integer_onep (TREE_OPERAND (arg0
, 1)))
12867 return fold_convert_loc (loc
, type
, arg0
);
12869 /* If we have (A & C) == C where C is a power of 2, convert this into
12870 (A & C) != 0. Similarly for NE_EXPR. */
12871 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12872 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12873 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12874 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12875 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12876 integer_zero_node
));
12878 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12879 bit, then fold the expression into A < 0 or A >= 0. */
12880 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12884 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12885 Similarly for NE_EXPR. */
12886 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12887 && TREE_CODE (arg1
) == INTEGER_CST
12888 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12890 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12891 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12892 TREE_OPERAND (arg0
, 1));
12893 tree dandnotc
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12895 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12896 if (integer_nonzerop (dandnotc
))
12897 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12900 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12901 Similarly for NE_EXPR. */
12902 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12903 && TREE_CODE (arg1
) == INTEGER_CST
12904 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12906 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12907 tree candnotd
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12908 TREE_OPERAND (arg0
, 1), notd
);
12909 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12910 if (integer_nonzerop (candnotd
))
12911 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12914 /* If this is a comparison of a field, we may be able to simplify it. */
12915 if ((TREE_CODE (arg0
) == COMPONENT_REF
12916 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12917 /* Handle the constant case even without -O
12918 to make sure the warnings are given. */
12919 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12921 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12926 /* Optimize comparisons of strlen vs zero to a compare of the
12927 first character of the string vs zero. To wit,
12928 strlen(ptr) == 0 => *ptr == 0
12929 strlen(ptr) != 0 => *ptr != 0
12930 Other cases should reduce to one of these two (or a constant)
12931 due to the return value of strlen being unsigned. */
12932 if (TREE_CODE (arg0
) == CALL_EXPR
12933 && integer_zerop (arg1
))
12935 tree fndecl
= get_callee_fndecl (arg0
);
12938 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12939 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12940 && call_expr_nargs (arg0
) == 1
12941 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12943 tree iref
= build_fold_indirect_ref_loc (loc
,
12944 CALL_EXPR_ARG (arg0
, 0));
12945 return fold_build2_loc (loc
, code
, type
, iref
,
12946 build_int_cst (TREE_TYPE (iref
), 0));
12950 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12951 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12952 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12953 && integer_zerop (arg1
)
12954 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12956 tree arg00
= TREE_OPERAND (arg0
, 0);
12957 tree arg01
= TREE_OPERAND (arg0
, 1);
12958 tree itype
= TREE_TYPE (arg00
);
12959 if (TREE_INT_CST_HIGH (arg01
) == 0
12960 && TREE_INT_CST_LOW (arg01
)
12961 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
12963 if (TYPE_UNSIGNED (itype
))
12965 itype
= signed_type_for (itype
);
12966 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12968 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12969 type
, arg00
, build_int_cst (itype
, 0));
12973 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12974 if (integer_zerop (arg1
)
12975 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12976 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12977 TREE_OPERAND (arg0
, 1));
12979 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12980 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12981 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12982 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12983 build_int_cst (TREE_TYPE (arg1
), 0));
12984 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12985 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12986 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12987 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12988 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12989 build_int_cst (TREE_TYPE (arg1
), 0));
12991 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12992 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12993 && TREE_CODE (arg1
) == INTEGER_CST
12994 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12995 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12996 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12997 TREE_OPERAND (arg0
, 1), arg1
));
12999 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13000 (X & C) == 0 when C is a single bit. */
13001 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13002 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
13003 && integer_zerop (arg1
)
13004 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13006 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13007 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
13008 TREE_OPERAND (arg0
, 1));
13009 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
13013 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13014 constant C is a power of two, i.e. a single bit. */
13015 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13016 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13017 && integer_zerop (arg1
)
13018 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13019 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13020 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13022 tree arg00
= TREE_OPERAND (arg0
, 0);
13023 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13024 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
13027 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13028 when is C is a power of two, i.e. a single bit. */
13029 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13030 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
13031 && integer_zerop (arg1
)
13032 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13033 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13034 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13036 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13037 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
13038 arg000
, TREE_OPERAND (arg0
, 1));
13039 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13040 tem
, build_int_cst (TREE_TYPE (tem
), 0));
13043 if (integer_zerop (arg1
)
13044 && tree_expr_nonzero_p (arg0
))
13046 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
13047 return omit_one_operand_loc (loc
, type
, res
, arg0
);
13050 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13051 if (TREE_CODE (arg0
) == NEGATE_EXPR
13052 && TREE_CODE (arg1
) == NEGATE_EXPR
)
13053 return fold_build2_loc (loc
, code
, type
,
13054 TREE_OPERAND (arg0
, 0),
13055 TREE_OPERAND (arg1
, 0));
13057 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13058 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13059 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
13061 tree arg00
= TREE_OPERAND (arg0
, 0);
13062 tree arg01
= TREE_OPERAND (arg0
, 1);
13063 tree arg10
= TREE_OPERAND (arg1
, 0);
13064 tree arg11
= TREE_OPERAND (arg1
, 1);
13065 tree itype
= TREE_TYPE (arg0
);
13067 if (operand_equal_p (arg01
, arg11
, 0))
13068 return fold_build2_loc (loc
, code
, type
,
13069 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13070 fold_build2_loc (loc
,
13071 BIT_XOR_EXPR
, itype
,
13074 build_int_cst (itype
, 0));
13076 if (operand_equal_p (arg01
, arg10
, 0))
13077 return fold_build2_loc (loc
, code
, type
,
13078 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13079 fold_build2_loc (loc
,
13080 BIT_XOR_EXPR
, itype
,
13083 build_int_cst (itype
, 0));
13085 if (operand_equal_p (arg00
, arg11
, 0))
13086 return fold_build2_loc (loc
, code
, type
,
13087 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13088 fold_build2_loc (loc
,
13089 BIT_XOR_EXPR
, itype
,
13092 build_int_cst (itype
, 0));
13094 if (operand_equal_p (arg00
, arg10
, 0))
13095 return fold_build2_loc (loc
, code
, type
,
13096 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13097 fold_build2_loc (loc
,
13098 BIT_XOR_EXPR
, itype
,
13101 build_int_cst (itype
, 0));
13104 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13105 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
13107 tree arg00
= TREE_OPERAND (arg0
, 0);
13108 tree arg01
= TREE_OPERAND (arg0
, 1);
13109 tree arg10
= TREE_OPERAND (arg1
, 0);
13110 tree arg11
= TREE_OPERAND (arg1
, 1);
13111 tree itype
= TREE_TYPE (arg0
);
13113 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13114 operand_equal_p guarantees no side-effects so we don't need
13115 to use omit_one_operand on Z. */
13116 if (operand_equal_p (arg01
, arg11
, 0))
13117 return fold_build2_loc (loc
, code
, type
, arg00
, arg10
);
13118 if (operand_equal_p (arg01
, arg10
, 0))
13119 return fold_build2_loc (loc
, code
, type
, arg00
, arg11
);
13120 if (operand_equal_p (arg00
, arg11
, 0))
13121 return fold_build2_loc (loc
, code
, type
, arg01
, arg10
);
13122 if (operand_equal_p (arg00
, arg10
, 0))
13123 return fold_build2_loc (loc
, code
, type
, arg01
, arg11
);
13125 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13126 if (TREE_CODE (arg01
) == INTEGER_CST
13127 && TREE_CODE (arg11
) == INTEGER_CST
)
13128 return fold_build2_loc (loc
, code
, type
,
13129 fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
,
13130 fold_build2_loc (loc
,
13131 BIT_XOR_EXPR
, itype
,
13136 /* Attempt to simplify equality/inequality comparisons of complex
13137 values. Only lower the comparison if the result is known or
13138 can be simplified to a single scalar comparison. */
13139 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
13140 || TREE_CODE (arg0
) == COMPLEX_CST
)
13141 && (TREE_CODE (arg1
) == COMPLEX_EXPR
13142 || TREE_CODE (arg1
) == COMPLEX_CST
))
13144 tree real0
, imag0
, real1
, imag1
;
13147 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
13149 real0
= TREE_OPERAND (arg0
, 0);
13150 imag0
= TREE_OPERAND (arg0
, 1);
13154 real0
= TREE_REALPART (arg0
);
13155 imag0
= TREE_IMAGPART (arg0
);
13158 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
13160 real1
= TREE_OPERAND (arg1
, 0);
13161 imag1
= TREE_OPERAND (arg1
, 1);
13165 real1
= TREE_REALPART (arg1
);
13166 imag1
= TREE_IMAGPART (arg1
);
13169 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
13170 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
13172 if (integer_zerop (rcond
))
13174 if (code
== EQ_EXPR
)
13175 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13177 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13181 if (code
== NE_EXPR
)
13182 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13184 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13188 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13189 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13191 if (integer_zerop (icond
))
13193 if (code
== EQ_EXPR
)
13194 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13196 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13200 if (code
== NE_EXPR
)
13201 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13203 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13214 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13215 if (tem
!= NULL_TREE
)
13218 /* Transform comparisons of the form X +- C CMP X. */
13219 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13220 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13221 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13222 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13223 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13224 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13226 tree arg01
= TREE_OPERAND (arg0
, 1);
13227 enum tree_code code0
= TREE_CODE (arg0
);
13230 if (TREE_CODE (arg01
) == REAL_CST
)
13231 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13233 is_positive
= tree_int_cst_sgn (arg01
);
13235 /* (X - c) > X becomes false. */
13236 if (code
== GT_EXPR
13237 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13238 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13240 if (TREE_CODE (arg01
) == INTEGER_CST
13241 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13242 fold_overflow_warning (("assuming signed overflow does not "
13243 "occur when assuming that (X - c) > X "
13244 "is always false"),
13245 WARN_STRICT_OVERFLOW_ALL
);
13246 return constant_boolean_node (0, type
);
13249 /* Likewise (X + c) < X becomes false. */
13250 if (code
== LT_EXPR
13251 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13252 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13254 if (TREE_CODE (arg01
) == INTEGER_CST
13255 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13256 fold_overflow_warning (("assuming signed overflow does not "
13257 "occur when assuming that "
13258 "(X + c) < X is always false"),
13259 WARN_STRICT_OVERFLOW_ALL
);
13260 return constant_boolean_node (0, type
);
13263 /* Convert (X - c) <= X to true. */
13264 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13266 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13267 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13269 if (TREE_CODE (arg01
) == INTEGER_CST
13270 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13271 fold_overflow_warning (("assuming signed overflow does not "
13272 "occur when assuming that "
13273 "(X - c) <= X is always true"),
13274 WARN_STRICT_OVERFLOW_ALL
);
13275 return constant_boolean_node (1, type
);
13278 /* Convert (X + c) >= X to true. */
13279 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13281 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13282 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13284 if (TREE_CODE (arg01
) == INTEGER_CST
13285 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13286 fold_overflow_warning (("assuming signed overflow does not "
13287 "occur when assuming that "
13288 "(X + c) >= X is always true"),
13289 WARN_STRICT_OVERFLOW_ALL
);
13290 return constant_boolean_node (1, type
);
13293 if (TREE_CODE (arg01
) == INTEGER_CST
)
13295 /* Convert X + c > X and X - c < X to true for integers. */
13296 if (code
== GT_EXPR
13297 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13298 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13300 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13301 fold_overflow_warning (("assuming signed overflow does "
13302 "not occur when assuming that "
13303 "(X + c) > X is always true"),
13304 WARN_STRICT_OVERFLOW_ALL
);
13305 return constant_boolean_node (1, type
);
13308 if (code
== LT_EXPR
13309 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13310 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13312 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13313 fold_overflow_warning (("assuming signed overflow does "
13314 "not occur when assuming that "
13315 "(X - c) < X is always true"),
13316 WARN_STRICT_OVERFLOW_ALL
);
13317 return constant_boolean_node (1, type
);
13320 /* Convert X + c <= X and X - c >= X to false for integers. */
13321 if (code
== LE_EXPR
13322 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13323 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13325 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13326 fold_overflow_warning (("assuming signed overflow does "
13327 "not occur when assuming that "
13328 "(X + c) <= X is always false"),
13329 WARN_STRICT_OVERFLOW_ALL
);
13330 return constant_boolean_node (0, type
);
13333 if (code
== GE_EXPR
13334 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13335 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13337 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13338 fold_overflow_warning (("assuming signed overflow does "
13339 "not occur when assuming that "
13340 "(X - c) >= X is always false"),
13341 WARN_STRICT_OVERFLOW_ALL
);
13342 return constant_boolean_node (0, type
);
13347 /* Comparisons with the highest or lowest possible integer of
13348 the specified precision will have known values. */
13350 tree arg1_type
= TREE_TYPE (arg1
);
13351 unsigned int width
= TYPE_PRECISION (arg1_type
);
13353 if (TREE_CODE (arg1
) == INTEGER_CST
13354 && width
<= 2 * HOST_BITS_PER_WIDE_INT
13355 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13357 HOST_WIDE_INT signed_max_hi
;
13358 unsigned HOST_WIDE_INT signed_max_lo
;
13359 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
13361 if (width
<= HOST_BITS_PER_WIDE_INT
)
13363 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13368 if (TYPE_UNSIGNED (arg1_type
))
13370 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13376 max_lo
= signed_max_lo
;
13377 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13383 width
-= HOST_BITS_PER_WIDE_INT
;
13384 signed_max_lo
= -1;
13385 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13390 if (TYPE_UNSIGNED (arg1_type
))
13392 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13397 max_hi
= signed_max_hi
;
13398 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13402 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
13403 && TREE_INT_CST_LOW (arg1
) == max_lo
)
13407 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13410 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13413 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13416 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13418 /* The GE_EXPR and LT_EXPR cases above are not normally
13419 reached because of previous transformations. */
13424 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13426 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
13430 arg1
= const_binop (PLUS_EXPR
, arg1
,
13431 build_int_cst (TREE_TYPE (arg1
), 1), 0);
13432 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13433 fold_convert_loc (loc
,
13434 TREE_TYPE (arg1
), arg0
),
13437 arg1
= const_binop (PLUS_EXPR
, arg1
,
13438 build_int_cst (TREE_TYPE (arg1
), 1), 0);
13439 return fold_build2_loc (loc
, NE_EXPR
, type
,
13440 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13446 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13448 && TREE_INT_CST_LOW (arg1
) == min_lo
)
13452 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13455 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13458 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13461 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13466 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13468 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13472 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
13473 return fold_build2_loc (loc
, NE_EXPR
, type
,
13474 fold_convert_loc (loc
,
13475 TREE_TYPE (arg1
), arg0
),
13478 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
13479 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13480 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13487 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13488 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13489 && TYPE_UNSIGNED (arg1_type
)
13490 /* We will flip the signedness of the comparison operator
13491 associated with the mode of arg1, so the sign bit is
13492 specified by this mode. Check that arg1 is the signed
13493 max associated with this sign bit. */
13494 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
13495 /* signed_type does not work on pointer types. */
13496 && INTEGRAL_TYPE_P (arg1_type
))
13498 /* The following case also applies to X < signed_max+1
13499 and X >= signed_max+1 because previous transformations. */
13500 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13503 st
= signed_type_for (TREE_TYPE (arg1
));
13504 return fold_build2_loc (loc
,
13505 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13506 type
, fold_convert_loc (loc
, st
, arg0
),
13507 build_int_cst (st
, 0));
13513 /* If we are comparing an ABS_EXPR with a constant, we can
13514 convert all the cases into explicit comparisons, but they may
13515 well not be faster than doing the ABS and one comparison.
13516 But ABS (X) <= C is a range comparison, which becomes a subtraction
13517 and a comparison, and is probably faster. */
13518 if (code
== LE_EXPR
13519 && TREE_CODE (arg1
) == INTEGER_CST
13520 && TREE_CODE (arg0
) == ABS_EXPR
13521 && ! TREE_SIDE_EFFECTS (arg0
)
13522 && (0 != (tem
= negate_expr (arg1
)))
13523 && TREE_CODE (tem
) == INTEGER_CST
13524 && !TREE_OVERFLOW (tem
))
13525 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13526 build2 (GE_EXPR
, type
,
13527 TREE_OPERAND (arg0
, 0), tem
),
13528 build2 (LE_EXPR
, type
,
13529 TREE_OPERAND (arg0
, 0), arg1
));
13531 /* Convert ABS_EXPR<x> >= 0 to true. */
13532 strict_overflow_p
= false;
13533 if (code
== GE_EXPR
13534 && (integer_zerop (arg1
)
13535 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13536 && real_zerop (arg1
)))
13537 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13539 if (strict_overflow_p
)
13540 fold_overflow_warning (("assuming signed overflow does not occur "
13541 "when simplifying comparison of "
13542 "absolute value and zero"),
13543 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13544 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13547 /* Convert ABS_EXPR<x> < 0 to false. */
13548 strict_overflow_p
= false;
13549 if (code
== LT_EXPR
13550 && (integer_zerop (arg1
) || real_zerop (arg1
))
13551 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13553 if (strict_overflow_p
)
13554 fold_overflow_warning (("assuming signed overflow does not occur "
13555 "when simplifying comparison of "
13556 "absolute value and zero"),
13557 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13558 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13561 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13562 and similarly for >= into !=. */
13563 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13564 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13565 && TREE_CODE (arg1
) == LSHIFT_EXPR
13566 && integer_onep (TREE_OPERAND (arg1
, 0)))
13568 tem
= build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13569 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13570 TREE_OPERAND (arg1
, 1)),
13571 build_int_cst (TREE_TYPE (arg0
), 0));
13572 goto fold_binary_exit
;
13575 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13576 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13577 && CONVERT_EXPR_P (arg1
)
13578 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13579 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13581 tem
= build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13582 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13583 build2 (RSHIFT_EXPR
,
13584 TREE_TYPE (arg0
), arg0
,
13585 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
13587 build_int_cst (TREE_TYPE (arg0
), 0));
13588 goto fold_binary_exit
;
13593 case UNORDERED_EXPR
:
13601 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13603 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13604 if (t1
!= NULL_TREE
)
13608 /* If the first operand is NaN, the result is constant. */
13609 if (TREE_CODE (arg0
) == REAL_CST
13610 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13611 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13613 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13614 ? integer_zero_node
13615 : integer_one_node
;
13616 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13619 /* If the second operand is NaN, the result is constant. */
13620 if (TREE_CODE (arg1
) == REAL_CST
13621 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13622 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13624 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13625 ? integer_zero_node
13626 : integer_one_node
;
13627 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13630 /* Simplify unordered comparison of something with itself. */
13631 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13632 && operand_equal_p (arg0
, arg1
, 0))
13633 return constant_boolean_node (1, type
);
13635 if (code
== LTGT_EXPR
13636 && !flag_trapping_math
13637 && operand_equal_p (arg0
, arg1
, 0))
13638 return constant_boolean_node (0, type
);
13640 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13642 tree targ0
= strip_float_extensions (arg0
);
13643 tree targ1
= strip_float_extensions (arg1
);
13644 tree newtype
= TREE_TYPE (targ0
);
13646 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13647 newtype
= TREE_TYPE (targ1
);
13649 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13650 return fold_build2_loc (loc
, code
, type
,
13651 fold_convert_loc (loc
, newtype
, targ0
),
13652 fold_convert_loc (loc
, newtype
, targ1
));
13657 case COMPOUND_EXPR
:
13658 /* When pedantic, a compound expression can be neither an lvalue
13659 nor an integer constant expression. */
13660 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13662 /* Don't let (0, 0) be null pointer constant. */
13663 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13664 : fold_convert_loc (loc
, type
, arg1
);
13665 return pedantic_non_lvalue_loc (loc
, tem
);
13668 if ((TREE_CODE (arg0
) == REAL_CST
13669 && TREE_CODE (arg1
) == REAL_CST
)
13670 || (TREE_CODE (arg0
) == INTEGER_CST
13671 && TREE_CODE (arg1
) == INTEGER_CST
))
13672 return build_complex (type
, arg0
, arg1
);
13676 /* An ASSERT_EXPR should never be passed to fold_binary. */
13677 gcc_unreachable ();
13681 } /* switch (code) */
13683 protected_set_expr_location (tem
, loc
);
13687 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13688 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13692 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13694 switch (TREE_CODE (*tp
))
13700 *walk_subtrees
= 0;
13702 /* ... fall through ... */
13709 /* Return whether the sub-tree ST contains a label which is accessible from
13710 outside the sub-tree. */
13713 contains_label_p (tree st
)
13716 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13719 /* Fold a ternary expression of code CODE and type TYPE with operands
13720 OP0, OP1, and OP2. Return the folded expression if folding is
13721 successful. Otherwise, return NULL_TREE. */
13724 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13725 tree op0
, tree op1
, tree op2
)
13728 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
13729 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13731 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13732 && TREE_CODE_LENGTH (code
) == 3);
13734 /* Strip any conversions that don't change the mode. This is safe
13735 for every expression, except for a comparison expression because
13736 its signedness is derived from its operands. So, in the latter
13737 case, only strip conversions that don't change the signedness.
13739 Note that this is done as an internal manipulation within the
13740 constant folder, in order to find the simplest representation of
13741 the arguments so that their form can be studied. In any cases,
13742 the appropriate type conversions should be put back in the tree
13743 that will get out of the constant folder. */
13758 case COMPONENT_REF
:
13759 if (TREE_CODE (arg0
) == CONSTRUCTOR
13760 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13762 unsigned HOST_WIDE_INT idx
;
13764 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13771 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13772 so all simple results must be passed through pedantic_non_lvalue. */
13773 if (TREE_CODE (arg0
) == INTEGER_CST
)
13775 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13776 tem
= integer_zerop (arg0
) ? op2
: op1
;
13777 /* Only optimize constant conditions when the selected branch
13778 has the same type as the COND_EXPR. This avoids optimizing
13779 away "c ? x : throw", where the throw has a void type.
13780 Avoid throwing away that operand which contains label. */
13781 if ((!TREE_SIDE_EFFECTS (unused_op
)
13782 || !contains_label_p (unused_op
))
13783 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13784 || VOID_TYPE_P (type
)))
13785 return pedantic_non_lvalue_loc (loc
, tem
);
13788 if (operand_equal_p (arg1
, op2
, 0))
13789 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
13791 /* If we have A op B ? A : C, we may be able to convert this to a
13792 simpler expression, depending on the operation and the values
13793 of B and C. Signed zeros prevent all of these transformations,
13794 for reasons given above each one.
13796 Also try swapping the arguments and inverting the conditional. */
13797 if (COMPARISON_CLASS_P (arg0
)
13798 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13799 arg1
, TREE_OPERAND (arg0
, 1))
13800 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
13802 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13807 if (COMPARISON_CLASS_P (arg0
)
13808 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13810 TREE_OPERAND (arg0
, 1))
13811 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
13813 tem
= fold_truth_not_expr (loc
, arg0
);
13814 if (tem
&& COMPARISON_CLASS_P (tem
))
13816 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13822 /* If the second operand is simpler than the third, swap them
13823 since that produces better jump optimization results. */
13824 if (truth_value_p (TREE_CODE (arg0
))
13825 && tree_swap_operands_p (op1
, op2
, false))
13827 /* See if this can be inverted. If it can't, possibly because
13828 it was a floating-point inequality comparison, don't do
13830 tem
= fold_truth_not_expr (loc
, arg0
);
13832 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13835 /* Convert A ? 1 : 0 to simply A. */
13836 if (integer_onep (op1
)
13837 && integer_zerop (op2
)
13838 /* If we try to convert OP0 to our type, the
13839 call to fold will try to move the conversion inside
13840 a COND, which will recurse. In that case, the COND_EXPR
13841 is probably the best choice, so leave it alone. */
13842 && type
== TREE_TYPE (arg0
))
13843 return pedantic_non_lvalue_loc (loc
, arg0
);
13845 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13846 over COND_EXPR in cases such as floating point comparisons. */
13847 if (integer_zerop (op1
)
13848 && integer_onep (op2
)
13849 && truth_value_p (TREE_CODE (arg0
)))
13850 return pedantic_non_lvalue_loc (loc
,
13851 fold_convert_loc (loc
, type
,
13852 invert_truthvalue_loc (loc
,
13855 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13856 if (TREE_CODE (arg0
) == LT_EXPR
13857 && integer_zerop (TREE_OPERAND (arg0
, 1))
13858 && integer_zerop (op2
)
13859 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13861 /* sign_bit_p only checks ARG1 bits within A's precision.
13862 If <sign bit of A> has wider type than A, bits outside
13863 of A's precision in <sign bit of A> need to be checked.
13864 If they are all 0, this optimization needs to be done
13865 in unsigned A's type, if they are all 1 in signed A's type,
13866 otherwise this can't be done. */
13867 if (TYPE_PRECISION (TREE_TYPE (tem
))
13868 < TYPE_PRECISION (TREE_TYPE (arg1
))
13869 && TYPE_PRECISION (TREE_TYPE (tem
))
13870 < TYPE_PRECISION (type
))
13872 unsigned HOST_WIDE_INT mask_lo
;
13873 HOST_WIDE_INT mask_hi
;
13874 int inner_width
, outer_width
;
13877 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13878 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13879 if (outer_width
> TYPE_PRECISION (type
))
13880 outer_width
= TYPE_PRECISION (type
);
13882 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
13884 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
13885 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
13891 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
13892 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
13894 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
13896 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
13897 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13901 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
13902 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13904 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
13905 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
13907 tem_type
= signed_type_for (TREE_TYPE (tem
));
13908 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13910 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
13911 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
13913 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13914 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13922 fold_convert_loc (loc
, type
,
13923 fold_build2_loc (loc
, BIT_AND_EXPR
,
13924 TREE_TYPE (tem
), tem
,
13925 fold_convert_loc (loc
,
13930 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13931 already handled above. */
13932 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13933 && integer_onep (TREE_OPERAND (arg0
, 1))
13934 && integer_zerop (op2
)
13935 && integer_pow2p (arg1
))
13937 tree tem
= TREE_OPERAND (arg0
, 0);
13939 if (TREE_CODE (tem
) == RSHIFT_EXPR
13940 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
13941 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13942 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
13943 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13944 TREE_OPERAND (tem
, 0), arg1
);
13947 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13948 is probably obsolete because the first operand should be a
13949 truth value (that's why we have the two cases above), but let's
13950 leave it in until we can confirm this for all front-ends. */
13951 if (integer_zerop (op2
)
13952 && TREE_CODE (arg0
) == NE_EXPR
13953 && integer_zerop (TREE_OPERAND (arg0
, 1))
13954 && integer_pow2p (arg1
)
13955 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13956 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13957 arg1
, OEP_ONLY_CONST
))
13958 return pedantic_non_lvalue_loc (loc
,
13959 fold_convert_loc (loc
, type
,
13960 TREE_OPERAND (arg0
, 0)));
13962 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13963 if (integer_zerop (op2
)
13964 && truth_value_p (TREE_CODE (arg0
))
13965 && truth_value_p (TREE_CODE (arg1
)))
13966 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13967 fold_convert_loc (loc
, type
, arg0
),
13970 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13971 if (integer_onep (op2
)
13972 && truth_value_p (TREE_CODE (arg0
))
13973 && truth_value_p (TREE_CODE (arg1
)))
13975 /* Only perform transformation if ARG0 is easily inverted. */
13976 tem
= fold_truth_not_expr (loc
, arg0
);
13978 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
13979 fold_convert_loc (loc
, type
, tem
),
13983 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13984 if (integer_zerop (arg1
)
13985 && truth_value_p (TREE_CODE (arg0
))
13986 && truth_value_p (TREE_CODE (op2
)))
13988 /* Only perform transformation if ARG0 is easily inverted. */
13989 tem
= fold_truth_not_expr (loc
, arg0
);
13991 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13992 fold_convert_loc (loc
, type
, tem
),
13996 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13997 if (integer_onep (arg1
)
13998 && truth_value_p (TREE_CODE (arg0
))
13999 && truth_value_p (TREE_CODE (op2
)))
14000 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
14001 fold_convert_loc (loc
, type
, arg0
),
14007 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14008 of fold_ternary on them. */
14009 gcc_unreachable ();
14011 case BIT_FIELD_REF
:
14012 if ((TREE_CODE (arg0
) == VECTOR_CST
14013 || (TREE_CODE (arg0
) == CONSTRUCTOR
&& TREE_CONSTANT (arg0
)))
14014 && type
== TREE_TYPE (TREE_TYPE (arg0
)))
14016 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
14017 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
14020 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
14021 && (idx
% width
) == 0
14022 && (idx
= idx
/ width
)
14023 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
14025 tree elements
= NULL_TREE
;
14027 if (TREE_CODE (arg0
) == VECTOR_CST
)
14028 elements
= TREE_VECTOR_CST_ELTS (arg0
);
14031 unsigned HOST_WIDE_INT idx
;
14034 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0
), idx
, value
)
14035 elements
= tree_cons (NULL_TREE
, value
, elements
);
14037 while (idx
-- > 0 && elements
)
14038 elements
= TREE_CHAIN (elements
);
14040 return TREE_VALUE (elements
);
14042 return fold_convert_loc (loc
, type
, integer_zero_node
);
14046 /* A bit-field-ref that referenced the full argument can be stripped. */
14047 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
14048 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_low_cst (arg1
, 1)
14049 && integer_zerop (op2
))
14050 return fold_convert_loc (loc
, type
, arg0
);
14056 } /* switch (code) */
14059 /* Perform constant folding and related simplification of EXPR.
14060 The related simplifications include x*1 => x, x*0 => 0, etc.,
14061 and application of the associative law.
14062 NOP_EXPR conversions may be removed freely (as long as we
14063 are careful not to change the type of the overall expression).
14064 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14065 but we can constant-fold them if they have constant operands. */
14067 #ifdef ENABLE_FOLD_CHECKING
14068 # define fold(x) fold_1 (x)
14069 static tree
fold_1 (tree
);
14075 const tree t
= expr
;
14076 enum tree_code code
= TREE_CODE (t
);
14077 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14079 location_t loc
= EXPR_LOCATION (expr
);
14081 /* Return right away if a constant. */
14082 if (kind
== tcc_constant
)
14085 /* CALL_EXPR-like objects with variable numbers of operands are
14086 treated specially. */
14087 if (kind
== tcc_vl_exp
)
14089 if (code
== CALL_EXPR
)
14091 tem
= fold_call_expr (loc
, expr
, false);
14092 return tem
? tem
: expr
;
14097 if (IS_EXPR_CODE_CLASS (kind
))
14099 tree type
= TREE_TYPE (t
);
14100 tree op0
, op1
, op2
;
14102 switch (TREE_CODE_LENGTH (code
))
14105 op0
= TREE_OPERAND (t
, 0);
14106 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14107 return tem
? tem
: expr
;
14109 op0
= TREE_OPERAND (t
, 0);
14110 op1
= TREE_OPERAND (t
, 1);
14111 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14112 return tem
? tem
: expr
;
14114 op0
= TREE_OPERAND (t
, 0);
14115 op1
= TREE_OPERAND (t
, 1);
14116 op2
= TREE_OPERAND (t
, 2);
14117 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14118 return tem
? tem
: expr
;
14128 tree op0
= TREE_OPERAND (t
, 0);
14129 tree op1
= TREE_OPERAND (t
, 1);
14131 if (TREE_CODE (op1
) == INTEGER_CST
14132 && TREE_CODE (op0
) == CONSTRUCTOR
14133 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14135 VEC(constructor_elt
,gc
) *elts
= CONSTRUCTOR_ELTS (op0
);
14136 unsigned HOST_WIDE_INT end
= VEC_length (constructor_elt
, elts
);
14137 unsigned HOST_WIDE_INT begin
= 0;
14139 /* Find a matching index by means of a binary search. */
14140 while (begin
!= end
)
14142 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14143 tree index
= VEC_index (constructor_elt
, elts
, middle
)->index
;
14145 if (TREE_CODE (index
) == INTEGER_CST
14146 && tree_int_cst_lt (index
, op1
))
14147 begin
= middle
+ 1;
14148 else if (TREE_CODE (index
) == INTEGER_CST
14149 && tree_int_cst_lt (op1
, index
))
14151 else if (TREE_CODE (index
) == RANGE_EXPR
14152 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14153 begin
= middle
+ 1;
14154 else if (TREE_CODE (index
) == RANGE_EXPR
14155 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14158 return VEC_index (constructor_elt
, elts
, middle
)->value
;
14166 return fold (DECL_INITIAL (t
));
14170 } /* switch (code) */
14173 #ifdef ENABLE_FOLD_CHECKING
14176 static void fold_checksum_tree (const_tree
, struct md5_ctx
*, htab_t
);
14177 static void fold_check_failed (const_tree
, const_tree
);
14178 void print_fold_checksum (const_tree
);
14180 /* When --enable-checking=fold, compute a digest of expr before
14181 and after actual fold call to see if fold did not accidentally
14182 change original expr. */
14188 struct md5_ctx ctx
;
14189 unsigned char checksum_before
[16], checksum_after
[16];
14192 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14193 md5_init_ctx (&ctx
);
14194 fold_checksum_tree (expr
, &ctx
, ht
);
14195 md5_finish_ctx (&ctx
, checksum_before
);
14198 ret
= fold_1 (expr
);
14200 md5_init_ctx (&ctx
);
14201 fold_checksum_tree (expr
, &ctx
, ht
);
14202 md5_finish_ctx (&ctx
, checksum_after
);
14205 if (memcmp (checksum_before
, checksum_after
, 16))
14206 fold_check_failed (expr
, ret
);
14212 print_fold_checksum (const_tree expr
)
14214 struct md5_ctx ctx
;
14215 unsigned char checksum
[16], cnt
;
14218 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14219 md5_init_ctx (&ctx
);
14220 fold_checksum_tree (expr
, &ctx
, ht
);
14221 md5_finish_ctx (&ctx
, checksum
);
14223 for (cnt
= 0; cnt
< 16; ++cnt
)
14224 fprintf (stderr
, "%02x", checksum
[cnt
]);
14225 putc ('\n', stderr
);
14229 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14231 internal_error ("fold check: original tree changed by fold");
14235 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
14238 enum tree_code code
;
14239 union tree_node buf
;
14244 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
14245 <= sizeof (struct tree_function_decl
))
14246 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
14249 slot
= (const void **) htab_find_slot (ht
, expr
, INSERT
);
14253 code
= TREE_CODE (expr
);
14254 if (TREE_CODE_CLASS (code
) == tcc_declaration
14255 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14257 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14258 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14259 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14260 expr
= (tree
) &buf
;
14262 else if (TREE_CODE_CLASS (code
) == tcc_type
14263 && (TYPE_POINTER_TO (expr
)
14264 || TYPE_REFERENCE_TO (expr
)
14265 || TYPE_CACHED_VALUES_P (expr
)
14266 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14267 || TYPE_NEXT_VARIANT (expr
)))
14269 /* Allow these fields to be modified. */
14271 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14272 expr
= tmp
= (tree
) &buf
;
14273 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14274 TYPE_POINTER_TO (tmp
) = NULL
;
14275 TYPE_REFERENCE_TO (tmp
) = NULL
;
14276 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14277 if (TYPE_CACHED_VALUES_P (tmp
))
14279 TYPE_CACHED_VALUES_P (tmp
) = 0;
14280 TYPE_CACHED_VALUES (tmp
) = NULL
;
14283 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14284 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14285 if (TREE_CODE_CLASS (code
) != tcc_type
14286 && TREE_CODE_CLASS (code
) != tcc_declaration
14287 && code
!= TREE_LIST
14288 && code
!= SSA_NAME
)
14289 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14290 switch (TREE_CODE_CLASS (code
))
14296 md5_process_bytes (TREE_STRING_POINTER (expr
),
14297 TREE_STRING_LENGTH (expr
), ctx
);
14300 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14301 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14304 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
14310 case tcc_exceptional
:
14314 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14315 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14316 expr
= TREE_CHAIN (expr
);
14317 goto recursive_label
;
14320 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14321 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14327 case tcc_expression
:
14328 case tcc_reference
:
14329 case tcc_comparison
:
14332 case tcc_statement
:
14334 len
= TREE_OPERAND_LENGTH (expr
);
14335 for (i
= 0; i
< len
; ++i
)
14336 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14338 case tcc_declaration
:
14339 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14340 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14341 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14343 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14344 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14345 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14346 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14347 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14349 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
14350 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
14352 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14354 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14355 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14356 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
14360 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14361 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14362 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14363 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14364 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14365 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14366 if (INTEGRAL_TYPE_P (expr
)
14367 || SCALAR_FLOAT_TYPE_P (expr
))
14369 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14370 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14372 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14373 if (TREE_CODE (expr
) == RECORD_TYPE
14374 || TREE_CODE (expr
) == UNION_TYPE
14375 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14376 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14377 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14384 /* Helper function for outputting the checksum of a tree T. When
14385 debugging with gdb, you can "define mynext" to be "next" followed
14386 by "call debug_fold_checksum (op0)", then just trace down till the
14390 debug_fold_checksum (const_tree t
)
14393 unsigned char checksum
[16];
14394 struct md5_ctx ctx
;
14395 htab_t ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14397 md5_init_ctx (&ctx
);
14398 fold_checksum_tree (t
, &ctx
, ht
);
14399 md5_finish_ctx (&ctx
, checksum
);
14402 for (i
= 0; i
< 16; i
++)
14403 fprintf (stderr
, "%d ", checksum
[i
]);
14405 fprintf (stderr
, "\n");
14410 /* Fold a unary tree expression with code CODE of type TYPE with an
14411 operand OP0. LOC is the location of the resulting expression.
14412 Return a folded expression if successful. Otherwise, return a tree
14413 expression with code CODE of type TYPE with an operand OP0. */
14416 fold_build1_stat_loc (location_t loc
,
14417 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14420 #ifdef ENABLE_FOLD_CHECKING
14421 unsigned char checksum_before
[16], checksum_after
[16];
14422 struct md5_ctx ctx
;
14425 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14426 md5_init_ctx (&ctx
);
14427 fold_checksum_tree (op0
, &ctx
, ht
);
14428 md5_finish_ctx (&ctx
, checksum_before
);
14432 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14435 tem
= build1_stat (code
, type
, op0 PASS_MEM_STAT
);
14436 SET_EXPR_LOCATION (tem
, loc
);
14439 #ifdef ENABLE_FOLD_CHECKING
14440 md5_init_ctx (&ctx
);
14441 fold_checksum_tree (op0
, &ctx
, ht
);
14442 md5_finish_ctx (&ctx
, checksum_after
);
14445 if (memcmp (checksum_before
, checksum_after
, 16))
14446 fold_check_failed (op0
, tem
);
14451 /* Fold a binary tree expression with code CODE of type TYPE with
14452 operands OP0 and OP1. LOC is the location of the resulting
14453 expression. Return a folded expression if successful. Otherwise,
14454 return a tree expression with code CODE of type TYPE with operands
14458 fold_build2_stat_loc (location_t loc
,
14459 enum tree_code code
, tree type
, tree op0
, tree op1
14463 #ifdef ENABLE_FOLD_CHECKING
14464 unsigned char checksum_before_op0
[16],
14465 checksum_before_op1
[16],
14466 checksum_after_op0
[16],
14467 checksum_after_op1
[16];
14468 struct md5_ctx ctx
;
14471 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14472 md5_init_ctx (&ctx
);
14473 fold_checksum_tree (op0
, &ctx
, ht
);
14474 md5_finish_ctx (&ctx
, checksum_before_op0
);
14477 md5_init_ctx (&ctx
);
14478 fold_checksum_tree (op1
, &ctx
, ht
);
14479 md5_finish_ctx (&ctx
, checksum_before_op1
);
14483 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14486 tem
= build2_stat (code
, type
, op0
, op1 PASS_MEM_STAT
);
14487 SET_EXPR_LOCATION (tem
, loc
);
14490 #ifdef ENABLE_FOLD_CHECKING
14491 md5_init_ctx (&ctx
);
14492 fold_checksum_tree (op0
, &ctx
, ht
);
14493 md5_finish_ctx (&ctx
, checksum_after_op0
);
14496 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14497 fold_check_failed (op0
, tem
);
14499 md5_init_ctx (&ctx
);
14500 fold_checksum_tree (op1
, &ctx
, ht
);
14501 md5_finish_ctx (&ctx
, checksum_after_op1
);
14504 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14505 fold_check_failed (op1
, tem
);
14510 /* Fold a ternary tree expression with code CODE of type TYPE with
14511 operands OP0, OP1, and OP2. Return a folded expression if
14512 successful. Otherwise, return a tree expression with code CODE of
14513 type TYPE with operands OP0, OP1, and OP2. */
14516 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14517 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14520 #ifdef ENABLE_FOLD_CHECKING
14521 unsigned char checksum_before_op0
[16],
14522 checksum_before_op1
[16],
14523 checksum_before_op2
[16],
14524 checksum_after_op0
[16],
14525 checksum_after_op1
[16],
14526 checksum_after_op2
[16];
14527 struct md5_ctx ctx
;
14530 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14531 md5_init_ctx (&ctx
);
14532 fold_checksum_tree (op0
, &ctx
, ht
);
14533 md5_finish_ctx (&ctx
, checksum_before_op0
);
14536 md5_init_ctx (&ctx
);
14537 fold_checksum_tree (op1
, &ctx
, ht
);
14538 md5_finish_ctx (&ctx
, checksum_before_op1
);
14541 md5_init_ctx (&ctx
);
14542 fold_checksum_tree (op2
, &ctx
, ht
);
14543 md5_finish_ctx (&ctx
, checksum_before_op2
);
14547 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14548 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14551 tem
= build3_stat (code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14552 SET_EXPR_LOCATION (tem
, loc
);
14555 #ifdef ENABLE_FOLD_CHECKING
14556 md5_init_ctx (&ctx
);
14557 fold_checksum_tree (op0
, &ctx
, ht
);
14558 md5_finish_ctx (&ctx
, checksum_after_op0
);
14561 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14562 fold_check_failed (op0
, tem
);
14564 md5_init_ctx (&ctx
);
14565 fold_checksum_tree (op1
, &ctx
, ht
);
14566 md5_finish_ctx (&ctx
, checksum_after_op1
);
14569 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14570 fold_check_failed (op1
, tem
);
14572 md5_init_ctx (&ctx
);
14573 fold_checksum_tree (op2
, &ctx
, ht
);
14574 md5_finish_ctx (&ctx
, checksum_after_op2
);
14577 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14578 fold_check_failed (op2
, tem
);
14583 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14584 arguments in ARGARRAY, and a null static chain.
14585 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14586 of type TYPE from the given operands as constructed by build_call_array. */
14589 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14590 int nargs
, tree
*argarray
)
14593 #ifdef ENABLE_FOLD_CHECKING
14594 unsigned char checksum_before_fn
[16],
14595 checksum_before_arglist
[16],
14596 checksum_after_fn
[16],
14597 checksum_after_arglist
[16];
14598 struct md5_ctx ctx
;
14602 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14603 md5_init_ctx (&ctx
);
14604 fold_checksum_tree (fn
, &ctx
, ht
);
14605 md5_finish_ctx (&ctx
, checksum_before_fn
);
14608 md5_init_ctx (&ctx
);
14609 for (i
= 0; i
< nargs
; i
++)
14610 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14611 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14615 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14617 #ifdef ENABLE_FOLD_CHECKING
14618 md5_init_ctx (&ctx
);
14619 fold_checksum_tree (fn
, &ctx
, ht
);
14620 md5_finish_ctx (&ctx
, checksum_after_fn
);
14623 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14624 fold_check_failed (fn
, tem
);
14626 md5_init_ctx (&ctx
);
14627 for (i
= 0; i
< nargs
; i
++)
14628 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14629 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14632 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14633 fold_check_failed (NULL_TREE
, tem
);
14638 /* Perform constant folding and related simplification of initializer
14639 expression EXPR. These behave identically to "fold_buildN" but ignore
14640 potential run-time traps and exceptions that fold must preserve. */
14642 #define START_FOLD_INIT \
14643 int saved_signaling_nans = flag_signaling_nans;\
14644 int saved_trapping_math = flag_trapping_math;\
14645 int saved_rounding_math = flag_rounding_math;\
14646 int saved_trapv = flag_trapv;\
14647 int saved_folding_initializer = folding_initializer;\
14648 flag_signaling_nans = 0;\
14649 flag_trapping_math = 0;\
14650 flag_rounding_math = 0;\
14652 folding_initializer = 1;
14654 #define END_FOLD_INIT \
14655 flag_signaling_nans = saved_signaling_nans;\
14656 flag_trapping_math = saved_trapping_math;\
14657 flag_rounding_math = saved_rounding_math;\
14658 flag_trapv = saved_trapv;\
14659 folding_initializer = saved_folding_initializer;
14662 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14663 tree type
, tree op
)
14668 result
= fold_build1_loc (loc
, code
, type
, op
);
14675 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14676 tree type
, tree op0
, tree op1
)
14681 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14688 fold_build3_initializer_loc (location_t loc
, enum tree_code code
,
14689 tree type
, tree op0
, tree op1
, tree op2
)
14694 result
= fold_build3_loc (loc
, code
, type
, op0
, op1
, op2
);
14701 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14702 int nargs
, tree
*argarray
)
14707 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14713 #undef START_FOLD_INIT
14714 #undef END_FOLD_INIT
14716 /* Determine if first argument is a multiple of second argument. Return 0 if
14717 it is not, or we cannot easily determined it to be.
14719 An example of the sort of thing we care about (at this point; this routine
14720 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14721 fold cases do now) is discovering that
14723 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14729 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14731 This code also handles discovering that
14733 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14735 is a multiple of 8 so we don't have to worry about dealing with a
14736 possible remainder.
14738 Note that we *look* inside a SAVE_EXPR only to determine how it was
14739 calculated; it is not safe for fold to do much of anything else with the
14740 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14741 at run time. For example, the latter example above *cannot* be implemented
14742 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14743 evaluation time of the original SAVE_EXPR is not necessarily the same at
14744 the time the new expression is evaluated. The only optimization of this
14745 sort that would be valid is changing
14747 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14751 SAVE_EXPR (I) * SAVE_EXPR (J)
14753 (where the same SAVE_EXPR (J) is used in the original and the
14754 transformed version). */
14757 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
14759 if (operand_equal_p (top
, bottom
, 0))
14762 if (TREE_CODE (type
) != INTEGER_TYPE
)
14765 switch (TREE_CODE (top
))
14768 /* Bitwise and provides a power of two multiple. If the mask is
14769 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14770 if (!integer_pow2p (bottom
))
14775 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14776 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14780 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14781 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14784 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14788 op1
= TREE_OPERAND (top
, 1);
14789 /* const_binop may not detect overflow correctly,
14790 so check for it explicitly here. */
14791 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
14792 > TREE_INT_CST_LOW (op1
)
14793 && TREE_INT_CST_HIGH (op1
) == 0
14794 && 0 != (t1
= fold_convert (type
,
14795 const_binop (LSHIFT_EXPR
,
14798 && !TREE_OVERFLOW (t1
))
14799 return multiple_of_p (type
, t1
, bottom
);
14804 /* Can't handle conversions from non-integral or wider integral type. */
14805 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14806 || (TYPE_PRECISION (type
)
14807 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14810 /* .. fall through ... */
14813 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14816 if (TREE_CODE (bottom
) != INTEGER_CST
14817 || integer_zerop (bottom
)
14818 || (TYPE_UNSIGNED (type
)
14819 && (tree_int_cst_sgn (top
) < 0
14820 || tree_int_cst_sgn (bottom
) < 0)))
14822 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
14830 /* Return true if CODE or TYPE is known to be non-negative. */
14833 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14835 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14836 && truth_value_p (code
))
14837 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14838 have a signed:1 type (where the value is -1 and 0). */
14843 /* Return true if (CODE OP0) is known to be non-negative. If the return
14844 value is based on the assumption that signed overflow is undefined,
14845 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14846 *STRICT_OVERFLOW_P. */
14849 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14850 bool *strict_overflow_p
)
14852 if (TYPE_UNSIGNED (type
))
14858 /* We can't return 1 if flag_wrapv is set because
14859 ABS_EXPR<INT_MIN> = INT_MIN. */
14860 if (!INTEGRAL_TYPE_P (type
))
14862 if (TYPE_OVERFLOW_UNDEFINED (type
))
14864 *strict_overflow_p
= true;
14869 case NON_LVALUE_EXPR
:
14871 case FIX_TRUNC_EXPR
:
14872 return tree_expr_nonnegative_warnv_p (op0
,
14873 strict_overflow_p
);
14877 tree inner_type
= TREE_TYPE (op0
);
14878 tree outer_type
= type
;
14880 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14882 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14883 return tree_expr_nonnegative_warnv_p (op0
,
14884 strict_overflow_p
);
14885 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14887 if (TYPE_UNSIGNED (inner_type
))
14889 return tree_expr_nonnegative_warnv_p (op0
,
14890 strict_overflow_p
);
14893 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
14895 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14896 return tree_expr_nonnegative_warnv_p (op0
,
14897 strict_overflow_p
);
14898 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14899 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14900 && TYPE_UNSIGNED (inner_type
);
14906 return tree_simple_nonnegative_warnv_p (code
, type
);
14909 /* We don't know sign of `t', so be conservative and return false. */
14913 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14914 value is based on the assumption that signed overflow is undefined,
14915 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14916 *STRICT_OVERFLOW_P. */
14919 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14920 tree op1
, bool *strict_overflow_p
)
14922 if (TYPE_UNSIGNED (type
))
14927 case POINTER_PLUS_EXPR
:
14929 if (FLOAT_TYPE_P (type
))
14930 return (tree_expr_nonnegative_warnv_p (op0
,
14932 && tree_expr_nonnegative_warnv_p (op1
,
14933 strict_overflow_p
));
14935 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14936 both unsigned and at least 2 bits shorter than the result. */
14937 if (TREE_CODE (type
) == INTEGER_TYPE
14938 && TREE_CODE (op0
) == NOP_EXPR
14939 && TREE_CODE (op1
) == NOP_EXPR
)
14941 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
14942 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
14943 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14944 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14946 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14947 TYPE_PRECISION (inner2
)) + 1;
14948 return prec
< TYPE_PRECISION (type
);
14954 if (FLOAT_TYPE_P (type
))
14956 /* x * x for floating point x is always non-negative. */
14957 if (operand_equal_p (op0
, op1
, 0))
14959 return (tree_expr_nonnegative_warnv_p (op0
,
14961 && tree_expr_nonnegative_warnv_p (op1
,
14962 strict_overflow_p
));
14965 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14966 both unsigned and their total bits is shorter than the result. */
14967 if (TREE_CODE (type
) == INTEGER_TYPE
14968 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14969 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14971 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14972 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14974 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14975 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14978 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14979 bool unsigned1
= TYPE_UNSIGNED (inner1
);
14981 if (TREE_CODE (op0
) == INTEGER_CST
)
14982 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
14984 if (TREE_CODE (op1
) == INTEGER_CST
)
14985 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
14987 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
14988 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
14990 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
14991 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
14992 : TYPE_PRECISION (inner0
);
14994 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
14995 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
14996 : TYPE_PRECISION (inner1
);
14998 return precision0
+ precision1
< TYPE_PRECISION (type
);
15005 return (tree_expr_nonnegative_warnv_p (op0
,
15007 || tree_expr_nonnegative_warnv_p (op1
,
15008 strict_overflow_p
));
15014 case TRUNC_DIV_EXPR
:
15015 case CEIL_DIV_EXPR
:
15016 case FLOOR_DIV_EXPR
:
15017 case ROUND_DIV_EXPR
:
15018 return (tree_expr_nonnegative_warnv_p (op0
,
15020 && tree_expr_nonnegative_warnv_p (op1
,
15021 strict_overflow_p
));
15023 case TRUNC_MOD_EXPR
:
15024 case CEIL_MOD_EXPR
:
15025 case FLOOR_MOD_EXPR
:
15026 case ROUND_MOD_EXPR
:
15027 return tree_expr_nonnegative_warnv_p (op0
,
15028 strict_overflow_p
);
15030 return tree_simple_nonnegative_warnv_p (code
, type
);
15033 /* We don't know sign of `t', so be conservative and return false. */
15037 /* Return true if T is known to be non-negative. If the return
15038 value is based on the assumption that signed overflow is undefined,
15039 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15040 *STRICT_OVERFLOW_P. */
15043 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15045 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15048 switch (TREE_CODE (t
))
15051 return tree_int_cst_sgn (t
) >= 0;
15054 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15057 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15060 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15062 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15063 strict_overflow_p
));
15065 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15068 /* We don't know sign of `t', so be conservative and return false. */
15072 /* Return true if T is known to be non-negative. If the return
15073 value is based on the assumption that signed overflow is undefined,
15074 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15075 *STRICT_OVERFLOW_P. */
15078 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15079 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15081 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15082 switch (DECL_FUNCTION_CODE (fndecl
))
15084 CASE_FLT_FN (BUILT_IN_ACOS
):
15085 CASE_FLT_FN (BUILT_IN_ACOSH
):
15086 CASE_FLT_FN (BUILT_IN_CABS
):
15087 CASE_FLT_FN (BUILT_IN_COSH
):
15088 CASE_FLT_FN (BUILT_IN_ERFC
):
15089 CASE_FLT_FN (BUILT_IN_EXP
):
15090 CASE_FLT_FN (BUILT_IN_EXP10
):
15091 CASE_FLT_FN (BUILT_IN_EXP2
):
15092 CASE_FLT_FN (BUILT_IN_FABS
):
15093 CASE_FLT_FN (BUILT_IN_FDIM
):
15094 CASE_FLT_FN (BUILT_IN_HYPOT
):
15095 CASE_FLT_FN (BUILT_IN_POW10
):
15096 CASE_INT_FN (BUILT_IN_FFS
):
15097 CASE_INT_FN (BUILT_IN_PARITY
):
15098 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15099 case BUILT_IN_BSWAP32
:
15100 case BUILT_IN_BSWAP64
:
15104 CASE_FLT_FN (BUILT_IN_SQRT
):
15105 /* sqrt(-0.0) is -0.0. */
15106 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15108 return tree_expr_nonnegative_warnv_p (arg0
,
15109 strict_overflow_p
);
15111 CASE_FLT_FN (BUILT_IN_ASINH
):
15112 CASE_FLT_FN (BUILT_IN_ATAN
):
15113 CASE_FLT_FN (BUILT_IN_ATANH
):
15114 CASE_FLT_FN (BUILT_IN_CBRT
):
15115 CASE_FLT_FN (BUILT_IN_CEIL
):
15116 CASE_FLT_FN (BUILT_IN_ERF
):
15117 CASE_FLT_FN (BUILT_IN_EXPM1
):
15118 CASE_FLT_FN (BUILT_IN_FLOOR
):
15119 CASE_FLT_FN (BUILT_IN_FMOD
):
15120 CASE_FLT_FN (BUILT_IN_FREXP
):
15121 CASE_FLT_FN (BUILT_IN_LCEIL
):
15122 CASE_FLT_FN (BUILT_IN_LDEXP
):
15123 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15124 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15125 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15126 CASE_FLT_FN (BUILT_IN_LLRINT
):
15127 CASE_FLT_FN (BUILT_IN_LLROUND
):
15128 CASE_FLT_FN (BUILT_IN_LRINT
):
15129 CASE_FLT_FN (BUILT_IN_LROUND
):
15130 CASE_FLT_FN (BUILT_IN_MODF
):
15131 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15132 CASE_FLT_FN (BUILT_IN_RINT
):
15133 CASE_FLT_FN (BUILT_IN_ROUND
):
15134 CASE_FLT_FN (BUILT_IN_SCALB
):
15135 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15136 CASE_FLT_FN (BUILT_IN_SCALBN
):
15137 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15138 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15139 CASE_FLT_FN (BUILT_IN_SINH
):
15140 CASE_FLT_FN (BUILT_IN_TANH
):
15141 CASE_FLT_FN (BUILT_IN_TRUNC
):
15142 /* True if the 1st argument is nonnegative. */
15143 return tree_expr_nonnegative_warnv_p (arg0
,
15144 strict_overflow_p
);
15146 CASE_FLT_FN (BUILT_IN_FMAX
):
15147 /* True if the 1st OR 2nd arguments are nonnegative. */
15148 return (tree_expr_nonnegative_warnv_p (arg0
,
15150 || (tree_expr_nonnegative_warnv_p (arg1
,
15151 strict_overflow_p
)));
15153 CASE_FLT_FN (BUILT_IN_FMIN
):
15154 /* True if the 1st AND 2nd arguments are nonnegative. */
15155 return (tree_expr_nonnegative_warnv_p (arg0
,
15157 && (tree_expr_nonnegative_warnv_p (arg1
,
15158 strict_overflow_p
)));
15160 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15161 /* True if the 2nd argument is nonnegative. */
15162 return tree_expr_nonnegative_warnv_p (arg1
,
15163 strict_overflow_p
);
15165 CASE_FLT_FN (BUILT_IN_POWI
):
15166 /* True if the 1st argument is nonnegative or the second
15167 argument is an even integer. */
15168 if (TREE_CODE (arg1
) == INTEGER_CST
15169 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15171 return tree_expr_nonnegative_warnv_p (arg0
,
15172 strict_overflow_p
);
15174 CASE_FLT_FN (BUILT_IN_POW
):
15175 /* True if the 1st argument is nonnegative or the second
15176 argument is an even integer valued real. */
15177 if (TREE_CODE (arg1
) == REAL_CST
)
15182 c
= TREE_REAL_CST (arg1
);
15183 n
= real_to_integer (&c
);
15186 REAL_VALUE_TYPE cint
;
15187 real_from_integer (&cint
, VOIDmode
, n
,
15188 n
< 0 ? -1 : 0, 0);
15189 if (real_identical (&c
, &cint
))
15193 return tree_expr_nonnegative_warnv_p (arg0
,
15194 strict_overflow_p
);
15199 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15203 /* Return true if T is known to be non-negative. If the return
15204 value is based on the assumption that signed overflow is undefined,
15205 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15206 *STRICT_OVERFLOW_P. */
15209 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15211 enum tree_code code
= TREE_CODE (t
);
15212 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15219 tree temp
= TARGET_EXPR_SLOT (t
);
15220 t
= TARGET_EXPR_INITIAL (t
);
15222 /* If the initializer is non-void, then it's a normal expression
15223 that will be assigned to the slot. */
15224 if (!VOID_TYPE_P (t
))
15225 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15227 /* Otherwise, the initializer sets the slot in some way. One common
15228 way is an assignment statement at the end of the initializer. */
15231 if (TREE_CODE (t
) == BIND_EXPR
)
15232 t
= expr_last (BIND_EXPR_BODY (t
));
15233 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15234 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15235 t
= expr_last (TREE_OPERAND (t
, 0));
15236 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15241 if (TREE_CODE (t
) == MODIFY_EXPR
15242 && TREE_OPERAND (t
, 0) == temp
)
15243 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15244 strict_overflow_p
);
15251 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15252 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15254 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15255 get_callee_fndecl (t
),
15258 strict_overflow_p
);
15260 case COMPOUND_EXPR
:
15262 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15263 strict_overflow_p
);
15265 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15266 strict_overflow_p
);
15268 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15269 strict_overflow_p
);
15272 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15276 /* We don't know sign of `t', so be conservative and return false. */
15280 /* Return true if T is known to be non-negative. If the return
15281 value is based on the assumption that signed overflow is undefined,
15282 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15283 *STRICT_OVERFLOW_P. */
15286 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15288 enum tree_code code
;
15289 if (t
== error_mark_node
)
15292 code
= TREE_CODE (t
);
15293 switch (TREE_CODE_CLASS (code
))
15296 case tcc_comparison
:
15297 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15299 TREE_OPERAND (t
, 0),
15300 TREE_OPERAND (t
, 1),
15301 strict_overflow_p
);
15304 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15306 TREE_OPERAND (t
, 0),
15307 strict_overflow_p
);
15310 case tcc_declaration
:
15311 case tcc_reference
:
15312 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15320 case TRUTH_AND_EXPR
:
15321 case TRUTH_OR_EXPR
:
15322 case TRUTH_XOR_EXPR
:
15323 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15325 TREE_OPERAND (t
, 0),
15326 TREE_OPERAND (t
, 1),
15327 strict_overflow_p
);
15328 case TRUTH_NOT_EXPR
:
15329 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15331 TREE_OPERAND (t
, 0),
15332 strict_overflow_p
);
15339 case WITH_SIZE_EXPR
:
15341 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15344 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15348 /* Return true if `t' is known to be non-negative. Handle warnings
15349 about undefined signed overflow. */
15352 tree_expr_nonnegative_p (tree t
)
15354 bool ret
, strict_overflow_p
;
15356 strict_overflow_p
= false;
15357 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15358 if (strict_overflow_p
)
15359 fold_overflow_warning (("assuming signed overflow does not occur when "
15360 "determining that expression is always "
15362 WARN_STRICT_OVERFLOW_MISC
);
15367 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15368 For floating point we further ensure that T is not denormal.
15369 Similar logic is present in nonzero_address in rtlanal.h.
15371 If the return value is based on the assumption that signed overflow
15372 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15373 change *STRICT_OVERFLOW_P. */
15376 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15377 bool *strict_overflow_p
)
15382 return tree_expr_nonzero_warnv_p (op0
,
15383 strict_overflow_p
);
15387 tree inner_type
= TREE_TYPE (op0
);
15388 tree outer_type
= type
;
15390 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15391 && tree_expr_nonzero_warnv_p (op0
,
15392 strict_overflow_p
));
15396 case NON_LVALUE_EXPR
:
15397 return tree_expr_nonzero_warnv_p (op0
,
15398 strict_overflow_p
);
15407 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15408 For floating point we further ensure that T is not denormal.
15409 Similar logic is present in nonzero_address in rtlanal.h.
15411 If the return value is based on the assumption that signed overflow
15412 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15413 change *STRICT_OVERFLOW_P. */
15416 tree_binary_nonzero_warnv_p (enum tree_code code
,
15419 tree op1
, bool *strict_overflow_p
)
15421 bool sub_strict_overflow_p
;
15424 case POINTER_PLUS_EXPR
:
15426 if (TYPE_OVERFLOW_UNDEFINED (type
))
15428 /* With the presence of negative values it is hard
15429 to say something. */
15430 sub_strict_overflow_p
= false;
15431 if (!tree_expr_nonnegative_warnv_p (op0
,
15432 &sub_strict_overflow_p
)
15433 || !tree_expr_nonnegative_warnv_p (op1
,
15434 &sub_strict_overflow_p
))
15436 /* One of operands must be positive and the other non-negative. */
15437 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15438 overflows, on a twos-complement machine the sum of two
15439 nonnegative numbers can never be zero. */
15440 return (tree_expr_nonzero_warnv_p (op0
,
15442 || tree_expr_nonzero_warnv_p (op1
,
15443 strict_overflow_p
));
15448 if (TYPE_OVERFLOW_UNDEFINED (type
))
15450 if (tree_expr_nonzero_warnv_p (op0
,
15452 && tree_expr_nonzero_warnv_p (op1
,
15453 strict_overflow_p
))
15455 *strict_overflow_p
= true;
15462 sub_strict_overflow_p
= false;
15463 if (tree_expr_nonzero_warnv_p (op0
,
15464 &sub_strict_overflow_p
)
15465 && tree_expr_nonzero_warnv_p (op1
,
15466 &sub_strict_overflow_p
))
15468 if (sub_strict_overflow_p
)
15469 *strict_overflow_p
= true;
15474 sub_strict_overflow_p
= false;
15475 if (tree_expr_nonzero_warnv_p (op0
,
15476 &sub_strict_overflow_p
))
15478 if (sub_strict_overflow_p
)
15479 *strict_overflow_p
= true;
15481 /* When both operands are nonzero, then MAX must be too. */
15482 if (tree_expr_nonzero_warnv_p (op1
,
15483 strict_overflow_p
))
15486 /* MAX where operand 0 is positive is positive. */
15487 return tree_expr_nonnegative_warnv_p (op0
,
15488 strict_overflow_p
);
15490 /* MAX where operand 1 is positive is positive. */
15491 else if (tree_expr_nonzero_warnv_p (op1
,
15492 &sub_strict_overflow_p
)
15493 && tree_expr_nonnegative_warnv_p (op1
,
15494 &sub_strict_overflow_p
))
15496 if (sub_strict_overflow_p
)
15497 *strict_overflow_p
= true;
15503 return (tree_expr_nonzero_warnv_p (op1
,
15505 || tree_expr_nonzero_warnv_p (op0
,
15506 strict_overflow_p
));
15515 /* Return true when T is an address and is known to be nonzero.
15516 For floating point we further ensure that T is not denormal.
15517 Similar logic is present in nonzero_address in rtlanal.h.
15519 If the return value is based on the assumption that signed overflow
15520 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15521 change *STRICT_OVERFLOW_P. */
15524 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15526 bool sub_strict_overflow_p
;
15527 switch (TREE_CODE (t
))
15530 return !integer_zerop (t
);
15534 tree base
= get_base_address (TREE_OPERAND (t
, 0));
15539 /* Weak declarations may link to NULL. Other things may also be NULL
15540 so protect with -fdelete-null-pointer-checks; but not variables
15541 allocated on the stack. */
15543 && (flag_delete_null_pointer_checks
15544 || (TREE_CODE (base
) == VAR_DECL
&& !TREE_STATIC (base
))))
15545 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
15547 /* Constants are never weak. */
15548 if (CONSTANT_CLASS_P (base
))
15555 sub_strict_overflow_p
= false;
15556 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15557 &sub_strict_overflow_p
)
15558 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15559 &sub_strict_overflow_p
))
15561 if (sub_strict_overflow_p
)
15562 *strict_overflow_p
= true;
15573 /* Return true when T is an address and is known to be nonzero.
15574 For floating point we further ensure that T is not denormal.
15575 Similar logic is present in nonzero_address in rtlanal.h.
15577 If the return value is based on the assumption that signed overflow
15578 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15579 change *STRICT_OVERFLOW_P. */
15582 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15584 tree type
= TREE_TYPE (t
);
15585 enum tree_code code
;
15587 /* Doing something useful for floating point would need more work. */
15588 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
15591 code
= TREE_CODE (t
);
15592 switch (TREE_CODE_CLASS (code
))
15595 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15596 strict_overflow_p
);
15598 case tcc_comparison
:
15599 return tree_binary_nonzero_warnv_p (code
, type
,
15600 TREE_OPERAND (t
, 0),
15601 TREE_OPERAND (t
, 1),
15602 strict_overflow_p
);
15604 case tcc_declaration
:
15605 case tcc_reference
:
15606 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15614 case TRUTH_NOT_EXPR
:
15615 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15616 strict_overflow_p
);
15618 case TRUTH_AND_EXPR
:
15619 case TRUTH_OR_EXPR
:
15620 case TRUTH_XOR_EXPR
:
15621 return tree_binary_nonzero_warnv_p (code
, type
,
15622 TREE_OPERAND (t
, 0),
15623 TREE_OPERAND (t
, 1),
15624 strict_overflow_p
);
15631 case WITH_SIZE_EXPR
:
15633 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15635 case COMPOUND_EXPR
:
15638 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15639 strict_overflow_p
);
15642 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
15643 strict_overflow_p
);
15646 return alloca_call_p (t
);
15654 /* Return true when T is an address and is known to be nonzero.
15655 Handle warnings about undefined signed overflow. */
15658 tree_expr_nonzero_p (tree t
)
15660 bool ret
, strict_overflow_p
;
15662 strict_overflow_p
= false;
15663 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
15664 if (strict_overflow_p
)
15665 fold_overflow_warning (("assuming signed overflow does not occur when "
15666 "determining that expression is always "
15668 WARN_STRICT_OVERFLOW_MISC
);
15672 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15673 attempt to fold the expression to a constant without modifying TYPE,
15676 If the expression could be simplified to a constant, then return
15677 the constant. If the expression would not be simplified to a
15678 constant, then return NULL_TREE. */
15681 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15683 tree tem
= fold_binary (code
, type
, op0
, op1
);
15684 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15687 /* Given the components of a unary expression CODE, TYPE and OP0,
15688 attempt to fold the expression to a constant without modifying
15691 If the expression could be simplified to a constant, then return
15692 the constant. If the expression would not be simplified to a
15693 constant, then return NULL_TREE. */
15696 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15698 tree tem
= fold_unary (code
, type
, op0
);
15699 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15702 /* If EXP represents referencing an element in a constant string
15703 (either via pointer arithmetic or array indexing), return the
15704 tree representing the value accessed, otherwise return NULL. */
15707 fold_read_from_constant_string (tree exp
)
15709 if ((TREE_CODE (exp
) == INDIRECT_REF
15710 || TREE_CODE (exp
) == ARRAY_REF
)
15711 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15713 tree exp1
= TREE_OPERAND (exp
, 0);
15716 location_t loc
= EXPR_LOCATION (exp
);
15718 if (TREE_CODE (exp
) == INDIRECT_REF
)
15719 string
= string_constant (exp1
, &index
);
15722 tree low_bound
= array_ref_low_bound (exp
);
15723 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15725 /* Optimize the special-case of a zero lower bound.
15727 We convert the low_bound to sizetype to avoid some problems
15728 with constant folding. (E.g. suppose the lower bound is 1,
15729 and its mode is QI. Without the conversion,l (ARRAY
15730 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15731 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15732 if (! integer_zerop (low_bound
))
15733 index
= size_diffop_loc (loc
, index
,
15734 fold_convert_loc (loc
, sizetype
, low_bound
));
15740 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15741 && TREE_CODE (string
) == STRING_CST
15742 && TREE_CODE (index
) == INTEGER_CST
15743 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15744 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15746 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15747 return build_int_cst_type (TREE_TYPE (exp
),
15748 (TREE_STRING_POINTER (string
)
15749 [TREE_INT_CST_LOW (index
)]));
15754 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15755 an integer constant, real, or fixed-point constant.
15757 TYPE is the type of the result. */
15760 fold_negate_const (tree arg0
, tree type
)
15762 tree t
= NULL_TREE
;
15764 switch (TREE_CODE (arg0
))
15768 unsigned HOST_WIDE_INT low
;
15769 HOST_WIDE_INT high
;
15770 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
15771 TREE_INT_CST_HIGH (arg0
),
15773 t
= force_fit_type_double (type
, low
, high
, 1,
15774 (overflow
| TREE_OVERFLOW (arg0
))
15775 && !TYPE_UNSIGNED (type
));
15780 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
15785 FIXED_VALUE_TYPE f
;
15786 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15787 &(TREE_FIXED_CST (arg0
)), NULL
,
15788 TYPE_SATURATING (type
));
15789 t
= build_fixed (type
, f
);
15790 /* Propagate overflow flags. */
15791 if (overflow_p
| TREE_OVERFLOW (arg0
))
15792 TREE_OVERFLOW (t
) = 1;
15797 gcc_unreachable ();
15803 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15804 an integer constant or real constant.
15806 TYPE is the type of the result. */
15809 fold_abs_const (tree arg0
, tree type
)
15811 tree t
= NULL_TREE
;
15813 switch (TREE_CODE (arg0
))
15816 /* If the value is unsigned, then the absolute value is
15817 the same as the ordinary value. */
15818 if (TYPE_UNSIGNED (type
))
15820 /* Similarly, if the value is non-negative. */
15821 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
15823 /* If the value is negative, then the absolute value is
15827 unsigned HOST_WIDE_INT low
;
15828 HOST_WIDE_INT high
;
15829 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
15830 TREE_INT_CST_HIGH (arg0
),
15832 t
= force_fit_type_double (type
, low
, high
, -1,
15833 overflow
| TREE_OVERFLOW (arg0
));
15838 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15839 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
15845 gcc_unreachable ();
15851 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15852 constant. TYPE is the type of the result. */
15855 fold_not_const (tree arg0
, tree type
)
15857 tree t
= NULL_TREE
;
15859 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15861 t
= force_fit_type_double (type
, ~TREE_INT_CST_LOW (arg0
),
15862 ~TREE_INT_CST_HIGH (arg0
), 0,
15863 TREE_OVERFLOW (arg0
));
15868 /* Given CODE, a relational operator, the target type, TYPE and two
15869 constant operands OP0 and OP1, return the result of the
15870 relational operation. If the result is not a compile time
15871 constant, then return NULL_TREE. */
15874 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15876 int result
, invert
;
15878 /* From here on, the only cases we handle are when the result is
15879 known to be a constant. */
15881 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15883 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15884 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15886 /* Handle the cases where either operand is a NaN. */
15887 if (real_isnan (c0
) || real_isnan (c1
))
15897 case UNORDERED_EXPR
:
15911 if (flag_trapping_math
)
15917 gcc_unreachable ();
15920 return constant_boolean_node (result
, type
);
15923 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
15926 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
15928 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
15929 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
15930 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
15933 /* Handle equality/inequality of complex constants. */
15934 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
15936 tree rcond
= fold_relational_const (code
, type
,
15937 TREE_REALPART (op0
),
15938 TREE_REALPART (op1
));
15939 tree icond
= fold_relational_const (code
, type
,
15940 TREE_IMAGPART (op0
),
15941 TREE_IMAGPART (op1
));
15942 if (code
== EQ_EXPR
)
15943 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
15944 else if (code
== NE_EXPR
)
15945 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15950 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15952 To compute GT, swap the arguments and do LT.
15953 To compute GE, do LT and invert the result.
15954 To compute LE, swap the arguments, do LT and invert the result.
15955 To compute NE, do EQ and invert the result.
15957 Therefore, the code below must handle only EQ and LT. */
15959 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15964 code
= swap_tree_comparison (code
);
15967 /* Note that it is safe to invert for real values here because we
15968 have already handled the one case that it matters. */
15971 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15974 code
= invert_tree_comparison (code
, false);
15977 /* Compute a result for LT or EQ if args permit;
15978 Otherwise return T. */
15979 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15981 if (code
== EQ_EXPR
)
15982 result
= tree_int_cst_equal (op0
, op1
);
15983 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
15984 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
15986 result
= INT_CST_LT (op0
, op1
);
15993 return constant_boolean_node (result
, type
);
15996 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15997 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16001 fold_build_cleanup_point_expr (tree type
, tree expr
)
16003 /* If the expression does not have side effects then we don't have to wrap
16004 it with a cleanup point expression. */
16005 if (!TREE_SIDE_EFFECTS (expr
))
16008 /* If the expression is a return, check to see if the expression inside the
16009 return has no side effects or the right hand side of the modify expression
16010 inside the return. If either don't have side effects set we don't need to
16011 wrap the expression in a cleanup point expression. Note we don't check the
16012 left hand side of the modify because it should always be a return decl. */
16013 if (TREE_CODE (expr
) == RETURN_EXPR
)
16015 tree op
= TREE_OPERAND (expr
, 0);
16016 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16018 op
= TREE_OPERAND (op
, 1);
16019 if (!TREE_SIDE_EFFECTS (op
))
16023 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16026 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16027 of an indirection through OP0, or NULL_TREE if no simplification is
16031 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16037 subtype
= TREE_TYPE (sub
);
16038 if (!POINTER_TYPE_P (subtype
))
16041 if (TREE_CODE (sub
) == ADDR_EXPR
)
16043 tree op
= TREE_OPERAND (sub
, 0);
16044 tree optype
= TREE_TYPE (op
);
16045 /* *&CONST_DECL -> to the value of the const decl. */
16046 if (TREE_CODE (op
) == CONST_DECL
)
16047 return DECL_INITIAL (op
);
16048 /* *&p => p; make sure to handle *&"str"[cst] here. */
16049 if (type
== optype
)
16051 tree fop
= fold_read_from_constant_string (op
);
16057 /* *(foo *)&fooarray => fooarray[0] */
16058 else if (TREE_CODE (optype
) == ARRAY_TYPE
16059 && type
== TREE_TYPE (optype
))
16061 tree type_domain
= TYPE_DOMAIN (optype
);
16062 tree min_val
= size_zero_node
;
16063 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16064 min_val
= TYPE_MIN_VALUE (type_domain
);
16065 op0
= build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
16066 SET_EXPR_LOCATION (op0
, loc
);
16069 /* *(foo *)&complexfoo => __real__ complexfoo */
16070 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16071 && type
== TREE_TYPE (optype
))
16072 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16073 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16074 else if (TREE_CODE (optype
) == VECTOR_TYPE
16075 && type
== TREE_TYPE (optype
))
16077 tree part_width
= TYPE_SIZE (type
);
16078 tree index
= bitsize_int (0);
16079 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16083 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16084 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16085 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16087 tree op00
= TREE_OPERAND (sub
, 0);
16088 tree op01
= TREE_OPERAND (sub
, 1);
16092 op00type
= TREE_TYPE (op00
);
16093 if (TREE_CODE (op00
) == ADDR_EXPR
16094 && TREE_CODE (TREE_TYPE (op00type
)) == VECTOR_TYPE
16095 && type
== TREE_TYPE (TREE_TYPE (op00type
)))
16097 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
16098 tree part_width
= TYPE_SIZE (type
);
16099 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
16100 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16101 tree index
= bitsize_int (indexi
);
16103 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type
)))
16104 return fold_build3_loc (loc
,
16105 BIT_FIELD_REF
, type
, TREE_OPERAND (op00
, 0),
16106 part_width
, index
);
16112 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16113 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16114 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16116 tree op00
= TREE_OPERAND (sub
, 0);
16117 tree op01
= TREE_OPERAND (sub
, 1);
16121 op00type
= TREE_TYPE (op00
);
16122 if (TREE_CODE (op00
) == ADDR_EXPR
16123 && TREE_CODE (TREE_TYPE (op00type
)) == COMPLEX_TYPE
16124 && type
== TREE_TYPE (TREE_TYPE (op00type
)))
16126 tree size
= TYPE_SIZE_UNIT (type
);
16127 if (tree_int_cst_equal (size
, op01
))
16128 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
,
16129 TREE_OPERAND (op00
, 0));
16133 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16134 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16135 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
16138 tree min_val
= size_zero_node
;
16139 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16140 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16141 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16142 min_val
= TYPE_MIN_VALUE (type_domain
);
16143 op0
= build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
16144 SET_EXPR_LOCATION (op0
, loc
);
16151 /* Builds an expression for an indirection through T, simplifying some
16155 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16157 tree type
= TREE_TYPE (TREE_TYPE (t
));
16158 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16163 t
= build1 (INDIRECT_REF
, type
, t
);
16164 SET_EXPR_LOCATION (t
, loc
);
16168 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16171 fold_indirect_ref_loc (location_t loc
, tree t
)
16173 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16181 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16182 whose result is ignored. The type of the returned tree need not be
16183 the same as the original expression. */
16186 fold_ignored_result (tree t
)
16188 if (!TREE_SIDE_EFFECTS (t
))
16189 return integer_zero_node
;
16192 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16195 t
= TREE_OPERAND (t
, 0);
16199 case tcc_comparison
:
16200 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16201 t
= TREE_OPERAND (t
, 0);
16202 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16203 t
= TREE_OPERAND (t
, 1);
16208 case tcc_expression
:
16209 switch (TREE_CODE (t
))
16211 case COMPOUND_EXPR
:
16212 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16214 t
= TREE_OPERAND (t
, 0);
16218 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16219 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16221 t
= TREE_OPERAND (t
, 0);
16234 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16235 This can only be applied to objects of a sizetype. */
16238 round_up_loc (location_t loc
, tree value
, int divisor
)
16240 tree div
= NULL_TREE
;
16242 gcc_assert (divisor
> 0);
16246 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16247 have to do anything. Only do this when we are not given a const,
16248 because in that case, this check is more expensive than just
16250 if (TREE_CODE (value
) != INTEGER_CST
)
16252 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16254 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16258 /* If divisor is a power of two, simplify this to bit manipulation. */
16259 if (divisor
== (divisor
& -divisor
))
16261 if (TREE_CODE (value
) == INTEGER_CST
)
16263 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (value
);
16264 unsigned HOST_WIDE_INT high
;
16267 if ((low
& (divisor
- 1)) == 0)
16270 overflow_p
= TREE_OVERFLOW (value
);
16271 high
= TREE_INT_CST_HIGH (value
);
16272 low
&= ~(divisor
- 1);
16281 return force_fit_type_double (TREE_TYPE (value
), low
, high
,
16288 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16289 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16290 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16291 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16297 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16298 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16299 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16305 /* Likewise, but round down. */
16308 round_down_loc (location_t loc
, tree value
, int divisor
)
16310 tree div
= NULL_TREE
;
16312 gcc_assert (divisor
> 0);
16316 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16317 have to do anything. Only do this when we are not given a const,
16318 because in that case, this check is more expensive than just
16320 if (TREE_CODE (value
) != INTEGER_CST
)
16322 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16324 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16328 /* If divisor is a power of two, simplify this to bit manipulation. */
16329 if (divisor
== (divisor
& -divisor
))
16333 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16334 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16339 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16340 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16341 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16347 /* Returns the pointer to the base of the object addressed by EXP and
16348 extracts the information about the offset of the access, storing it
16349 to PBITPOS and POFFSET. */
16352 split_address_to_core_and_offset (tree exp
,
16353 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16356 enum machine_mode mode
;
16357 int unsignedp
, volatilep
;
16358 HOST_WIDE_INT bitsize
;
16359 location_t loc
= EXPR_LOCATION (exp
);
16361 if (TREE_CODE (exp
) == ADDR_EXPR
)
16363 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16364 poffset
, &mode
, &unsignedp
, &volatilep
,
16366 core
= build_fold_addr_expr_loc (loc
, core
);
16372 *poffset
= NULL_TREE
;
16378 /* Returns true if addresses of E1 and E2 differ by a constant, false
16379 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16382 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16385 HOST_WIDE_INT bitpos1
, bitpos2
;
16386 tree toffset1
, toffset2
, tdiff
, type
;
16388 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16389 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16391 if (bitpos1
% BITS_PER_UNIT
!= 0
16392 || bitpos2
% BITS_PER_UNIT
!= 0
16393 || !operand_equal_p (core1
, core2
, 0))
16396 if (toffset1
&& toffset2
)
16398 type
= TREE_TYPE (toffset1
);
16399 if (type
!= TREE_TYPE (toffset2
))
16400 toffset2
= fold_convert (type
, toffset2
);
16402 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16403 if (!cst_and_fits_in_hwi (tdiff
))
16406 *diff
= int_cst_value (tdiff
);
16408 else if (toffset1
|| toffset2
)
16410 /* If only one of the offsets is non-constant, the difference cannot
16417 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16421 /* Simplify the floating point expression EXP when the sign of the
16422 result is not significant. Return NULL_TREE if no simplification
16426 fold_strip_sign_ops (tree exp
)
16429 location_t loc
= EXPR_LOCATION (exp
);
16431 switch (TREE_CODE (exp
))
16435 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16436 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16440 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16442 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16443 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16444 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16445 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16446 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16447 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16450 case COMPOUND_EXPR
:
16451 arg0
= TREE_OPERAND (exp
, 0);
16452 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16454 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16458 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16459 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16461 return fold_build3_loc (loc
,
16462 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16463 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16464 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16469 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16472 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16473 /* Strip copysign function call, return the 1st argument. */
16474 arg0
= CALL_EXPR_ARG (exp
, 0);
16475 arg1
= CALL_EXPR_ARG (exp
, 1);
16476 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16479 /* Strip sign ops from the argument of "odd" math functions. */
16480 if (negate_mathfn_p (fcode
))
16482 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16484 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);