1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
66 #include "langhooks.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
72 int folding_initializer
= 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code
{
96 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
97 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
98 static bool negate_mathfn_p (enum built_in_function
);
99 static bool negate_expr_p (tree
);
100 static tree
negate_expr (tree
);
101 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
102 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
103 static tree
const_binop (enum tree_code
, tree
, tree
, int);
104 static enum comparison_code
comparison_to_compcode (enum tree_code
);
105 static enum tree_code
compcode_to_comparison (enum comparison_code
);
106 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
107 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
108 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
109 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
110 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
111 static tree
make_bit_field_ref (location_t
, tree
, tree
,
112 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
113 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
115 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
117 enum machine_mode
*, int *, int *,
119 static int all_ones_mask_p (const_tree
, int);
120 static tree
sign_bit_p (tree
, const_tree
);
121 static int simple_operand_p (const_tree
);
122 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
123 static tree
range_predecessor (tree
);
124 static tree
range_successor (tree
);
125 extern tree
make_range (tree
, int *, tree
*, tree
*, bool *);
126 extern bool merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int,
128 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
129 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
130 static tree
unextend (tree
, int, int, tree
);
131 static tree
fold_truthop (location_t
, enum tree_code
, tree
, tree
, tree
);
132 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
134 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
135 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
136 static tree
fold_binary_op_with_conditional_arg (location_t
,
137 enum tree_code
, tree
,
140 static tree
fold_mathfn_compare (location_t
,
141 enum built_in_function
, enum tree_code
,
143 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
144 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
145 static bool reorder_operands_p (const_tree
, const_tree
);
146 static tree
fold_negate_const (tree
, tree
);
147 static tree
fold_not_const (tree
, tree
);
148 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
149 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
152 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
153 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
154 and SUM1. Then this yields nonzero if overflow occurred during the
157 Overflow occurs if A and B have the same sign, but A and SUM differ in
158 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
160 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
162 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
163 We do that by representing the two-word integer in 4 words, with only
164 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
165 number. The value of the word is LOWPART + HIGHPART * BASE. */
168 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
169 #define HIGHPART(x) \
170 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
171 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
173 /* Unpack a two-word integer into 4 words.
174 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
175 WORDS points to the array of HOST_WIDE_INTs. */
178 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
180 words
[0] = LOWPART (low
);
181 words
[1] = HIGHPART (low
);
182 words
[2] = LOWPART (hi
);
183 words
[3] = HIGHPART (hi
);
186 /* Pack an array of 4 words into a two-word integer.
187 WORDS points to the array of words.
188 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
191 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
194 *low
= words
[0] + words
[1] * BASE
;
195 *hi
= words
[2] + words
[3] * BASE
;
198 /* Force the double-word integer L1, H1 to be within the range of the
199 integer type TYPE. Stores the properly truncated and sign-extended
200 double-word integer in *LV, *HV. Returns true if the operation
201 overflows, that is, argument and result are different. */
204 fit_double_type (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
205 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, const_tree type
)
207 unsigned HOST_WIDE_INT low0
= l1
;
208 HOST_WIDE_INT high0
= h1
;
209 unsigned int prec
= TYPE_PRECISION (type
);
210 int sign_extended_type
;
212 /* Size types *are* sign extended. */
213 sign_extended_type
= (!TYPE_UNSIGNED (type
)
214 || (TREE_CODE (type
) == INTEGER_TYPE
215 && TYPE_IS_SIZETYPE (type
)));
217 /* First clear all bits that are beyond the type's precision. */
218 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
220 else if (prec
> HOST_BITS_PER_WIDE_INT
)
221 h1
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
225 if (prec
< HOST_BITS_PER_WIDE_INT
)
226 l1
&= ~((HOST_WIDE_INT
) (-1) << prec
);
229 /* Then do sign extension if necessary. */
230 if (!sign_extended_type
)
231 /* No sign extension */;
232 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
233 /* Correct width already. */;
234 else if (prec
> HOST_BITS_PER_WIDE_INT
)
236 /* Sign extend top half? */
237 if (h1
& ((unsigned HOST_WIDE_INT
)1
238 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
239 h1
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
241 else if (prec
== HOST_BITS_PER_WIDE_INT
)
243 if ((HOST_WIDE_INT
)l1
< 0)
248 /* Sign extend bottom half? */
249 if (l1
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
252 l1
|= (HOST_WIDE_INT
)(-1) << prec
;
259 /* If the value didn't fit, signal overflow. */
260 return l1
!= low0
|| h1
!= high0
;
263 /* We force the double-int HIGH:LOW to the range of the type TYPE by
264 sign or zero extending it.
265 OVERFLOWABLE indicates if we are interested
266 in overflow of the value, when >0 we are only interested in signed
267 overflow, for <0 we are interested in any overflow. OVERFLOWED
268 indicates whether overflow has already occurred. CONST_OVERFLOWED
269 indicates whether constant overflow has already occurred. We force
270 T's value to be within range of T's type (by setting to 0 or 1 all
271 the bits outside the type's range). We set TREE_OVERFLOWED if,
272 OVERFLOWED is nonzero,
273 or OVERFLOWABLE is >0 and signed overflow occurs
274 or OVERFLOWABLE is <0 and any overflow occurs
275 We return a new tree node for the extended double-int. The node
276 is shared if no overflow flags are set. */
279 force_fit_type_double (tree type
, unsigned HOST_WIDE_INT low
,
280 HOST_WIDE_INT high
, int overflowable
,
283 int sign_extended_type
;
286 /* Size types *are* sign extended. */
287 sign_extended_type
= (!TYPE_UNSIGNED (type
)
288 || (TREE_CODE (type
) == INTEGER_TYPE
289 && TYPE_IS_SIZETYPE (type
)));
291 overflow
= fit_double_type (low
, high
, &low
, &high
, type
);
293 /* If we need to set overflow flags, return a new unshared node. */
294 if (overflowed
|| overflow
)
298 || (overflowable
> 0 && sign_extended_type
))
300 tree t
= make_node (INTEGER_CST
);
301 TREE_INT_CST_LOW (t
) = low
;
302 TREE_INT_CST_HIGH (t
) = high
;
303 TREE_TYPE (t
) = type
;
304 TREE_OVERFLOW (t
) = 1;
309 /* Else build a shared node. */
310 return build_int_cst_wide (type
, low
, high
);
313 /* Add two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows according to UNSIGNED_P.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 add_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
321 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
322 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
325 unsigned HOST_WIDE_INT l
;
329 h
= (HOST_WIDE_INT
) ((unsigned HOST_WIDE_INT
) h1
330 + (unsigned HOST_WIDE_INT
) h2
337 return ((unsigned HOST_WIDE_INT
) h
< (unsigned HOST_WIDE_INT
) h1
341 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
344 /* Negate a doubleword integer with doubleword result.
345 Return nonzero if the operation overflows, assuming it's signed.
346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
351 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
357 return (*hv
& h1
) < 0;
367 /* Multiply two doubleword integers with doubleword result.
368 Return nonzero if the operation overflows according to UNSIGNED_P.
369 Each argument is given as two `HOST_WIDE_INT' pieces.
370 One argument is L1 and H1; the other, L2 and H2.
371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374 mul_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
375 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
376 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
379 HOST_WIDE_INT arg1
[4];
380 HOST_WIDE_INT arg2
[4];
381 HOST_WIDE_INT prod
[4 * 2];
382 unsigned HOST_WIDE_INT carry
;
384 unsigned HOST_WIDE_INT toplow
, neglow
;
385 HOST_WIDE_INT tophigh
, neghigh
;
387 encode (arg1
, l1
, h1
);
388 encode (arg2
, l2
, h2
);
390 memset (prod
, 0, sizeof prod
);
392 for (i
= 0; i
< 4; i
++)
395 for (j
= 0; j
< 4; j
++)
398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
399 carry
+= arg1
[i
] * arg2
[j
];
400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
402 prod
[k
] = LOWPART (carry
);
403 carry
= HIGHPART (carry
);
408 decode (prod
, lv
, hv
);
409 decode (prod
+ 4, &toplow
, &tophigh
);
411 /* Unsigned overflow is immediate. */
413 return (toplow
| tophigh
) != 0;
415 /* Check for signed overflow by calculating the signed representation of the
416 top half of the result; it should agree with the low half's sign bit. */
419 neg_double (l2
, h2
, &neglow
, &neghigh
);
420 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
424 neg_double (l1
, h1
, &neglow
, &neghigh
);
425 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
427 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
430 /* Shift the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Shift right if COUNT is negative.
433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
437 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
438 HOST_WIDE_INT count
, unsigned int prec
,
439 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
441 unsigned HOST_WIDE_INT signmask
;
445 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
449 if (SHIFT_COUNT_TRUNCATED
)
452 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
454 /* Shifting by the host word size is undefined according to the
455 ANSI standard, so we must handle this as a special case. */
459 else if (count
>= HOST_BITS_PER_WIDE_INT
)
461 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
466 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
467 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
471 /* Sign extend all bits that are beyond the precision. */
473 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
474 ? ((unsigned HOST_WIDE_INT
) *hv
475 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
476 : (*lv
>> (prec
- 1))) & 1);
478 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
480 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
482 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
483 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
488 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
489 *lv
|= signmask
<< prec
;
493 /* Shift the doubleword integer in L1, H1 right by COUNT places
494 keeping only PREC bits of result. COUNT must be positive.
495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
499 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
500 HOST_WIDE_INT count
, unsigned int prec
,
501 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
504 unsigned HOST_WIDE_INT signmask
;
507 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
510 if (SHIFT_COUNT_TRUNCATED
)
513 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
515 /* Shifting by the host word size is undefined according to the
516 ANSI standard, so we must handle this as a special case. */
520 else if (count
>= HOST_BITS_PER_WIDE_INT
)
523 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
527 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
529 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
532 /* Zero / sign extend all bits that are beyond the precision. */
534 if (count
>= (HOST_WIDE_INT
)prec
)
539 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
541 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
543 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
544 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
549 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
550 *lv
|= signmask
<< (prec
- count
);
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result.
556 Rotate right if COUNT is negative.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
560 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
561 HOST_WIDE_INT count
, unsigned int prec
,
562 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
564 unsigned HOST_WIDE_INT s1l
, s2l
;
565 HOST_WIDE_INT s1h
, s2h
;
571 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
572 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
577 /* Rotate the doubleword integer in L1, H1 left by COUNT places
578 keeping only PREC bits of result. COUNT must be positive.
579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
582 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
583 HOST_WIDE_INT count
, unsigned int prec
,
584 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
586 unsigned HOST_WIDE_INT s1l
, s2l
;
587 HOST_WIDE_INT s1h
, s2h
;
593 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
594 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601 CODE is a tree code for a kind of division, one of
602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
604 It controls how the quotient is rounded to an integer.
605 Return nonzero if the operation overflows.
606 UNS nonzero says do unsigned division. */
609 div_and_round_double (enum tree_code code
, int uns
,
610 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
611 HOST_WIDE_INT hnum_orig
,
612 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
613 HOST_WIDE_INT hden_orig
,
614 unsigned HOST_WIDE_INT
*lquo
,
615 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
619 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
620 HOST_WIDE_INT den
[4], quo
[4];
622 unsigned HOST_WIDE_INT work
;
623 unsigned HOST_WIDE_INT carry
= 0;
624 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
625 HOST_WIDE_INT hnum
= hnum_orig
;
626 unsigned HOST_WIDE_INT lden
= lden_orig
;
627 HOST_WIDE_INT hden
= hden_orig
;
630 if (hden
== 0 && lden
== 0)
631 overflow
= 1, lden
= 1;
633 /* Calculate quotient sign and convert operands to unsigned. */
639 /* (minimum integer) / (-1) is the only overflow case. */
640 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
641 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
647 neg_double (lden
, hden
, &lden
, &hden
);
651 if (hnum
== 0 && hden
== 0)
652 { /* single precision */
654 /* This unsigned division rounds toward zero. */
660 { /* trivial case: dividend < divisor */
661 /* hden != 0 already checked. */
668 memset (quo
, 0, sizeof quo
);
670 memset (num
, 0, sizeof num
); /* to zero 9th element */
671 memset (den
, 0, sizeof den
);
673 encode (num
, lnum
, hnum
);
674 encode (den
, lden
, hden
);
676 /* Special code for when the divisor < BASE. */
677 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
679 /* hnum != 0 already checked. */
680 for (i
= 4 - 1; i
>= 0; i
--)
682 work
= num
[i
] + carry
* BASE
;
683 quo
[i
] = work
/ lden
;
689 /* Full double precision division,
690 with thanks to Don Knuth's "Seminumerical Algorithms". */
691 int num_hi_sig
, den_hi_sig
;
692 unsigned HOST_WIDE_INT quo_est
, scale
;
694 /* Find the highest nonzero divisor digit. */
695 for (i
= 4 - 1;; i
--)
702 /* Insure that the first digit of the divisor is at least BASE/2.
703 This is required by the quotient digit estimation algorithm. */
705 scale
= BASE
/ (den
[den_hi_sig
] + 1);
707 { /* scale divisor and dividend */
709 for (i
= 0; i
<= 4 - 1; i
++)
711 work
= (num
[i
] * scale
) + carry
;
712 num
[i
] = LOWPART (work
);
713 carry
= HIGHPART (work
);
718 for (i
= 0; i
<= 4 - 1; i
++)
720 work
= (den
[i
] * scale
) + carry
;
721 den
[i
] = LOWPART (work
);
722 carry
= HIGHPART (work
);
723 if (den
[i
] != 0) den_hi_sig
= i
;
730 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
732 /* Guess the next quotient digit, quo_est, by dividing the first
733 two remaining dividend digits by the high order quotient digit.
734 quo_est is never low and is at most 2 high. */
735 unsigned HOST_WIDE_INT tmp
;
737 num_hi_sig
= i
+ den_hi_sig
+ 1;
738 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
739 if (num
[num_hi_sig
] != den
[den_hi_sig
])
740 quo_est
= work
/ den
[den_hi_sig
];
744 /* Refine quo_est so it's usually correct, and at most one high. */
745 tmp
= work
- quo_est
* den
[den_hi_sig
];
747 && (den
[den_hi_sig
- 1] * quo_est
748 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
751 /* Try QUO_EST as the quotient digit, by multiplying the
752 divisor by QUO_EST and subtracting from the remaining dividend.
753 Keep in mind that QUO_EST is the I - 1st digit. */
756 for (j
= 0; j
<= den_hi_sig
; j
++)
758 work
= quo_est
* den
[j
] + carry
;
759 carry
= HIGHPART (work
);
760 work
= num
[i
+ j
] - LOWPART (work
);
761 num
[i
+ j
] = LOWPART (work
);
762 carry
+= HIGHPART (work
) != 0;
765 /* If quo_est was high by one, then num[i] went negative and
766 we need to correct things. */
767 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
770 carry
= 0; /* add divisor back in */
771 for (j
= 0; j
<= den_hi_sig
; j
++)
773 work
= num
[i
+ j
] + den
[j
] + carry
;
774 carry
= HIGHPART (work
);
775 num
[i
+ j
] = LOWPART (work
);
778 num
[num_hi_sig
] += carry
;
781 /* Store the quotient digit. */
786 decode (quo
, lquo
, hquo
);
789 /* If result is negative, make it so. */
791 neg_double (*lquo
, *hquo
, lquo
, hquo
);
793 /* Compute trial remainder: rem = num - (quo * den) */
794 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
795 neg_double (*lrem
, *hrem
, lrem
, hrem
);
796 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
801 case TRUNC_MOD_EXPR
: /* round toward zero */
802 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
806 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
807 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
810 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
818 case CEIL_MOD_EXPR
: /* round toward positive infinity */
819 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
821 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
829 case ROUND_MOD_EXPR
: /* round to closest integer */
831 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
832 HOST_WIDE_INT habs_rem
= *hrem
;
833 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
834 HOST_WIDE_INT habs_den
= hden
, htwice
;
836 /* Get absolute values. */
838 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
840 neg_double (lden
, hden
, &labs_den
, &habs_den
);
842 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
843 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
844 labs_rem
, habs_rem
, <wice
, &htwice
);
846 if (((unsigned HOST_WIDE_INT
) habs_den
847 < (unsigned HOST_WIDE_INT
) htwice
)
848 || (((unsigned HOST_WIDE_INT
) habs_den
849 == (unsigned HOST_WIDE_INT
) htwice
)
850 && (labs_den
<= ltwice
)))
854 add_double (*lquo
, *hquo
,
855 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
858 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
870 /* Compute true remainder: rem = num - (quo * den) */
871 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
872 neg_double (*lrem
, *hrem
, lrem
, hrem
);
873 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
877 /* If ARG2 divides ARG1 with zero remainder, carries out the division
878 of type CODE and returns the quotient.
879 Otherwise returns NULL_TREE. */
882 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
884 unsigned HOST_WIDE_INT int1l
, int2l
;
885 HOST_WIDE_INT int1h
, int2h
;
886 unsigned HOST_WIDE_INT quol
, reml
;
887 HOST_WIDE_INT quoh
, remh
;
890 /* The sign of the division is according to operand two, that
891 does the correct thing for POINTER_PLUS_EXPR where we want
892 a signed division. */
893 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
894 if (TREE_CODE (TREE_TYPE (arg2
)) == INTEGER_TYPE
895 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2
)))
898 int1l
= TREE_INT_CST_LOW (arg1
);
899 int1h
= TREE_INT_CST_HIGH (arg1
);
900 int2l
= TREE_INT_CST_LOW (arg2
);
901 int2h
= TREE_INT_CST_HIGH (arg2
);
903 div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
904 &quol
, &quoh
, &reml
, &remh
);
905 if (remh
!= 0 || reml
!= 0)
908 return build_int_cst_wide (TREE_TYPE (arg1
), quol
, quoh
);
911 /* This is nonzero if we should defer warnings about undefined
912 overflow. This facility exists because these warnings are a
913 special case. The code to estimate loop iterations does not want
914 to issue any warnings, since it works with expressions which do not
915 occur in user code. Various bits of cleanup code call fold(), but
916 only use the result if it has certain characteristics (e.g., is a
917 constant); that code only wants to issue a warning if the result is
920 static int fold_deferring_overflow_warnings
;
922 /* If a warning about undefined overflow is deferred, this is the
923 warning. Note that this may cause us to turn two warnings into
924 one, but that is fine since it is sufficient to only give one
925 warning per expression. */
927 static const char* fold_deferred_overflow_warning
;
929 /* If a warning about undefined overflow is deferred, this is the
930 level at which the warning should be emitted. */
932 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
934 /* Start deferring overflow warnings. We could use a stack here to
935 permit nested calls, but at present it is not necessary. */
938 fold_defer_overflow_warnings (void)
940 ++fold_deferring_overflow_warnings
;
943 /* Stop deferring overflow warnings. If there is a pending warning,
944 and ISSUE is true, then issue the warning if appropriate. STMT is
945 the statement with which the warning should be associated (used for
946 location information); STMT may be NULL. CODE is the level of the
947 warning--a warn_strict_overflow_code value. This function will use
948 the smaller of CODE and the deferred code when deciding whether to
949 issue the warning. CODE may be zero to mean to always use the
953 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
958 gcc_assert (fold_deferring_overflow_warnings
> 0);
959 --fold_deferring_overflow_warnings
;
960 if (fold_deferring_overflow_warnings
> 0)
962 if (fold_deferred_overflow_warning
!= NULL
964 && code
< (int) fold_deferred_overflow_code
)
965 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
969 warnmsg
= fold_deferred_overflow_warning
;
970 fold_deferred_overflow_warning
= NULL
;
972 if (!issue
|| warnmsg
== NULL
)
975 if (gimple_no_warning_p (stmt
))
978 /* Use the smallest code level when deciding to issue the
980 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
981 code
= fold_deferred_overflow_code
;
983 if (!issue_strict_overflow_warning (code
))
987 locus
= input_location
;
989 locus
= gimple_location (stmt
);
990 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
993 /* Stop deferring overflow warnings, ignoring any deferred
997 fold_undefer_and_ignore_overflow_warnings (void)
999 fold_undefer_overflow_warnings (false, NULL
, 0);
1002 /* Whether we are deferring overflow warnings. */
1005 fold_deferring_overflow_warnings_p (void)
1007 return fold_deferring_overflow_warnings
> 0;
1010 /* This is called when we fold something based on the fact that signed
1011 overflow is undefined. */
1014 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
1016 if (fold_deferring_overflow_warnings
> 0)
1018 if (fold_deferred_overflow_warning
== NULL
1019 || wc
< fold_deferred_overflow_code
)
1021 fold_deferred_overflow_warning
= gmsgid
;
1022 fold_deferred_overflow_code
= wc
;
1025 else if (issue_strict_overflow_warning (wc
))
1026 warning (OPT_Wstrict_overflow
, gmsgid
);
1029 /* Return true if the built-in mathematical function specified by CODE
1030 is odd, i.e. -f(x) == f(-x). */
1033 negate_mathfn_p (enum built_in_function code
)
1037 CASE_FLT_FN (BUILT_IN_ASIN
):
1038 CASE_FLT_FN (BUILT_IN_ASINH
):
1039 CASE_FLT_FN (BUILT_IN_ATAN
):
1040 CASE_FLT_FN (BUILT_IN_ATANH
):
1041 CASE_FLT_FN (BUILT_IN_CASIN
):
1042 CASE_FLT_FN (BUILT_IN_CASINH
):
1043 CASE_FLT_FN (BUILT_IN_CATAN
):
1044 CASE_FLT_FN (BUILT_IN_CATANH
):
1045 CASE_FLT_FN (BUILT_IN_CBRT
):
1046 CASE_FLT_FN (BUILT_IN_CPROJ
):
1047 CASE_FLT_FN (BUILT_IN_CSIN
):
1048 CASE_FLT_FN (BUILT_IN_CSINH
):
1049 CASE_FLT_FN (BUILT_IN_CTAN
):
1050 CASE_FLT_FN (BUILT_IN_CTANH
):
1051 CASE_FLT_FN (BUILT_IN_ERF
):
1052 CASE_FLT_FN (BUILT_IN_LLROUND
):
1053 CASE_FLT_FN (BUILT_IN_LROUND
):
1054 CASE_FLT_FN (BUILT_IN_ROUND
):
1055 CASE_FLT_FN (BUILT_IN_SIN
):
1056 CASE_FLT_FN (BUILT_IN_SINH
):
1057 CASE_FLT_FN (BUILT_IN_TAN
):
1058 CASE_FLT_FN (BUILT_IN_TANH
):
1059 CASE_FLT_FN (BUILT_IN_TRUNC
):
1062 CASE_FLT_FN (BUILT_IN_LLRINT
):
1063 CASE_FLT_FN (BUILT_IN_LRINT
):
1064 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
1065 CASE_FLT_FN (BUILT_IN_RINT
):
1066 return !flag_rounding_math
;
1074 /* Check whether we may negate an integer constant T without causing
1078 may_negate_without_overflow_p (const_tree t
)
1080 unsigned HOST_WIDE_INT val
;
1084 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
1086 type
= TREE_TYPE (t
);
1087 if (TYPE_UNSIGNED (type
))
1090 prec
= TYPE_PRECISION (type
);
1091 if (prec
> HOST_BITS_PER_WIDE_INT
)
1093 if (TREE_INT_CST_LOW (t
) != 0)
1095 prec
-= HOST_BITS_PER_WIDE_INT
;
1096 val
= TREE_INT_CST_HIGH (t
);
1099 val
= TREE_INT_CST_LOW (t
);
1100 if (prec
< HOST_BITS_PER_WIDE_INT
)
1101 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
1102 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
1105 /* Determine whether an expression T can be cheaply negated using
1106 the function negate_expr without introducing undefined overflow. */
1109 negate_expr_p (tree t
)
1116 type
= TREE_TYPE (t
);
1118 STRIP_SIGN_NOPS (t
);
1119 switch (TREE_CODE (t
))
1122 if (TYPE_OVERFLOW_WRAPS (type
))
1125 /* Check that -CST will not overflow type. */
1126 return may_negate_without_overflow_p (t
);
1128 return (INTEGRAL_TYPE_P (type
)
1129 && TYPE_OVERFLOW_WRAPS (type
));
1136 /* We want to canonicalize to positive real constants. Pretend
1137 that only negative ones can be easily negated. */
1138 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
1141 return negate_expr_p (TREE_REALPART (t
))
1142 && negate_expr_p (TREE_IMAGPART (t
));
1145 return negate_expr_p (TREE_OPERAND (t
, 0))
1146 && negate_expr_p (TREE_OPERAND (t
, 1));
1149 return negate_expr_p (TREE_OPERAND (t
, 0));
1152 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1153 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1155 /* -(A + B) -> (-B) - A. */
1156 if (negate_expr_p (TREE_OPERAND (t
, 1))
1157 && reorder_operands_p (TREE_OPERAND (t
, 0),
1158 TREE_OPERAND (t
, 1)))
1160 /* -(A + B) -> (-A) - B. */
1161 return negate_expr_p (TREE_OPERAND (t
, 0));
1164 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1165 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1166 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1167 && reorder_operands_p (TREE_OPERAND (t
, 0),
1168 TREE_OPERAND (t
, 1));
1171 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1177 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1178 return negate_expr_p (TREE_OPERAND (t
, 1))
1179 || negate_expr_p (TREE_OPERAND (t
, 0));
1182 case TRUNC_DIV_EXPR
:
1183 case ROUND_DIV_EXPR
:
1184 case FLOOR_DIV_EXPR
:
1186 case EXACT_DIV_EXPR
:
1187 /* In general we can't negate A / B, because if A is INT_MIN and
1188 B is 1, we may turn this into INT_MIN / -1 which is undefined
1189 and actually traps on some architectures. But if overflow is
1190 undefined, we can negate, because - (INT_MIN / 1) is an
1192 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
1193 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
1195 return negate_expr_p (TREE_OPERAND (t
, 1))
1196 || negate_expr_p (TREE_OPERAND (t
, 0));
1199 /* Negate -((double)float) as (double)(-float). */
1200 if (TREE_CODE (type
) == REAL_TYPE
)
1202 tree tem
= strip_float_extensions (t
);
1204 return negate_expr_p (tem
);
1209 /* Negate -f(x) as f(-x). */
1210 if (negate_mathfn_p (builtin_mathfn_code (t
)))
1211 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
1215 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1216 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1218 tree op1
= TREE_OPERAND (t
, 1);
1219 if (TREE_INT_CST_HIGH (op1
) == 0
1220 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1221 == TREE_INT_CST_LOW (op1
))
1232 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1233 simplification is possible.
1234 If negate_expr_p would return true for T, NULL_TREE will never be
1238 fold_negate_expr (location_t loc
, tree t
)
1240 tree type
= TREE_TYPE (t
);
1243 switch (TREE_CODE (t
))
1245 /* Convert - (~A) to A + 1. */
1247 if (INTEGRAL_TYPE_P (type
))
1248 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
1249 build_int_cst (type
, 1));
1253 tem
= fold_negate_const (t
, type
);
1254 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
1255 || !TYPE_OVERFLOW_TRAPS (type
))
1260 tem
= fold_negate_const (t
, type
);
1261 /* Two's complement FP formats, such as c4x, may overflow. */
1262 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
1267 tem
= fold_negate_const (t
, type
);
1272 tree rpart
= negate_expr (TREE_REALPART (t
));
1273 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1275 if ((TREE_CODE (rpart
) == REAL_CST
1276 && TREE_CODE (ipart
) == REAL_CST
)
1277 || (TREE_CODE (rpart
) == INTEGER_CST
1278 && TREE_CODE (ipart
) == INTEGER_CST
))
1279 return build_complex (type
, rpart
, ipart
);
1284 if (negate_expr_p (t
))
1285 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
1286 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
1287 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
1291 if (negate_expr_p (t
))
1292 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
1293 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
1297 return TREE_OPERAND (t
, 0);
1300 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1301 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1303 /* -(A + B) -> (-B) - A. */
1304 if (negate_expr_p (TREE_OPERAND (t
, 1))
1305 && reorder_operands_p (TREE_OPERAND (t
, 0),
1306 TREE_OPERAND (t
, 1)))
1308 tem
= negate_expr (TREE_OPERAND (t
, 1));
1309 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
1310 tem
, TREE_OPERAND (t
, 0));
1313 /* -(A + B) -> (-A) - B. */
1314 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1316 tem
= negate_expr (TREE_OPERAND (t
, 0));
1317 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
1318 tem
, TREE_OPERAND (t
, 1));
1324 /* - (A - B) -> B - A */
1325 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1326 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1327 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1328 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
1329 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
1333 if (TYPE_UNSIGNED (type
))
1339 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
1341 tem
= TREE_OPERAND (t
, 1);
1342 if (negate_expr_p (tem
))
1343 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
1344 TREE_OPERAND (t
, 0), negate_expr (tem
));
1345 tem
= TREE_OPERAND (t
, 0);
1346 if (negate_expr_p (tem
))
1347 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
1348 negate_expr (tem
), TREE_OPERAND (t
, 1));
1352 case TRUNC_DIV_EXPR
:
1353 case ROUND_DIV_EXPR
:
1354 case FLOOR_DIV_EXPR
:
1356 case EXACT_DIV_EXPR
:
1357 /* In general we can't negate A / B, because if A is INT_MIN and
1358 B is 1, we may turn this into INT_MIN / -1 which is undefined
1359 and actually traps on some architectures. But if overflow is
1360 undefined, we can negate, because - (INT_MIN / 1) is an
1362 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
1364 const char * const warnmsg
= G_("assuming signed overflow does not "
1365 "occur when negating a division");
1366 tem
= TREE_OPERAND (t
, 1);
1367 if (negate_expr_p (tem
))
1369 if (INTEGRAL_TYPE_P (type
)
1370 && (TREE_CODE (tem
) != INTEGER_CST
1371 || integer_onep (tem
)))
1372 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
1373 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
1374 TREE_OPERAND (t
, 0), negate_expr (tem
));
1376 tem
= TREE_OPERAND (t
, 0);
1377 if (negate_expr_p (tem
))
1379 if (INTEGRAL_TYPE_P (type
)
1380 && (TREE_CODE (tem
) != INTEGER_CST
1381 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
1382 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
1383 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
1384 negate_expr (tem
), TREE_OPERAND (t
, 1));
1390 /* Convert -((double)float) into (double)(-float). */
1391 if (TREE_CODE (type
) == REAL_TYPE
)
1393 tem
= strip_float_extensions (t
);
1394 if (tem
!= t
&& negate_expr_p (tem
))
1395 return fold_convert_loc (loc
, type
, negate_expr (tem
));
1400 /* Negate -f(x) as f(-x). */
1401 if (negate_mathfn_p (builtin_mathfn_code (t
))
1402 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
1406 fndecl
= get_callee_fndecl (t
);
1407 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
1408 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
1413 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1414 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1416 tree op1
= TREE_OPERAND (t
, 1);
1417 if (TREE_INT_CST_HIGH (op1
) == 0
1418 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1419 == TREE_INT_CST_LOW (op1
))
1421 tree ntype
= TYPE_UNSIGNED (type
)
1422 ? signed_type_for (type
)
1423 : unsigned_type_for (type
);
1424 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
1425 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
1426 return fold_convert_loc (loc
, type
, temp
);
1438 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1439 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1440 return NULL_TREE. */
1443 negate_expr (tree t
)
1451 loc
= EXPR_LOCATION (t
);
1452 type
= TREE_TYPE (t
);
1453 STRIP_SIGN_NOPS (t
);
1455 tem
= fold_negate_expr (loc
, t
);
1458 tem
= build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1459 SET_EXPR_LOCATION (tem
, loc
);
1461 return fold_convert_loc (loc
, type
, tem
);
1464 /* Split a tree IN into a constant, literal and variable parts that could be
1465 combined with CODE to make IN. "constant" means an expression with
1466 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1467 commutative arithmetic operation. Store the constant part into *CONP,
1468 the literal in *LITP and return the variable part. If a part isn't
1469 present, set it to null. If the tree does not decompose in this way,
1470 return the entire tree as the variable part and the other parts as null.
1472 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1473 case, we negate an operand that was subtracted. Except if it is a
1474 literal for which we use *MINUS_LITP instead.
1476 If NEGATE_P is true, we are negating all of IN, again except a literal
1477 for which we use *MINUS_LITP instead.
1479 If IN is itself a literal or constant, return it as appropriate.
1481 Note that we do not guarantee that any of the three values will be the
1482 same type as IN, but they will have the same signedness and mode. */
1485 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1486 tree
*minus_litp
, int negate_p
)
1494 /* Strip any conversions that don't change the machine mode or signedness. */
1495 STRIP_SIGN_NOPS (in
);
1497 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
1498 || TREE_CODE (in
) == FIXED_CST
)
1500 else if (TREE_CODE (in
) == code
1501 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
1502 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
1503 /* We can associate addition and subtraction together (even
1504 though the C standard doesn't say so) for integers because
1505 the value is not affected. For reals, the value might be
1506 affected, so we can't. */
1507 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1508 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1510 tree op0
= TREE_OPERAND (in
, 0);
1511 tree op1
= TREE_OPERAND (in
, 1);
1512 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1513 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1515 /* First see if either of the operands is a literal, then a constant. */
1516 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
1517 || TREE_CODE (op0
) == FIXED_CST
)
1518 *litp
= op0
, op0
= 0;
1519 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
1520 || TREE_CODE (op1
) == FIXED_CST
)
1521 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1523 if (op0
!= 0 && TREE_CONSTANT (op0
))
1524 *conp
= op0
, op0
= 0;
1525 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1526 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1528 /* If we haven't dealt with either operand, this is not a case we can
1529 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1530 if (op0
!= 0 && op1
!= 0)
1535 var
= op1
, neg_var_p
= neg1_p
;
1537 /* Now do any needed negations. */
1539 *minus_litp
= *litp
, *litp
= 0;
1541 *conp
= negate_expr (*conp
);
1543 var
= negate_expr (var
);
1545 else if (TREE_CONSTANT (in
))
1553 *minus_litp
= *litp
, *litp
= 0;
1554 else if (*minus_litp
)
1555 *litp
= *minus_litp
, *minus_litp
= 0;
1556 *conp
= negate_expr (*conp
);
1557 var
= negate_expr (var
);
1563 /* Re-associate trees split by the above function. T1 and T2 are
1564 either expressions to associate or null. Return the new
1565 expression, if any. LOC is the location of the new expression. If
1566 we build an operation, do it in TYPE and with CODE. */
1569 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
1578 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1579 try to fold this since we will have infinite recursion. But do
1580 deal with any NEGATE_EXPRs. */
1581 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1582 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1584 if (code
== PLUS_EXPR
)
1586 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1587 tem
= build2 (MINUS_EXPR
, type
, fold_convert_loc (loc
, type
, t2
),
1588 fold_convert_loc (loc
, type
, TREE_OPERAND (t1
, 0)));
1589 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1590 tem
= build2 (MINUS_EXPR
, type
, fold_convert_loc (loc
, type
, t1
),
1591 fold_convert_loc (loc
, type
, TREE_OPERAND (t2
, 0)));
1592 else if (integer_zerop (t2
))
1593 return fold_convert_loc (loc
, type
, t1
);
1595 else if (code
== MINUS_EXPR
)
1597 if (integer_zerop (t2
))
1598 return fold_convert_loc (loc
, type
, t1
);
1601 tem
= build2 (code
, type
, fold_convert_loc (loc
, type
, t1
),
1602 fold_convert_loc (loc
, type
, t2
));
1603 goto associate_trees_exit
;
1606 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
1607 fold_convert_loc (loc
, type
, t2
));
1608 associate_trees_exit
:
1609 protected_set_expr_location (tem
, loc
);
1613 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1614 for use in int_const_binop, size_binop and size_diffop. */
1617 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
1619 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
1621 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
1636 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
1637 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
1638 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
1642 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1643 to produce a new constant. Return NULL_TREE if we don't know how
1644 to evaluate CODE at compile-time.
1646 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1649 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
, int notrunc
)
1651 unsigned HOST_WIDE_INT int1l
, int2l
;
1652 HOST_WIDE_INT int1h
, int2h
;
1653 unsigned HOST_WIDE_INT low
;
1655 unsigned HOST_WIDE_INT garbagel
;
1656 HOST_WIDE_INT garbageh
;
1658 tree type
= TREE_TYPE (arg1
);
1659 int uns
= TYPE_UNSIGNED (type
);
1661 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1664 int1l
= TREE_INT_CST_LOW (arg1
);
1665 int1h
= TREE_INT_CST_HIGH (arg1
);
1666 int2l
= TREE_INT_CST_LOW (arg2
);
1667 int2h
= TREE_INT_CST_HIGH (arg2
);
1672 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1676 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1680 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1686 /* It's unclear from the C standard whether shifts can overflow.
1687 The following code ignores overflow; perhaps a C standard
1688 interpretation ruling is needed. */
1689 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1696 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1701 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1705 neg_double (int2l
, int2h
, &low
, &hi
);
1706 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1707 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1711 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1714 case TRUNC_DIV_EXPR
:
1715 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1716 case EXACT_DIV_EXPR
:
1717 /* This is a shortcut for a common special case. */
1718 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1719 && !TREE_OVERFLOW (arg1
)
1720 && !TREE_OVERFLOW (arg2
)
1721 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1723 if (code
== CEIL_DIV_EXPR
)
1726 low
= int1l
/ int2l
, hi
= 0;
1730 /* ... fall through ... */
1732 case ROUND_DIV_EXPR
:
1733 if (int2h
== 0 && int2l
== 0)
1735 if (int2h
== 0 && int2l
== 1)
1737 low
= int1l
, hi
= int1h
;
1740 if (int1l
== int2l
&& int1h
== int2h
1741 && ! (int1l
== 0 && int1h
== 0))
1746 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1747 &low
, &hi
, &garbagel
, &garbageh
);
1750 case TRUNC_MOD_EXPR
:
1751 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1752 /* This is a shortcut for a common special case. */
1753 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1754 && !TREE_OVERFLOW (arg1
)
1755 && !TREE_OVERFLOW (arg2
)
1756 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1758 if (code
== CEIL_MOD_EXPR
)
1760 low
= int1l
% int2l
, hi
= 0;
1764 /* ... fall through ... */
1766 case ROUND_MOD_EXPR
:
1767 if (int2h
== 0 && int2l
== 0)
1769 overflow
= div_and_round_double (code
, uns
,
1770 int1l
, int1h
, int2l
, int2h
,
1771 &garbagel
, &garbageh
, &low
, &hi
);
1777 low
= (((unsigned HOST_WIDE_INT
) int1h
1778 < (unsigned HOST_WIDE_INT
) int2h
)
1779 || (((unsigned HOST_WIDE_INT
) int1h
1780 == (unsigned HOST_WIDE_INT
) int2h
)
1783 low
= (int1h
< int2h
1784 || (int1h
== int2h
&& int1l
< int2l
));
1786 if (low
== (code
== MIN_EXPR
))
1787 low
= int1l
, hi
= int1h
;
1789 low
= int2l
, hi
= int2h
;
1798 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1800 /* Propagate overflow flags ourselves. */
1801 if (((!uns
|| is_sizetype
) && overflow
)
1802 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1805 TREE_OVERFLOW (t
) = 1;
1809 t
= force_fit_type_double (TREE_TYPE (arg1
), low
, hi
, 1,
1810 ((!uns
|| is_sizetype
) && overflow
)
1811 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1816 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1817 constant. We assume ARG1 and ARG2 have the same data type, or at least
1818 are the same kind of constant and the same machine mode. Return zero if
1819 combining the constants is not allowed in the current operating mode.
1821 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1824 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1826 /* Sanity check for the recursive cases. */
1833 if (TREE_CODE (arg1
) == INTEGER_CST
)
1834 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1836 if (TREE_CODE (arg1
) == REAL_CST
)
1838 enum machine_mode mode
;
1841 REAL_VALUE_TYPE value
;
1842 REAL_VALUE_TYPE result
;
1846 /* The following codes are handled by real_arithmetic. */
1861 d1
= TREE_REAL_CST (arg1
);
1862 d2
= TREE_REAL_CST (arg2
);
1864 type
= TREE_TYPE (arg1
);
1865 mode
= TYPE_MODE (type
);
1867 /* Don't perform operation if we honor signaling NaNs and
1868 either operand is a NaN. */
1869 if (HONOR_SNANS (mode
)
1870 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1873 /* Don't perform operation if it would raise a division
1874 by zero exception. */
1875 if (code
== RDIV_EXPR
1876 && REAL_VALUES_EQUAL (d2
, dconst0
)
1877 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1880 /* If either operand is a NaN, just return it. Otherwise, set up
1881 for floating-point trap; we return an overflow. */
1882 if (REAL_VALUE_ISNAN (d1
))
1884 else if (REAL_VALUE_ISNAN (d2
))
1887 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1888 real_convert (&result
, mode
, &value
);
1890 /* Don't constant fold this floating point operation if
1891 the result has overflowed and flag_trapping_math. */
1892 if (flag_trapping_math
1893 && MODE_HAS_INFINITIES (mode
)
1894 && REAL_VALUE_ISINF (result
)
1895 && !REAL_VALUE_ISINF (d1
)
1896 && !REAL_VALUE_ISINF (d2
))
1899 /* Don't constant fold this floating point operation if the
1900 result may dependent upon the run-time rounding mode and
1901 flag_rounding_math is set, or if GCC's software emulation
1902 is unable to accurately represent the result. */
1903 if ((flag_rounding_math
1904 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1905 && (inexact
|| !real_identical (&result
, &value
)))
1908 t
= build_real (type
, result
);
1910 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1914 if (TREE_CODE (arg1
) == FIXED_CST
)
1916 FIXED_VALUE_TYPE f1
;
1917 FIXED_VALUE_TYPE f2
;
1918 FIXED_VALUE_TYPE result
;
1923 /* The following codes are handled by fixed_arithmetic. */
1929 case TRUNC_DIV_EXPR
:
1930 f2
= TREE_FIXED_CST (arg2
);
1935 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1936 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1944 f1
= TREE_FIXED_CST (arg1
);
1945 type
= TREE_TYPE (arg1
);
1946 sat_p
= TYPE_SATURATING (type
);
1947 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1948 t
= build_fixed (type
, result
);
1949 /* Propagate overflow flags. */
1950 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1951 TREE_OVERFLOW (t
) = 1;
1955 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1957 tree type
= TREE_TYPE (arg1
);
1958 tree r1
= TREE_REALPART (arg1
);
1959 tree i1
= TREE_IMAGPART (arg1
);
1960 tree r2
= TREE_REALPART (arg2
);
1961 tree i2
= TREE_IMAGPART (arg2
);
1968 real
= const_binop (code
, r1
, r2
, notrunc
);
1969 imag
= const_binop (code
, i1
, i2
, notrunc
);
1973 if (COMPLEX_FLOAT_TYPE_P (type
))
1974 return do_mpc_arg2 (arg1
, arg2
, type
,
1975 /* do_nonfinite= */ folding_initializer
,
1978 real
= const_binop (MINUS_EXPR
,
1979 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1980 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1982 imag
= const_binop (PLUS_EXPR
,
1983 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1984 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1989 if (COMPLEX_FLOAT_TYPE_P (type
))
1990 return do_mpc_arg2 (arg1
, arg2
, type
,
1991 /* do_nonfinite= */ folding_initializer
,
1994 case TRUNC_DIV_EXPR
:
1996 case FLOOR_DIV_EXPR
:
1997 case ROUND_DIV_EXPR
:
1998 if (flag_complex_method
== 0)
2000 /* Keep this algorithm in sync with
2001 tree-complex.c:expand_complex_div_straight().
2003 Expand complex division to scalars, straightforward algorithm.
2004 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
2008 = const_binop (PLUS_EXPR
,
2009 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
2010 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
2013 = const_binop (PLUS_EXPR
,
2014 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
2015 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
2018 = const_binop (MINUS_EXPR
,
2019 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
2020 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
2023 real
= const_binop (code
, t1
, magsquared
, notrunc
);
2024 imag
= const_binop (code
, t2
, magsquared
, notrunc
);
2028 /* Keep this algorithm in sync with
2029 tree-complex.c:expand_complex_div_wide().
2031 Expand complex division to scalars, modified algorithm to minimize
2032 overflow with wide input ranges. */
2033 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
2034 fold_abs_const (r2
, TREE_TYPE (type
)),
2035 fold_abs_const (i2
, TREE_TYPE (type
)));
2037 if (integer_nonzerop (compare
))
2039 /* In the TRUE branch, we compute
2041 div = (br * ratio) + bi;
2042 tr = (ar * ratio) + ai;
2043 ti = (ai * ratio) - ar;
2046 tree ratio
= const_binop (code
, r2
, i2
, notrunc
);
2047 tree div
= const_binop (PLUS_EXPR
, i2
,
2048 const_binop (MULT_EXPR
, r2
, ratio
,
2051 real
= const_binop (MULT_EXPR
, r1
, ratio
, notrunc
);
2052 real
= const_binop (PLUS_EXPR
, real
, i1
, notrunc
);
2053 real
= const_binop (code
, real
, div
, notrunc
);
2055 imag
= const_binop (MULT_EXPR
, i1
, ratio
, notrunc
);
2056 imag
= const_binop (MINUS_EXPR
, imag
, r1
, notrunc
);
2057 imag
= const_binop (code
, imag
, div
, notrunc
);
2061 /* In the FALSE branch, we compute
2063 divisor = (d * ratio) + c;
2064 tr = (b * ratio) + a;
2065 ti = b - (a * ratio);
2068 tree ratio
= const_binop (code
, i2
, r2
, notrunc
);
2069 tree div
= const_binop (PLUS_EXPR
, r2
,
2070 const_binop (MULT_EXPR
, i2
, ratio
,
2074 real
= const_binop (MULT_EXPR
, i1
, ratio
, notrunc
);
2075 real
= const_binop (PLUS_EXPR
, real
, r1
, notrunc
);
2076 real
= const_binop (code
, real
, div
, notrunc
);
2078 imag
= const_binop (MULT_EXPR
, r1
, ratio
, notrunc
);
2079 imag
= const_binop (MINUS_EXPR
, i1
, imag
, notrunc
);
2080 imag
= const_binop (code
, imag
, div
, notrunc
);
2090 return build_complex (type
, real
, imag
);
2093 if (TREE_CODE (arg1
) == VECTOR_CST
)
2095 tree type
= TREE_TYPE(arg1
);
2096 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
2097 tree elements1
, elements2
, list
= NULL_TREE
;
2099 if(TREE_CODE(arg2
) != VECTOR_CST
)
2102 elements1
= TREE_VECTOR_CST_ELTS (arg1
);
2103 elements2
= TREE_VECTOR_CST_ELTS (arg2
);
2105 for (i
= 0; i
< count
; i
++)
2107 tree elem1
, elem2
, elem
;
2109 /* The trailing elements can be empty and should be treated as 0 */
2111 elem1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2114 elem1
= TREE_VALUE(elements1
);
2115 elements1
= TREE_CHAIN (elements1
);
2119 elem2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2122 elem2
= TREE_VALUE(elements2
);
2123 elements2
= TREE_CHAIN (elements2
);
2126 elem
= const_binop (code
, elem1
, elem2
, notrunc
);
2128 /* It is possible that const_binop cannot handle the given
2129 code and return NULL_TREE */
2130 if(elem
== NULL_TREE
)
2133 list
= tree_cons (NULL_TREE
, elem
, list
);
2135 return build_vector(type
, nreverse(list
));
2140 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2141 indicates which particular sizetype to create. */
2144 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
2146 return build_int_cst (sizetype_tab
[(int) kind
], number
);
2149 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2150 is a tree code. The type of the result is taken from the operands.
2151 Both must be equivalent integer types, ala int_binop_types_match_p.
2152 If the operands are constant, so is the result. */
2155 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
2157 tree type
= TREE_TYPE (arg0
);
2159 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
2160 return error_mark_node
;
2162 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
2165 /* Handle the special case of two integer constants faster. */
2166 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2168 /* And some specific cases even faster than that. */
2169 if (code
== PLUS_EXPR
)
2171 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
2173 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
2176 else if (code
== MINUS_EXPR
)
2178 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
2181 else if (code
== MULT_EXPR
)
2183 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
2187 /* Handle general case of two integer constants. */
2188 return int_const_binop (code
, arg0
, arg1
, 0);
2191 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
2194 /* Given two values, either both of sizetype or both of bitsizetype,
2195 compute the difference between the two values. Return the value
2196 in signed type corresponding to the type of the operands. */
2199 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
2201 tree type
= TREE_TYPE (arg0
);
2204 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
2207 /* If the type is already signed, just do the simple thing. */
2208 if (!TYPE_UNSIGNED (type
))
2209 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
2211 if (type
== sizetype
)
2213 else if (type
== bitsizetype
)
2214 ctype
= sbitsizetype
;
2216 ctype
= signed_type_for (type
);
2218 /* If either operand is not a constant, do the conversions to the signed
2219 type and subtract. The hardware will do the right thing with any
2220 overflow in the subtraction. */
2221 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
2222 return size_binop_loc (loc
, MINUS_EXPR
,
2223 fold_convert_loc (loc
, ctype
, arg0
),
2224 fold_convert_loc (loc
, ctype
, arg1
));
2226 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2227 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2228 overflow) and negate (which can't either). Special-case a result
2229 of zero while we're here. */
2230 if (tree_int_cst_equal (arg0
, arg1
))
2231 return build_int_cst (ctype
, 0);
2232 else if (tree_int_cst_lt (arg1
, arg0
))
2233 return fold_convert_loc (loc
, ctype
,
2234 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
2236 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
2237 fold_convert_loc (loc
, ctype
,
2238 size_binop_loc (loc
,
2243 /* A subroutine of fold_convert_const handling conversions of an
2244 INTEGER_CST to another integer type. */
2247 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
2251 /* Given an integer constant, make new constant with new type,
2252 appropriately sign-extended or truncated. */
2253 t
= force_fit_type_double (type
, TREE_INT_CST_LOW (arg1
),
2254 TREE_INT_CST_HIGH (arg1
),
2255 /* Don't set the overflow when
2256 converting from a pointer, */
2257 !POINTER_TYPE_P (TREE_TYPE (arg1
))
2258 /* or to a sizetype with same signedness
2259 and the precision is unchanged.
2260 ??? sizetype is always sign-extended,
2261 but its signedness depends on the
2262 frontend. Thus we see spurious overflows
2263 here if we do not check this. */
2264 && !((TYPE_PRECISION (TREE_TYPE (arg1
))
2265 == TYPE_PRECISION (type
))
2266 && (TYPE_UNSIGNED (TREE_TYPE (arg1
))
2267 == TYPE_UNSIGNED (type
))
2268 && ((TREE_CODE (TREE_TYPE (arg1
)) == INTEGER_TYPE
2269 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1
)))
2270 || (TREE_CODE (type
) == INTEGER_TYPE
2271 && TYPE_IS_SIZETYPE (type
)))),
2272 (TREE_INT_CST_HIGH (arg1
) < 0
2273 && (TYPE_UNSIGNED (type
)
2274 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2275 | TREE_OVERFLOW (arg1
));
2280 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2281 to an integer type. */
2284 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
2289 /* The following code implements the floating point to integer
2290 conversion rules required by the Java Language Specification,
2291 that IEEE NaNs are mapped to zero and values that overflow
2292 the target precision saturate, i.e. values greater than
2293 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2294 are mapped to INT_MIN. These semantics are allowed by the
2295 C and C++ standards that simply state that the behavior of
2296 FP-to-integer conversion is unspecified upon overflow. */
2298 HOST_WIDE_INT high
, low
;
2300 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
2304 case FIX_TRUNC_EXPR
:
2305 real_trunc (&r
, VOIDmode
, &x
);
2312 /* If R is NaN, return zero and show we have an overflow. */
2313 if (REAL_VALUE_ISNAN (r
))
2320 /* See if R is less than the lower bound or greater than the
2325 tree lt
= TYPE_MIN_VALUE (type
);
2326 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
2327 if (REAL_VALUES_LESS (r
, l
))
2330 high
= TREE_INT_CST_HIGH (lt
);
2331 low
= TREE_INT_CST_LOW (lt
);
2337 tree ut
= TYPE_MAX_VALUE (type
);
2340 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
2341 if (REAL_VALUES_LESS (u
, r
))
2344 high
= TREE_INT_CST_HIGH (ut
);
2345 low
= TREE_INT_CST_LOW (ut
);
2351 REAL_VALUE_TO_INT (&low
, &high
, r
);
2353 t
= force_fit_type_double (type
, low
, high
, -1,
2354 overflow
| TREE_OVERFLOW (arg1
));
2358 /* A subroutine of fold_convert_const handling conversions of a
2359 FIXED_CST to an integer type. */
2362 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
2365 double_int temp
, temp_trunc
;
2368 /* Right shift FIXED_CST to temp by fbit. */
2369 temp
= TREE_FIXED_CST (arg1
).data
;
2370 mode
= TREE_FIXED_CST (arg1
).mode
;
2371 if (GET_MODE_FBIT (mode
) < 2 * HOST_BITS_PER_WIDE_INT
)
2373 lshift_double (temp
.low
, temp
.high
,
2374 - GET_MODE_FBIT (mode
), 2 * HOST_BITS_PER_WIDE_INT
,
2375 &temp
.low
, &temp
.high
, SIGNED_FIXED_POINT_MODE_P (mode
));
2377 /* Left shift temp to temp_trunc by fbit. */
2378 lshift_double (temp
.low
, temp
.high
,
2379 GET_MODE_FBIT (mode
), 2 * HOST_BITS_PER_WIDE_INT
,
2380 &temp_trunc
.low
, &temp_trunc
.high
,
2381 SIGNED_FIXED_POINT_MODE_P (mode
));
2388 temp_trunc
.high
= 0;
2391 /* If FIXED_CST is negative, we need to round the value toward 0.
2392 By checking if the fractional bits are not zero to add 1 to temp. */
2393 if (SIGNED_FIXED_POINT_MODE_P (mode
) && temp_trunc
.high
< 0
2394 && !double_int_equal_p (TREE_FIXED_CST (arg1
).data
, temp_trunc
))
2399 temp
= double_int_add (temp
, one
);
2402 /* Given a fixed-point constant, make new constant with new type,
2403 appropriately sign-extended or truncated. */
2404 t
= force_fit_type_double (type
, temp
.low
, temp
.high
, -1,
2406 && (TYPE_UNSIGNED (type
)
2407 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2408 | TREE_OVERFLOW (arg1
));
2413 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2414 to another floating point type. */
2417 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
2419 REAL_VALUE_TYPE value
;
2422 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
2423 t
= build_real (type
, value
);
2425 /* If converting an infinity or NAN to a representation that doesn't
2426 have one, set the overflow bit so that we can produce some kind of
2427 error message at the appropriate point if necessary. It's not the
2428 most user-friendly message, but it's better than nothing. */
2429 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
2430 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
2431 TREE_OVERFLOW (t
) = 1;
2432 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
2433 && !MODE_HAS_NANS (TYPE_MODE (type
)))
2434 TREE_OVERFLOW (t
) = 1;
2435 /* Regular overflow, conversion produced an infinity in a mode that
2436 can't represent them. */
2437 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
2438 && REAL_VALUE_ISINF (value
)
2439 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
2440 TREE_OVERFLOW (t
) = 1;
2442 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2446 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2447 to a floating point type. */
2450 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2452 REAL_VALUE_TYPE value
;
2455 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
2456 t
= build_real (type
, value
);
2458 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2462 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2463 to another fixed-point type. */
2466 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2468 FIXED_VALUE_TYPE value
;
2472 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2473 TYPE_SATURATING (type
));
2474 t
= build_fixed (type
, value
);
2476 /* Propagate overflow flags. */
2477 if (overflow_p
| TREE_OVERFLOW (arg1
))
2478 TREE_OVERFLOW (t
) = 1;
2482 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2483 to a fixed-point type. */
2486 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2488 FIXED_VALUE_TYPE value
;
2492 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
2493 TREE_INT_CST (arg1
),
2494 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2495 TYPE_SATURATING (type
));
2496 t
= build_fixed (type
, value
);
2498 /* Propagate overflow flags. */
2499 if (overflow_p
| TREE_OVERFLOW (arg1
))
2500 TREE_OVERFLOW (t
) = 1;
2504 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2505 to a fixed-point type. */
2508 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2510 FIXED_VALUE_TYPE value
;
2514 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2515 &TREE_REAL_CST (arg1
),
2516 TYPE_SATURATING (type
));
2517 t
= build_fixed (type
, value
);
2519 /* Propagate overflow flags. */
2520 if (overflow_p
| TREE_OVERFLOW (arg1
))
2521 TREE_OVERFLOW (t
) = 1;
2525 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2526 type TYPE. If no simplification can be done return NULL_TREE. */
2529 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2531 if (TREE_TYPE (arg1
) == type
)
2534 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2535 || TREE_CODE (type
) == OFFSET_TYPE
)
2537 if (TREE_CODE (arg1
) == INTEGER_CST
)
2538 return fold_convert_const_int_from_int (type
, arg1
);
2539 else if (TREE_CODE (arg1
) == REAL_CST
)
2540 return fold_convert_const_int_from_real (code
, type
, arg1
);
2541 else if (TREE_CODE (arg1
) == FIXED_CST
)
2542 return fold_convert_const_int_from_fixed (type
, arg1
);
2544 else if (TREE_CODE (type
) == REAL_TYPE
)
2546 if (TREE_CODE (arg1
) == INTEGER_CST
)
2547 return build_real_from_int_cst (type
, arg1
);
2548 else if (TREE_CODE (arg1
) == REAL_CST
)
2549 return fold_convert_const_real_from_real (type
, arg1
);
2550 else if (TREE_CODE (arg1
) == FIXED_CST
)
2551 return fold_convert_const_real_from_fixed (type
, arg1
);
2553 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2555 if (TREE_CODE (arg1
) == FIXED_CST
)
2556 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2557 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2558 return fold_convert_const_fixed_from_int (type
, arg1
);
2559 else if (TREE_CODE (arg1
) == REAL_CST
)
2560 return fold_convert_const_fixed_from_real (type
, arg1
);
2565 /* Construct a vector of zero elements of vector type TYPE. */
2568 build_zero_vector (tree type
)
2573 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2574 units
= TYPE_VECTOR_SUBPARTS (type
);
2577 for (i
= 0; i
< units
; i
++)
2578 list
= tree_cons (NULL_TREE
, elem
, list
);
2579 return build_vector (type
, list
);
2582 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2585 fold_convertible_p (const_tree type
, const_tree arg
)
2587 tree orig
= TREE_TYPE (arg
);
2592 if (TREE_CODE (arg
) == ERROR_MARK
2593 || TREE_CODE (type
) == ERROR_MARK
2594 || TREE_CODE (orig
) == ERROR_MARK
)
2597 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2600 switch (TREE_CODE (type
))
2602 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2603 case POINTER_TYPE
: case REFERENCE_TYPE
:
2605 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2606 || TREE_CODE (orig
) == OFFSET_TYPE
)
2608 return (TREE_CODE (orig
) == VECTOR_TYPE
2609 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2612 case FIXED_POINT_TYPE
:
2616 return TREE_CODE (type
) == TREE_CODE (orig
);
2623 /* Convert expression ARG to type TYPE. Used by the middle-end for
2624 simple conversions in preference to calling the front-end's convert. */
2627 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2629 tree orig
= TREE_TYPE (arg
);
2635 if (TREE_CODE (arg
) == ERROR_MARK
2636 || TREE_CODE (type
) == ERROR_MARK
2637 || TREE_CODE (orig
) == ERROR_MARK
)
2638 return error_mark_node
;
2640 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2641 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2643 switch (TREE_CODE (type
))
2646 case REFERENCE_TYPE
:
2647 /* Handle conversions between pointers to different address spaces. */
2648 if (POINTER_TYPE_P (orig
)
2649 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2650 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2651 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2654 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2656 if (TREE_CODE (arg
) == INTEGER_CST
)
2658 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2659 if (tem
!= NULL_TREE
)
2662 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2663 || TREE_CODE (orig
) == OFFSET_TYPE
)
2664 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2665 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2666 return fold_convert_loc (loc
, type
,
2667 fold_build1_loc (loc
, REALPART_EXPR
,
2668 TREE_TYPE (orig
), arg
));
2669 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2670 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2671 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2674 if (TREE_CODE (arg
) == INTEGER_CST
)
2676 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2677 if (tem
!= NULL_TREE
)
2680 else if (TREE_CODE (arg
) == REAL_CST
)
2682 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2683 if (tem
!= NULL_TREE
)
2686 else if (TREE_CODE (arg
) == FIXED_CST
)
2688 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2689 if (tem
!= NULL_TREE
)
2693 switch (TREE_CODE (orig
))
2696 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2697 case POINTER_TYPE
: case REFERENCE_TYPE
:
2698 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2701 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2703 case FIXED_POINT_TYPE
:
2704 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2707 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2708 return fold_convert_loc (loc
, type
, tem
);
2714 case FIXED_POINT_TYPE
:
2715 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2716 || TREE_CODE (arg
) == REAL_CST
)
2718 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2719 if (tem
!= NULL_TREE
)
2720 goto fold_convert_exit
;
2723 switch (TREE_CODE (orig
))
2725 case FIXED_POINT_TYPE
:
2730 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2733 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2734 return fold_convert_loc (loc
, type
, tem
);
2741 switch (TREE_CODE (orig
))
2744 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2745 case POINTER_TYPE
: case REFERENCE_TYPE
:
2747 case FIXED_POINT_TYPE
:
2748 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2749 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2750 fold_convert_loc (loc
, TREE_TYPE (type
),
2751 integer_zero_node
));
2756 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2758 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2759 TREE_OPERAND (arg
, 0));
2760 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2761 TREE_OPERAND (arg
, 1));
2762 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2765 arg
= save_expr (arg
);
2766 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2767 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2768 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2769 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2770 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2778 if (integer_zerop (arg
))
2779 return build_zero_vector (type
);
2780 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2781 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2782 || TREE_CODE (orig
) == VECTOR_TYPE
);
2783 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2786 tem
= fold_ignored_result (arg
);
2787 if (TREE_CODE (tem
) == MODIFY_EXPR
)
2788 goto fold_convert_exit
;
2789 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2795 protected_set_expr_location (tem
, loc
);
2799 /* Return false if expr can be assumed not to be an lvalue, true
2803 maybe_lvalue_p (const_tree x
)
2805 /* We only need to wrap lvalue tree codes. */
2806 switch (TREE_CODE (x
))
2817 case ALIGN_INDIRECT_REF
:
2818 case MISALIGNED_INDIRECT_REF
:
2820 case ARRAY_RANGE_REF
:
2826 case PREINCREMENT_EXPR
:
2827 case PREDECREMENT_EXPR
:
2829 case TRY_CATCH_EXPR
:
2830 case WITH_CLEANUP_EXPR
:
2839 /* Assume the worst for front-end tree codes. */
2840 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2848 /* Return an expr equal to X but certainly not valid as an lvalue. */
2851 non_lvalue_loc (location_t loc
, tree x
)
2853 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2858 if (! maybe_lvalue_p (x
))
2860 x
= build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2861 SET_EXPR_LOCATION (x
, loc
);
2865 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2866 Zero means allow extended lvalues. */
2868 int pedantic_lvalues
;
2870 /* When pedantic, return an expr equal to X but certainly not valid as a
2871 pedantic lvalue. Otherwise, return X. */
2874 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2876 if (pedantic_lvalues
)
2877 return non_lvalue_loc (loc
, x
);
2878 protected_set_expr_location (x
, loc
);
2882 /* Given a tree comparison code, return the code that is the logical inverse
2883 of the given code. It is not safe to do this for floating-point
2884 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2885 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2888 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2890 if (honor_nans
&& flag_trapping_math
)
2900 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2902 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2904 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2906 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2920 return UNORDERED_EXPR
;
2921 case UNORDERED_EXPR
:
2922 return ORDERED_EXPR
;
2928 /* Similar, but return the comparison that results if the operands are
2929 swapped. This is safe for floating-point. */
2932 swap_tree_comparison (enum tree_code code
)
2939 case UNORDERED_EXPR
:
2965 /* Convert a comparison tree code from an enum tree_code representation
2966 into a compcode bit-based encoding. This function is the inverse of
2967 compcode_to_comparison. */
2969 static enum comparison_code
2970 comparison_to_compcode (enum tree_code code
)
2987 return COMPCODE_ORD
;
2988 case UNORDERED_EXPR
:
2989 return COMPCODE_UNORD
;
2991 return COMPCODE_UNLT
;
2993 return COMPCODE_UNEQ
;
2995 return COMPCODE_UNLE
;
2997 return COMPCODE_UNGT
;
2999 return COMPCODE_LTGT
;
3001 return COMPCODE_UNGE
;
3007 /* Convert a compcode bit-based encoding of a comparison operator back
3008 to GCC's enum tree_code representation. This function is the
3009 inverse of comparison_to_compcode. */
3011 static enum tree_code
3012 compcode_to_comparison (enum comparison_code code
)
3029 return ORDERED_EXPR
;
3030 case COMPCODE_UNORD
:
3031 return UNORDERED_EXPR
;
3049 /* Return a tree for the comparison which is the combination of
3050 doing the AND or OR (depending on CODE) of the two operations LCODE
3051 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
3052 the possibility of trapping if the mode has NaNs, and return NULL_TREE
3053 if this makes the transformation invalid. */
3056 combine_comparisons (location_t loc
,
3057 enum tree_code code
, enum tree_code lcode
,
3058 enum tree_code rcode
, tree truth_type
,
3059 tree ll_arg
, tree lr_arg
)
3061 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
3062 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
3063 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
3068 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
3069 compcode
= lcompcode
& rcompcode
;
3072 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
3073 compcode
= lcompcode
| rcompcode
;
3082 /* Eliminate unordered comparisons, as well as LTGT and ORD
3083 which are not used unless the mode has NaNs. */
3084 compcode
&= ~COMPCODE_UNORD
;
3085 if (compcode
== COMPCODE_LTGT
)
3086 compcode
= COMPCODE_NE
;
3087 else if (compcode
== COMPCODE_ORD
)
3088 compcode
= COMPCODE_TRUE
;
3090 else if (flag_trapping_math
)
3092 /* Check that the original operation and the optimized ones will trap
3093 under the same condition. */
3094 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
3095 && (lcompcode
!= COMPCODE_EQ
)
3096 && (lcompcode
!= COMPCODE_ORD
);
3097 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
3098 && (rcompcode
!= COMPCODE_EQ
)
3099 && (rcompcode
!= COMPCODE_ORD
);
3100 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
3101 && (compcode
!= COMPCODE_EQ
)
3102 && (compcode
!= COMPCODE_ORD
);
3104 /* In a short-circuited boolean expression the LHS might be
3105 such that the RHS, if evaluated, will never trap. For
3106 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3107 if neither x nor y is NaN. (This is a mixed blessing: for
3108 example, the expression above will never trap, hence
3109 optimizing it to x < y would be invalid). */
3110 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
3111 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
3114 /* If the comparison was short-circuited, and only the RHS
3115 trapped, we may now generate a spurious trap. */
3117 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3120 /* If we changed the conditions that cause a trap, we lose. */
3121 if ((ltrap
|| rtrap
) != trap
)
3125 if (compcode
== COMPCODE_TRUE
)
3126 return constant_boolean_node (true, truth_type
);
3127 else if (compcode
== COMPCODE_FALSE
)
3128 return constant_boolean_node (false, truth_type
);
3131 enum tree_code tcode
;
3133 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
3134 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
3138 /* Return nonzero if two operands (typically of the same tree node)
3139 are necessarily equal. If either argument has side-effects this
3140 function returns zero. FLAGS modifies behavior as follows:
3142 If OEP_ONLY_CONST is set, only return nonzero for constants.
3143 This function tests whether the operands are indistinguishable;
3144 it does not test whether they are equal using C's == operation.
3145 The distinction is important for IEEE floating point, because
3146 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3147 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3149 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3150 even though it may hold multiple values during a function.
3151 This is because a GCC tree node guarantees that nothing else is
3152 executed between the evaluation of its "operands" (which may often
3153 be evaluated in arbitrary order). Hence if the operands themselves
3154 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3155 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3156 unset means assuming isochronic (or instantaneous) tree equivalence.
3157 Unless comparing arbitrary expression trees, such as from different
3158 statements, this flag can usually be left unset.
3160 If OEP_PURE_SAME is set, then pure functions with identical arguments
3161 are considered the same. It is used when the caller has other ways
3162 to ensure that global memory is unchanged in between. */
3165 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
3167 /* If either is ERROR_MARK, they aren't equal. */
3168 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
3169 || TREE_TYPE (arg0
) == error_mark_node
3170 || TREE_TYPE (arg1
) == error_mark_node
)
3173 /* Similar, if either does not have a type (like a released SSA name),
3174 they aren't equal. */
3175 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
3178 /* Check equality of integer constants before bailing out due to
3179 precision differences. */
3180 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
3181 return tree_int_cst_equal (arg0
, arg1
);
3183 /* If both types don't have the same signedness, then we can't consider
3184 them equal. We must check this before the STRIP_NOPS calls
3185 because they may change the signedness of the arguments. As pointers
3186 strictly don't have a signedness, require either two pointers or
3187 two non-pointers as well. */
3188 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
3189 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
3192 /* We cannot consider pointers to different address space equal. */
3193 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
3194 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
3195 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
3198 /* If both types don't have the same precision, then it is not safe
3200 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
3206 /* In case both args are comparisons but with different comparison
3207 code, try to swap the comparison operands of one arg to produce
3208 a match and compare that variant. */
3209 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3210 && COMPARISON_CLASS_P (arg0
)
3211 && COMPARISON_CLASS_P (arg1
))
3213 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
3215 if (TREE_CODE (arg0
) == swap_code
)
3216 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3217 TREE_OPERAND (arg1
, 1), flags
)
3218 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3219 TREE_OPERAND (arg1
, 0), flags
);
3222 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3223 /* This is needed for conversions and for COMPONENT_REF.
3224 Might as well play it safe and always test this. */
3225 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
3226 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
3227 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
3230 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3231 We don't care about side effects in that case because the SAVE_EXPR
3232 takes care of that for us. In all other cases, two expressions are
3233 equal if they have no side effects. If we have two identical
3234 expressions with side effects that should be treated the same due
3235 to the only side effects being identical SAVE_EXPR's, that will
3236 be detected in the recursive calls below. */
3237 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
3238 && (TREE_CODE (arg0
) == SAVE_EXPR
3239 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
3242 /* Next handle constant cases, those for which we can return 1 even
3243 if ONLY_CONST is set. */
3244 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
3245 switch (TREE_CODE (arg0
))
3248 return tree_int_cst_equal (arg0
, arg1
);
3251 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
3252 TREE_FIXED_CST (arg1
));
3255 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
3256 TREE_REAL_CST (arg1
)))
3260 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
3262 /* If we do not distinguish between signed and unsigned zero,
3263 consider them equal. */
3264 if (real_zerop (arg0
) && real_zerop (arg1
))
3273 v1
= TREE_VECTOR_CST_ELTS (arg0
);
3274 v2
= TREE_VECTOR_CST_ELTS (arg1
);
3277 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
3280 v1
= TREE_CHAIN (v1
);
3281 v2
= TREE_CHAIN (v2
);
3288 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
3290 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
3294 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
3295 && ! memcmp (TREE_STRING_POINTER (arg0
),
3296 TREE_STRING_POINTER (arg1
),
3297 TREE_STRING_LENGTH (arg0
)));
3300 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
3306 if (flags
& OEP_ONLY_CONST
)
3309 /* Define macros to test an operand from arg0 and arg1 for equality and a
3310 variant that allows null and views null as being different from any
3311 non-null value. In the latter case, if either is null, the both
3312 must be; otherwise, do the normal comparison. */
3313 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3314 TREE_OPERAND (arg1, N), flags)
3316 #define OP_SAME_WITH_NULL(N) \
3317 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3318 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3320 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
3323 /* Two conversions are equal only if signedness and modes match. */
3324 switch (TREE_CODE (arg0
))
3327 case FIX_TRUNC_EXPR
:
3328 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
3329 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3339 case tcc_comparison
:
3341 if (OP_SAME (0) && OP_SAME (1))
3344 /* For commutative ops, allow the other order. */
3345 return (commutative_tree_code (TREE_CODE (arg0
))
3346 && operand_equal_p (TREE_OPERAND (arg0
, 0),
3347 TREE_OPERAND (arg1
, 1), flags
)
3348 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3349 TREE_OPERAND (arg1
, 0), flags
));
3352 /* If either of the pointer (or reference) expressions we are
3353 dereferencing contain a side effect, these cannot be equal. */
3354 if (TREE_SIDE_EFFECTS (arg0
)
3355 || TREE_SIDE_EFFECTS (arg1
))
3358 switch (TREE_CODE (arg0
))
3361 case ALIGN_INDIRECT_REF
:
3362 case MISALIGNED_INDIRECT_REF
:
3368 case ARRAY_RANGE_REF
:
3369 /* Operands 2 and 3 may be null.
3370 Compare the array index by value if it is constant first as we
3371 may have different types but same value here. */
3373 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3374 TREE_OPERAND (arg1
, 1))
3376 && OP_SAME_WITH_NULL (2)
3377 && OP_SAME_WITH_NULL (3));
3380 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3381 may be NULL when we're called to compare MEM_EXPRs. */
3382 return OP_SAME_WITH_NULL (0)
3384 && OP_SAME_WITH_NULL (2);
3387 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3393 case tcc_expression
:
3394 switch (TREE_CODE (arg0
))
3397 case TRUTH_NOT_EXPR
:
3400 case TRUTH_ANDIF_EXPR
:
3401 case TRUTH_ORIF_EXPR
:
3402 return OP_SAME (0) && OP_SAME (1);
3404 case TRUTH_AND_EXPR
:
3406 case TRUTH_XOR_EXPR
:
3407 if (OP_SAME (0) && OP_SAME (1))
3410 /* Otherwise take into account this is a commutative operation. */
3411 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3412 TREE_OPERAND (arg1
, 1), flags
)
3413 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3414 TREE_OPERAND (arg1
, 0), flags
));
3417 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3424 switch (TREE_CODE (arg0
))
3427 /* If the CALL_EXPRs call different functions, then they
3428 clearly can not be equal. */
3429 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3434 unsigned int cef
= call_expr_flags (arg0
);
3435 if (flags
& OEP_PURE_SAME
)
3436 cef
&= ECF_CONST
| ECF_PURE
;
3443 /* Now see if all the arguments are the same. */
3445 const_call_expr_arg_iterator iter0
, iter1
;
3447 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3448 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3450 a0
= next_const_call_expr_arg (&iter0
),
3451 a1
= next_const_call_expr_arg (&iter1
))
3452 if (! operand_equal_p (a0
, a1
, flags
))
3455 /* If we get here and both argument lists are exhausted
3456 then the CALL_EXPRs are equal. */
3457 return ! (a0
|| a1
);
3463 case tcc_declaration
:
3464 /* Consider __builtin_sqrt equal to sqrt. */
3465 return (TREE_CODE (arg0
) == FUNCTION_DECL
3466 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3467 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3468 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3475 #undef OP_SAME_WITH_NULL
3478 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3479 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3481 When in doubt, return 0. */
3484 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3486 int unsignedp1
, unsignedpo
;
3487 tree primarg0
, primarg1
, primother
;
3488 unsigned int correct_width
;
3490 if (operand_equal_p (arg0
, arg1
, 0))
3493 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3494 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3497 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3498 and see if the inner values are the same. This removes any
3499 signedness comparison, which doesn't matter here. */
3500 primarg0
= arg0
, primarg1
= arg1
;
3501 STRIP_NOPS (primarg0
);
3502 STRIP_NOPS (primarg1
);
3503 if (operand_equal_p (primarg0
, primarg1
, 0))
3506 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3507 actual comparison operand, ARG0.
3509 First throw away any conversions to wider types
3510 already present in the operands. */
3512 primarg1
= get_narrower (arg1
, &unsignedp1
);
3513 primother
= get_narrower (other
, &unsignedpo
);
3515 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3516 if (unsignedp1
== unsignedpo
3517 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3518 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3520 tree type
= TREE_TYPE (arg0
);
3522 /* Make sure shorter operand is extended the right way
3523 to match the longer operand. */
3524 primarg1
= fold_convert (signed_or_unsigned_type_for
3525 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3527 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3534 /* See if ARG is an expression that is either a comparison or is performing
3535 arithmetic on comparisons. The comparisons must only be comparing
3536 two different values, which will be stored in *CVAL1 and *CVAL2; if
3537 they are nonzero it means that some operands have already been found.
3538 No variables may be used anywhere else in the expression except in the
3539 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3540 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3542 If this is true, return 1. Otherwise, return zero. */
3545 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3547 enum tree_code code
= TREE_CODE (arg
);
3548 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3550 /* We can handle some of the tcc_expression cases here. */
3551 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3553 else if (tclass
== tcc_expression
3554 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3555 || code
== COMPOUND_EXPR
))
3556 tclass
= tcc_binary
;
3558 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
3559 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3561 /* If we've already found a CVAL1 or CVAL2, this expression is
3562 two complex to handle. */
3563 if (*cval1
|| *cval2
)
3573 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3576 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3577 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3578 cval1
, cval2
, save_p
));
3583 case tcc_expression
:
3584 if (code
== COND_EXPR
)
3585 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3586 cval1
, cval2
, save_p
)
3587 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3588 cval1
, cval2
, save_p
)
3589 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3590 cval1
, cval2
, save_p
));
3593 case tcc_comparison
:
3594 /* First see if we can handle the first operand, then the second. For
3595 the second operand, we know *CVAL1 can't be zero. It must be that
3596 one side of the comparison is each of the values; test for the
3597 case where this isn't true by failing if the two operands
3600 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3601 TREE_OPERAND (arg
, 1), 0))
3605 *cval1
= TREE_OPERAND (arg
, 0);
3606 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3608 else if (*cval2
== 0)
3609 *cval2
= TREE_OPERAND (arg
, 0);
3610 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3615 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3617 else if (*cval2
== 0)
3618 *cval2
= TREE_OPERAND (arg
, 1);
3619 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3631 /* ARG is a tree that is known to contain just arithmetic operations and
3632 comparisons. Evaluate the operations in the tree substituting NEW0 for
3633 any occurrence of OLD0 as an operand of a comparison and likewise for
3637 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
3638 tree old1
, tree new1
)
3640 tree type
= TREE_TYPE (arg
);
3641 enum tree_code code
= TREE_CODE (arg
);
3642 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3644 /* We can handle some of the tcc_expression cases here. */
3645 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3647 else if (tclass
== tcc_expression
3648 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3649 tclass
= tcc_binary
;
3654 return fold_build1_loc (loc
, code
, type
,
3655 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3656 old0
, new0
, old1
, new1
));
3659 return fold_build2_loc (loc
, code
, type
,
3660 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3661 old0
, new0
, old1
, new1
),
3662 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3663 old0
, new0
, old1
, new1
));
3665 case tcc_expression
:
3669 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3673 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3677 return fold_build3_loc (loc
, code
, type
,
3678 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3679 old0
, new0
, old1
, new1
),
3680 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3681 old0
, new0
, old1
, new1
),
3682 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3683 old0
, new0
, old1
, new1
));
3687 /* Fall through - ??? */
3689 case tcc_comparison
:
3691 tree arg0
= TREE_OPERAND (arg
, 0);
3692 tree arg1
= TREE_OPERAND (arg
, 1);
3694 /* We need to check both for exact equality and tree equality. The
3695 former will be true if the operand has a side-effect. In that
3696 case, we know the operand occurred exactly once. */
3698 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3700 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3703 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3705 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3708 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3716 /* Return a tree for the case when the result of an expression is RESULT
3717 converted to TYPE and OMITTED was previously an operand of the expression
3718 but is now not needed (e.g., we folded OMITTED * 0).
3720 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3721 the conversion of RESULT to TYPE. */
3724 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3726 tree t
= fold_convert_loc (loc
, type
, result
);
3728 /* If the resulting operand is an empty statement, just return the omitted
3729 statement casted to void. */
3730 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3732 t
= build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
3733 goto omit_one_operand_exit
;
3736 if (TREE_SIDE_EFFECTS (omitted
))
3738 t
= build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3739 goto omit_one_operand_exit
;
3742 return non_lvalue_loc (loc
, t
);
3744 omit_one_operand_exit
:
3745 protected_set_expr_location (t
, loc
);
3749 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3752 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
3755 tree t
= fold_convert_loc (loc
, type
, result
);
3757 /* If the resulting operand is an empty statement, just return the omitted
3758 statement casted to void. */
3759 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3761 t
= build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
3762 goto pedantic_omit_one_operand_exit
;
3765 if (TREE_SIDE_EFFECTS (omitted
))
3767 t
= build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3768 goto pedantic_omit_one_operand_exit
;
3771 return pedantic_non_lvalue_loc (loc
, t
);
3773 pedantic_omit_one_operand_exit
:
3774 protected_set_expr_location (t
, loc
);
3778 /* Return a tree for the case when the result of an expression is RESULT
3779 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3780 of the expression but are now not needed.
3782 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3783 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3784 evaluated before OMITTED2. Otherwise, if neither has side effects,
3785 just do the conversion of RESULT to TYPE. */
3788 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3789 tree omitted1
, tree omitted2
)
3791 tree t
= fold_convert_loc (loc
, type
, result
);
3793 if (TREE_SIDE_EFFECTS (omitted2
))
3795 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
3796 SET_EXPR_LOCATION (t
, loc
);
3798 if (TREE_SIDE_EFFECTS (omitted1
))
3800 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
3801 SET_EXPR_LOCATION (t
, loc
);
3804 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3808 /* Return a simplified tree node for the truth-negation of ARG. This
3809 never alters ARG itself. We assume that ARG is an operation that
3810 returns a truth value (0 or 1).
3812 FIXME: one would think we would fold the result, but it causes
3813 problems with the dominator optimizer. */
3816 fold_truth_not_expr (location_t loc
, tree arg
)
3818 tree t
, type
= TREE_TYPE (arg
);
3819 enum tree_code code
= TREE_CODE (arg
);
3820 location_t loc1
, loc2
;
3822 /* If this is a comparison, we can simply invert it, except for
3823 floating-point non-equality comparisons, in which case we just
3824 enclose a TRUTH_NOT_EXPR around what we have. */
3826 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3828 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3829 if (FLOAT_TYPE_P (op_type
)
3830 && flag_trapping_math
3831 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3832 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3835 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3836 if (code
== ERROR_MARK
)
3839 t
= build2 (code
, type
, TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
3840 SET_EXPR_LOCATION (t
, loc
);
3847 return constant_boolean_node (integer_zerop (arg
), type
);
3849 case TRUTH_AND_EXPR
:
3850 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3851 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3852 if (loc1
== UNKNOWN_LOCATION
)
3854 if (loc2
== UNKNOWN_LOCATION
)
3856 t
= build2 (TRUTH_OR_EXPR
, type
,
3857 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3858 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3862 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3863 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3864 if (loc1
== UNKNOWN_LOCATION
)
3866 if (loc2
== UNKNOWN_LOCATION
)
3868 t
= build2 (TRUTH_AND_EXPR
, type
,
3869 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3870 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3873 case TRUTH_XOR_EXPR
:
3874 /* Here we can invert either operand. We invert the first operand
3875 unless the second operand is a TRUTH_NOT_EXPR in which case our
3876 result is the XOR of the first operand with the inside of the
3877 negation of the second operand. */
3879 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3880 t
= build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3881 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3883 t
= build2 (TRUTH_XOR_EXPR
, type
,
3884 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3885 TREE_OPERAND (arg
, 1));
3888 case TRUTH_ANDIF_EXPR
:
3889 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3890 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3891 if (loc1
== UNKNOWN_LOCATION
)
3893 if (loc2
== UNKNOWN_LOCATION
)
3895 t
= build2 (TRUTH_ORIF_EXPR
, type
,
3896 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3897 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3900 case TRUTH_ORIF_EXPR
:
3901 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3902 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3903 if (loc1
== UNKNOWN_LOCATION
)
3905 if (loc2
== UNKNOWN_LOCATION
)
3907 t
= build2 (TRUTH_ANDIF_EXPR
, type
,
3908 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3909 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3912 case TRUTH_NOT_EXPR
:
3913 return TREE_OPERAND (arg
, 0);
3917 tree arg1
= TREE_OPERAND (arg
, 1);
3918 tree arg2
= TREE_OPERAND (arg
, 2);
3920 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3921 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 2));
3922 if (loc1
== UNKNOWN_LOCATION
)
3924 if (loc2
== UNKNOWN_LOCATION
)
3927 /* A COND_EXPR may have a throw as one operand, which
3928 then has void type. Just leave void operands
3930 t
= build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3931 VOID_TYPE_P (TREE_TYPE (arg1
))
3932 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3933 VOID_TYPE_P (TREE_TYPE (arg2
))
3934 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3939 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3940 if (loc1
== UNKNOWN_LOCATION
)
3942 t
= build2 (COMPOUND_EXPR
, type
,
3943 TREE_OPERAND (arg
, 0),
3944 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3947 case NON_LVALUE_EXPR
:
3948 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3949 if (loc1
== UNKNOWN_LOCATION
)
3951 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3954 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3956 t
= build1 (TRUTH_NOT_EXPR
, type
, arg
);
3960 /* ... fall through ... */
3963 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3964 if (loc1
== UNKNOWN_LOCATION
)
3966 t
= build1 (TREE_CODE (arg
), type
,
3967 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3971 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3973 t
= build2 (EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3977 t
= build1 (TRUTH_NOT_EXPR
, type
, arg
);
3980 case CLEANUP_POINT_EXPR
:
3981 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3982 if (loc1
== UNKNOWN_LOCATION
)
3984 t
= build1 (CLEANUP_POINT_EXPR
, type
,
3985 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3994 SET_EXPR_LOCATION (t
, loc
);
3999 /* Return a simplified tree node for the truth-negation of ARG. This
4000 never alters ARG itself. We assume that ARG is an operation that
4001 returns a truth value (0 or 1).
4003 FIXME: one would think we would fold the result, but it causes
4004 problems with the dominator optimizer. */
4007 invert_truthvalue_loc (location_t loc
, tree arg
)
4011 if (TREE_CODE (arg
) == ERROR_MARK
)
4014 tem
= fold_truth_not_expr (loc
, arg
);
4017 tem
= build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
4018 SET_EXPR_LOCATION (tem
, loc
);
4024 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
4025 operands are another bit-wise operation with a common input. If so,
4026 distribute the bit operations to save an operation and possibly two if
4027 constants are involved. For example, convert
4028 (A | B) & (A | C) into A | (B & C)
4029 Further simplification will occur if B and C are constants.
4031 If this optimization cannot be done, 0 will be returned. */
4034 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
4035 tree arg0
, tree arg1
)
4040 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
4041 || TREE_CODE (arg0
) == code
4042 || (TREE_CODE (arg0
) != BIT_AND_EXPR
4043 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
4046 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
4048 common
= TREE_OPERAND (arg0
, 0);
4049 left
= TREE_OPERAND (arg0
, 1);
4050 right
= TREE_OPERAND (arg1
, 1);
4052 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
4054 common
= TREE_OPERAND (arg0
, 0);
4055 left
= TREE_OPERAND (arg0
, 1);
4056 right
= TREE_OPERAND (arg1
, 0);
4058 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
4060 common
= TREE_OPERAND (arg0
, 1);
4061 left
= TREE_OPERAND (arg0
, 0);
4062 right
= TREE_OPERAND (arg1
, 1);
4064 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
4066 common
= TREE_OPERAND (arg0
, 1);
4067 left
= TREE_OPERAND (arg0
, 0);
4068 right
= TREE_OPERAND (arg1
, 0);
4073 common
= fold_convert_loc (loc
, type
, common
);
4074 left
= fold_convert_loc (loc
, type
, left
);
4075 right
= fold_convert_loc (loc
, type
, right
);
4076 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
4077 fold_build2_loc (loc
, code
, type
, left
, right
));
4080 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
4081 with code CODE. This optimization is unsafe. */
4083 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
4084 tree arg0
, tree arg1
)
4086 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
4087 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
4089 /* (A / C) +- (B / C) -> (A +- B) / C. */
4091 && operand_equal_p (TREE_OPERAND (arg0
, 1),
4092 TREE_OPERAND (arg1
, 1), 0))
4093 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
4094 fold_build2_loc (loc
, code
, type
,
4095 TREE_OPERAND (arg0
, 0),
4096 TREE_OPERAND (arg1
, 0)),
4097 TREE_OPERAND (arg0
, 1));
4099 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
4100 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
4101 TREE_OPERAND (arg1
, 0), 0)
4102 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
4103 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
4105 REAL_VALUE_TYPE r0
, r1
;
4106 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
4107 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
4109 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
4111 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
4112 real_arithmetic (&r0
, code
, &r0
, &r1
);
4113 return fold_build2_loc (loc
, MULT_EXPR
, type
,
4114 TREE_OPERAND (arg0
, 0),
4115 build_real (type
, r0
));
4121 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4122 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
4125 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
4126 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
4128 tree result
, bftype
;
4132 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
4133 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
4134 || POINTER_TYPE_P (TREE_TYPE (inner
)))
4135 && host_integerp (size
, 0)
4136 && tree_low_cst (size
, 0) == bitsize
)
4137 return fold_convert_loc (loc
, type
, inner
);
4141 if (TYPE_PRECISION (bftype
) != bitsize
4142 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
4143 bftype
= build_nonstandard_integer_type (bitsize
, 0);
4145 result
= build3 (BIT_FIELD_REF
, bftype
, inner
,
4146 size_int (bitsize
), bitsize_int (bitpos
));
4147 SET_EXPR_LOCATION (result
, loc
);
4150 result
= fold_convert_loc (loc
, type
, result
);
4155 /* Optimize a bit-field compare.
4157 There are two cases: First is a compare against a constant and the
4158 second is a comparison of two items where the fields are at the same
4159 bit position relative to the start of a chunk (byte, halfword, word)
4160 large enough to contain it. In these cases we can avoid the shift
4161 implicit in bitfield extractions.
4163 For constants, we emit a compare of the shifted constant with the
4164 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4165 compared. For two fields at the same position, we do the ANDs with the
4166 similar mask and compare the result of the ANDs.
4168 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4169 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4170 are the left and right operands of the comparison, respectively.
4172 If the optimization described above can be done, we return the resulting
4173 tree. Otherwise we return zero. */
4176 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
4177 tree compare_type
, tree lhs
, tree rhs
)
4179 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
4180 tree type
= TREE_TYPE (lhs
);
4181 tree signed_type
, unsigned_type
;
4182 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
4183 enum machine_mode lmode
, rmode
, nmode
;
4184 int lunsignedp
, runsignedp
;
4185 int lvolatilep
= 0, rvolatilep
= 0;
4186 tree linner
, rinner
= NULL_TREE
;
4190 /* Get all the information about the extractions being done. If the bit size
4191 if the same as the size of the underlying object, we aren't doing an
4192 extraction at all and so can do nothing. We also don't want to
4193 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4194 then will no longer be able to replace it. */
4195 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
4196 &lunsignedp
, &lvolatilep
, false);
4197 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
4198 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
4203 /* If this is not a constant, we can only do something if bit positions,
4204 sizes, and signedness are the same. */
4205 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
4206 &runsignedp
, &rvolatilep
, false);
4208 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
4209 || lunsignedp
!= runsignedp
|| offset
!= 0
4210 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
4214 /* See if we can find a mode to refer to this field. We should be able to,
4215 but fail if we can't. */
4216 nmode
= get_best_mode (lbitsize
, lbitpos
,
4217 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
4218 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
4219 TYPE_ALIGN (TREE_TYPE (rinner
))),
4220 word_mode
, lvolatilep
|| rvolatilep
);
4221 if (nmode
== VOIDmode
)
4224 /* Set signed and unsigned types of the precision of this mode for the
4226 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
4227 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
4229 /* Compute the bit position and size for the new reference and our offset
4230 within it. If the new reference is the same size as the original, we
4231 won't optimize anything, so return zero. */
4232 nbitsize
= GET_MODE_BITSIZE (nmode
);
4233 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
4235 if (nbitsize
== lbitsize
)
4238 if (BYTES_BIG_ENDIAN
)
4239 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
4241 /* Make the mask to be used against the extracted field. */
4242 mask
= build_int_cst_type (unsigned_type
, -1);
4243 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
4244 mask
= const_binop (RSHIFT_EXPR
, mask
,
4245 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
4248 /* If not comparing with constant, just rework the comparison
4250 return fold_build2_loc (loc
, code
, compare_type
,
4251 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4252 make_bit_field_ref (loc
, linner
,
4257 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4258 make_bit_field_ref (loc
, rinner
,
4264 /* Otherwise, we are handling the constant case. See if the constant is too
4265 big for the field. Warn and return a tree of for 0 (false) if so. We do
4266 this not only for its own sake, but to avoid having to test for this
4267 error case below. If we didn't, we might generate wrong code.
4269 For unsigned fields, the constant shifted right by the field length should
4270 be all zero. For signed fields, the high-order bits should agree with
4275 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
4276 fold_convert_loc (loc
,
4277 unsigned_type
, rhs
),
4278 size_int (lbitsize
), 0)))
4280 warning (0, "comparison is always %d due to width of bit-field",
4282 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4287 tree tem
= const_binop (RSHIFT_EXPR
,
4288 fold_convert_loc (loc
, signed_type
, rhs
),
4289 size_int (lbitsize
- 1), 0);
4290 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
4292 warning (0, "comparison is always %d due to width of bit-field",
4294 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4298 /* Single-bit compares should always be against zero. */
4299 if (lbitsize
== 1 && ! integer_zerop (rhs
))
4301 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
4302 rhs
= build_int_cst (type
, 0);
4305 /* Make a new bitfield reference, shift the constant over the
4306 appropriate number of bits and mask it with the computed mask
4307 (in case this was a signed field). If we changed it, make a new one. */
4308 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
4311 TREE_SIDE_EFFECTS (lhs
) = 1;
4312 TREE_THIS_VOLATILE (lhs
) = 1;
4315 rhs
= const_binop (BIT_AND_EXPR
,
4316 const_binop (LSHIFT_EXPR
,
4317 fold_convert_loc (loc
, unsigned_type
, rhs
),
4318 size_int (lbitpos
), 0),
4321 lhs
= build2 (code
, compare_type
,
4322 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
4324 SET_EXPR_LOCATION (lhs
, loc
);
4328 /* Subroutine for fold_truthop: decode a field reference.
4330 If EXP is a comparison reference, we return the innermost reference.
4332 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4333 set to the starting bit number.
4335 If the innermost field can be completely contained in a mode-sized
4336 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4338 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4339 otherwise it is not changed.
4341 *PUNSIGNEDP is set to the signedness of the field.
4343 *PMASK is set to the mask used. This is either contained in a
4344 BIT_AND_EXPR or derived from the width of the field.
4346 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4348 Return 0 if this is not a component reference or is one that we can't
4349 do anything with. */
4352 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
4353 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
4354 int *punsignedp
, int *pvolatilep
,
4355 tree
*pmask
, tree
*pand_mask
)
4357 tree outer_type
= 0;
4359 tree mask
, inner
, offset
;
4361 unsigned int precision
;
4363 /* All the optimizations using this function assume integer fields.
4364 There are problems with FP fields since the type_for_size call
4365 below can fail for, e.g., XFmode. */
4366 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
4369 /* We are interested in the bare arrangement of bits, so strip everything
4370 that doesn't affect the machine mode. However, record the type of the
4371 outermost expression if it may matter below. */
4372 if (CONVERT_EXPR_P (exp
)
4373 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
4374 outer_type
= TREE_TYPE (exp
);
4377 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
4379 and_mask
= TREE_OPERAND (exp
, 1);
4380 exp
= TREE_OPERAND (exp
, 0);
4381 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
4382 if (TREE_CODE (and_mask
) != INTEGER_CST
)
4386 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
4387 punsignedp
, pvolatilep
, false);
4388 if ((inner
== exp
&& and_mask
== 0)
4389 || *pbitsize
< 0 || offset
!= 0
4390 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
4393 /* If the number of bits in the reference is the same as the bitsize of
4394 the outer type, then the outer type gives the signedness. Otherwise
4395 (in case of a small bitfield) the signedness is unchanged. */
4396 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
4397 *punsignedp
= TYPE_UNSIGNED (outer_type
);
4399 /* Compute the mask to access the bitfield. */
4400 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
4401 precision
= TYPE_PRECISION (unsigned_type
);
4403 mask
= build_int_cst_type (unsigned_type
, -1);
4405 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
4406 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
4408 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4410 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4411 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
4414 *pand_mask
= and_mask
;
4418 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4422 all_ones_mask_p (const_tree mask
, int size
)
4424 tree type
= TREE_TYPE (mask
);
4425 unsigned int precision
= TYPE_PRECISION (type
);
4428 tmask
= build_int_cst_type (signed_type_for (type
), -1);
4431 tree_int_cst_equal (mask
,
4432 const_binop (RSHIFT_EXPR
,
4433 const_binop (LSHIFT_EXPR
, tmask
,
4434 size_int (precision
- size
),
4436 size_int (precision
- size
), 0));
4439 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4440 represents the sign bit of EXP's type. If EXP represents a sign
4441 or zero extension, also test VAL against the unextended type.
4442 The return value is the (sub)expression whose sign bit is VAL,
4443 or NULL_TREE otherwise. */
4446 sign_bit_p (tree exp
, const_tree val
)
4448 unsigned HOST_WIDE_INT mask_lo
, lo
;
4449 HOST_WIDE_INT mask_hi
, hi
;
4453 /* Tree EXP must have an integral type. */
4454 t
= TREE_TYPE (exp
);
4455 if (! INTEGRAL_TYPE_P (t
))
4458 /* Tree VAL must be an integer constant. */
4459 if (TREE_CODE (val
) != INTEGER_CST
4460 || TREE_OVERFLOW (val
))
4463 width
= TYPE_PRECISION (t
);
4464 if (width
> HOST_BITS_PER_WIDE_INT
)
4466 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
4469 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
4470 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
4476 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
4479 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
4480 >> (HOST_BITS_PER_WIDE_INT
- width
));
4483 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4484 treat VAL as if it were unsigned. */
4485 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
4486 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
4489 /* Handle extension from a narrower type. */
4490 if (TREE_CODE (exp
) == NOP_EXPR
4491 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
4492 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4497 /* Subroutine for fold_truthop: determine if an operand is simple enough
4498 to be evaluated unconditionally. */
4501 simple_operand_p (const_tree exp
)
4503 /* Strip any conversions that don't change the machine mode. */
4506 return (CONSTANT_CLASS_P (exp
)
4507 || TREE_CODE (exp
) == SSA_NAME
4509 && ! TREE_ADDRESSABLE (exp
)
4510 && ! TREE_THIS_VOLATILE (exp
)
4511 && ! DECL_NONLOCAL (exp
)
4512 /* Don't regard global variables as simple. They may be
4513 allocated in ways unknown to the compiler (shared memory,
4514 #pragma weak, etc). */
4515 && ! TREE_PUBLIC (exp
)
4516 && ! DECL_EXTERNAL (exp
)
4517 /* Loading a static variable is unduly expensive, but global
4518 registers aren't expensive. */
4519 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4522 /* The following functions are subroutines to fold_range_test and allow it to
4523 try to change a logical combination of comparisons into a range test.
4526 X == 2 || X == 3 || X == 4 || X == 5
4530 (unsigned) (X - 2) <= 3
4532 We describe each set of comparisons as being either inside or outside
4533 a range, using a variable named like IN_P, and then describe the
4534 range with a lower and upper bound. If one of the bounds is omitted,
4535 it represents either the highest or lowest value of the type.
4537 In the comments below, we represent a range by two numbers in brackets
4538 preceded by a "+" to designate being inside that range, or a "-" to
4539 designate being outside that range, so the condition can be inverted by
4540 flipping the prefix. An omitted bound is represented by a "-". For
4541 example, "- [-, 10]" means being outside the range starting at the lowest
4542 possible value and ending at 10, in other words, being greater than 10.
4543 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4546 We set up things so that the missing bounds are handled in a consistent
4547 manner so neither a missing bound nor "true" and "false" need to be
4548 handled using a special case. */
4550 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4551 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4552 and UPPER1_P are nonzero if the respective argument is an upper bound
4553 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4554 must be specified for a comparison. ARG1 will be converted to ARG0's
4555 type if both are specified. */
4558 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4559 tree arg1
, int upper1_p
)
4565 /* If neither arg represents infinity, do the normal operation.
4566 Else, if not a comparison, return infinity. Else handle the special
4567 comparison rules. Note that most of the cases below won't occur, but
4568 are handled for consistency. */
4570 if (arg0
!= 0 && arg1
!= 0)
4572 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4573 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4575 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4578 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4581 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4582 for neither. In real maths, we cannot assume open ended ranges are
4583 the same. But, this is computer arithmetic, where numbers are finite.
4584 We can therefore make the transformation of any unbounded range with
4585 the value Z, Z being greater than any representable number. This permits
4586 us to treat unbounded ranges as equal. */
4587 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4588 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4592 result
= sgn0
== sgn1
;
4595 result
= sgn0
!= sgn1
;
4598 result
= sgn0
< sgn1
;
4601 result
= sgn0
<= sgn1
;
4604 result
= sgn0
> sgn1
;
4607 result
= sgn0
>= sgn1
;
4613 return constant_boolean_node (result
, type
);
4616 /* Given EXP, a logical expression, set the range it is testing into
4617 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4618 actually being tested. *PLOW and *PHIGH will be made of the same
4619 type as the returned expression. If EXP is not a comparison, we
4620 will most likely not be returning a useful value and range. Set
4621 *STRICT_OVERFLOW_P to true if the return value is only valid
4622 because signed overflow is undefined; otherwise, do not change
4623 *STRICT_OVERFLOW_P. */
4626 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4627 bool *strict_overflow_p
)
4629 enum tree_code code
;
4630 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
4631 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
4633 tree low
, high
, n_low
, n_high
;
4634 location_t loc
= EXPR_LOCATION (exp
);
4636 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4637 and see if we can refine the range. Some of the cases below may not
4638 happen, but it doesn't seem worth worrying about this. We "continue"
4639 the outer loop when we've changed something; otherwise we "break"
4640 the switch, which will "break" the while. */
4643 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4647 code
= TREE_CODE (exp
);
4648 exp_type
= TREE_TYPE (exp
);
4650 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4652 if (TREE_OPERAND_LENGTH (exp
) > 0)
4653 arg0
= TREE_OPERAND (exp
, 0);
4654 if (TREE_CODE_CLASS (code
) == tcc_comparison
4655 || TREE_CODE_CLASS (code
) == tcc_unary
4656 || TREE_CODE_CLASS (code
) == tcc_binary
)
4657 arg0_type
= TREE_TYPE (arg0
);
4658 if (TREE_CODE_CLASS (code
) == tcc_binary
4659 || TREE_CODE_CLASS (code
) == tcc_comparison
4660 || (TREE_CODE_CLASS (code
) == tcc_expression
4661 && TREE_OPERAND_LENGTH (exp
) > 1))
4662 arg1
= TREE_OPERAND (exp
, 1);
4667 case TRUTH_NOT_EXPR
:
4668 in_p
= ! in_p
, exp
= arg0
;
4671 case EQ_EXPR
: case NE_EXPR
:
4672 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4673 /* We can only do something if the range is testing for zero
4674 and if the second operand is an integer constant. Note that
4675 saying something is "in" the range we make is done by
4676 complementing IN_P since it will set in the initial case of
4677 being not equal to zero; "out" is leaving it alone. */
4678 if (low
== 0 || high
== 0
4679 || ! integer_zerop (low
) || ! integer_zerop (high
)
4680 || TREE_CODE (arg1
) != INTEGER_CST
)
4685 case NE_EXPR
: /* - [c, c] */
4688 case EQ_EXPR
: /* + [c, c] */
4689 in_p
= ! in_p
, low
= high
= arg1
;
4691 case GT_EXPR
: /* - [-, c] */
4692 low
= 0, high
= arg1
;
4694 case GE_EXPR
: /* + [c, -] */
4695 in_p
= ! in_p
, low
= arg1
, high
= 0;
4697 case LT_EXPR
: /* - [c, -] */
4698 low
= arg1
, high
= 0;
4700 case LE_EXPR
: /* + [-, c] */
4701 in_p
= ! in_p
, low
= 0, high
= arg1
;
4707 /* If this is an unsigned comparison, we also know that EXP is
4708 greater than or equal to zero. We base the range tests we make
4709 on that fact, so we record it here so we can parse existing
4710 range tests. We test arg0_type since often the return type
4711 of, e.g. EQ_EXPR, is boolean. */
4712 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4714 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4716 build_int_cst (arg0_type
, 0),
4720 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4722 /* If the high bound is missing, but we have a nonzero low
4723 bound, reverse the range so it goes from zero to the low bound
4725 if (high
== 0 && low
&& ! integer_zerop (low
))
4728 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4729 integer_one_node
, 0);
4730 low
= build_int_cst (arg0_type
, 0);
4738 /* (-x) IN [a,b] -> x in [-b, -a] */
4739 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4740 build_int_cst (exp_type
, 0),
4742 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4743 build_int_cst (exp_type
, 0),
4745 low
= n_low
, high
= n_high
;
4751 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4752 build_int_cst (exp_type
, 1));
4753 SET_EXPR_LOCATION (exp
, loc
);
4756 case PLUS_EXPR
: case MINUS_EXPR
:
4757 if (TREE_CODE (arg1
) != INTEGER_CST
)
4760 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4761 move a constant to the other side. */
4762 if (!TYPE_UNSIGNED (arg0_type
)
4763 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4766 /* If EXP is signed, any overflow in the computation is undefined,
4767 so we don't worry about it so long as our computations on
4768 the bounds don't overflow. For unsigned, overflow is defined
4769 and this is exactly the right thing. */
4770 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4771 arg0_type
, low
, 0, arg1
, 0);
4772 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4773 arg0_type
, high
, 1, arg1
, 0);
4774 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4775 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4778 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4779 *strict_overflow_p
= true;
4781 /* Check for an unsigned range which has wrapped around the maximum
4782 value thus making n_high < n_low, and normalize it. */
4783 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4785 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4786 integer_one_node
, 0);
4787 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4788 integer_one_node
, 0);
4790 /* If the range is of the form +/- [ x+1, x ], we won't
4791 be able to normalize it. But then, it represents the
4792 whole range or the empty set, so make it
4794 if (tree_int_cst_equal (n_low
, low
)
4795 && tree_int_cst_equal (n_high
, high
))
4801 low
= n_low
, high
= n_high
;
4806 CASE_CONVERT
: case NON_LVALUE_EXPR
:
4807 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4810 if (! INTEGRAL_TYPE_P (arg0_type
)
4811 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4812 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4815 n_low
= low
, n_high
= high
;
4818 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4821 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4824 /* If we're converting arg0 from an unsigned type, to exp,
4825 a signed type, we will be doing the comparison as unsigned.
4826 The tests above have already verified that LOW and HIGH
4829 So we have to ensure that we will handle large unsigned
4830 values the same way that the current signed bounds treat
4833 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4837 /* For fixed-point modes, we need to pass the saturating flag
4838 as the 2nd parameter. */
4839 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4840 equiv_type
= lang_hooks
.types
.type_for_mode
4841 (TYPE_MODE (arg0_type
),
4842 TYPE_SATURATING (arg0_type
));
4844 equiv_type
= lang_hooks
.types
.type_for_mode
4845 (TYPE_MODE (arg0_type
), 1);
4847 /* A range without an upper bound is, naturally, unbounded.
4848 Since convert would have cropped a very large value, use
4849 the max value for the destination type. */
4851 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4852 : TYPE_MAX_VALUE (arg0_type
);
4854 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4855 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4856 fold_convert_loc (loc
, arg0_type
,
4858 build_int_cst (arg0_type
, 1));
4860 /* If the low bound is specified, "and" the range with the
4861 range for which the original unsigned value will be
4865 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4866 1, n_low
, n_high
, 1,
4867 fold_convert_loc (loc
, arg0_type
,
4872 in_p
= (n_in_p
== in_p
);
4876 /* Otherwise, "or" the range with the range of the input
4877 that will be interpreted as negative. */
4878 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4879 0, n_low
, n_high
, 1,
4880 fold_convert_loc (loc
, arg0_type
,
4885 in_p
= (in_p
!= n_in_p
);
4890 low
= n_low
, high
= n_high
;
4900 /* If EXP is a constant, we can evaluate whether this is true or false. */
4901 if (TREE_CODE (exp
) == INTEGER_CST
)
4903 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4905 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4911 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4915 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4916 type, TYPE, return an expression to test if EXP is in (or out of, depending
4917 on IN_P) the range. Return 0 if the test couldn't be created. */
4920 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4921 tree low
, tree high
)
4923 tree etype
= TREE_TYPE (exp
), value
;
4925 #ifdef HAVE_canonicalize_funcptr_for_compare
4926 /* Disable this optimization for function pointer expressions
4927 on targets that require function pointer canonicalization. */
4928 if (HAVE_canonicalize_funcptr_for_compare
4929 && TREE_CODE (etype
) == POINTER_TYPE
4930 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4936 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4938 return invert_truthvalue_loc (loc
, value
);
4943 if (low
== 0 && high
== 0)
4944 return build_int_cst (type
, 1);
4947 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4948 fold_convert_loc (loc
, etype
, high
));
4951 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4952 fold_convert_loc (loc
, etype
, low
));
4954 if (operand_equal_p (low
, high
, 0))
4955 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4956 fold_convert_loc (loc
, etype
, low
));
4958 if (integer_zerop (low
))
4960 if (! TYPE_UNSIGNED (etype
))
4962 etype
= unsigned_type_for (etype
);
4963 high
= fold_convert_loc (loc
, etype
, high
);
4964 exp
= fold_convert_loc (loc
, etype
, exp
);
4966 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4969 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4970 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4972 unsigned HOST_WIDE_INT lo
;
4976 prec
= TYPE_PRECISION (etype
);
4977 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4980 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4984 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4985 lo
= (unsigned HOST_WIDE_INT
) -1;
4988 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4990 if (TYPE_UNSIGNED (etype
))
4992 tree signed_etype
= signed_type_for (etype
);
4993 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4995 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4997 etype
= signed_etype
;
4998 exp
= fold_convert_loc (loc
, etype
, exp
);
5000 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
5001 build_int_cst (etype
, 0));
5005 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5006 This requires wrap-around arithmetics for the type of the expression.
5007 First make sure that arithmetics in this type is valid, then make sure
5008 that it wraps around. */
5009 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
5010 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
5011 TYPE_UNSIGNED (etype
));
5013 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
5015 tree utype
, minv
, maxv
;
5017 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5018 for the type in question, as we rely on this here. */
5019 utype
= unsigned_type_for (etype
);
5020 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
5021 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
5022 integer_one_node
, 1);
5023 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
5025 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
5032 high
= fold_convert_loc (loc
, etype
, high
);
5033 low
= fold_convert_loc (loc
, etype
, low
);
5034 exp
= fold_convert_loc (loc
, etype
, exp
);
5036 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
5039 if (POINTER_TYPE_P (etype
))
5041 if (value
!= 0 && !TREE_OVERFLOW (value
))
5043 low
= fold_convert_loc (loc
, sizetype
, low
);
5044 low
= fold_build1_loc (loc
, NEGATE_EXPR
, sizetype
, low
);
5045 return build_range_check (loc
, type
,
5046 fold_build2_loc (loc
, POINTER_PLUS_EXPR
,
5048 1, build_int_cst (etype
, 0), value
);
5053 if (value
!= 0 && !TREE_OVERFLOW (value
))
5054 return build_range_check (loc
, type
,
5055 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
5056 1, build_int_cst (etype
, 0), value
);
5061 /* Return the predecessor of VAL in its type, handling the infinite case. */
5064 range_predecessor (tree val
)
5066 tree type
= TREE_TYPE (val
);
5068 if (INTEGRAL_TYPE_P (type
)
5069 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
5072 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
5075 /* Return the successor of VAL in its type, handling the infinite case. */
5078 range_successor (tree val
)
5080 tree type
= TREE_TYPE (val
);
5082 if (INTEGRAL_TYPE_P (type
)
5083 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
5086 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
5089 /* Given two ranges, see if we can merge them into one. Return 1 if we
5090 can, 0 if we can't. Set the output range into the specified parameters. */
5093 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
5094 tree high0
, int in1_p
, tree low1
, tree high1
)
5102 int lowequal
= ((low0
== 0 && low1
== 0)
5103 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5104 low0
, 0, low1
, 0)));
5105 int highequal
= ((high0
== 0 && high1
== 0)
5106 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5107 high0
, 1, high1
, 1)));
5109 /* Make range 0 be the range that starts first, or ends last if they
5110 start at the same value. Swap them if it isn't. */
5111 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
5114 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
5115 high1
, 1, high0
, 1))))
5117 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
5118 tem
= low0
, low0
= low1
, low1
= tem
;
5119 tem
= high0
, high0
= high1
, high1
= tem
;
5122 /* Now flag two cases, whether the ranges are disjoint or whether the
5123 second range is totally subsumed in the first. Note that the tests
5124 below are simplified by the ones above. */
5125 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
5126 high0
, 1, low1
, 0));
5127 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
5128 high1
, 1, high0
, 1));
5130 /* We now have four cases, depending on whether we are including or
5131 excluding the two ranges. */
5134 /* If they don't overlap, the result is false. If the second range
5135 is a subset it is the result. Otherwise, the range is from the start
5136 of the second to the end of the first. */
5138 in_p
= 0, low
= high
= 0;
5140 in_p
= 1, low
= low1
, high
= high1
;
5142 in_p
= 1, low
= low1
, high
= high0
;
5145 else if (in0_p
&& ! in1_p
)
5147 /* If they don't overlap, the result is the first range. If they are
5148 equal, the result is false. If the second range is a subset of the
5149 first, and the ranges begin at the same place, we go from just after
5150 the end of the second range to the end of the first. If the second
5151 range is not a subset of the first, or if it is a subset and both
5152 ranges end at the same place, the range starts at the start of the
5153 first range and ends just before the second range.
5154 Otherwise, we can't describe this as a single range. */
5156 in_p
= 1, low
= low0
, high
= high0
;
5157 else if (lowequal
&& highequal
)
5158 in_p
= 0, low
= high
= 0;
5159 else if (subset
&& lowequal
)
5161 low
= range_successor (high1
);
5166 /* We are in the weird situation where high0 > high1 but
5167 high1 has no successor. Punt. */
5171 else if (! subset
|| highequal
)
5174 high
= range_predecessor (low1
);
5178 /* low0 < low1 but low1 has no predecessor. Punt. */
5186 else if (! in0_p
&& in1_p
)
5188 /* If they don't overlap, the result is the second range. If the second
5189 is a subset of the first, the result is false. Otherwise,
5190 the range starts just after the first range and ends at the
5191 end of the second. */
5193 in_p
= 1, low
= low1
, high
= high1
;
5194 else if (subset
|| highequal
)
5195 in_p
= 0, low
= high
= 0;
5198 low
= range_successor (high0
);
5203 /* high1 > high0 but high0 has no successor. Punt. */
5211 /* The case where we are excluding both ranges. Here the complex case
5212 is if they don't overlap. In that case, the only time we have a
5213 range is if they are adjacent. If the second is a subset of the
5214 first, the result is the first. Otherwise, the range to exclude
5215 starts at the beginning of the first range and ends at the end of the
5219 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5220 range_successor (high0
),
5222 in_p
= 0, low
= low0
, high
= high1
;
5225 /* Canonicalize - [min, x] into - [-, x]. */
5226 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
5227 switch (TREE_CODE (TREE_TYPE (low0
)))
5230 if (TYPE_PRECISION (TREE_TYPE (low0
))
5231 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
5235 if (tree_int_cst_equal (low0
,
5236 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
5240 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
5241 && integer_zerop (low0
))
5248 /* Canonicalize - [x, max] into - [x, -]. */
5249 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
5250 switch (TREE_CODE (TREE_TYPE (high1
)))
5253 if (TYPE_PRECISION (TREE_TYPE (high1
))
5254 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
5258 if (tree_int_cst_equal (high1
,
5259 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
5263 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
5264 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
5266 integer_one_node
, 1)))
5273 /* The ranges might be also adjacent between the maximum and
5274 minimum values of the given type. For
5275 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5276 return + [x + 1, y - 1]. */
5277 if (low0
== 0 && high1
== 0)
5279 low
= range_successor (high0
);
5280 high
= range_predecessor (low1
);
5281 if (low
== 0 || high
== 0)
5291 in_p
= 0, low
= low0
, high
= high0
;
5293 in_p
= 0, low
= low0
, high
= high1
;
5296 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
5301 /* Subroutine of fold, looking inside expressions of the form
5302 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5303 of the COND_EXPR. This function is being used also to optimize
5304 A op B ? C : A, by reversing the comparison first.
5306 Return a folded expression whose code is not a COND_EXPR
5307 anymore, or NULL_TREE if no folding opportunity is found. */
5310 fold_cond_expr_with_comparison (location_t loc
, tree type
,
5311 tree arg0
, tree arg1
, tree arg2
)
5313 enum tree_code comp_code
= TREE_CODE (arg0
);
5314 tree arg00
= TREE_OPERAND (arg0
, 0);
5315 tree arg01
= TREE_OPERAND (arg0
, 1);
5316 tree arg1_type
= TREE_TYPE (arg1
);
5322 /* If we have A op 0 ? A : -A, consider applying the following
5325 A == 0? A : -A same as -A
5326 A != 0? A : -A same as A
5327 A >= 0? A : -A same as abs (A)
5328 A > 0? A : -A same as abs (A)
5329 A <= 0? A : -A same as -abs (A)
5330 A < 0? A : -A same as -abs (A)
5332 None of these transformations work for modes with signed
5333 zeros. If A is +/-0, the first two transformations will
5334 change the sign of the result (from +0 to -0, or vice
5335 versa). The last four will fix the sign of the result,
5336 even though the original expressions could be positive or
5337 negative, depending on the sign of A.
5339 Note that all these transformations are correct if A is
5340 NaN, since the two alternatives (A and -A) are also NaNs. */
5341 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
5342 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
5343 ? real_zerop (arg01
)
5344 : integer_zerop (arg01
))
5345 && ((TREE_CODE (arg2
) == NEGATE_EXPR
5346 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
5347 /* In the case that A is of the form X-Y, '-A' (arg2) may
5348 have already been folded to Y-X, check for that. */
5349 || (TREE_CODE (arg1
) == MINUS_EXPR
5350 && TREE_CODE (arg2
) == MINUS_EXPR
5351 && operand_equal_p (TREE_OPERAND (arg1
, 0),
5352 TREE_OPERAND (arg2
, 1), 0)
5353 && operand_equal_p (TREE_OPERAND (arg1
, 1),
5354 TREE_OPERAND (arg2
, 0), 0))))
5359 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
5360 return pedantic_non_lvalue_loc (loc
,
5361 fold_convert_loc (loc
, type
,
5362 negate_expr (tem
)));
5365 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5368 if (flag_trapping_math
)
5373 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5374 arg1
= fold_convert_loc (loc
, signed_type_for
5375 (TREE_TYPE (arg1
)), arg1
);
5376 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5377 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5380 if (flag_trapping_math
)
5384 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5385 arg1
= fold_convert_loc (loc
, signed_type_for
5386 (TREE_TYPE (arg1
)), arg1
);
5387 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5388 return negate_expr (fold_convert_loc (loc
, type
, tem
));
5390 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5394 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5395 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5396 both transformations are correct when A is NaN: A != 0
5397 is then true, and A == 0 is false. */
5399 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
5400 && integer_zerop (arg01
) && integer_zerop (arg2
))
5402 if (comp_code
== NE_EXPR
)
5403 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5404 else if (comp_code
== EQ_EXPR
)
5405 return build_int_cst (type
, 0);
5408 /* Try some transformations of A op B ? A : B.
5410 A == B? A : B same as B
5411 A != B? A : B same as A
5412 A >= B? A : B same as max (A, B)
5413 A > B? A : B same as max (B, A)
5414 A <= B? A : B same as min (A, B)
5415 A < B? A : B same as min (B, A)
5417 As above, these transformations don't work in the presence
5418 of signed zeros. For example, if A and B are zeros of
5419 opposite sign, the first two transformations will change
5420 the sign of the result. In the last four, the original
5421 expressions give different results for (A=+0, B=-0) and
5422 (A=-0, B=+0), but the transformed expressions do not.
5424 The first two transformations are correct if either A or B
5425 is a NaN. In the first transformation, the condition will
5426 be false, and B will indeed be chosen. In the case of the
5427 second transformation, the condition A != B will be true,
5428 and A will be chosen.
5430 The conversions to max() and min() are not correct if B is
5431 a number and A is not. The conditions in the original
5432 expressions will be false, so all four give B. The min()
5433 and max() versions would give a NaN instead. */
5434 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
5435 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
5436 /* Avoid these transformations if the COND_EXPR may be used
5437 as an lvalue in the C++ front-end. PR c++/19199. */
5439 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
5440 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
5441 || ! maybe_lvalue_p (arg1
)
5442 || ! maybe_lvalue_p (arg2
)))
5444 tree comp_op0
= arg00
;
5445 tree comp_op1
= arg01
;
5446 tree comp_type
= TREE_TYPE (comp_op0
);
5448 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5449 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
5459 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
5461 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
5466 /* In C++ a ?: expression can be an lvalue, so put the
5467 operand which will be used if they are equal first
5468 so that we can convert this back to the
5469 corresponding COND_EXPR. */
5470 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5472 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5473 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5474 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5475 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5476 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
5477 comp_op1
, comp_op0
);
5478 return pedantic_non_lvalue_loc (loc
,
5479 fold_convert_loc (loc
, type
, tem
));
5486 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5488 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5489 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5490 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5491 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5492 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
5493 comp_op1
, comp_op0
);
5494 return pedantic_non_lvalue_loc (loc
,
5495 fold_convert_loc (loc
, type
, tem
));
5499 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5500 return pedantic_non_lvalue_loc (loc
,
5501 fold_convert_loc (loc
, type
, arg2
));
5504 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5505 return pedantic_non_lvalue_loc (loc
,
5506 fold_convert_loc (loc
, type
, arg1
));
5509 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5514 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5515 we might still be able to simplify this. For example,
5516 if C1 is one less or one more than C2, this might have started
5517 out as a MIN or MAX and been transformed by this function.
5518 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5520 if (INTEGRAL_TYPE_P (type
)
5521 && TREE_CODE (arg01
) == INTEGER_CST
5522 && TREE_CODE (arg2
) == INTEGER_CST
)
5526 if (TREE_CODE (arg1
) == INTEGER_CST
)
5528 /* We can replace A with C1 in this case. */
5529 arg1
= fold_convert_loc (loc
, type
, arg01
);
5530 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
5533 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5534 MIN_EXPR, to preserve the signedness of the comparison. */
5535 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5537 && operand_equal_p (arg01
,
5538 const_binop (PLUS_EXPR
, arg2
,
5539 build_int_cst (type
, 1), 0),
5542 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5543 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5545 return pedantic_non_lvalue_loc (loc
,
5546 fold_convert_loc (loc
, type
, tem
));
5551 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5553 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5555 && operand_equal_p (arg01
,
5556 const_binop (MINUS_EXPR
, arg2
,
5557 build_int_cst (type
, 1), 0),
5560 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
5561 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5563 return pedantic_non_lvalue_loc (loc
,
5564 fold_convert_loc (loc
, type
, tem
));
5569 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5570 MAX_EXPR, to preserve the signedness of the comparison. */
5571 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5573 && operand_equal_p (arg01
,
5574 const_binop (MINUS_EXPR
, arg2
,
5575 build_int_cst (type
, 1), 0),
5578 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5579 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5581 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5586 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5587 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5589 && operand_equal_p (arg01
,
5590 const_binop (PLUS_EXPR
, arg2
,
5591 build_int_cst (type
, 1), 0),
5594 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
5595 fold_convert_loc (loc
, TREE_TYPE (arg00
),
5597 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
5611 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5612 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5613 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5617 /* EXP is some logical combination of boolean tests. See if we can
5618 merge it into some range test. Return the new tree if so. */
5621 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
5624 int or_op
= (code
== TRUTH_ORIF_EXPR
5625 || code
== TRUTH_OR_EXPR
);
5626 int in0_p
, in1_p
, in_p
;
5627 tree low0
, low1
, low
, high0
, high1
, high
;
5628 bool strict_overflow_p
= false;
5629 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5630 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5632 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5633 "when simplifying range test");
5635 /* If this is an OR operation, invert both sides; we will invert
5636 again at the end. */
5638 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5640 /* If both expressions are the same, if we can merge the ranges, and we
5641 can build the range test, return it or it inverted. If one of the
5642 ranges is always true or always false, consider it to be the same
5643 expression as the other. */
5644 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5645 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5647 && 0 != (tem
= (build_range_check (UNKNOWN_LOCATION
, type
,
5649 : rhs
!= 0 ? rhs
: integer_zero_node
,
5652 if (strict_overflow_p
)
5653 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5654 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5657 /* On machines where the branch cost is expensive, if this is a
5658 short-circuited branch and the underlying object on both sides
5659 is the same, make a non-short-circuit operation. */
5660 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5661 && lhs
!= 0 && rhs
!= 0
5662 && (code
== TRUTH_ANDIF_EXPR
5663 || code
== TRUTH_ORIF_EXPR
)
5664 && operand_equal_p (lhs
, rhs
, 0))
5666 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5667 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5668 which cases we can't do this. */
5669 if (simple_operand_p (lhs
))
5671 tem
= build2 (code
== TRUTH_ANDIF_EXPR
5672 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5674 SET_EXPR_LOCATION (tem
, loc
);
5678 else if (lang_hooks
.decls
.global_bindings_p () == 0
5679 && ! CONTAINS_PLACEHOLDER_P (lhs
))
5681 tree common
= save_expr (lhs
);
5683 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5684 or_op
? ! in0_p
: in0_p
,
5686 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5687 or_op
? ! in1_p
: in1_p
,
5690 if (strict_overflow_p
)
5691 fold_overflow_warning (warnmsg
,
5692 WARN_STRICT_OVERFLOW_COMPARISON
);
5693 tem
= build2 (code
== TRUTH_ANDIF_EXPR
5694 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5696 SET_EXPR_LOCATION (tem
, loc
);
5705 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5706 bit value. Arrange things so the extra bits will be set to zero if and
5707 only if C is signed-extended to its full width. If MASK is nonzero,
5708 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5711 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5713 tree type
= TREE_TYPE (c
);
5714 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5717 if (p
== modesize
|| unsignedp
)
5720 /* We work by getting just the sign bit into the low-order bit, then
5721 into the high-order bit, then sign-extend. We then XOR that value
5723 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
5724 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
5726 /* We must use a signed type in order to get an arithmetic right shift.
5727 However, we must also avoid introducing accidental overflows, so that
5728 a subsequent call to integer_zerop will work. Hence we must
5729 do the type conversion here. At this point, the constant is either
5730 zero or one, and the conversion to a signed type can never overflow.
5731 We could get an overflow if this conversion is done anywhere else. */
5732 if (TYPE_UNSIGNED (type
))
5733 temp
= fold_convert (signed_type_for (type
), temp
);
5735 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
5736 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
5738 temp
= const_binop (BIT_AND_EXPR
, temp
,
5739 fold_convert (TREE_TYPE (c
), mask
),
5741 /* If necessary, convert the type back to match the type of C. */
5742 if (TYPE_UNSIGNED (type
))
5743 temp
= fold_convert (type
, temp
);
5745 return fold_convert (type
,
5746 const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
5749 /* Find ways of folding logical expressions of LHS and RHS:
5750 Try to merge two comparisons to the same innermost item.
5751 Look for range tests like "ch >= '0' && ch <= '9'".
5752 Look for combinations of simple terms on machines with expensive branches
5753 and evaluate the RHS unconditionally.
5755 For example, if we have p->a == 2 && p->b == 4 and we can make an
5756 object large enough to span both A and B, we can do this with a comparison
5757 against the object ANDed with the a mask.
5759 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5760 operations to do this with one comparison.
5762 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5763 function and the one above.
5765 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5766 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5768 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5771 We return the simplified tree or 0 if no optimization is possible. */
5774 fold_truthop (location_t loc
, enum tree_code code
, tree truth_type
,
5777 /* If this is the "or" of two comparisons, we can do something if
5778 the comparisons are NE_EXPR. If this is the "and", we can do something
5779 if the comparisons are EQ_EXPR. I.e.,
5780 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5782 WANTED_CODE is this operation code. For single bit fields, we can
5783 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5784 comparison for one-bit fields. */
5786 enum tree_code wanted_code
;
5787 enum tree_code lcode
, rcode
;
5788 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5789 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5790 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5791 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5792 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5793 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5794 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5795 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5796 enum machine_mode lnmode
, rnmode
;
5797 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5798 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5799 tree l_const
, r_const
;
5800 tree lntype
, rntype
, result
;
5801 HOST_WIDE_INT first_bit
, end_bit
;
5803 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5804 enum tree_code orig_code
= code
;
5806 /* Start by getting the comparison codes. Fail if anything is volatile.
5807 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5808 it were surrounded with a NE_EXPR. */
5810 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5813 lcode
= TREE_CODE (lhs
);
5814 rcode
= TREE_CODE (rhs
);
5816 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5818 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5819 build_int_cst (TREE_TYPE (lhs
), 0));
5823 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5825 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5826 build_int_cst (TREE_TYPE (rhs
), 0));
5830 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5831 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5834 ll_arg
= TREE_OPERAND (lhs
, 0);
5835 lr_arg
= TREE_OPERAND (lhs
, 1);
5836 rl_arg
= TREE_OPERAND (rhs
, 0);
5837 rr_arg
= TREE_OPERAND (rhs
, 1);
5839 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5840 if (simple_operand_p (ll_arg
)
5841 && simple_operand_p (lr_arg
))
5844 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5845 && operand_equal_p (lr_arg
, rr_arg
, 0))
5847 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5848 truth_type
, ll_arg
, lr_arg
);
5852 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5853 && operand_equal_p (lr_arg
, rl_arg
, 0))
5855 result
= combine_comparisons (loc
, code
, lcode
,
5856 swap_tree_comparison (rcode
),
5857 truth_type
, ll_arg
, lr_arg
);
5863 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5864 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5866 /* If the RHS can be evaluated unconditionally and its operands are
5867 simple, it wins to evaluate the RHS unconditionally on machines
5868 with expensive branches. In this case, this isn't a comparison
5869 that can be merged. Avoid doing this if the RHS is a floating-point
5870 comparison since those can trap. */
5872 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5874 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5875 && simple_operand_p (rl_arg
)
5876 && simple_operand_p (rr_arg
))
5878 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5879 if (code
== TRUTH_OR_EXPR
5880 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5881 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5882 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5883 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5885 result
= build2 (NE_EXPR
, truth_type
,
5886 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5888 build_int_cst (TREE_TYPE (ll_arg
), 0));
5889 goto fold_truthop_exit
;
5892 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5893 if (code
== TRUTH_AND_EXPR
5894 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5895 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5896 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5897 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5899 result
= build2 (EQ_EXPR
, truth_type
,
5900 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5902 build_int_cst (TREE_TYPE (ll_arg
), 0));
5903 goto fold_truthop_exit
;
5906 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
5908 if (code
!= orig_code
|| lhs
!= orig_lhs
|| rhs
!= orig_rhs
)
5910 result
= build2 (code
, truth_type
, lhs
, rhs
);
5911 goto fold_truthop_exit
;
5917 /* See if the comparisons can be merged. Then get all the parameters for
5920 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5921 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5925 ll_inner
= decode_field_reference (loc
, ll_arg
,
5926 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5927 &ll_unsignedp
, &volatilep
, &ll_mask
,
5929 lr_inner
= decode_field_reference (loc
, lr_arg
,
5930 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5931 &lr_unsignedp
, &volatilep
, &lr_mask
,
5933 rl_inner
= decode_field_reference (loc
, rl_arg
,
5934 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5935 &rl_unsignedp
, &volatilep
, &rl_mask
,
5937 rr_inner
= decode_field_reference (loc
, rr_arg
,
5938 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5939 &rr_unsignedp
, &volatilep
, &rr_mask
,
5942 /* It must be true that the inner operation on the lhs of each
5943 comparison must be the same if we are to be able to do anything.
5944 Then see if we have constants. If not, the same must be true for
5946 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5947 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5950 if (TREE_CODE (lr_arg
) == INTEGER_CST
5951 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5952 l_const
= lr_arg
, r_const
= rr_arg
;
5953 else if (lr_inner
== 0 || rr_inner
== 0
5954 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5957 l_const
= r_const
= 0;
5959 /* If either comparison code is not correct for our logical operation,
5960 fail. However, we can convert a one-bit comparison against zero into
5961 the opposite comparison against that bit being set in the field. */
5963 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5964 if (lcode
!= wanted_code
)
5966 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5968 /* Make the left operand unsigned, since we are only interested
5969 in the value of one bit. Otherwise we are doing the wrong
5978 /* This is analogous to the code for l_const above. */
5979 if (rcode
!= wanted_code
)
5981 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5990 /* See if we can find a mode that contains both fields being compared on
5991 the left. If we can't, fail. Otherwise, update all constants and masks
5992 to be relative to a field of that size. */
5993 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5994 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5995 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5996 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5998 if (lnmode
== VOIDmode
)
6001 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
6002 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
6003 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
6004 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
6006 if (BYTES_BIG_ENDIAN
)
6008 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
6009 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
6012 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
6013 size_int (xll_bitpos
), 0);
6014 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
6015 size_int (xrl_bitpos
), 0);
6019 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
6020 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
6021 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
6022 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
6023 fold_build1_loc (loc
, BIT_NOT_EXPR
,
6027 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
6029 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
6034 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
6035 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
6036 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
6037 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
6038 fold_build1_loc (loc
, BIT_NOT_EXPR
,
6042 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
6044 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
6048 /* If the right sides are not constant, do the same for it. Also,
6049 disallow this optimization if a size or signedness mismatch occurs
6050 between the left and right sides. */
6053 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
6054 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
6055 /* Make sure the two fields on the right
6056 correspond to the left without being swapped. */
6057 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
6060 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
6061 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
6062 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
6063 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
6065 if (rnmode
== VOIDmode
)
6068 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
6069 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
6070 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
6071 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
6073 if (BYTES_BIG_ENDIAN
)
6075 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
6076 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
6079 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
6081 size_int (xlr_bitpos
), 0);
6082 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
6084 size_int (xrr_bitpos
), 0);
6086 /* Make a mask that corresponds to both fields being compared.
6087 Do this for both items being compared. If the operands are the
6088 same size and the bits being compared are in the same position
6089 then we can do this by masking both and comparing the masked
6091 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
6092 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
6093 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
6095 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
6096 ll_unsignedp
|| rl_unsignedp
);
6097 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
6098 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
6100 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
6101 lr_unsignedp
|| rr_unsignedp
);
6102 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
6103 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
6105 result
= build2 (wanted_code
, truth_type
, lhs
, rhs
);
6106 goto fold_truthop_exit
;
6109 /* There is still another way we can do something: If both pairs of
6110 fields being compared are adjacent, we may be able to make a wider
6111 field containing them both.
6113 Note that we still must mask the lhs/rhs expressions. Furthermore,
6114 the mask must be shifted to account for the shift done by
6115 make_bit_field_ref. */
6116 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
6117 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
6118 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
6119 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
6123 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
6124 ll_bitsize
+ rl_bitsize
,
6125 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
6126 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
6127 lr_bitsize
+ rr_bitsize
,
6128 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
6130 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
6131 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
6132 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
6133 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
6135 /* Convert to the smaller type before masking out unwanted bits. */
6137 if (lntype
!= rntype
)
6139 if (lnbitsize
> rnbitsize
)
6141 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
6142 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
6145 else if (lnbitsize
< rnbitsize
)
6147 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
6148 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
6153 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
6154 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
6156 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
6157 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
6159 result
= build2 (wanted_code
, truth_type
, lhs
, rhs
);
6160 goto fold_truthop_exit
;
6166 /* Handle the case of comparisons with constants. If there is something in
6167 common between the masks, those bits of the constants must be the same.
6168 If not, the condition is always false. Test for this to avoid generating
6169 incorrect code below. */
6170 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
6171 if (! integer_zerop (result
)
6172 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
6173 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
6175 if (wanted_code
== NE_EXPR
)
6177 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6178 return constant_boolean_node (true, truth_type
);
6182 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6183 return constant_boolean_node (false, truth_type
);
6187 /* Construct the expression we will return. First get the component
6188 reference we will make. Unless the mask is all ones the width of
6189 that field, perform the mask operation. Then compare with the
6191 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
6192 ll_unsignedp
|| rl_unsignedp
);
6194 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
6195 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
6197 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
6198 SET_EXPR_LOCATION (result
, loc
);
6201 result
= build2 (wanted_code
, truth_type
, result
,
6202 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
6205 SET_EXPR_LOCATION (result
, loc
);
6209 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
6213 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
6217 enum tree_code op_code
;
6220 int consts_equal
, consts_lt
;
6223 STRIP_SIGN_NOPS (arg0
);
6225 op_code
= TREE_CODE (arg0
);
6226 minmax_const
= TREE_OPERAND (arg0
, 1);
6227 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
6228 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
6229 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
6230 inner
= TREE_OPERAND (arg0
, 0);
6232 /* If something does not permit us to optimize, return the original tree. */
6233 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
6234 || TREE_CODE (comp_const
) != INTEGER_CST
6235 || TREE_OVERFLOW (comp_const
)
6236 || TREE_CODE (minmax_const
) != INTEGER_CST
6237 || TREE_OVERFLOW (minmax_const
))
6240 /* Now handle all the various comparison codes. We only handle EQ_EXPR
6241 and GT_EXPR, doing the rest with recursive calls using logical
6245 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
6248 = optimize_minmax_comparison (loc
,
6249 invert_tree_comparison (code
, false),
6252 return invert_truthvalue_loc (loc
, tem
);
6258 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
6259 optimize_minmax_comparison
6260 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
6261 optimize_minmax_comparison
6262 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
6265 if (op_code
== MAX_EXPR
&& consts_equal
)
6266 /* MAX (X, 0) == 0 -> X <= 0 */
6267 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
6269 else if (op_code
== MAX_EXPR
&& consts_lt
)
6270 /* MAX (X, 0) == 5 -> X == 5 */
6271 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
6273 else if (op_code
== MAX_EXPR
)
6274 /* MAX (X, 0) == -1 -> false */
6275 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
6277 else if (consts_equal
)
6278 /* MIN (X, 0) == 0 -> X >= 0 */
6279 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
6282 /* MIN (X, 0) == 5 -> false */
6283 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
6286 /* MIN (X, 0) == -1 -> X == -1 */
6287 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
6290 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
6291 /* MAX (X, 0) > 0 -> X > 0
6292 MAX (X, 0) > 5 -> X > 5 */
6293 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
6295 else if (op_code
== MAX_EXPR
)
6296 /* MAX (X, 0) > -1 -> true */
6297 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
6299 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
6300 /* MIN (X, 0) > 0 -> false
6301 MIN (X, 0) > 5 -> false */
6302 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
6305 /* MIN (X, 0) > -1 -> X > -1 */
6306 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
6313 /* T is an integer expression that is being multiplied, divided, or taken a
6314 modulus (CODE says which and what kind of divide or modulus) by a
6315 constant C. See if we can eliminate that operation by folding it with
6316 other operations already in T. WIDE_TYPE, if non-null, is a type that
6317 should be used for the computation if wider than our type.
6319 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6320 (X * 2) + (Y * 4). We must, however, be assured that either the original
6321 expression would not overflow or that overflow is undefined for the type
6322 in the language in question.
6324 If we return a non-null expression, it is an equivalent form of the
6325 original computation, but need not be in the original type.
6327 We set *STRICT_OVERFLOW_P to true if the return values depends on
6328 signed overflow being undefined. Otherwise we do not change
6329 *STRICT_OVERFLOW_P. */
6332 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6333 bool *strict_overflow_p
)
6335 /* To avoid exponential search depth, refuse to allow recursion past
6336 three levels. Beyond that (1) it's highly unlikely that we'll find
6337 something interesting and (2) we've probably processed it before
6338 when we built the inner expression. */
6347 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
6354 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6355 bool *strict_overflow_p
)
6357 tree type
= TREE_TYPE (t
);
6358 enum tree_code tcode
= TREE_CODE (t
);
6359 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
6360 > GET_MODE_SIZE (TYPE_MODE (type
)))
6361 ? wide_type
: type
);
6363 int same_p
= tcode
== code
;
6364 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
6365 bool sub_strict_overflow_p
;
6367 /* Don't deal with constants of zero here; they confuse the code below. */
6368 if (integer_zerop (c
))
6371 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
6372 op0
= TREE_OPERAND (t
, 0);
6374 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
6375 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
6377 /* Note that we need not handle conditional operations here since fold
6378 already handles those cases. So just do arithmetic here. */
6382 /* For a constant, we can always simplify if we are a multiply
6383 or (for divide and modulus) if it is a multiple of our constant. */
6384 if (code
== MULT_EXPR
6385 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
6386 return const_binop (code
, fold_convert (ctype
, t
),
6387 fold_convert (ctype
, c
), 0);
6390 CASE_CONVERT
: case NON_LVALUE_EXPR
:
6391 /* If op0 is an expression ... */
6392 if ((COMPARISON_CLASS_P (op0
)
6393 || UNARY_CLASS_P (op0
)
6394 || BINARY_CLASS_P (op0
)
6395 || VL_EXP_CLASS_P (op0
)
6396 || EXPRESSION_CLASS_P (op0
))
6397 /* ... and has wrapping overflow, and its type is smaller
6398 than ctype, then we cannot pass through as widening. */
6399 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
6400 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
6401 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
6402 && (TYPE_PRECISION (ctype
)
6403 > TYPE_PRECISION (TREE_TYPE (op0
))))
6404 /* ... or this is a truncation (t is narrower than op0),
6405 then we cannot pass through this narrowing. */
6406 || (TYPE_PRECISION (type
)
6407 < TYPE_PRECISION (TREE_TYPE (op0
)))
6408 /* ... or signedness changes for division or modulus,
6409 then we cannot pass through this conversion. */
6410 || (code
!= MULT_EXPR
6411 && (TYPE_UNSIGNED (ctype
)
6412 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
6413 /* ... or has undefined overflow while the converted to
6414 type has not, we cannot do the operation in the inner type
6415 as that would introduce undefined overflow. */
6416 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
6417 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6420 /* Pass the constant down and see if we can make a simplification. If
6421 we can, replace this expression with the inner simplification for
6422 possible later conversion to our or some other type. */
6423 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6424 && TREE_CODE (t2
) == INTEGER_CST
6425 && !TREE_OVERFLOW (t2
)
6426 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
6428 ? ctype
: NULL_TREE
,
6429 strict_overflow_p
))))
6434 /* If widening the type changes it from signed to unsigned, then we
6435 must avoid building ABS_EXPR itself as unsigned. */
6436 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6438 tree cstype
= (*signed_type_for
) (ctype
);
6439 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6442 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6443 return fold_convert (ctype
, t1
);
6447 /* If the constant is negative, we cannot simplify this. */
6448 if (tree_int_cst_sgn (c
) == -1)
6452 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6454 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6457 case MIN_EXPR
: case MAX_EXPR
:
6458 /* If widening the type changes the signedness, then we can't perform
6459 this optimization as that changes the result. */
6460 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6463 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6464 sub_strict_overflow_p
= false;
6465 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6466 &sub_strict_overflow_p
)) != 0
6467 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6468 &sub_strict_overflow_p
)) != 0)
6470 if (tree_int_cst_sgn (c
) < 0)
6471 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6472 if (sub_strict_overflow_p
)
6473 *strict_overflow_p
= true;
6474 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6475 fold_convert (ctype
, t2
));
6479 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6480 /* If the second operand is constant, this is a multiplication
6481 or floor division, by a power of two, so we can treat it that
6482 way unless the multiplier or divisor overflows. Signed
6483 left-shift overflow is implementation-defined rather than
6484 undefined in C90, so do not convert signed left shift into
6486 if (TREE_CODE (op1
) == INTEGER_CST
6487 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6488 /* const_binop may not detect overflow correctly,
6489 so check for it explicitly here. */
6490 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
6491 && TREE_INT_CST_HIGH (op1
) == 0
6492 && 0 != (t1
= fold_convert (ctype
,
6493 const_binop (LSHIFT_EXPR
,
6496 && !TREE_OVERFLOW (t1
))
6497 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6498 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6500 fold_convert (ctype
, op0
),
6502 c
, code
, wide_type
, strict_overflow_p
);
6505 case PLUS_EXPR
: case MINUS_EXPR
:
6506 /* See if we can eliminate the operation on both sides. If we can, we
6507 can return a new PLUS or MINUS. If we can't, the only remaining
6508 cases where we can do anything are if the second operand is a
6510 sub_strict_overflow_p
= false;
6511 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6512 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6513 if (t1
!= 0 && t2
!= 0
6514 && (code
== MULT_EXPR
6515 /* If not multiplication, we can only do this if both operands
6516 are divisible by c. */
6517 || (multiple_of_p (ctype
, op0
, c
)
6518 && multiple_of_p (ctype
, op1
, c
))))
6520 if (sub_strict_overflow_p
)
6521 *strict_overflow_p
= true;
6522 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6523 fold_convert (ctype
, t2
));
6526 /* If this was a subtraction, negate OP1 and set it to be an addition.
6527 This simplifies the logic below. */
6528 if (tcode
== MINUS_EXPR
)
6530 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6531 /* If OP1 was not easily negatable, the constant may be OP0. */
6532 if (TREE_CODE (op0
) == INTEGER_CST
)
6543 if (TREE_CODE (op1
) != INTEGER_CST
)
6546 /* If either OP1 or C are negative, this optimization is not safe for
6547 some of the division and remainder types while for others we need
6548 to change the code. */
6549 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6551 if (code
== CEIL_DIV_EXPR
)
6552 code
= FLOOR_DIV_EXPR
;
6553 else if (code
== FLOOR_DIV_EXPR
)
6554 code
= CEIL_DIV_EXPR
;
6555 else if (code
!= MULT_EXPR
6556 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6560 /* If it's a multiply or a division/modulus operation of a multiple
6561 of our constant, do the operation and verify it doesn't overflow. */
6562 if (code
== MULT_EXPR
6563 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
6565 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6566 fold_convert (ctype
, c
), 0);
6567 /* We allow the constant to overflow with wrapping semantics. */
6569 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6575 /* If we have an unsigned type is not a sizetype, we cannot widen
6576 the operation since it will change the result if the original
6577 computation overflowed. */
6578 if (TYPE_UNSIGNED (ctype
)
6579 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
6583 /* If we were able to eliminate our operation from the first side,
6584 apply our operation to the second side and reform the PLUS. */
6585 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
6586 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
6588 /* The last case is if we are a multiply. In that case, we can
6589 apply the distributive law to commute the multiply and addition
6590 if the multiplication of the constants doesn't overflow. */
6591 if (code
== MULT_EXPR
)
6592 return fold_build2 (tcode
, ctype
,
6593 fold_build2 (code
, ctype
,
6594 fold_convert (ctype
, op0
),
6595 fold_convert (ctype
, c
)),
6601 /* We have a special case here if we are doing something like
6602 (C * 8) % 4 since we know that's zero. */
6603 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6604 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6605 /* If the multiplication can overflow we cannot optimize this.
6606 ??? Until we can properly mark individual operations as
6607 not overflowing we need to treat sizetype special here as
6608 stor-layout relies on this opimization to make
6609 DECL_FIELD_BIT_OFFSET always a constant. */
6610 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6611 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
6612 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))))
6613 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6614 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
6616 *strict_overflow_p
= true;
6617 return omit_one_operand (type
, integer_zero_node
, op0
);
6620 /* ... fall through ... */
6622 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6623 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6624 /* If we can extract our operation from the LHS, do so and return a
6625 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6626 do something only if the second operand is a constant. */
6628 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6629 strict_overflow_p
)) != 0)
6630 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6631 fold_convert (ctype
, op1
));
6632 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6633 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6634 strict_overflow_p
)) != 0)
6635 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6636 fold_convert (ctype
, t1
));
6637 else if (TREE_CODE (op1
) != INTEGER_CST
)
6640 /* If these are the same operation types, we can associate them
6641 assuming no overflow. */
6643 && 0 != (t1
= int_const_binop (MULT_EXPR
,
6644 fold_convert (ctype
, op1
),
6645 fold_convert (ctype
, c
), 1))
6646 && 0 != (t1
= force_fit_type_double (ctype
, TREE_INT_CST_LOW (t1
),
6647 TREE_INT_CST_HIGH (t1
),
6648 (TYPE_UNSIGNED (ctype
)
6649 && tcode
!= MULT_EXPR
) ? -1 : 1,
6650 TREE_OVERFLOW (t1
)))
6651 && !TREE_OVERFLOW (t1
))
6652 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
6654 /* If these operations "cancel" each other, we have the main
6655 optimizations of this pass, which occur when either constant is a
6656 multiple of the other, in which case we replace this with either an
6657 operation or CODE or TCODE.
6659 If we have an unsigned type that is not a sizetype, we cannot do
6660 this since it will change the result if the original computation
6662 if ((TYPE_OVERFLOW_UNDEFINED (ctype
)
6663 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
6664 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6665 || (tcode
== MULT_EXPR
6666 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6667 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6668 && code
!= MULT_EXPR
)))
6670 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
6672 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6673 *strict_overflow_p
= true;
6674 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6675 fold_convert (ctype
,
6676 const_binop (TRUNC_DIV_EXPR
,
6679 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
6681 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6682 *strict_overflow_p
= true;
6683 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6684 fold_convert (ctype
,
6685 const_binop (TRUNC_DIV_EXPR
,
6698 /* Return a node which has the indicated constant VALUE (either 0 or
6699 1), and is of the indicated TYPE. */
6702 constant_boolean_node (int value
, tree type
)
6704 if (type
== integer_type_node
)
6705 return value
? integer_one_node
: integer_zero_node
;
6706 else if (type
== boolean_type_node
)
6707 return value
? boolean_true_node
: boolean_false_node
;
6709 return build_int_cst (type
, value
);
6713 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6714 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6715 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6716 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6717 COND is the first argument to CODE; otherwise (as in the example
6718 given here), it is the second argument. TYPE is the type of the
6719 original expression. Return NULL_TREE if no simplification is
6723 fold_binary_op_with_conditional_arg (location_t loc
,
6724 enum tree_code code
,
6725 tree type
, tree op0
, tree op1
,
6726 tree cond
, tree arg
, int cond_first_p
)
6728 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6729 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6730 tree test
, true_value
, false_value
;
6731 tree lhs
= NULL_TREE
;
6732 tree rhs
= NULL_TREE
;
6734 /* This transformation is only worthwhile if we don't have to wrap
6735 arg in a SAVE_EXPR, and the operation can be simplified on at least
6736 one of the branches once its pushed inside the COND_EXPR. */
6737 if (!TREE_CONSTANT (arg
))
6740 if (TREE_CODE (cond
) == COND_EXPR
)
6742 test
= TREE_OPERAND (cond
, 0);
6743 true_value
= TREE_OPERAND (cond
, 1);
6744 false_value
= TREE_OPERAND (cond
, 2);
6745 /* If this operand throws an expression, then it does not make
6746 sense to try to perform a logical or arithmetic operation
6748 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6750 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6755 tree testtype
= TREE_TYPE (cond
);
6757 true_value
= constant_boolean_node (true, testtype
);
6758 false_value
= constant_boolean_node (false, testtype
);
6761 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6764 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6766 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6768 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6772 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6774 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6776 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6779 test
= fold_build3_loc (loc
, COND_EXPR
, type
, test
, lhs
, rhs
);
6780 return fold_convert_loc (loc
, type
, test
);
6784 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6786 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6787 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6788 ADDEND is the same as X.
6790 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6791 and finite. The problematic cases are when X is zero, and its mode
6792 has signed zeros. In the case of rounding towards -infinity,
6793 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6794 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6797 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6799 if (!real_zerop (addend
))
6802 /* Don't allow the fold with -fsignaling-nans. */
6803 if (HONOR_SNANS (TYPE_MODE (type
)))
6806 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6807 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6810 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6811 if (TREE_CODE (addend
) == REAL_CST
6812 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6815 /* The mode has signed zeros, and we have to honor their sign.
6816 In this situation, there is only one case we can return true for.
6817 X - 0 is the same as X unless rounding towards -infinity is
6819 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6822 /* Subroutine of fold() that checks comparisons of built-in math
6823 functions against real constants.
6825 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6826 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6827 is the type of the result and ARG0 and ARG1 are the operands of the
6828 comparison. ARG1 must be a TREE_REAL_CST.
6830 The function returns the constant folded tree if a simplification
6831 can be made, and NULL_TREE otherwise. */
6834 fold_mathfn_compare (location_t loc
,
6835 enum built_in_function fcode
, enum tree_code code
,
6836 tree type
, tree arg0
, tree arg1
)
6840 if (BUILTIN_SQRT_P (fcode
))
6842 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6843 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6845 c
= TREE_REAL_CST (arg1
);
6846 if (REAL_VALUE_NEGATIVE (c
))
6848 /* sqrt(x) < y is always false, if y is negative. */
6849 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6850 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6852 /* sqrt(x) > y is always true, if y is negative and we
6853 don't care about NaNs, i.e. negative values of x. */
6854 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6855 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6857 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6858 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6859 build_real (TREE_TYPE (arg
), dconst0
));
6861 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6865 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6866 real_convert (&c2
, mode
, &c2
);
6868 if (REAL_VALUE_ISINF (c2
))
6870 /* sqrt(x) > y is x == +Inf, when y is very large. */
6871 if (HONOR_INFINITIES (mode
))
6872 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6873 build_real (TREE_TYPE (arg
), c2
));
6875 /* sqrt(x) > y is always false, when y is very large
6876 and we don't care about infinities. */
6877 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6880 /* sqrt(x) > c is the same as x > c*c. */
6881 return fold_build2_loc (loc
, code
, type
, arg
,
6882 build_real (TREE_TYPE (arg
), c2
));
6884 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6888 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6889 real_convert (&c2
, mode
, &c2
);
6891 if (REAL_VALUE_ISINF (c2
))
6893 /* sqrt(x) < y is always true, when y is a very large
6894 value and we don't care about NaNs or Infinities. */
6895 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6896 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6898 /* sqrt(x) < y is x != +Inf when y is very large and we
6899 don't care about NaNs. */
6900 if (! HONOR_NANS (mode
))
6901 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6902 build_real (TREE_TYPE (arg
), c2
));
6904 /* sqrt(x) < y is x >= 0 when y is very large and we
6905 don't care about Infinities. */
6906 if (! HONOR_INFINITIES (mode
))
6907 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6908 build_real (TREE_TYPE (arg
), dconst0
));
6910 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6911 if (lang_hooks
.decls
.global_bindings_p () != 0
6912 || CONTAINS_PLACEHOLDER_P (arg
))
6915 arg
= save_expr (arg
);
6916 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6917 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6918 build_real (TREE_TYPE (arg
),
6920 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6921 build_real (TREE_TYPE (arg
),
6925 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6926 if (! HONOR_NANS (mode
))
6927 return fold_build2_loc (loc
, code
, type
, arg
,
6928 build_real (TREE_TYPE (arg
), c2
));
6930 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6931 if (lang_hooks
.decls
.global_bindings_p () == 0
6932 && ! CONTAINS_PLACEHOLDER_P (arg
))
6934 arg
= save_expr (arg
);
6935 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6936 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6937 build_real (TREE_TYPE (arg
),
6939 fold_build2_loc (loc
, code
, type
, arg
,
6940 build_real (TREE_TYPE (arg
),
6949 /* Subroutine of fold() that optimizes comparisons against Infinities,
6950 either +Inf or -Inf.
6952 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6953 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6954 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6956 The function returns the constant folded tree if a simplification
6957 can be made, and NULL_TREE otherwise. */
6960 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6961 tree arg0
, tree arg1
)
6963 enum machine_mode mode
;
6964 REAL_VALUE_TYPE max
;
6968 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6970 /* For negative infinity swap the sense of the comparison. */
6971 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6973 code
= swap_tree_comparison (code
);
6978 /* x > +Inf is always false, if with ignore sNANs. */
6979 if (HONOR_SNANS (mode
))
6981 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6984 /* x <= +Inf is always true, if we don't case about NaNs. */
6985 if (! HONOR_NANS (mode
))
6986 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6988 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6989 if (lang_hooks
.decls
.global_bindings_p () == 0
6990 && ! CONTAINS_PLACEHOLDER_P (arg0
))
6992 arg0
= save_expr (arg0
);
6993 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6999 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
7000 real_maxval (&max
, neg
, mode
);
7001 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
7002 arg0
, build_real (TREE_TYPE (arg0
), max
));
7005 /* x < +Inf is always equal to x <= DBL_MAX. */
7006 real_maxval (&max
, neg
, mode
);
7007 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
7008 arg0
, build_real (TREE_TYPE (arg0
), max
));
7011 /* x != +Inf is always equal to !(x > DBL_MAX). */
7012 real_maxval (&max
, neg
, mode
);
7013 if (! HONOR_NANS (mode
))
7014 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
7015 arg0
, build_real (TREE_TYPE (arg0
), max
));
7017 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
7018 arg0
, build_real (TREE_TYPE (arg0
), max
));
7019 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
7028 /* Subroutine of fold() that optimizes comparisons of a division by
7029 a nonzero integer constant against an integer constant, i.e.
7032 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7033 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
7034 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
7036 The function returns the constant folded tree if a simplification
7037 can be made, and NULL_TREE otherwise. */
7040 fold_div_compare (location_t loc
,
7041 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
7043 tree prod
, tmp
, hi
, lo
;
7044 tree arg00
= TREE_OPERAND (arg0
, 0);
7045 tree arg01
= TREE_OPERAND (arg0
, 1);
7046 unsigned HOST_WIDE_INT lpart
;
7047 HOST_WIDE_INT hpart
;
7048 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
7052 /* We have to do this the hard way to detect unsigned overflow.
7053 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
7054 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
7055 TREE_INT_CST_HIGH (arg01
),
7056 TREE_INT_CST_LOW (arg1
),
7057 TREE_INT_CST_HIGH (arg1
),
7058 &lpart
, &hpart
, unsigned_p
);
7059 prod
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
7061 neg_overflow
= false;
7065 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
7066 build_int_cst (TREE_TYPE (arg01
), 1), 0);
7069 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
7070 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
7071 TREE_INT_CST_HIGH (prod
),
7072 TREE_INT_CST_LOW (tmp
),
7073 TREE_INT_CST_HIGH (tmp
),
7074 &lpart
, &hpart
, unsigned_p
);
7075 hi
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
7076 -1, overflow
| TREE_OVERFLOW (prod
));
7078 else if (tree_int_cst_sgn (arg01
) >= 0)
7080 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
7081 build_int_cst (TREE_TYPE (arg01
), 1), 0);
7082 switch (tree_int_cst_sgn (arg1
))
7085 neg_overflow
= true;
7086 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
7091 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
7096 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
7106 /* A negative divisor reverses the relational operators. */
7107 code
= swap_tree_comparison (code
);
7109 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
7110 build_int_cst (TREE_TYPE (arg01
), 1), 0);
7111 switch (tree_int_cst_sgn (arg1
))
7114 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
7119 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
7124 neg_overflow
= true;
7125 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
7137 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
7138 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
7139 if (TREE_OVERFLOW (hi
))
7140 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
7141 if (TREE_OVERFLOW (lo
))
7142 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
7143 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
7146 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
7147 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
7148 if (TREE_OVERFLOW (hi
))
7149 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
7150 if (TREE_OVERFLOW (lo
))
7151 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
7152 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
7155 if (TREE_OVERFLOW (lo
))
7157 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
7158 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
7160 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
7163 if (TREE_OVERFLOW (hi
))
7165 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
7166 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
7168 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
7171 if (TREE_OVERFLOW (hi
))
7173 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
7174 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
7176 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
7179 if (TREE_OVERFLOW (lo
))
7181 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
7182 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
7184 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
7194 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7195 equality/inequality test, then return a simplified form of the test
7196 using a sign testing. Otherwise return NULL. TYPE is the desired
7200 fold_single_bit_test_into_sign_test (location_t loc
,
7201 enum tree_code code
, tree arg0
, tree arg1
,
7204 /* If this is testing a single bit, we can optimize the test. */
7205 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7206 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
7207 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
7209 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7210 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7211 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
7213 if (arg00
!= NULL_TREE
7214 /* This is only a win if casting to a signed type is cheap,
7215 i.e. when arg00's type is not a partial mode. */
7216 && TYPE_PRECISION (TREE_TYPE (arg00
))
7217 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
7219 tree stype
= signed_type_for (TREE_TYPE (arg00
));
7220 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
7222 fold_convert_loc (loc
, stype
, arg00
),
7223 build_int_cst (stype
, 0));
7230 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7231 equality/inequality test, then return a simplified form of
7232 the test using shifts and logical operations. Otherwise return
7233 NULL. TYPE is the desired result type. */
7236 fold_single_bit_test (location_t loc
, enum tree_code code
,
7237 tree arg0
, tree arg1
, tree result_type
)
7239 /* If this is testing a single bit, we can optimize the test. */
7240 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7241 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
7242 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
7244 tree inner
= TREE_OPERAND (arg0
, 0);
7245 tree type
= TREE_TYPE (arg0
);
7246 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
7247 enum machine_mode operand_mode
= TYPE_MODE (type
);
7249 tree signed_type
, unsigned_type
, intermediate_type
;
7252 /* First, see if we can fold the single bit test into a sign-bit
7254 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
7259 /* Otherwise we have (A & C) != 0 where C is a single bit,
7260 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7261 Similarly for (A & C) == 0. */
7263 /* If INNER is a right shift of a constant and it plus BITNUM does
7264 not overflow, adjust BITNUM and INNER. */
7265 if (TREE_CODE (inner
) == RSHIFT_EXPR
7266 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
7267 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
7268 && bitnum
< TYPE_PRECISION (type
)
7269 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
7270 bitnum
- TYPE_PRECISION (type
)))
7272 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
7273 inner
= TREE_OPERAND (inner
, 0);
7276 /* If we are going to be able to omit the AND below, we must do our
7277 operations as unsigned. If we must use the AND, we have a choice.
7278 Normally unsigned is faster, but for some machines signed is. */
7279 #ifdef LOAD_EXTEND_OP
7280 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
7281 && !flag_syntax_only
) ? 0 : 1;
7286 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
7287 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
7288 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
7289 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
7292 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
7293 inner
, size_int (bitnum
));
7295 one
= build_int_cst (intermediate_type
, 1);
7297 if (code
== EQ_EXPR
)
7298 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
7300 /* Put the AND last so it can combine with more things. */
7301 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
7303 /* Make sure to return the proper type. */
7304 inner
= fold_convert_loc (loc
, result_type
, inner
);
7311 /* Check whether we are allowed to reorder operands arg0 and arg1,
7312 such that the evaluation of arg1 occurs before arg0. */
7315 reorder_operands_p (const_tree arg0
, const_tree arg1
)
7317 if (! flag_evaluation_order
)
7319 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
7321 return ! TREE_SIDE_EFFECTS (arg0
)
7322 && ! TREE_SIDE_EFFECTS (arg1
);
7325 /* Test whether it is preferable two swap two operands, ARG0 and
7326 ARG1, for example because ARG0 is an integer constant and ARG1
7327 isn't. If REORDER is true, only recommend swapping if we can
7328 evaluate the operands in reverse order. */
7331 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
7333 STRIP_SIGN_NOPS (arg0
);
7334 STRIP_SIGN_NOPS (arg1
);
7336 if (TREE_CODE (arg1
) == INTEGER_CST
)
7338 if (TREE_CODE (arg0
) == INTEGER_CST
)
7341 if (TREE_CODE (arg1
) == REAL_CST
)
7343 if (TREE_CODE (arg0
) == REAL_CST
)
7346 if (TREE_CODE (arg1
) == FIXED_CST
)
7348 if (TREE_CODE (arg0
) == FIXED_CST
)
7351 if (TREE_CODE (arg1
) == COMPLEX_CST
)
7353 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7356 if (TREE_CONSTANT (arg1
))
7358 if (TREE_CONSTANT (arg0
))
7361 if (optimize_function_for_size_p (cfun
))
7364 if (reorder
&& flag_evaluation_order
7365 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
7368 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7369 for commutative and comparison operators. Ensuring a canonical
7370 form allows the optimizers to find additional redundancies without
7371 having to explicitly check for both orderings. */
7372 if (TREE_CODE (arg0
) == SSA_NAME
7373 && TREE_CODE (arg1
) == SSA_NAME
7374 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
7377 /* Put SSA_NAMEs last. */
7378 if (TREE_CODE (arg1
) == SSA_NAME
)
7380 if (TREE_CODE (arg0
) == SSA_NAME
)
7383 /* Put variables last. */
7392 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7393 ARG0 is extended to a wider type. */
7396 fold_widened_comparison (location_t loc
, enum tree_code code
,
7397 tree type
, tree arg0
, tree arg1
)
7399 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
7401 tree shorter_type
, outer_type
;
7405 if (arg0_unw
== arg0
)
7407 shorter_type
= TREE_TYPE (arg0_unw
);
7409 #ifdef HAVE_canonicalize_funcptr_for_compare
7410 /* Disable this optimization if we're casting a function pointer
7411 type on targets that require function pointer canonicalization. */
7412 if (HAVE_canonicalize_funcptr_for_compare
7413 && TREE_CODE (shorter_type
) == POINTER_TYPE
7414 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
7418 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
7421 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
7423 /* If possible, express the comparison in the shorter mode. */
7424 if ((code
== EQ_EXPR
|| code
== NE_EXPR
7425 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
7426 && (TREE_TYPE (arg1_unw
) == shorter_type
7427 || ((TYPE_PRECISION (shorter_type
)
7428 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
7429 && (TYPE_UNSIGNED (shorter_type
)
7430 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
7431 || (TREE_CODE (arg1_unw
) == INTEGER_CST
7432 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
7433 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
7434 && int_fits_type_p (arg1_unw
, shorter_type
))))
7435 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
7436 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
7438 if (TREE_CODE (arg1_unw
) != INTEGER_CST
7439 || TREE_CODE (shorter_type
) != INTEGER_TYPE
7440 || !int_fits_type_p (arg1_unw
, shorter_type
))
7443 /* If we are comparing with the integer that does not fit into the range
7444 of the shorter type, the result is known. */
7445 outer_type
= TREE_TYPE (arg1_unw
);
7446 min
= lower_bound_in_type (outer_type
, shorter_type
);
7447 max
= upper_bound_in_type (outer_type
, shorter_type
);
7449 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
7451 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
7458 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
7463 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
7469 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
7471 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
7476 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
7478 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
7487 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7488 ARG0 just the signedness is changed. */
7491 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
7492 tree arg0
, tree arg1
)
7495 tree inner_type
, outer_type
;
7497 if (!CONVERT_EXPR_P (arg0
))
7500 outer_type
= TREE_TYPE (arg0
);
7501 arg0_inner
= TREE_OPERAND (arg0
, 0);
7502 inner_type
= TREE_TYPE (arg0_inner
);
7504 #ifdef HAVE_canonicalize_funcptr_for_compare
7505 /* Disable this optimization if we're casting a function pointer
7506 type on targets that require function pointer canonicalization. */
7507 if (HAVE_canonicalize_funcptr_for_compare
7508 && TREE_CODE (inner_type
) == POINTER_TYPE
7509 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
7513 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
7516 if (TREE_CODE (arg1
) != INTEGER_CST
7517 && !(CONVERT_EXPR_P (arg1
)
7518 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
7521 if ((TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
7522 || POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
7527 if (TREE_CODE (arg1
) == INTEGER_CST
)
7528 arg1
= force_fit_type_double (inner_type
, TREE_INT_CST_LOW (arg1
),
7529 TREE_INT_CST_HIGH (arg1
), 0,
7530 TREE_OVERFLOW (arg1
));
7532 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
7534 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
7537 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7538 step of the array. Reconstructs s and delta in the case of s *
7539 delta being an integer constant (and thus already folded). ADDR is
7540 the address. MULT is the multiplicative expression. If the
7541 function succeeds, the new address expression is returned.
7542 Otherwise NULL_TREE is returned. LOC is the location of the
7543 resulting expression. */
7546 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
7548 tree s
, delta
, step
;
7549 tree ref
= TREE_OPERAND (addr
, 0), pref
;
7554 /* Strip the nops that might be added when converting op1 to sizetype. */
7557 /* Canonicalize op1 into a possibly non-constant delta
7558 and an INTEGER_CST s. */
7559 if (TREE_CODE (op1
) == MULT_EXPR
)
7561 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
7566 if (TREE_CODE (arg0
) == INTEGER_CST
)
7571 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7579 else if (TREE_CODE (op1
) == INTEGER_CST
)
7586 /* Simulate we are delta * 1. */
7588 s
= integer_one_node
;
7591 for (;; ref
= TREE_OPERAND (ref
, 0))
7593 if (TREE_CODE (ref
) == ARRAY_REF
)
7597 /* Remember if this was a multi-dimensional array. */
7598 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
7601 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
7604 itype
= TREE_TYPE (domain
);
7606 step
= array_ref_element_size (ref
);
7607 if (TREE_CODE (step
) != INTEGER_CST
)
7612 if (! tree_int_cst_equal (step
, s
))
7617 /* Try if delta is a multiple of step. */
7618 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
7624 /* Only fold here if we can verify we do not overflow one
7625 dimension of a multi-dimensional array. */
7630 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
7631 || !TYPE_MAX_VALUE (domain
)
7632 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
7635 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
7636 fold_convert_loc (loc
, itype
,
7637 TREE_OPERAND (ref
, 1)),
7638 fold_convert_loc (loc
, itype
, delta
));
7640 || TREE_CODE (tmp
) != INTEGER_CST
7641 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
7650 if (!handled_component_p (ref
))
7654 /* We found the suitable array reference. So copy everything up to it,
7655 and replace the index. */
7657 pref
= TREE_OPERAND (addr
, 0);
7658 ret
= copy_node (pref
);
7659 SET_EXPR_LOCATION (ret
, loc
);
7664 pref
= TREE_OPERAND (pref
, 0);
7665 TREE_OPERAND (pos
, 0) = copy_node (pref
);
7666 pos
= TREE_OPERAND (pos
, 0);
7669 TREE_OPERAND (pos
, 1) = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
7670 fold_convert_loc (loc
, itype
,
7671 TREE_OPERAND (pos
, 1)),
7672 fold_convert_loc (loc
, itype
, delta
));
7674 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
7678 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7679 means A >= Y && A != MAX, but in this case we know that
7680 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7683 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7685 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7687 if (TREE_CODE (bound
) == LT_EXPR
)
7688 a
= TREE_OPERAND (bound
, 0);
7689 else if (TREE_CODE (bound
) == GT_EXPR
)
7690 a
= TREE_OPERAND (bound
, 1);
7694 typea
= TREE_TYPE (a
);
7695 if (!INTEGRAL_TYPE_P (typea
)
7696 && !POINTER_TYPE_P (typea
))
7699 if (TREE_CODE (ineq
) == LT_EXPR
)
7701 a1
= TREE_OPERAND (ineq
, 1);
7702 y
= TREE_OPERAND (ineq
, 0);
7704 else if (TREE_CODE (ineq
) == GT_EXPR
)
7706 a1
= TREE_OPERAND (ineq
, 0);
7707 y
= TREE_OPERAND (ineq
, 1);
7712 if (TREE_TYPE (a1
) != typea
)
7715 if (POINTER_TYPE_P (typea
))
7717 /* Convert the pointer types into integer before taking the difference. */
7718 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7719 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7720 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7723 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7725 if (!diff
|| !integer_onep (diff
))
7728 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7731 /* Fold a sum or difference of at least one multiplication.
7732 Returns the folded tree or NULL if no simplification could be made. */
7735 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7736 tree arg0
, tree arg1
)
7738 tree arg00
, arg01
, arg10
, arg11
;
7739 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7741 /* (A * C) +- (B * C) -> (A+-B) * C.
7742 (A * C) +- A -> A * (C+-1).
7743 We are most concerned about the case where C is a constant,
7744 but other combinations show up during loop reduction. Since
7745 it is not difficult, try all four possibilities. */
7747 if (TREE_CODE (arg0
) == MULT_EXPR
)
7749 arg00
= TREE_OPERAND (arg0
, 0);
7750 arg01
= TREE_OPERAND (arg0
, 1);
7752 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7754 arg00
= build_one_cst (type
);
7759 /* We cannot generate constant 1 for fract. */
7760 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7763 arg01
= build_one_cst (type
);
7765 if (TREE_CODE (arg1
) == MULT_EXPR
)
7767 arg10
= TREE_OPERAND (arg1
, 0);
7768 arg11
= TREE_OPERAND (arg1
, 1);
7770 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7772 arg10
= build_one_cst (type
);
7773 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7774 the purpose of this canonicalization. */
7775 if (TREE_INT_CST_HIGH (arg1
) == -1
7776 && negate_expr_p (arg1
)
7777 && code
== PLUS_EXPR
)
7779 arg11
= negate_expr (arg1
);
7787 /* We cannot generate constant 1 for fract. */
7788 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7791 arg11
= build_one_cst (type
);
7795 if (operand_equal_p (arg01
, arg11
, 0))
7796 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7797 else if (operand_equal_p (arg00
, arg10
, 0))
7798 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7799 else if (operand_equal_p (arg00
, arg11
, 0))
7800 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7801 else if (operand_equal_p (arg01
, arg10
, 0))
7802 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7804 /* No identical multiplicands; see if we can find a common
7805 power-of-two factor in non-power-of-two multiplies. This
7806 can help in multi-dimensional array access. */
7807 else if (host_integerp (arg01
, 0)
7808 && host_integerp (arg11
, 0))
7810 HOST_WIDE_INT int01
, int11
, tmp
;
7813 int01
= TREE_INT_CST_LOW (arg01
);
7814 int11
= TREE_INT_CST_LOW (arg11
);
7816 /* Move min of absolute values to int11. */
7817 if ((int01
>= 0 ? int01
: -int01
)
7818 < (int11
>= 0 ? int11
: -int11
))
7820 tmp
= int01
, int01
= int11
, int11
= tmp
;
7821 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7828 if (exact_log2 (abs (int11
)) > 0 && int01
% int11
== 0
7829 /* The remainder should not be a constant, otherwise we
7830 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7831 increased the number of multiplications necessary. */
7832 && TREE_CODE (arg10
) != INTEGER_CST
)
7834 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7835 build_int_cst (TREE_TYPE (arg00
),
7840 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7845 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7846 fold_build2_loc (loc
, code
, type
,
7847 fold_convert_loc (loc
, type
, alt0
),
7848 fold_convert_loc (loc
, type
, alt1
)),
7849 fold_convert_loc (loc
, type
, same
));
7854 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7855 specified by EXPR into the buffer PTR of length LEN bytes.
7856 Return the number of bytes placed in the buffer, or zero
7860 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7862 tree type
= TREE_TYPE (expr
);
7863 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7864 int byte
, offset
, word
, words
;
7865 unsigned char value
;
7867 if (total_bytes
> len
)
7869 words
= total_bytes
/ UNITS_PER_WORD
;
7871 for (byte
= 0; byte
< total_bytes
; byte
++)
7873 int bitpos
= byte
* BITS_PER_UNIT
;
7874 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7875 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7877 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7878 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7880 if (total_bytes
> UNITS_PER_WORD
)
7882 word
= byte
/ UNITS_PER_WORD
;
7883 if (WORDS_BIG_ENDIAN
)
7884 word
= (words
- 1) - word
;
7885 offset
= word
* UNITS_PER_WORD
;
7886 if (BYTES_BIG_ENDIAN
)
7887 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7889 offset
+= byte
% UNITS_PER_WORD
;
7892 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7893 ptr
[offset
] = value
;
7899 /* Subroutine of native_encode_expr. Encode the REAL_CST
7900 specified by EXPR into the buffer PTR of length LEN bytes.
7901 Return the number of bytes placed in the buffer, or zero
7905 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7907 tree type
= TREE_TYPE (expr
);
7908 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7909 int byte
, offset
, word
, words
, bitpos
;
7910 unsigned char value
;
7912 /* There are always 32 bits in each long, no matter the size of
7913 the hosts long. We handle floating point representations with
7917 if (total_bytes
> len
)
7919 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7921 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7923 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7924 bitpos
+= BITS_PER_UNIT
)
7926 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7927 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7929 if (UNITS_PER_WORD
< 4)
7931 word
= byte
/ UNITS_PER_WORD
;
7932 if (WORDS_BIG_ENDIAN
)
7933 word
= (words
- 1) - word
;
7934 offset
= word
* UNITS_PER_WORD
;
7935 if (BYTES_BIG_ENDIAN
)
7936 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7938 offset
+= byte
% UNITS_PER_WORD
;
7941 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7942 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7947 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7948 specified by EXPR into the buffer PTR of length LEN bytes.
7949 Return the number of bytes placed in the buffer, or zero
7953 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7958 part
= TREE_REALPART (expr
);
7959 rsize
= native_encode_expr (part
, ptr
, len
);
7962 part
= TREE_IMAGPART (expr
);
7963 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7966 return rsize
+ isize
;
7970 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7971 specified by EXPR into the buffer PTR of length LEN bytes.
7972 Return the number of bytes placed in the buffer, or zero
7976 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7978 int i
, size
, offset
, count
;
7979 tree itype
, elem
, elements
;
7982 elements
= TREE_VECTOR_CST_ELTS (expr
);
7983 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
7984 itype
= TREE_TYPE (TREE_TYPE (expr
));
7985 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7986 for (i
= 0; i
< count
; i
++)
7990 elem
= TREE_VALUE (elements
);
7991 elements
= TREE_CHAIN (elements
);
7998 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
8003 if (offset
+ size
> len
)
8005 memset (ptr
+offset
, 0, size
);
8013 /* Subroutine of native_encode_expr. Encode the STRING_CST
8014 specified by EXPR into the buffer PTR of length LEN bytes.
8015 Return the number of bytes placed in the buffer, or zero
8019 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
8021 tree type
= TREE_TYPE (expr
);
8022 HOST_WIDE_INT total_bytes
;
8024 if (TREE_CODE (type
) != ARRAY_TYPE
8025 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
8026 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
8027 || !host_integerp (TYPE_SIZE_UNIT (type
), 0))
8029 total_bytes
= tree_low_cst (TYPE_SIZE_UNIT (type
), 0);
8030 if (total_bytes
> len
)
8032 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
8034 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
8035 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
8036 total_bytes
- TREE_STRING_LENGTH (expr
));
8039 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
8044 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
8045 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
8046 buffer PTR of length LEN bytes. Return the number of bytes
8047 placed in the buffer, or zero upon failure. */
8050 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
8052 switch (TREE_CODE (expr
))
8055 return native_encode_int (expr
, ptr
, len
);
8058 return native_encode_real (expr
, ptr
, len
);
8061 return native_encode_complex (expr
, ptr
, len
);
8064 return native_encode_vector (expr
, ptr
, len
);
8067 return native_encode_string (expr
, ptr
, len
);
8075 /* Subroutine of native_interpret_expr. Interpret the contents of
8076 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8077 If the buffer cannot be interpreted, return NULL_TREE. */
8080 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
8082 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
8083 int byte
, offset
, word
, words
;
8084 unsigned char value
;
8085 unsigned int HOST_WIDE_INT lo
= 0;
8086 HOST_WIDE_INT hi
= 0;
8088 if (total_bytes
> len
)
8090 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
8092 words
= total_bytes
/ UNITS_PER_WORD
;
8094 for (byte
= 0; byte
< total_bytes
; byte
++)
8096 int bitpos
= byte
* BITS_PER_UNIT
;
8097 if (total_bytes
> UNITS_PER_WORD
)
8099 word
= byte
/ UNITS_PER_WORD
;
8100 if (WORDS_BIG_ENDIAN
)
8101 word
= (words
- 1) - word
;
8102 offset
= word
* UNITS_PER_WORD
;
8103 if (BYTES_BIG_ENDIAN
)
8104 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
8106 offset
+= byte
% UNITS_PER_WORD
;
8109 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
8110 value
= ptr
[offset
];
8112 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
8113 lo
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
8115 hi
|= (unsigned HOST_WIDE_INT
) value
8116 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
8119 return build_int_cst_wide_type (type
, lo
, hi
);
8123 /* Subroutine of native_interpret_expr. Interpret the contents of
8124 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8125 If the buffer cannot be interpreted, return NULL_TREE. */
8128 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
8130 enum machine_mode mode
= TYPE_MODE (type
);
8131 int total_bytes
= GET_MODE_SIZE (mode
);
8132 int byte
, offset
, word
, words
, bitpos
;
8133 unsigned char value
;
8134 /* There are always 32 bits in each long, no matter the size of
8135 the hosts long. We handle floating point representations with
8140 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
8141 if (total_bytes
> len
|| total_bytes
> 24)
8143 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
8145 memset (tmp
, 0, sizeof (tmp
));
8146 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
8147 bitpos
+= BITS_PER_UNIT
)
8149 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
8150 if (UNITS_PER_WORD
< 4)
8152 word
= byte
/ UNITS_PER_WORD
;
8153 if (WORDS_BIG_ENDIAN
)
8154 word
= (words
- 1) - word
;
8155 offset
= word
* UNITS_PER_WORD
;
8156 if (BYTES_BIG_ENDIAN
)
8157 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
8159 offset
+= byte
% UNITS_PER_WORD
;
8162 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
8163 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
8165 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
8168 real_from_target (&r
, tmp
, mode
);
8169 return build_real (type
, r
);
8173 /* Subroutine of native_interpret_expr. Interpret the contents of
8174 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8175 If the buffer cannot be interpreted, return NULL_TREE. */
8178 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
8180 tree etype
, rpart
, ipart
;
8183 etype
= TREE_TYPE (type
);
8184 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
8187 rpart
= native_interpret_expr (etype
, ptr
, size
);
8190 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
8193 return build_complex (type
, rpart
, ipart
);
8197 /* Subroutine of native_interpret_expr. Interpret the contents of
8198 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8199 If the buffer cannot be interpreted, return NULL_TREE. */
8202 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
8204 tree etype
, elem
, elements
;
8207 etype
= TREE_TYPE (type
);
8208 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
8209 count
= TYPE_VECTOR_SUBPARTS (type
);
8210 if (size
* count
> len
)
8213 elements
= NULL_TREE
;
8214 for (i
= count
- 1; i
>= 0; i
--)
8216 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
8219 elements
= tree_cons (NULL_TREE
, elem
, elements
);
8221 return build_vector (type
, elements
);
8225 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8226 the buffer PTR of length LEN as a constant of type TYPE. For
8227 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8228 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8229 return NULL_TREE. */
8232 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
8234 switch (TREE_CODE (type
))
8239 return native_interpret_int (type
, ptr
, len
);
8242 return native_interpret_real (type
, ptr
, len
);
8245 return native_interpret_complex (type
, ptr
, len
);
8248 return native_interpret_vector (type
, ptr
, len
);
8256 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8257 TYPE at compile-time. If we're unable to perform the conversion
8258 return NULL_TREE. */
8261 fold_view_convert_expr (tree type
, tree expr
)
8263 /* We support up to 512-bit values (for V8DFmode). */
8264 unsigned char buffer
[64];
8267 /* Check that the host and target are sane. */
8268 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
8271 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
8275 return native_interpret_expr (type
, buffer
, len
);
8278 /* Build an expression for the address of T. Folds away INDIRECT_REF
8279 to avoid confusing the gimplify process. */
8282 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
8284 /* The size of the object is not relevant when talking about its address. */
8285 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
8286 t
= TREE_OPERAND (t
, 0);
8288 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
8289 if (TREE_CODE (t
) == INDIRECT_REF
8290 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
8292 t
= TREE_OPERAND (t
, 0);
8294 if (TREE_TYPE (t
) != ptrtype
)
8296 t
= build1 (NOP_EXPR
, ptrtype
, t
);
8297 SET_EXPR_LOCATION (t
, loc
);
8300 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
8302 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
8304 if (TREE_TYPE (t
) != ptrtype
)
8305 t
= fold_convert_loc (loc
, ptrtype
, t
);
8309 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
8310 SET_EXPR_LOCATION (t
, loc
);
8316 /* Build an expression for the address of T. */
8319 build_fold_addr_expr_loc (location_t loc
, tree t
)
8321 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
8323 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
8326 /* Fold a unary expression of code CODE and type TYPE with operand
8327 OP0. Return the folded expression if folding is successful.
8328 Otherwise, return NULL_TREE. */
8331 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
8335 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
8337 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
8338 && TREE_CODE_LENGTH (code
) == 1);
8343 if (CONVERT_EXPR_CODE_P (code
)
8344 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
8346 /* Don't use STRIP_NOPS, because signedness of argument type
8348 STRIP_SIGN_NOPS (arg0
);
8352 /* Strip any conversions that don't change the mode. This
8353 is safe for every expression, except for a comparison
8354 expression because its signedness is derived from its
8357 Note that this is done as an internal manipulation within
8358 the constant folder, in order to find the simplest
8359 representation of the arguments so that their form can be
8360 studied. In any cases, the appropriate type conversions
8361 should be put back in the tree that will get out of the
8367 if (TREE_CODE_CLASS (code
) == tcc_unary
)
8369 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
8370 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8371 fold_build1_loc (loc
, code
, type
,
8372 fold_convert_loc (loc
, TREE_TYPE (op0
),
8373 TREE_OPERAND (arg0
, 1))));
8374 else if (TREE_CODE (arg0
) == COND_EXPR
)
8376 tree arg01
= TREE_OPERAND (arg0
, 1);
8377 tree arg02
= TREE_OPERAND (arg0
, 2);
8378 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
8379 arg01
= fold_build1_loc (loc
, code
, type
,
8380 fold_convert_loc (loc
,
8381 TREE_TYPE (op0
), arg01
));
8382 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
8383 arg02
= fold_build1_loc (loc
, code
, type
,
8384 fold_convert_loc (loc
,
8385 TREE_TYPE (op0
), arg02
));
8386 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8389 /* If this was a conversion, and all we did was to move into
8390 inside the COND_EXPR, bring it back out. But leave it if
8391 it is a conversion from integer to integer and the
8392 result precision is no wider than a word since such a
8393 conversion is cheap and may be optimized away by combine,
8394 while it couldn't if it were outside the COND_EXPR. Then return
8395 so we don't get into an infinite recursion loop taking the
8396 conversion out and then back in. */
8398 if ((CONVERT_EXPR_CODE_P (code
)
8399 || code
== NON_LVALUE_EXPR
)
8400 && TREE_CODE (tem
) == COND_EXPR
8401 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
8402 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
8403 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
8404 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
8405 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
8406 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
8407 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8409 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
8410 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
8411 || flag_syntax_only
))
8413 tem
= build1 (code
, type
,
8415 TREE_TYPE (TREE_OPERAND
8416 (TREE_OPERAND (tem
, 1), 0)),
8417 TREE_OPERAND (tem
, 0),
8418 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
8419 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
8420 SET_EXPR_LOCATION (tem
, loc
);
8424 else if (COMPARISON_CLASS_P (arg0
))
8426 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
8428 arg0
= copy_node (arg0
);
8429 TREE_TYPE (arg0
) = type
;
8432 else if (TREE_CODE (type
) != INTEGER_TYPE
)
8433 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
,
8434 fold_build1_loc (loc
, code
, type
,
8436 fold_build1_loc (loc
, code
, type
,
8437 integer_zero_node
));
8444 /* Re-association barriers around constants and other re-association
8445 barriers can be removed. */
8446 if (CONSTANT_CLASS_P (op0
)
8447 || TREE_CODE (op0
) == PAREN_EXPR
)
8448 return fold_convert_loc (loc
, type
, op0
);
8453 case FIX_TRUNC_EXPR
:
8454 if (TREE_TYPE (op0
) == type
)
8457 /* If we have (type) (a CMP b) and type is an integral type, return
8458 new expression involving the new type. */
8459 if (COMPARISON_CLASS_P (op0
) && INTEGRAL_TYPE_P (type
))
8460 return fold_build2_loc (loc
, TREE_CODE (op0
), type
, TREE_OPERAND (op0
, 0),
8461 TREE_OPERAND (op0
, 1));
8463 /* Handle cases of two conversions in a row. */
8464 if (CONVERT_EXPR_P (op0
))
8466 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
8467 tree inter_type
= TREE_TYPE (op0
);
8468 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
8469 int inside_ptr
= POINTER_TYPE_P (inside_type
);
8470 int inside_float
= FLOAT_TYPE_P (inside_type
);
8471 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
8472 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
8473 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
8474 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
8475 int inter_ptr
= POINTER_TYPE_P (inter_type
);
8476 int inter_float
= FLOAT_TYPE_P (inter_type
);
8477 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
8478 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
8479 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
8480 int final_int
= INTEGRAL_TYPE_P (type
);
8481 int final_ptr
= POINTER_TYPE_P (type
);
8482 int final_float
= FLOAT_TYPE_P (type
);
8483 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
8484 unsigned int final_prec
= TYPE_PRECISION (type
);
8485 int final_unsignedp
= TYPE_UNSIGNED (type
);
8487 /* In addition to the cases of two conversions in a row
8488 handled below, if we are converting something to its own
8489 type via an object of identical or wider precision, neither
8490 conversion is needed. */
8491 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
8492 && (((inter_int
|| inter_ptr
) && final_int
)
8493 || (inter_float
&& final_float
))
8494 && inter_prec
>= final_prec
)
8495 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8497 /* Likewise, if the intermediate and initial types are either both
8498 float or both integer, we don't need the middle conversion if the
8499 former is wider than the latter and doesn't change the signedness
8500 (for integers). Avoid this if the final type is a pointer since
8501 then we sometimes need the middle conversion. Likewise if the
8502 final type has a precision not equal to the size of its mode. */
8503 if (((inter_int
&& inside_int
)
8504 || (inter_float
&& inside_float
)
8505 || (inter_vec
&& inside_vec
))
8506 && inter_prec
>= inside_prec
8507 && (inter_float
|| inter_vec
8508 || inter_unsignedp
== inside_unsignedp
)
8509 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
8510 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
8512 && (! final_vec
|| inter_prec
== inside_prec
))
8513 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8515 /* If we have a sign-extension of a zero-extended value, we can
8516 replace that by a single zero-extension. */
8517 if (inside_int
&& inter_int
&& final_int
8518 && inside_prec
< inter_prec
&& inter_prec
< final_prec
8519 && inside_unsignedp
&& !inter_unsignedp
)
8520 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8522 /* Two conversions in a row are not needed unless:
8523 - some conversion is floating-point (overstrict for now), or
8524 - some conversion is a vector (overstrict for now), or
8525 - the intermediate type is narrower than both initial and
8527 - the intermediate type and innermost type differ in signedness,
8528 and the outermost type is wider than the intermediate, or
8529 - the initial type is a pointer type and the precisions of the
8530 intermediate and final types differ, or
8531 - the final type is a pointer type and the precisions of the
8532 initial and intermediate types differ. */
8533 if (! inside_float
&& ! inter_float
&& ! final_float
8534 && ! inside_vec
&& ! inter_vec
&& ! final_vec
8535 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
8536 && ! (inside_int
&& inter_int
8537 && inter_unsignedp
!= inside_unsignedp
8538 && inter_prec
< final_prec
)
8539 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
8540 == (final_unsignedp
&& final_prec
> inter_prec
))
8541 && ! (inside_ptr
&& inter_prec
!= final_prec
)
8542 && ! (final_ptr
&& inside_prec
!= inter_prec
)
8543 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
8544 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
8545 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
8548 /* Handle (T *)&A.B.C for A being of type T and B and C
8549 living at offset zero. This occurs frequently in
8550 C++ upcasting and then accessing the base. */
8551 if (TREE_CODE (op0
) == ADDR_EXPR
8552 && POINTER_TYPE_P (type
)
8553 && handled_component_p (TREE_OPERAND (op0
, 0)))
8555 HOST_WIDE_INT bitsize
, bitpos
;
8557 enum machine_mode mode
;
8558 int unsignedp
, volatilep
;
8559 tree base
= TREE_OPERAND (op0
, 0);
8560 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
8561 &mode
, &unsignedp
, &volatilep
, false);
8562 /* If the reference was to a (constant) zero offset, we can use
8563 the address of the base if it has the same base type
8564 as the result type and the pointer type is unqualified. */
8565 if (! offset
&& bitpos
== 0
8566 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
8567 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
8568 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
8569 return fold_convert_loc (loc
, type
,
8570 build_fold_addr_expr_loc (loc
, base
));
8573 if (TREE_CODE (op0
) == MODIFY_EXPR
8574 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
8575 /* Detect assigning a bitfield. */
8576 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
8578 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
8580 /* Don't leave an assignment inside a conversion
8581 unless assigning a bitfield. */
8582 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
8583 /* First do the assignment, then return converted constant. */
8584 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
8585 TREE_NO_WARNING (tem
) = 1;
8586 TREE_USED (tem
) = 1;
8587 SET_EXPR_LOCATION (tem
, loc
);
8591 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8592 constants (if x has signed type, the sign bit cannot be set
8593 in c). This folds extension into the BIT_AND_EXPR.
8594 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8595 very likely don't have maximal range for their precision and this
8596 transformation effectively doesn't preserve non-maximal ranges. */
8597 if (TREE_CODE (type
) == INTEGER_TYPE
8598 && TREE_CODE (op0
) == BIT_AND_EXPR
8599 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8601 tree and_expr
= op0
;
8602 tree and0
= TREE_OPERAND (and_expr
, 0);
8603 tree and1
= TREE_OPERAND (and_expr
, 1);
8606 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
8607 || (TYPE_PRECISION (type
)
8608 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
8610 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8611 <= HOST_BITS_PER_WIDE_INT
8612 && host_integerp (and1
, 1))
8614 unsigned HOST_WIDE_INT cst
;
8616 cst
= tree_low_cst (and1
, 1);
8617 cst
&= (HOST_WIDE_INT
) -1
8618 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8619 change
= (cst
== 0);
8620 #ifdef LOAD_EXTEND_OP
8622 && !flag_syntax_only
8623 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
8626 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8627 and0
= fold_convert_loc (loc
, uns
, and0
);
8628 and1
= fold_convert_loc (loc
, uns
, and1
);
8634 tem
= force_fit_type_double (type
, TREE_INT_CST_LOW (and1
),
8635 TREE_INT_CST_HIGH (and1
), 0,
8636 TREE_OVERFLOW (and1
));
8637 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
8638 fold_convert_loc (loc
, type
, and0
), tem
);
8642 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8643 when one of the new casts will fold away. Conservatively we assume
8644 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8645 if (POINTER_TYPE_P (type
)
8646 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8647 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8648 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
8649 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
8651 tree arg00
= TREE_OPERAND (arg0
, 0);
8652 tree arg01
= TREE_OPERAND (arg0
, 1);
8654 return fold_build2_loc (loc
,
8655 TREE_CODE (arg0
), type
,
8656 fold_convert_loc (loc
, type
, arg00
),
8657 fold_convert_loc (loc
, sizetype
, arg01
));
8660 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8661 of the same precision, and X is an integer type not narrower than
8662 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8663 if (INTEGRAL_TYPE_P (type
)
8664 && TREE_CODE (op0
) == BIT_NOT_EXPR
8665 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8666 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
8667 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8669 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8670 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8671 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8672 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
8673 fold_convert_loc (loc
, type
, tem
));
8676 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8677 type of X and Y (integer types only). */
8678 if (INTEGRAL_TYPE_P (type
)
8679 && TREE_CODE (op0
) == MULT_EXPR
8680 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8681 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8683 /* Be careful not to introduce new overflows. */
8685 if (TYPE_OVERFLOW_WRAPS (type
))
8688 mult_type
= unsigned_type_for (type
);
8690 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8692 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8693 fold_convert_loc (loc
, mult_type
,
8694 TREE_OPERAND (op0
, 0)),
8695 fold_convert_loc (loc
, mult_type
,
8696 TREE_OPERAND (op0
, 1)));
8697 return fold_convert_loc (loc
, type
, tem
);
8701 tem
= fold_convert_const (code
, type
, op0
);
8702 return tem
? tem
: NULL_TREE
;
8704 case ADDR_SPACE_CONVERT_EXPR
:
8705 if (integer_zerop (arg0
))
8706 return fold_convert_const (code
, type
, arg0
);
8709 case FIXED_CONVERT_EXPR
:
8710 tem
= fold_convert_const (code
, type
, arg0
);
8711 return tem
? tem
: NULL_TREE
;
8713 case VIEW_CONVERT_EXPR
:
8714 if (TREE_TYPE (op0
) == type
)
8716 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8717 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8718 type
, TREE_OPERAND (op0
, 0));
8720 /* For integral conversions with the same precision or pointer
8721 conversions use a NOP_EXPR instead. */
8722 if ((INTEGRAL_TYPE_P (type
)
8723 || POINTER_TYPE_P (type
))
8724 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8725 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8726 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8727 return fold_convert_loc (loc
, type
, op0
);
8729 /* Strip inner integral conversions that do not change the precision. */
8730 if (CONVERT_EXPR_P (op0
)
8731 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8732 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8733 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8734 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8735 && (TYPE_PRECISION (TREE_TYPE (op0
))
8736 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8737 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8738 type
, TREE_OPERAND (op0
, 0));
8740 return fold_view_convert_expr (type
, op0
);
8743 tem
= fold_negate_expr (loc
, arg0
);
8745 return fold_convert_loc (loc
, type
, tem
);
8749 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8750 return fold_abs_const (arg0
, type
);
8751 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8752 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8753 /* Convert fabs((double)float) into (double)fabsf(float). */
8754 else if (TREE_CODE (arg0
) == NOP_EXPR
8755 && TREE_CODE (type
) == REAL_TYPE
)
8757 tree targ0
= strip_float_extensions (arg0
);
8759 return fold_convert_loc (loc
, type
,
8760 fold_build1_loc (loc
, ABS_EXPR
,
8764 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8765 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8767 else if (tree_expr_nonnegative_p (arg0
))
8770 /* Strip sign ops from argument. */
8771 if (TREE_CODE (type
) == REAL_TYPE
)
8773 tem
= fold_strip_sign_ops (arg0
);
8775 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8776 fold_convert_loc (loc
, type
, tem
));
8781 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8782 return fold_convert_loc (loc
, type
, arg0
);
8783 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8785 tree itype
= TREE_TYPE (type
);
8786 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8787 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8788 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8789 negate_expr (ipart
));
8791 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8793 tree itype
= TREE_TYPE (type
);
8794 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8795 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8796 return build_complex (type
, rpart
, negate_expr (ipart
));
8798 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8799 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8803 if (TREE_CODE (arg0
) == INTEGER_CST
)
8804 return fold_not_const (arg0
, type
);
8805 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8806 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8807 /* Convert ~ (-A) to A - 1. */
8808 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8809 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8810 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8811 build_int_cst (type
, 1));
8812 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8813 else if (INTEGRAL_TYPE_P (type
)
8814 && ((TREE_CODE (arg0
) == MINUS_EXPR
8815 && integer_onep (TREE_OPERAND (arg0
, 1)))
8816 || (TREE_CODE (arg0
) == PLUS_EXPR
8817 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8818 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8819 fold_convert_loc (loc
, type
,
8820 TREE_OPERAND (arg0
, 0)));
8821 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8822 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8823 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8824 fold_convert_loc (loc
, type
,
8825 TREE_OPERAND (arg0
, 0)))))
8826 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8827 fold_convert_loc (loc
, type
,
8828 TREE_OPERAND (arg0
, 1)));
8829 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8830 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8831 fold_convert_loc (loc
, type
,
8832 TREE_OPERAND (arg0
, 1)))))
8833 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8834 fold_convert_loc (loc
, type
,
8835 TREE_OPERAND (arg0
, 0)), tem
);
8836 /* Perform BIT_NOT_EXPR on each element individually. */
8837 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8839 tree elements
= TREE_VECTOR_CST_ELTS (arg0
), elem
, list
= NULL_TREE
;
8840 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
8842 for (i
= 0; i
< count
; i
++)
8846 elem
= TREE_VALUE (elements
);
8847 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8848 if (elem
== NULL_TREE
)
8850 elements
= TREE_CHAIN (elements
);
8853 elem
= build_int_cst (TREE_TYPE (type
), -1);
8854 list
= tree_cons (NULL_TREE
, elem
, list
);
8857 return build_vector (type
, nreverse (list
));
8862 case TRUTH_NOT_EXPR
:
8863 /* The argument to invert_truthvalue must have Boolean type. */
8864 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8865 arg0
= fold_convert_loc (loc
, boolean_type_node
, arg0
);
8867 /* Note that the operand of this must be an int
8868 and its values must be 0 or 1.
8869 ("true" is a fixed value perhaps depending on the language,
8870 but we don't handle values other than 1 correctly yet.) */
8871 tem
= fold_truth_not_expr (loc
, arg0
);
8874 return fold_convert_loc (loc
, type
, tem
);
8877 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8878 return fold_convert_loc (loc
, type
, arg0
);
8879 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8880 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8881 TREE_OPERAND (arg0
, 1));
8882 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8883 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8884 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8886 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8887 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8888 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8889 TREE_OPERAND (arg0
, 0)),
8890 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8891 TREE_OPERAND (arg0
, 1)));
8892 return fold_convert_loc (loc
, type
, tem
);
8894 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8896 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8897 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8898 TREE_OPERAND (arg0
, 0));
8899 return fold_convert_loc (loc
, type
, tem
);
8901 if (TREE_CODE (arg0
) == CALL_EXPR
)
8903 tree fn
= get_callee_fndecl (arg0
);
8904 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8905 switch (DECL_FUNCTION_CODE (fn
))
8907 CASE_FLT_FN (BUILT_IN_CEXPI
):
8908 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8910 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8920 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8921 return fold_convert_loc (loc
, type
, integer_zero_node
);
8922 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8923 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8924 TREE_OPERAND (arg0
, 0));
8925 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8926 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8927 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8929 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8930 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8931 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8932 TREE_OPERAND (arg0
, 0)),
8933 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8934 TREE_OPERAND (arg0
, 1)));
8935 return fold_convert_loc (loc
, type
, tem
);
8937 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8939 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8940 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8941 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8943 if (TREE_CODE (arg0
) == CALL_EXPR
)
8945 tree fn
= get_callee_fndecl (arg0
);
8946 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8947 switch (DECL_FUNCTION_CODE (fn
))
8949 CASE_FLT_FN (BUILT_IN_CEXPI
):
8950 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8952 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8962 /* Fold *&X to X if X is an lvalue. */
8963 if (TREE_CODE (op0
) == ADDR_EXPR
)
8965 tree op00
= TREE_OPERAND (op0
, 0);
8966 if ((TREE_CODE (op00
) == VAR_DECL
8967 || TREE_CODE (op00
) == PARM_DECL
8968 || TREE_CODE (op00
) == RESULT_DECL
)
8969 && !TREE_READONLY (op00
))
8976 } /* switch (code) */
8980 /* If the operation was a conversion do _not_ mark a resulting constant
8981 with TREE_OVERFLOW if the original constant was not. These conversions
8982 have implementation defined behavior and retaining the TREE_OVERFLOW
8983 flag here would confuse later passes such as VRP. */
8985 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8986 tree type
, tree op0
)
8988 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8990 && TREE_CODE (res
) == INTEGER_CST
8991 && TREE_CODE (op0
) == INTEGER_CST
8992 && CONVERT_EXPR_CODE_P (code
))
8993 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8998 /* Fold a binary expression of code CODE and type TYPE with operands
8999 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
9000 Return the folded expression if folding is successful. Otherwise,
9001 return NULL_TREE. */
9004 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
9006 enum tree_code compl_code
;
9008 if (code
== MIN_EXPR
)
9009 compl_code
= MAX_EXPR
;
9010 else if (code
== MAX_EXPR
)
9011 compl_code
= MIN_EXPR
;
9015 /* MIN (MAX (a, b), b) == b. */
9016 if (TREE_CODE (op0
) == compl_code
9017 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
9018 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
9020 /* MIN (MAX (b, a), b) == b. */
9021 if (TREE_CODE (op0
) == compl_code
9022 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
9023 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
9024 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
9026 /* MIN (a, MAX (a, b)) == a. */
9027 if (TREE_CODE (op1
) == compl_code
9028 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
9029 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
9030 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
9032 /* MIN (a, MAX (b, a)) == a. */
9033 if (TREE_CODE (op1
) == compl_code
9034 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
9035 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
9036 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
9041 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9042 by changing CODE to reduce the magnitude of constants involved in
9043 ARG0 of the comparison.
9044 Returns a canonicalized comparison tree if a simplification was
9045 possible, otherwise returns NULL_TREE.
9046 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9047 valid if signed overflow is undefined. */
9050 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
9051 tree arg0
, tree arg1
,
9052 bool *strict_overflow_p
)
9054 enum tree_code code0
= TREE_CODE (arg0
);
9055 tree t
, cst0
= NULL_TREE
;
9059 /* Match A +- CST code arg1 and CST code arg1. We can change the
9060 first form only if overflow is undefined. */
9061 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9062 /* In principle pointers also have undefined overflow behavior,
9063 but that causes problems elsewhere. */
9064 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
9065 && (code0
== MINUS_EXPR
9066 || code0
== PLUS_EXPR
)
9067 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9068 || code0
== INTEGER_CST
))
9071 /* Identify the constant in arg0 and its sign. */
9072 if (code0
== INTEGER_CST
)
9075 cst0
= TREE_OPERAND (arg0
, 1);
9076 sgn0
= tree_int_cst_sgn (cst0
);
9078 /* Overflowed constants and zero will cause problems. */
9079 if (integer_zerop (cst0
)
9080 || TREE_OVERFLOW (cst0
))
9083 /* See if we can reduce the magnitude of the constant in
9084 arg0 by changing the comparison code. */
9085 if (code0
== INTEGER_CST
)
9087 /* CST <= arg1 -> CST-1 < arg1. */
9088 if (code
== LE_EXPR
&& sgn0
== 1)
9090 /* -CST < arg1 -> -CST-1 <= arg1. */
9091 else if (code
== LT_EXPR
&& sgn0
== -1)
9093 /* CST > arg1 -> CST-1 >= arg1. */
9094 else if (code
== GT_EXPR
&& sgn0
== 1)
9096 /* -CST >= arg1 -> -CST-1 > arg1. */
9097 else if (code
== GE_EXPR
&& sgn0
== -1)
9101 /* arg1 code' CST' might be more canonical. */
9106 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9108 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
9110 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9111 else if (code
== GT_EXPR
9112 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
9114 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9115 else if (code
== LE_EXPR
9116 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
9118 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9119 else if (code
== GE_EXPR
9120 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
9124 *strict_overflow_p
= true;
9127 /* Now build the constant reduced in magnitude. But not if that
9128 would produce one outside of its types range. */
9129 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
9131 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
9132 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
9134 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
9135 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
9136 /* We cannot swap the comparison here as that would cause us to
9137 endlessly recurse. */
9140 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
9141 cst0
, build_int_cst (TREE_TYPE (cst0
), 1), 0);
9142 if (code0
!= INTEGER_CST
)
9143 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
9145 /* If swapping might yield to a more canonical form, do so. */
9147 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
9149 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
9152 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9153 overflow further. Try to decrease the magnitude of constants involved
9154 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9155 and put sole constants at the second argument position.
9156 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9159 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
9160 tree arg0
, tree arg1
)
9163 bool strict_overflow_p
;
9164 const char * const warnmsg
= G_("assuming signed overflow does not occur "
9165 "when reducing constant in comparison");
9167 /* Try canonicalization by simplifying arg0. */
9168 strict_overflow_p
= false;
9169 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
9170 &strict_overflow_p
);
9173 if (strict_overflow_p
)
9174 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
9178 /* Try canonicalization by simplifying arg1 using the swapped
9180 code
= swap_tree_comparison (code
);
9181 strict_overflow_p
= false;
9182 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
9183 &strict_overflow_p
);
9184 if (t
&& strict_overflow_p
)
9185 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
9189 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9190 space. This is used to avoid issuing overflow warnings for
9191 expressions like &p->x which can not wrap. */
9194 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
9196 unsigned HOST_WIDE_INT offset_low
, total_low
;
9197 HOST_WIDE_INT size
, offset_high
, total_high
;
9199 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
9205 if (offset
== NULL_TREE
)
9210 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
9214 offset_low
= TREE_INT_CST_LOW (offset
);
9215 offset_high
= TREE_INT_CST_HIGH (offset
);
9218 if (add_double_with_sign (offset_low
, offset_high
,
9219 bitpos
/ BITS_PER_UNIT
, 0,
9220 &total_low
, &total_high
,
9224 if (total_high
!= 0)
9227 size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
9231 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9233 if (TREE_CODE (base
) == ADDR_EXPR
)
9235 HOST_WIDE_INT base_size
;
9237 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
9238 if (base_size
> 0 && size
< base_size
)
9242 return total_low
> (unsigned HOST_WIDE_INT
) size
;
9245 /* Subroutine of fold_binary. This routine performs all of the
9246 transformations that are common to the equality/inequality
9247 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9248 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9249 fold_binary should call fold_binary. Fold a comparison with
9250 tree code CODE and type TYPE with operands OP0 and OP1. Return
9251 the folded comparison or NULL_TREE. */
9254 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
9257 tree arg0
, arg1
, tem
;
9262 STRIP_SIGN_NOPS (arg0
);
9263 STRIP_SIGN_NOPS (arg1
);
9265 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9266 if (tem
!= NULL_TREE
)
9269 /* If one arg is a real or integer constant, put it last. */
9270 if (tree_swap_operands_p (arg0
, arg1
, true))
9271 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9273 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9274 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9275 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9276 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9277 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
9278 && (TREE_CODE (arg1
) == INTEGER_CST
9279 && !TREE_OVERFLOW (arg1
)))
9281 tree const1
= TREE_OPERAND (arg0
, 1);
9283 tree variable
= TREE_OPERAND (arg0
, 0);
9286 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
9288 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
9289 TREE_TYPE (arg1
), const2
, const1
);
9291 /* If the constant operation overflowed this can be
9292 simplified as a comparison against INT_MAX/INT_MIN. */
9293 if (TREE_CODE (lhs
) == INTEGER_CST
9294 && TREE_OVERFLOW (lhs
))
9296 int const1_sgn
= tree_int_cst_sgn (const1
);
9297 enum tree_code code2
= code
;
9299 /* Get the sign of the constant on the lhs if the
9300 operation were VARIABLE + CONST1. */
9301 if (TREE_CODE (arg0
) == MINUS_EXPR
)
9302 const1_sgn
= -const1_sgn
;
9304 /* The sign of the constant determines if we overflowed
9305 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9306 Canonicalize to the INT_MIN overflow by swapping the comparison
9308 if (const1_sgn
== -1)
9309 code2
= swap_tree_comparison (code
);
9311 /* We now can look at the canonicalized case
9312 VARIABLE + 1 CODE2 INT_MIN
9313 and decide on the result. */
9314 if (code2
== LT_EXPR
9316 || code2
== EQ_EXPR
)
9317 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
9318 else if (code2
== NE_EXPR
9320 || code2
== GT_EXPR
)
9321 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
9324 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
9325 && (TREE_CODE (lhs
) != INTEGER_CST
9326 || !TREE_OVERFLOW (lhs
)))
9328 fold_overflow_warning (("assuming signed overflow does not occur "
9329 "when changing X +- C1 cmp C2 to "
9331 WARN_STRICT_OVERFLOW_COMPARISON
);
9332 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
9336 /* For comparisons of pointers we can decompose it to a compile time
9337 comparison of the base objects and the offsets into the object.
9338 This requires at least one operand being an ADDR_EXPR or a
9339 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9340 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
9341 && (TREE_CODE (arg0
) == ADDR_EXPR
9342 || TREE_CODE (arg1
) == ADDR_EXPR
9343 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9344 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
9346 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
9347 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
9348 enum machine_mode mode
;
9349 int volatilep
, unsignedp
;
9350 bool indirect_base0
= false, indirect_base1
= false;
9352 /* Get base and offset for the access. Strip ADDR_EXPR for
9353 get_inner_reference, but put it back by stripping INDIRECT_REF
9354 off the base object if possible. indirect_baseN will be true
9355 if baseN is not an address but refers to the object itself. */
9357 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9359 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
9360 &bitsize
, &bitpos0
, &offset0
, &mode
,
9361 &unsignedp
, &volatilep
, false);
9362 if (TREE_CODE (base0
) == INDIRECT_REF
)
9363 base0
= TREE_OPERAND (base0
, 0);
9365 indirect_base0
= true;
9367 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9369 base0
= TREE_OPERAND (arg0
, 0);
9370 offset0
= TREE_OPERAND (arg0
, 1);
9374 if (TREE_CODE (arg1
) == ADDR_EXPR
)
9376 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
9377 &bitsize
, &bitpos1
, &offset1
, &mode
,
9378 &unsignedp
, &volatilep
, false);
9379 if (TREE_CODE (base1
) == INDIRECT_REF
)
9380 base1
= TREE_OPERAND (base1
, 0);
9382 indirect_base1
= true;
9384 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9386 base1
= TREE_OPERAND (arg1
, 0);
9387 offset1
= TREE_OPERAND (arg1
, 1);
9390 /* If we have equivalent bases we might be able to simplify. */
9391 if (indirect_base0
== indirect_base1
9392 && operand_equal_p (base0
, base1
, 0))
9394 /* We can fold this expression to a constant if the non-constant
9395 offset parts are equal. */
9396 if ((offset0
== offset1
9397 || (offset0
&& offset1
9398 && operand_equal_p (offset0
, offset1
, 0)))
9401 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9406 && bitpos0
!= bitpos1
9407 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9408 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9409 fold_overflow_warning (("assuming pointer wraparound does not "
9410 "occur when comparing P +- C1 with "
9412 WARN_STRICT_OVERFLOW_CONDITIONAL
);
9417 return constant_boolean_node (bitpos0
== bitpos1
, type
);
9419 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
9421 return constant_boolean_node (bitpos0
< bitpos1
, type
);
9423 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
9425 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
9427 return constant_boolean_node (bitpos0
> bitpos1
, type
);
9431 /* We can simplify the comparison to a comparison of the variable
9432 offset parts if the constant offset parts are equal.
9433 Be careful to use signed size type here because otherwise we
9434 mess with array offsets in the wrong way. This is possible
9435 because pointer arithmetic is restricted to retain within an
9436 object and overflow on pointer differences is undefined as of
9437 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9438 else if (bitpos0
== bitpos1
9439 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9440 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9442 tree signed_size_type_node
;
9443 signed_size_type_node
= signed_type_for (size_type_node
);
9445 /* By converting to signed size type we cover middle-end pointer
9446 arithmetic which operates on unsigned pointer types of size
9447 type size and ARRAY_REF offsets which are properly sign or
9448 zero extended from their type in case it is narrower than
9450 if (offset0
== NULL_TREE
)
9451 offset0
= build_int_cst (signed_size_type_node
, 0);
9453 offset0
= fold_convert_loc (loc
, signed_size_type_node
,
9455 if (offset1
== NULL_TREE
)
9456 offset1
= build_int_cst (signed_size_type_node
, 0);
9458 offset1
= fold_convert_loc (loc
, signed_size_type_node
,
9463 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9464 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9465 fold_overflow_warning (("assuming pointer wraparound does not "
9466 "occur when comparing P +- C1 with "
9468 WARN_STRICT_OVERFLOW_COMPARISON
);
9470 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9473 /* For non-equal bases we can simplify if they are addresses
9474 of local binding decls or constants. */
9475 else if (indirect_base0
&& indirect_base1
9476 /* We know that !operand_equal_p (base0, base1, 0)
9477 because the if condition was false. But make
9478 sure two decls are not the same. */
9480 && TREE_CODE (arg0
) == ADDR_EXPR
9481 && TREE_CODE (arg1
) == ADDR_EXPR
9482 && (((TREE_CODE (base0
) == VAR_DECL
9483 || TREE_CODE (base0
) == PARM_DECL
)
9484 && (targetm
.binds_local_p (base0
)
9485 || CONSTANT_CLASS_P (base1
)))
9486 || CONSTANT_CLASS_P (base0
))
9487 && (((TREE_CODE (base1
) == VAR_DECL
9488 || TREE_CODE (base1
) == PARM_DECL
)
9489 && (targetm
.binds_local_p (base1
)
9490 || CONSTANT_CLASS_P (base0
)))
9491 || CONSTANT_CLASS_P (base1
)))
9493 if (code
== EQ_EXPR
)
9494 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9496 else if (code
== NE_EXPR
)
9497 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9500 /* For equal offsets we can simplify to a comparison of the
9502 else if (bitpos0
== bitpos1
9504 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9506 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9507 && ((offset0
== offset1
)
9508 || (offset0
&& offset1
9509 && operand_equal_p (offset0
, offset1
, 0))))
9512 base0
= build_fold_addr_expr_loc (loc
, base0
);
9514 base1
= build_fold_addr_expr_loc (loc
, base1
);
9515 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9519 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9520 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9521 the resulting offset is smaller in absolute value than the
9523 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9524 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9525 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9526 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9527 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9528 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9529 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9531 tree const1
= TREE_OPERAND (arg0
, 1);
9532 tree const2
= TREE_OPERAND (arg1
, 1);
9533 tree variable1
= TREE_OPERAND (arg0
, 0);
9534 tree variable2
= TREE_OPERAND (arg1
, 0);
9536 const char * const warnmsg
= G_("assuming signed overflow does not "
9537 "occur when combining constants around "
9540 /* Put the constant on the side where it doesn't overflow and is
9541 of lower absolute value than before. */
9542 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9543 ? MINUS_EXPR
: PLUS_EXPR
,
9545 if (!TREE_OVERFLOW (cst
)
9546 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
9548 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9549 return fold_build2_loc (loc
, code
, type
,
9551 fold_build2_loc (loc
,
9552 TREE_CODE (arg1
), TREE_TYPE (arg1
),
9556 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9557 ? MINUS_EXPR
: PLUS_EXPR
,
9559 if (!TREE_OVERFLOW (cst
)
9560 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
9562 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9563 return fold_build2_loc (loc
, code
, type
,
9564 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
9570 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9571 signed arithmetic case. That form is created by the compiler
9572 often enough for folding it to be of value. One example is in
9573 computing loop trip counts after Operator Strength Reduction. */
9574 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9575 && TREE_CODE (arg0
) == MULT_EXPR
9576 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9577 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9578 && integer_zerop (arg1
))
9580 tree const1
= TREE_OPERAND (arg0
, 1);
9581 tree const2
= arg1
; /* zero */
9582 tree variable1
= TREE_OPERAND (arg0
, 0);
9583 enum tree_code cmp_code
= code
;
9585 /* Handle unfolded multiplication by zero. */
9586 if (integer_zerop (const1
))
9587 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9589 fold_overflow_warning (("assuming signed overflow does not occur when "
9590 "eliminating multiplication in comparison "
9592 WARN_STRICT_OVERFLOW_COMPARISON
);
9594 /* If const1 is negative we swap the sense of the comparison. */
9595 if (tree_int_cst_sgn (const1
) < 0)
9596 cmp_code
= swap_tree_comparison (cmp_code
);
9598 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9601 tem
= maybe_canonicalize_comparison (loc
, code
, type
, op0
, op1
);
9605 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9607 tree targ0
= strip_float_extensions (arg0
);
9608 tree targ1
= strip_float_extensions (arg1
);
9609 tree newtype
= TREE_TYPE (targ0
);
9611 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9612 newtype
= TREE_TYPE (targ1
);
9614 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9615 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9616 return fold_build2_loc (loc
, code
, type
,
9617 fold_convert_loc (loc
, newtype
, targ0
),
9618 fold_convert_loc (loc
, newtype
, targ1
));
9620 /* (-a) CMP (-b) -> b CMP a */
9621 if (TREE_CODE (arg0
) == NEGATE_EXPR
9622 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9623 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9624 TREE_OPERAND (arg0
, 0));
9626 if (TREE_CODE (arg1
) == REAL_CST
)
9628 REAL_VALUE_TYPE cst
;
9629 cst
= TREE_REAL_CST (arg1
);
9631 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9632 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9633 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9634 TREE_OPERAND (arg0
, 0),
9635 build_real (TREE_TYPE (arg1
),
9636 REAL_VALUE_NEGATE (cst
)));
9638 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9639 /* a CMP (-0) -> a CMP 0 */
9640 if (REAL_VALUE_MINUS_ZERO (cst
))
9641 return fold_build2_loc (loc
, code
, type
, arg0
,
9642 build_real (TREE_TYPE (arg1
), dconst0
));
9644 /* x != NaN is always true, other ops are always false. */
9645 if (REAL_VALUE_ISNAN (cst
)
9646 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9648 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9649 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9652 /* Fold comparisons against infinity. */
9653 if (REAL_VALUE_ISINF (cst
)
9654 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9656 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9657 if (tem
!= NULL_TREE
)
9662 /* If this is a comparison of a real constant with a PLUS_EXPR
9663 or a MINUS_EXPR of a real constant, we can convert it into a
9664 comparison with a revised real constant as long as no overflow
9665 occurs when unsafe_math_optimizations are enabled. */
9666 if (flag_unsafe_math_optimizations
9667 && TREE_CODE (arg1
) == REAL_CST
9668 && (TREE_CODE (arg0
) == PLUS_EXPR
9669 || TREE_CODE (arg0
) == MINUS_EXPR
)
9670 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9671 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9672 ? MINUS_EXPR
: PLUS_EXPR
,
9673 arg1
, TREE_OPERAND (arg0
, 1), 0))
9674 && !TREE_OVERFLOW (tem
))
9675 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9677 /* Likewise, we can simplify a comparison of a real constant with
9678 a MINUS_EXPR whose first operand is also a real constant, i.e.
9679 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9680 floating-point types only if -fassociative-math is set. */
9681 if (flag_associative_math
9682 && TREE_CODE (arg1
) == REAL_CST
9683 && TREE_CODE (arg0
) == MINUS_EXPR
9684 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9685 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9687 && !TREE_OVERFLOW (tem
))
9688 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9689 TREE_OPERAND (arg0
, 1), tem
);
9691 /* Fold comparisons against built-in math functions. */
9692 if (TREE_CODE (arg1
) == REAL_CST
9693 && flag_unsafe_math_optimizations
9694 && ! flag_errno_math
)
9696 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9698 if (fcode
!= END_BUILTINS
)
9700 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9701 if (tem
!= NULL_TREE
)
9707 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9708 && CONVERT_EXPR_P (arg0
))
9710 /* If we are widening one operand of an integer comparison,
9711 see if the other operand is similarly being widened. Perhaps we
9712 can do the comparison in the narrower type. */
9713 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9717 /* Or if we are changing signedness. */
9718 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9723 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9724 constant, we can simplify it. */
9725 if (TREE_CODE (arg1
) == INTEGER_CST
9726 && (TREE_CODE (arg0
) == MIN_EXPR
9727 || TREE_CODE (arg0
) == MAX_EXPR
)
9728 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9730 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9735 /* Simplify comparison of something with itself. (For IEEE
9736 floating-point, we can only do some of these simplifications.) */
9737 if (operand_equal_p (arg0
, arg1
, 0))
9742 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9743 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9744 return constant_boolean_node (1, type
);
9749 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9750 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9751 return constant_boolean_node (1, type
);
9752 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9755 /* For NE, we can only do this simplification if integer
9756 or we don't honor IEEE floating point NaNs. */
9757 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9758 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9760 /* ... fall through ... */
9763 return constant_boolean_node (0, type
);
9769 /* If we are comparing an expression that just has comparisons
9770 of two integer values, arithmetic expressions of those comparisons,
9771 and constants, we can simplify it. There are only three cases
9772 to check: the two values can either be equal, the first can be
9773 greater, or the second can be greater. Fold the expression for
9774 those three values. Since each value must be 0 or 1, we have
9775 eight possibilities, each of which corresponds to the constant 0
9776 or 1 or one of the six possible comparisons.
9778 This handles common cases like (a > b) == 0 but also handles
9779 expressions like ((x > y) - (y > x)) > 0, which supposedly
9780 occur in macroized code. */
9782 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9784 tree cval1
= 0, cval2
= 0;
9787 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9788 /* Don't handle degenerate cases here; they should already
9789 have been handled anyway. */
9790 && cval1
!= 0 && cval2
!= 0
9791 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9792 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9793 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9794 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9795 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9796 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9797 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9799 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9800 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9802 /* We can't just pass T to eval_subst in case cval1 or cval2
9803 was the same as ARG1. */
9806 = fold_build2_loc (loc
, code
, type
,
9807 eval_subst (loc
, arg0
, cval1
, maxval
,
9811 = fold_build2_loc (loc
, code
, type
,
9812 eval_subst (loc
, arg0
, cval1
, maxval
,
9816 = fold_build2_loc (loc
, code
, type
,
9817 eval_subst (loc
, arg0
, cval1
, minval
,
9821 /* All three of these results should be 0 or 1. Confirm they are.
9822 Then use those values to select the proper code to use. */
9824 if (TREE_CODE (high_result
) == INTEGER_CST
9825 && TREE_CODE (equal_result
) == INTEGER_CST
9826 && TREE_CODE (low_result
) == INTEGER_CST
)
9828 /* Make a 3-bit mask with the high-order bit being the
9829 value for `>', the next for '=', and the low for '<'. */
9830 switch ((integer_onep (high_result
) * 4)
9831 + (integer_onep (equal_result
) * 2)
9832 + integer_onep (low_result
))
9836 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9857 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9862 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9863 SET_EXPR_LOCATION (tem
, loc
);
9866 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9871 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9872 into a single range test. */
9873 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9874 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9875 && TREE_CODE (arg1
) == INTEGER_CST
9876 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9877 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9878 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9879 && !TREE_OVERFLOW (arg1
))
9881 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9882 if (tem
!= NULL_TREE
)
9886 /* Fold ~X op ~Y as Y op X. */
9887 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9888 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9890 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9891 return fold_build2_loc (loc
, code
, type
,
9892 fold_convert_loc (loc
, cmp_type
,
9893 TREE_OPERAND (arg1
, 0)),
9894 TREE_OPERAND (arg0
, 0));
9897 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9898 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9899 && TREE_CODE (arg1
) == INTEGER_CST
)
9901 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9902 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9903 TREE_OPERAND (arg0
, 0),
9904 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9905 fold_convert_loc (loc
, cmp_type
, arg1
)));
9912 /* Subroutine of fold_binary. Optimize complex multiplications of the
9913 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9914 argument EXPR represents the expression "z" of type TYPE. */
9917 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9919 tree itype
= TREE_TYPE (type
);
9920 tree rpart
, ipart
, tem
;
9922 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9924 rpart
= TREE_OPERAND (expr
, 0);
9925 ipart
= TREE_OPERAND (expr
, 1);
9927 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9929 rpart
= TREE_REALPART (expr
);
9930 ipart
= TREE_IMAGPART (expr
);
9934 expr
= save_expr (expr
);
9935 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9936 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9939 rpart
= save_expr (rpart
);
9940 ipart
= save_expr (ipart
);
9941 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9942 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9943 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9944 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9945 fold_convert_loc (loc
, itype
, integer_zero_node
));
9949 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9950 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9951 guarantees that P and N have the same least significant log2(M) bits.
9952 N is not otherwise constrained. In particular, N is not normalized to
9953 0 <= N < M as is common. In general, the precise value of P is unknown.
9954 M is chosen as large as possible such that constant N can be determined.
9956 Returns M and sets *RESIDUE to N.
9958 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9959 account. This is not always possible due to PR 35705.
9962 static unsigned HOST_WIDE_INT
9963 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9964 bool allow_func_align
)
9966 enum tree_code code
;
9970 code
= TREE_CODE (expr
);
9971 if (code
== ADDR_EXPR
)
9973 expr
= TREE_OPERAND (expr
, 0);
9974 if (handled_component_p (expr
))
9976 HOST_WIDE_INT bitsize
, bitpos
;
9978 enum machine_mode mode
;
9979 int unsignedp
, volatilep
;
9981 expr
= get_inner_reference (expr
, &bitsize
, &bitpos
, &offset
,
9982 &mode
, &unsignedp
, &volatilep
, false);
9983 *residue
= bitpos
/ BITS_PER_UNIT
;
9986 if (TREE_CODE (offset
) == INTEGER_CST
)
9987 *residue
+= TREE_INT_CST_LOW (offset
);
9989 /* We don't handle more complicated offset expressions. */
9995 && (allow_func_align
|| TREE_CODE (expr
) != FUNCTION_DECL
))
9996 return DECL_ALIGN_UNIT (expr
);
9998 else if (code
== POINTER_PLUS_EXPR
)
10001 unsigned HOST_WIDE_INT modulus
;
10002 enum tree_code inner_code
;
10004 op0
= TREE_OPERAND (expr
, 0);
10006 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
10009 op1
= TREE_OPERAND (expr
, 1);
10011 inner_code
= TREE_CODE (op1
);
10012 if (inner_code
== INTEGER_CST
)
10014 *residue
+= TREE_INT_CST_LOW (op1
);
10017 else if (inner_code
== MULT_EXPR
)
10019 op1
= TREE_OPERAND (op1
, 1);
10020 if (TREE_CODE (op1
) == INTEGER_CST
)
10022 unsigned HOST_WIDE_INT align
;
10024 /* Compute the greatest power-of-2 divisor of op1. */
10025 align
= TREE_INT_CST_LOW (op1
);
10028 /* If align is non-zero and less than *modulus, replace
10029 *modulus with align., If align is 0, then either op1 is 0
10030 or the greatest power-of-2 divisor of op1 doesn't fit in an
10031 unsigned HOST_WIDE_INT. In either case, no additional
10032 constraint is imposed. */
10034 modulus
= MIN (modulus
, align
);
10041 /* If we get here, we were unable to determine anything useful about the
10047 /* Fold a binary expression of code CODE and type TYPE with operands
10048 OP0 and OP1. LOC is the location of the resulting expression.
10049 Return the folded expression if folding is successful. Otherwise,
10050 return NULL_TREE. */
10053 fold_binary_loc (location_t loc
,
10054 enum tree_code code
, tree type
, tree op0
, tree op1
)
10056 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10057 tree arg0
, arg1
, tem
;
10058 tree t1
= NULL_TREE
;
10059 bool strict_overflow_p
;
10061 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
10062 && TREE_CODE_LENGTH (code
) == 2
10063 && op0
!= NULL_TREE
10064 && op1
!= NULL_TREE
);
10069 /* Strip any conversions that don't change the mode. This is
10070 safe for every expression, except for a comparison expression
10071 because its signedness is derived from its operands. So, in
10072 the latter case, only strip conversions that don't change the
10073 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10076 Note that this is done as an internal manipulation within the
10077 constant folder, in order to find the simplest representation
10078 of the arguments so that their form can be studied. In any
10079 cases, the appropriate type conversions should be put back in
10080 the tree that will get out of the constant folder. */
10082 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
10084 STRIP_SIGN_NOPS (arg0
);
10085 STRIP_SIGN_NOPS (arg1
);
10093 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10094 constant but we can't do arithmetic on them. */
10095 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10096 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
10097 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
10098 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10099 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
10100 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
10102 if (kind
== tcc_binary
)
10104 /* Make sure type and arg0 have the same saturating flag. */
10105 gcc_assert (TYPE_SATURATING (type
)
10106 == TYPE_SATURATING (TREE_TYPE (arg0
)));
10107 tem
= const_binop (code
, arg0
, arg1
, 0);
10109 else if (kind
== tcc_comparison
)
10110 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
10114 if (tem
!= NULL_TREE
)
10116 if (TREE_TYPE (tem
) != type
)
10117 tem
= fold_convert_loc (loc
, type
, tem
);
10122 /* If this is a commutative operation, and ARG0 is a constant, move it
10123 to ARG1 to reduce the number of tests below. */
10124 if (commutative_tree_code (code
)
10125 && tree_swap_operands_p (arg0
, arg1
, true))
10126 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
10128 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10130 First check for cases where an arithmetic operation is applied to a
10131 compound, conditional, or comparison operation. Push the arithmetic
10132 operation inside the compound or conditional to see if any folding
10133 can then be done. Convert comparison to conditional for this purpose.
10134 The also optimizes non-constant cases that used to be done in
10137 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10138 one of the operands is a comparison and the other is a comparison, a
10139 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10140 code below would make the expression more complex. Change it to a
10141 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10142 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10144 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
10145 || code
== EQ_EXPR
|| code
== NE_EXPR
)
10146 && ((truth_value_p (TREE_CODE (arg0
))
10147 && (truth_value_p (TREE_CODE (arg1
))
10148 || (TREE_CODE (arg1
) == BIT_AND_EXPR
10149 && integer_onep (TREE_OPERAND (arg1
, 1)))))
10150 || (truth_value_p (TREE_CODE (arg1
))
10151 && (truth_value_p (TREE_CODE (arg0
))
10152 || (TREE_CODE (arg0
) == BIT_AND_EXPR
10153 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
10155 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
10156 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
10159 fold_convert_loc (loc
, boolean_type_node
, arg0
),
10160 fold_convert_loc (loc
, boolean_type_node
, arg1
));
10162 if (code
== EQ_EXPR
)
10163 tem
= invert_truthvalue_loc (loc
, tem
);
10165 return fold_convert_loc (loc
, type
, tem
);
10168 if (TREE_CODE_CLASS (code
) == tcc_binary
10169 || TREE_CODE_CLASS (code
) == tcc_comparison
)
10171 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
10173 tem
= fold_build2_loc (loc
, code
, type
,
10174 fold_convert_loc (loc
, TREE_TYPE (op0
),
10175 TREE_OPERAND (arg0
, 1)), op1
);
10176 tem
= build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0), tem
);
10177 goto fold_binary_exit
;
10179 if (TREE_CODE (arg1
) == COMPOUND_EXPR
10180 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10182 tem
= fold_build2_loc (loc
, code
, type
, op0
,
10183 fold_convert_loc (loc
, TREE_TYPE (op1
),
10184 TREE_OPERAND (arg1
, 1)));
10185 tem
= build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0), tem
);
10186 goto fold_binary_exit
;
10189 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
10191 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10193 /*cond_first_p=*/1);
10194 if (tem
!= NULL_TREE
)
10198 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
10200 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10202 /*cond_first_p=*/0);
10203 if (tem
!= NULL_TREE
)
10210 case POINTER_PLUS_EXPR
:
10211 /* 0 +p index -> (type)index */
10212 if (integer_zerop (arg0
))
10213 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10215 /* PTR +p 0 -> PTR */
10216 if (integer_zerop (arg1
))
10217 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10219 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10220 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10221 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10222 return fold_convert_loc (loc
, type
,
10223 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10224 fold_convert_loc (loc
, sizetype
,
10226 fold_convert_loc (loc
, sizetype
,
10229 /* index +p PTR -> PTR +p index */
10230 if (POINTER_TYPE_P (TREE_TYPE (arg1
))
10231 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10232 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, type
,
10233 fold_convert_loc (loc
, type
, arg1
),
10234 fold_convert_loc (loc
, sizetype
, arg0
));
10236 /* (PTR +p B) +p A -> PTR +p (B + A) */
10237 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10240 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
10241 tree arg00
= TREE_OPERAND (arg0
, 0);
10242 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10243 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
10244 return fold_convert_loc (loc
, type
,
10245 fold_build2_loc (loc
, POINTER_PLUS_EXPR
,
10250 /* PTR_CST +p CST -> CST1 */
10251 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10252 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
10253 fold_convert_loc (loc
, type
, arg1
));
10255 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10256 of the array. Loop optimizer sometimes produce this type of
10258 if (TREE_CODE (arg0
) == ADDR_EXPR
)
10260 tem
= try_move_mult_to_index (loc
, arg0
,
10261 fold_convert_loc (loc
, sizetype
, arg1
));
10263 return fold_convert_loc (loc
, type
, tem
);
10269 /* A + (-B) -> A - B */
10270 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10271 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10272 fold_convert_loc (loc
, type
, arg0
),
10273 fold_convert_loc (loc
, type
,
10274 TREE_OPERAND (arg1
, 0)));
10275 /* (-A) + B -> B - A */
10276 if (TREE_CODE (arg0
) == NEGATE_EXPR
10277 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
10278 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10279 fold_convert_loc (loc
, type
, arg1
),
10280 fold_convert_loc (loc
, type
,
10281 TREE_OPERAND (arg0
, 0)));
10283 if (INTEGRAL_TYPE_P (type
))
10285 /* Convert ~A + 1 to -A. */
10286 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10287 && integer_onep (arg1
))
10288 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
10289 fold_convert_loc (loc
, type
,
10290 TREE_OPERAND (arg0
, 0)));
10292 /* ~X + X is -1. */
10293 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10294 && !TYPE_OVERFLOW_TRAPS (type
))
10296 tree tem
= TREE_OPERAND (arg0
, 0);
10299 if (operand_equal_p (tem
, arg1
, 0))
10301 t1
= build_int_cst_type (type
, -1);
10302 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10306 /* X + ~X is -1. */
10307 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10308 && !TYPE_OVERFLOW_TRAPS (type
))
10310 tree tem
= TREE_OPERAND (arg1
, 0);
10313 if (operand_equal_p (arg0
, tem
, 0))
10315 t1
= build_int_cst_type (type
, -1);
10316 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10320 /* X + (X / CST) * -CST is X % CST. */
10321 if (TREE_CODE (arg1
) == MULT_EXPR
10322 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10323 && operand_equal_p (arg0
,
10324 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10326 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10327 tree cst1
= TREE_OPERAND (arg1
, 1);
10328 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10330 if (sum
&& integer_zerop (sum
))
10331 return fold_convert_loc (loc
, type
,
10332 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10333 TREE_TYPE (arg0
), arg0
,
10338 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
10339 same or one. Make sure type is not saturating.
10340 fold_plusminus_mult_expr will re-associate. */
10341 if ((TREE_CODE (arg0
) == MULT_EXPR
10342 || TREE_CODE (arg1
) == MULT_EXPR
)
10343 && !TYPE_SATURATING (type
)
10344 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10346 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10351 if (! FLOAT_TYPE_P (type
))
10353 if (integer_zerop (arg1
))
10354 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10356 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10357 with a constant, and the two constants have no bits in common,
10358 we should treat this as a BIT_IOR_EXPR since this may produce more
10359 simplifications. */
10360 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10361 && TREE_CODE (arg1
) == BIT_AND_EXPR
10362 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10363 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10364 && integer_zerop (const_binop (BIT_AND_EXPR
,
10365 TREE_OPERAND (arg0
, 1),
10366 TREE_OPERAND (arg1
, 1), 0)))
10368 code
= BIT_IOR_EXPR
;
10372 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10373 (plus (plus (mult) (mult)) (foo)) so that we can
10374 take advantage of the factoring cases below. */
10375 if (((TREE_CODE (arg0
) == PLUS_EXPR
10376 || TREE_CODE (arg0
) == MINUS_EXPR
)
10377 && TREE_CODE (arg1
) == MULT_EXPR
)
10378 || ((TREE_CODE (arg1
) == PLUS_EXPR
10379 || TREE_CODE (arg1
) == MINUS_EXPR
)
10380 && TREE_CODE (arg0
) == MULT_EXPR
))
10382 tree parg0
, parg1
, parg
, marg
;
10383 enum tree_code pcode
;
10385 if (TREE_CODE (arg1
) == MULT_EXPR
)
10386 parg
= arg0
, marg
= arg1
;
10388 parg
= arg1
, marg
= arg0
;
10389 pcode
= TREE_CODE (parg
);
10390 parg0
= TREE_OPERAND (parg
, 0);
10391 parg1
= TREE_OPERAND (parg
, 1);
10392 STRIP_NOPS (parg0
);
10393 STRIP_NOPS (parg1
);
10395 if (TREE_CODE (parg0
) == MULT_EXPR
10396 && TREE_CODE (parg1
) != MULT_EXPR
)
10397 return fold_build2_loc (loc
, pcode
, type
,
10398 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10399 fold_convert_loc (loc
, type
,
10401 fold_convert_loc (loc
, type
,
10403 fold_convert_loc (loc
, type
, parg1
));
10404 if (TREE_CODE (parg0
) != MULT_EXPR
10405 && TREE_CODE (parg1
) == MULT_EXPR
)
10407 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10408 fold_convert_loc (loc
, type
, parg0
),
10409 fold_build2_loc (loc
, pcode
, type
,
10410 fold_convert_loc (loc
, type
, marg
),
10411 fold_convert_loc (loc
, type
,
10417 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10418 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10419 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10421 /* Likewise if the operands are reversed. */
10422 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10423 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10425 /* Convert X + -C into X - C. */
10426 if (TREE_CODE (arg1
) == REAL_CST
10427 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10429 tem
= fold_negate_const (arg1
, type
);
10430 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10431 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10432 fold_convert_loc (loc
, type
, arg0
),
10433 fold_convert_loc (loc
, type
, tem
));
10436 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10437 to __complex__ ( x, y ). This is not the same for SNaNs or
10438 if signed zeros are involved. */
10439 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10440 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10441 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10443 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10444 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10445 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10446 bool arg0rz
= false, arg0iz
= false;
10447 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10448 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10450 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10451 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10452 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10454 tree rp
= arg1r
? arg1r
10455 : build1 (REALPART_EXPR
, rtype
, arg1
);
10456 tree ip
= arg0i
? arg0i
10457 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10458 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10460 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10462 tree rp
= arg0r
? arg0r
10463 : build1 (REALPART_EXPR
, rtype
, arg0
);
10464 tree ip
= arg1i
? arg1i
10465 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10466 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10471 if (flag_unsafe_math_optimizations
10472 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10473 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10474 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10477 /* Convert x+x into x*2.0. */
10478 if (operand_equal_p (arg0
, arg1
, 0)
10479 && SCALAR_FLOAT_TYPE_P (type
))
10480 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10481 build_real (type
, dconst2
));
10483 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10484 We associate floats only if the user has specified
10485 -fassociative-math. */
10486 if (flag_associative_math
10487 && TREE_CODE (arg1
) == PLUS_EXPR
10488 && TREE_CODE (arg0
) != MULT_EXPR
)
10490 tree tree10
= TREE_OPERAND (arg1
, 0);
10491 tree tree11
= TREE_OPERAND (arg1
, 1);
10492 if (TREE_CODE (tree11
) == MULT_EXPR
10493 && TREE_CODE (tree10
) == MULT_EXPR
)
10496 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10497 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10500 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10501 We associate floats only if the user has specified
10502 -fassociative-math. */
10503 if (flag_associative_math
10504 && TREE_CODE (arg0
) == PLUS_EXPR
10505 && TREE_CODE (arg1
) != MULT_EXPR
)
10507 tree tree00
= TREE_OPERAND (arg0
, 0);
10508 tree tree01
= TREE_OPERAND (arg0
, 1);
10509 if (TREE_CODE (tree01
) == MULT_EXPR
10510 && TREE_CODE (tree00
) == MULT_EXPR
)
10513 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10514 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10520 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10521 is a rotate of A by C1 bits. */
10522 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10523 is a rotate of A by B bits. */
10525 enum tree_code code0
, code1
;
10527 code0
= TREE_CODE (arg0
);
10528 code1
= TREE_CODE (arg1
);
10529 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10530 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10531 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10532 TREE_OPERAND (arg1
, 0), 0)
10533 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10534 TYPE_UNSIGNED (rtype
))
10535 /* Only create rotates in complete modes. Other cases are not
10536 expanded properly. */
10537 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
10539 tree tree01
, tree11
;
10540 enum tree_code code01
, code11
;
10542 tree01
= TREE_OPERAND (arg0
, 1);
10543 tree11
= TREE_OPERAND (arg1
, 1);
10544 STRIP_NOPS (tree01
);
10545 STRIP_NOPS (tree11
);
10546 code01
= TREE_CODE (tree01
);
10547 code11
= TREE_CODE (tree11
);
10548 if (code01
== INTEGER_CST
10549 && code11
== INTEGER_CST
10550 && TREE_INT_CST_HIGH (tree01
) == 0
10551 && TREE_INT_CST_HIGH (tree11
) == 0
10552 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
10553 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10555 tem
= build2 (LROTATE_EXPR
,
10556 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10557 TREE_OPERAND (arg0
, 0),
10558 code0
== LSHIFT_EXPR
10559 ? tree01
: tree11
);
10560 SET_EXPR_LOCATION (tem
, loc
);
10561 return fold_convert_loc (loc
, type
, tem
);
10563 else if (code11
== MINUS_EXPR
)
10565 tree tree110
, tree111
;
10566 tree110
= TREE_OPERAND (tree11
, 0);
10567 tree111
= TREE_OPERAND (tree11
, 1);
10568 STRIP_NOPS (tree110
);
10569 STRIP_NOPS (tree111
);
10570 if (TREE_CODE (tree110
) == INTEGER_CST
10571 && 0 == compare_tree_int (tree110
,
10573 (TREE_TYPE (TREE_OPERAND
10575 && operand_equal_p (tree01
, tree111
, 0))
10577 fold_convert_loc (loc
, type
,
10578 build2 ((code0
== LSHIFT_EXPR
10581 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10582 TREE_OPERAND (arg0
, 0), tree01
));
10584 else if (code01
== MINUS_EXPR
)
10586 tree tree010
, tree011
;
10587 tree010
= TREE_OPERAND (tree01
, 0);
10588 tree011
= TREE_OPERAND (tree01
, 1);
10589 STRIP_NOPS (tree010
);
10590 STRIP_NOPS (tree011
);
10591 if (TREE_CODE (tree010
) == INTEGER_CST
10592 && 0 == compare_tree_int (tree010
,
10594 (TREE_TYPE (TREE_OPERAND
10596 && operand_equal_p (tree11
, tree011
, 0))
10597 return fold_convert_loc
10599 build2 ((code0
!= LSHIFT_EXPR
10602 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10603 TREE_OPERAND (arg0
, 0), tree11
));
10609 /* In most languages, can't associate operations on floats through
10610 parentheses. Rather than remember where the parentheses were, we
10611 don't associate floats at all, unless the user has specified
10612 -fassociative-math.
10613 And, we need to make sure type is not saturating. */
10615 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10616 && !TYPE_SATURATING (type
))
10618 tree var0
, con0
, lit0
, minus_lit0
;
10619 tree var1
, con1
, lit1
, minus_lit1
;
10622 /* Split both trees into variables, constants, and literals. Then
10623 associate each group together, the constants with literals,
10624 then the result with variables. This increases the chances of
10625 literals being recombined later and of generating relocatable
10626 expressions for the sum of a constant and literal. */
10627 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10628 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10629 code
== MINUS_EXPR
);
10631 /* With undefined overflow we can only associate constants
10632 with one variable. */
10633 if (((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10634 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10640 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10641 tmp0
= TREE_OPERAND (tmp0
, 0);
10642 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10643 tmp1
= TREE_OPERAND (tmp1
, 0);
10644 /* The only case we can still associate with two variables
10645 is if they are the same, modulo negation. */
10646 if (!operand_equal_p (tmp0
, tmp1
, 0))
10650 /* Only do something if we found more than two objects. Otherwise,
10651 nothing has changed and we risk infinite recursion. */
10653 && (2 < ((var0
!= 0) + (var1
!= 0)
10654 + (con0
!= 0) + (con1
!= 0)
10655 + (lit0
!= 0) + (lit1
!= 0)
10656 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10658 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10659 if (code
== MINUS_EXPR
)
10662 var0
= associate_trees (loc
, var0
, var1
, code
, type
);
10663 con0
= associate_trees (loc
, con0
, con1
, code
, type
);
10664 lit0
= associate_trees (loc
, lit0
, lit1
, code
, type
);
10665 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
, code
, type
);
10667 /* Preserve the MINUS_EXPR if the negative part of the literal is
10668 greater than the positive part. Otherwise, the multiplicative
10669 folding code (i.e extract_muldiv) may be fooled in case
10670 unsigned constants are subtracted, like in the following
10671 example: ((X*2 + 4) - 8U)/2. */
10672 if (minus_lit0
&& lit0
)
10674 if (TREE_CODE (lit0
) == INTEGER_CST
10675 && TREE_CODE (minus_lit0
) == INTEGER_CST
10676 && tree_int_cst_lt (lit0
, minus_lit0
))
10678 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10684 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10693 fold_convert_loc (loc
, type
,
10694 associate_trees (loc
, var0
, minus_lit0
,
10695 MINUS_EXPR
, type
));
10698 con0
= associate_trees (loc
, con0
, minus_lit0
,
10701 fold_convert_loc (loc
, type
,
10702 associate_trees (loc
, var0
, con0
,
10707 con0
= associate_trees (loc
, con0
, lit0
, code
, type
);
10709 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10717 /* Pointer simplifications for subtraction, simple reassociations. */
10718 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10720 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10721 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10722 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10724 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10725 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10726 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10727 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10728 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10729 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10731 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10734 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10735 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10737 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10738 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10739 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10740 fold_convert_loc (loc
, type
, arg1
));
10742 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10745 /* A - (-B) -> A + B */
10746 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10747 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10748 fold_convert_loc (loc
, type
,
10749 TREE_OPERAND (arg1
, 0)));
10750 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10751 if (TREE_CODE (arg0
) == NEGATE_EXPR
10752 && (FLOAT_TYPE_P (type
)
10753 || INTEGRAL_TYPE_P (type
))
10754 && negate_expr_p (arg1
)
10755 && reorder_operands_p (arg0
, arg1
))
10756 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10757 fold_convert_loc (loc
, type
,
10758 negate_expr (arg1
)),
10759 fold_convert_loc (loc
, type
,
10760 TREE_OPERAND (arg0
, 0)));
10761 /* Convert -A - 1 to ~A. */
10762 if (INTEGRAL_TYPE_P (type
)
10763 && TREE_CODE (arg0
) == NEGATE_EXPR
10764 && integer_onep (arg1
)
10765 && !TYPE_OVERFLOW_TRAPS (type
))
10766 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10767 fold_convert_loc (loc
, type
,
10768 TREE_OPERAND (arg0
, 0)));
10770 /* Convert -1 - A to ~A. */
10771 if (INTEGRAL_TYPE_P (type
)
10772 && integer_all_onesp (arg0
))
10773 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10776 /* X - (X / CST) * CST is X % CST. */
10777 if (INTEGRAL_TYPE_P (type
)
10778 && TREE_CODE (arg1
) == MULT_EXPR
10779 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10780 && operand_equal_p (arg0
,
10781 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10782 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10783 TREE_OPERAND (arg1
, 1), 0))
10785 fold_convert_loc (loc
, type
,
10786 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10787 arg0
, TREE_OPERAND (arg1
, 1)));
10789 if (! FLOAT_TYPE_P (type
))
10791 if (integer_zerop (arg0
))
10792 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10793 if (integer_zerop (arg1
))
10794 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10796 /* Fold A - (A & B) into ~B & A. */
10797 if (!TREE_SIDE_EFFECTS (arg0
)
10798 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10800 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10802 tree arg10
= fold_convert_loc (loc
, type
,
10803 TREE_OPERAND (arg1
, 0));
10804 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10805 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10807 fold_convert_loc (loc
, type
, arg0
));
10809 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10811 tree arg11
= fold_convert_loc (loc
,
10812 type
, TREE_OPERAND (arg1
, 1));
10813 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10814 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10816 fold_convert_loc (loc
, type
, arg0
));
10820 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10821 any power of 2 minus 1. */
10822 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10823 && TREE_CODE (arg1
) == BIT_AND_EXPR
10824 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10825 TREE_OPERAND (arg1
, 0), 0))
10827 tree mask0
= TREE_OPERAND (arg0
, 1);
10828 tree mask1
= TREE_OPERAND (arg1
, 1);
10829 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10831 if (operand_equal_p (tem
, mask1
, 0))
10833 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10834 TREE_OPERAND (arg0
, 0), mask1
);
10835 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10840 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10841 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10842 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10844 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10845 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10846 (-ARG1 + ARG0) reduces to -ARG1. */
10847 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10848 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10850 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10851 __complex__ ( x, -y ). This is not the same for SNaNs or if
10852 signed zeros are involved. */
10853 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10854 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10855 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10857 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10858 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10859 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10860 bool arg0rz
= false, arg0iz
= false;
10861 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10862 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10864 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10865 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10866 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10868 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10870 : build1 (REALPART_EXPR
, rtype
, arg1
));
10871 tree ip
= arg0i
? arg0i
10872 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10873 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10875 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10877 tree rp
= arg0r
? arg0r
10878 : build1 (REALPART_EXPR
, rtype
, arg0
);
10879 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10881 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10882 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10887 /* Fold &x - &x. This can happen from &x.foo - &x.
10888 This is unsafe for certain floats even in non-IEEE formats.
10889 In IEEE, it is unsafe because it does wrong for NaNs.
10890 Also note that operand_equal_p is always false if an operand
10893 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10894 && operand_equal_p (arg0
, arg1
, 0))
10895 return fold_convert_loc (loc
, type
, integer_zero_node
);
10897 /* A - B -> A + (-B) if B is easily negatable. */
10898 if (negate_expr_p (arg1
)
10899 && ((FLOAT_TYPE_P (type
)
10900 /* Avoid this transformation if B is a positive REAL_CST. */
10901 && (TREE_CODE (arg1
) != REAL_CST
10902 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10903 || INTEGRAL_TYPE_P (type
)))
10904 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10905 fold_convert_loc (loc
, type
, arg0
),
10906 fold_convert_loc (loc
, type
,
10907 negate_expr (arg1
)));
10909 /* Try folding difference of addresses. */
10911 HOST_WIDE_INT diff
;
10913 if ((TREE_CODE (arg0
) == ADDR_EXPR
10914 || TREE_CODE (arg1
) == ADDR_EXPR
)
10915 && ptr_difference_const (arg0
, arg1
, &diff
))
10916 return build_int_cst_type (type
, diff
);
10919 /* Fold &a[i] - &a[j] to i-j. */
10920 if (TREE_CODE (arg0
) == ADDR_EXPR
10921 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10922 && TREE_CODE (arg1
) == ADDR_EXPR
10923 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10925 tree aref0
= TREE_OPERAND (arg0
, 0);
10926 tree aref1
= TREE_OPERAND (arg1
, 0);
10927 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
10928 TREE_OPERAND (aref1
, 0), 0))
10930 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
10931 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
10932 tree esz
= array_ref_element_size (aref0
);
10933 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
10934 return fold_build2_loc (loc
, MULT_EXPR
, type
, diff
,
10935 fold_convert_loc (loc
, type
, esz
));
10940 if (FLOAT_TYPE_P (type
)
10941 && flag_unsafe_math_optimizations
10942 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10943 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10944 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10947 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10948 same or one. Make sure type is not saturating.
10949 fold_plusminus_mult_expr will re-associate. */
10950 if ((TREE_CODE (arg0
) == MULT_EXPR
10951 || TREE_CODE (arg1
) == MULT_EXPR
)
10952 && !TYPE_SATURATING (type
)
10953 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10955 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10963 /* (-A) * (-B) -> A * B */
10964 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10965 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10966 fold_convert_loc (loc
, type
,
10967 TREE_OPERAND (arg0
, 0)),
10968 fold_convert_loc (loc
, type
,
10969 negate_expr (arg1
)));
10970 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10971 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10972 fold_convert_loc (loc
, type
,
10973 negate_expr (arg0
)),
10974 fold_convert_loc (loc
, type
,
10975 TREE_OPERAND (arg1
, 0)));
10977 if (! FLOAT_TYPE_P (type
))
10979 if (integer_zerop (arg1
))
10980 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10981 if (integer_onep (arg1
))
10982 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10983 /* Transform x * -1 into -x. Make sure to do the negation
10984 on the original operand with conversions not stripped
10985 because we can only strip non-sign-changing conversions. */
10986 if (integer_all_onesp (arg1
))
10987 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10988 /* Transform x * -C into -x * C if x is easily negatable. */
10989 if (TREE_CODE (arg1
) == INTEGER_CST
10990 && tree_int_cst_sgn (arg1
) == -1
10991 && negate_expr_p (arg0
)
10992 && (tem
= negate_expr (arg1
)) != arg1
10993 && !TREE_OVERFLOW (tem
))
10994 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10995 fold_convert_loc (loc
, type
,
10996 negate_expr (arg0
)),
10999 /* (a * (1 << b)) is (a << b) */
11000 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11001 && integer_onep (TREE_OPERAND (arg1
, 0)))
11002 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
11003 TREE_OPERAND (arg1
, 1));
11004 if (TREE_CODE (arg0
) == LSHIFT_EXPR
11005 && integer_onep (TREE_OPERAND (arg0
, 0)))
11006 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
11007 TREE_OPERAND (arg0
, 1));
11009 /* (A + A) * C -> A * 2 * C */
11010 if (TREE_CODE (arg0
) == PLUS_EXPR
11011 && TREE_CODE (arg1
) == INTEGER_CST
11012 && operand_equal_p (TREE_OPERAND (arg0
, 0),
11013 TREE_OPERAND (arg0
, 1), 0))
11014 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11015 omit_one_operand_loc (loc
, type
,
11016 TREE_OPERAND (arg0
, 0),
11017 TREE_OPERAND (arg0
, 1)),
11018 fold_build2_loc (loc
, MULT_EXPR
, type
,
11019 build_int_cst (type
, 2) , arg1
));
11021 strict_overflow_p
= false;
11022 if (TREE_CODE (arg1
) == INTEGER_CST
11023 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11024 &strict_overflow_p
)))
11026 if (strict_overflow_p
)
11027 fold_overflow_warning (("assuming signed overflow does not "
11028 "occur when simplifying "
11030 WARN_STRICT_OVERFLOW_MISC
);
11031 return fold_convert_loc (loc
, type
, tem
);
11034 /* Optimize z * conj(z) for integer complex numbers. */
11035 if (TREE_CODE (arg0
) == CONJ_EXPR
11036 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11037 return fold_mult_zconjz (loc
, type
, arg1
);
11038 if (TREE_CODE (arg1
) == CONJ_EXPR
11039 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11040 return fold_mult_zconjz (loc
, type
, arg0
);
11044 /* Maybe fold x * 0 to 0. The expressions aren't the same
11045 when x is NaN, since x * 0 is also NaN. Nor are they the
11046 same in modes with signed zeros, since multiplying a
11047 negative value by 0 gives -0, not +0. */
11048 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11049 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11050 && real_zerop (arg1
))
11051 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11052 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11053 Likewise for complex arithmetic with signed zeros. */
11054 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11055 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11056 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11057 && real_onep (arg1
))
11058 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11060 /* Transform x * -1.0 into -x. */
11061 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11062 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11063 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11064 && real_minus_onep (arg1
))
11065 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
11067 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11068 the result for floating point types due to rounding so it is applied
11069 only if -fassociative-math was specify. */
11070 if (flag_associative_math
11071 && TREE_CODE (arg0
) == RDIV_EXPR
11072 && TREE_CODE (arg1
) == REAL_CST
11073 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
11075 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
11078 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11079 TREE_OPERAND (arg0
, 1));
11082 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11083 if (operand_equal_p (arg0
, arg1
, 0))
11085 tree tem
= fold_strip_sign_ops (arg0
);
11086 if (tem
!= NULL_TREE
)
11088 tem
= fold_convert_loc (loc
, type
, tem
);
11089 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
11093 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11094 This is not the same for NaNs or if signed zeros are
11096 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11097 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11098 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11099 && TREE_CODE (arg1
) == COMPLEX_CST
11100 && real_zerop (TREE_REALPART (arg1
)))
11102 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
11103 if (real_onep (TREE_IMAGPART (arg1
)))
11105 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11106 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
11108 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
11109 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
11111 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11112 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
11113 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
11117 /* Optimize z * conj(z) for floating point complex numbers.
11118 Guarded by flag_unsafe_math_optimizations as non-finite
11119 imaginary components don't produce scalar results. */
11120 if (flag_unsafe_math_optimizations
11121 && TREE_CODE (arg0
) == CONJ_EXPR
11122 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11123 return fold_mult_zconjz (loc
, type
, arg1
);
11124 if (flag_unsafe_math_optimizations
11125 && TREE_CODE (arg1
) == CONJ_EXPR
11126 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11127 return fold_mult_zconjz (loc
, type
, arg0
);
11129 if (flag_unsafe_math_optimizations
)
11131 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11132 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11134 /* Optimizations of root(...)*root(...). */
11135 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
11138 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11139 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11141 /* Optimize sqrt(x)*sqrt(x) as x. */
11142 if (BUILTIN_SQRT_P (fcode0
)
11143 && operand_equal_p (arg00
, arg10
, 0)
11144 && ! HONOR_SNANS (TYPE_MODE (type
)))
11147 /* Optimize root(x)*root(y) as root(x*y). */
11148 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11149 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
11150 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
11153 /* Optimize expN(x)*expN(y) as expN(x+y). */
11154 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
11156 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11157 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11158 CALL_EXPR_ARG (arg0
, 0),
11159 CALL_EXPR_ARG (arg1
, 0));
11160 return build_call_expr_loc (loc
, expfn
, 1, arg
);
11163 /* Optimizations of pow(...)*pow(...). */
11164 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
11165 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
11166 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
11168 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11169 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11170 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11171 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11173 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11174 if (operand_equal_p (arg01
, arg11
, 0))
11176 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11177 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
11179 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
11182 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11183 if (operand_equal_p (arg00
, arg10
, 0))
11185 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11186 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11188 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
11192 /* Optimize tan(x)*cos(x) as sin(x). */
11193 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
11194 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
11195 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
11196 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
11197 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
11198 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
11199 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11200 CALL_EXPR_ARG (arg1
, 0), 0))
11202 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
11204 if (sinfn
!= NULL_TREE
)
11205 return build_call_expr_loc (loc
, sinfn
, 1,
11206 CALL_EXPR_ARG (arg0
, 0));
11209 /* Optimize x*pow(x,c) as pow(x,c+1). */
11210 if (fcode1
== BUILT_IN_POW
11211 || fcode1
== BUILT_IN_POWF
11212 || fcode1
== BUILT_IN_POWL
)
11214 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11215 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11216 if (TREE_CODE (arg11
) == REAL_CST
11217 && !TREE_OVERFLOW (arg11
)
11218 && operand_equal_p (arg0
, arg10
, 0))
11220 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11224 c
= TREE_REAL_CST (arg11
);
11225 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11226 arg
= build_real (type
, c
);
11227 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11231 /* Optimize pow(x,c)*x as pow(x,c+1). */
11232 if (fcode0
== BUILT_IN_POW
11233 || fcode0
== BUILT_IN_POWF
11234 || fcode0
== BUILT_IN_POWL
)
11236 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11237 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11238 if (TREE_CODE (arg01
) == REAL_CST
11239 && !TREE_OVERFLOW (arg01
)
11240 && operand_equal_p (arg1
, arg00
, 0))
11242 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11246 c
= TREE_REAL_CST (arg01
);
11247 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11248 arg
= build_real (type
, c
);
11249 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11253 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
11254 if (optimize_function_for_speed_p (cfun
)
11255 && operand_equal_p (arg0
, arg1
, 0))
11257 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
11261 tree arg
= build_real (type
, dconst2
);
11262 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11271 if (integer_all_onesp (arg1
))
11272 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11273 if (integer_zerop (arg1
))
11274 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11275 if (operand_equal_p (arg0
, arg1
, 0))
11276 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11278 /* ~X | X is -1. */
11279 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11280 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11282 t1
= fold_convert_loc (loc
, type
, integer_zero_node
);
11283 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11284 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11287 /* X | ~X is -1. */
11288 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11289 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11291 t1
= fold_convert_loc (loc
, type
, integer_zero_node
);
11292 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11293 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11296 /* Canonicalize (X & C1) | C2. */
11297 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11298 && TREE_CODE (arg1
) == INTEGER_CST
11299 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11301 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, hi3
, lo3
, mlo
, mhi
;
11302 int width
= TYPE_PRECISION (type
), w
;
11303 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
11304 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
11305 hi2
= TREE_INT_CST_HIGH (arg1
);
11306 lo2
= TREE_INT_CST_LOW (arg1
);
11308 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11309 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
11310 return omit_one_operand_loc (loc
, type
, arg1
,
11311 TREE_OPERAND (arg0
, 0));
11313 if (width
> HOST_BITS_PER_WIDE_INT
)
11315 mhi
= (unsigned HOST_WIDE_INT
) -1
11316 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
11322 mlo
= (unsigned HOST_WIDE_INT
) -1
11323 >> (HOST_BITS_PER_WIDE_INT
- width
);
11326 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11327 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
11328 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11329 TREE_OPERAND (arg0
, 0), arg1
);
11331 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11332 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11333 mode which allows further optimizations. */
11340 for (w
= BITS_PER_UNIT
;
11341 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
11344 unsigned HOST_WIDE_INT mask
11345 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
11346 if (((lo1
| lo2
) & mask
) == mask
11347 && (lo1
& ~mask
) == 0 && hi1
== 0)
11354 if (hi3
!= hi1
|| lo3
!= lo1
)
11355 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11356 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11357 TREE_OPERAND (arg0
, 0),
11358 build_int_cst_wide (type
,
11363 /* (X & Y) | Y is (X, Y). */
11364 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11365 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11366 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11367 /* (X & Y) | X is (Y, X). */
11368 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11369 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11370 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11371 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11372 /* X | (X & Y) is (Y, X). */
11373 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11374 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11375 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11376 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11377 /* X | (Y & X) is (Y, X). */
11378 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11379 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11380 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11381 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11383 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11384 if (t1
!= NULL_TREE
)
11387 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11389 This results in more efficient code for machines without a NAND
11390 instruction. Combine will canonicalize to the first form
11391 which will allow use of NAND instructions provided by the
11392 backend if they exist. */
11393 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11394 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11397 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11398 build2 (BIT_AND_EXPR
, type
,
11399 fold_convert_loc (loc
, type
,
11400 TREE_OPERAND (arg0
, 0)),
11401 fold_convert_loc (loc
, type
,
11402 TREE_OPERAND (arg1
, 0))));
11405 /* See if this can be simplified into a rotate first. If that
11406 is unsuccessful continue in the association code. */
11410 if (integer_zerop (arg1
))
11411 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11412 if (integer_all_onesp (arg1
))
11413 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11414 if (operand_equal_p (arg0
, arg1
, 0))
11415 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11417 /* ~X ^ X is -1. */
11418 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11419 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11421 t1
= fold_convert_loc (loc
, type
, integer_zero_node
);
11422 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11423 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11426 /* X ^ ~X is -1. */
11427 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11428 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11430 t1
= fold_convert_loc (loc
, type
, integer_zero_node
);
11431 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11432 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11435 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11436 with a constant, and the two constants have no bits in common,
11437 we should treat this as a BIT_IOR_EXPR since this may produce more
11438 simplifications. */
11439 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11440 && TREE_CODE (arg1
) == BIT_AND_EXPR
11441 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11442 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11443 && integer_zerop (const_binop (BIT_AND_EXPR
,
11444 TREE_OPERAND (arg0
, 1),
11445 TREE_OPERAND (arg1
, 1), 0)))
11447 code
= BIT_IOR_EXPR
;
11451 /* (X | Y) ^ X -> Y & ~ X*/
11452 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11453 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11455 tree t2
= TREE_OPERAND (arg0
, 1);
11456 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11458 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11459 fold_convert_loc (loc
, type
, t2
),
11460 fold_convert_loc (loc
, type
, t1
));
11464 /* (Y | X) ^ X -> Y & ~ X*/
11465 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11466 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11468 tree t2
= TREE_OPERAND (arg0
, 0);
11469 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11471 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11472 fold_convert_loc (loc
, type
, t2
),
11473 fold_convert_loc (loc
, type
, t1
));
11477 /* X ^ (X | Y) -> Y & ~ X*/
11478 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11479 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11481 tree t2
= TREE_OPERAND (arg1
, 1);
11482 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11484 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11485 fold_convert_loc (loc
, type
, t2
),
11486 fold_convert_loc (loc
, type
, t1
));
11490 /* X ^ (Y | X) -> Y & ~ X*/
11491 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11492 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11494 tree t2
= TREE_OPERAND (arg1
, 0);
11495 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11497 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11498 fold_convert_loc (loc
, type
, t2
),
11499 fold_convert_loc (loc
, type
, t1
));
11503 /* Convert ~X ^ ~Y to X ^ Y. */
11504 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11505 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11506 return fold_build2_loc (loc
, code
, type
,
11507 fold_convert_loc (loc
, type
,
11508 TREE_OPERAND (arg0
, 0)),
11509 fold_convert_loc (loc
, type
,
11510 TREE_OPERAND (arg1
, 0)));
11512 /* Convert ~X ^ C to X ^ ~C. */
11513 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11514 && TREE_CODE (arg1
) == INTEGER_CST
)
11515 return fold_build2_loc (loc
, code
, type
,
11516 fold_convert_loc (loc
, type
,
11517 TREE_OPERAND (arg0
, 0)),
11518 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11520 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11521 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11522 && integer_onep (TREE_OPERAND (arg0
, 1))
11523 && integer_onep (arg1
))
11524 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11525 build_int_cst (TREE_TYPE (arg0
), 0));
11527 /* Fold (X & Y) ^ Y as ~X & Y. */
11528 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11529 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11531 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11532 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11533 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11534 fold_convert_loc (loc
, type
, arg1
));
11536 /* Fold (X & Y) ^ X as ~Y & X. */
11537 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11538 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11539 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11541 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11542 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11543 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11544 fold_convert_loc (loc
, type
, arg1
));
11546 /* Fold X ^ (X & Y) as X & ~Y. */
11547 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11548 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11550 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11551 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11552 fold_convert_loc (loc
, type
, arg0
),
11553 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11555 /* Fold X ^ (Y & X) as ~Y & X. */
11556 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11557 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11558 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11560 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11561 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11562 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11563 fold_convert_loc (loc
, type
, arg0
));
11566 /* See if this can be simplified into a rotate first. If that
11567 is unsuccessful continue in the association code. */
11571 if (integer_all_onesp (arg1
))
11572 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11573 if (integer_zerop (arg1
))
11574 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11575 if (operand_equal_p (arg0
, arg1
, 0))
11576 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11578 /* ~X & X is always zero. */
11579 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11580 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11581 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11583 /* X & ~X is always zero. */
11584 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11585 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11586 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11588 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11589 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11590 && TREE_CODE (arg1
) == INTEGER_CST
11591 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11593 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11594 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11595 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11596 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11597 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11599 fold_convert_loc (loc
, type
,
11600 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11601 type
, tmp2
, tmp3
));
11604 /* (X | Y) & Y is (X, Y). */
11605 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11606 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11607 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11608 /* (X | Y) & X is (Y, X). */
11609 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11610 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11611 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11612 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11613 /* X & (X | Y) is (Y, X). */
11614 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11615 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11616 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11617 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11618 /* X & (Y | X) is (Y, X). */
11619 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11620 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11621 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11622 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11624 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11625 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11626 && integer_onep (TREE_OPERAND (arg0
, 1))
11627 && integer_onep (arg1
))
11629 tem
= TREE_OPERAND (arg0
, 0);
11630 return fold_build2_loc (loc
, EQ_EXPR
, type
,
11631 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
11632 build_int_cst (TREE_TYPE (tem
), 1)),
11633 build_int_cst (TREE_TYPE (tem
), 0));
11635 /* Fold ~X & 1 as (X & 1) == 0. */
11636 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11637 && integer_onep (arg1
))
11639 tem
= TREE_OPERAND (arg0
, 0);
11640 return fold_build2_loc (loc
, EQ_EXPR
, type
,
11641 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
11642 build_int_cst (TREE_TYPE (tem
), 1)),
11643 build_int_cst (TREE_TYPE (tem
), 0));
11646 /* Fold (X ^ Y) & Y as ~X & Y. */
11647 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11648 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11650 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11651 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11652 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11653 fold_convert_loc (loc
, type
, arg1
));
11655 /* Fold (X ^ Y) & X as ~Y & X. */
11656 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11657 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11658 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11660 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11661 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11662 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11663 fold_convert_loc (loc
, type
, arg1
));
11665 /* Fold X & (X ^ Y) as X & ~Y. */
11666 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11667 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11669 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11670 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11671 fold_convert_loc (loc
, type
, arg0
),
11672 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11674 /* Fold X & (Y ^ X) as ~Y & X. */
11675 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11676 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11677 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11679 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11680 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11681 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11682 fold_convert_loc (loc
, type
, arg0
));
11685 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11686 if (t1
!= NULL_TREE
)
11688 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11689 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11690 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11693 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11695 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11696 && (~TREE_INT_CST_LOW (arg1
)
11697 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11699 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11702 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11704 This results in more efficient code for machines without a NOR
11705 instruction. Combine will canonicalize to the first form
11706 which will allow use of NOR instructions provided by the
11707 backend if they exist. */
11708 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11709 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11711 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11712 build2 (BIT_IOR_EXPR
, type
,
11713 fold_convert_loc (loc
, type
,
11714 TREE_OPERAND (arg0
, 0)),
11715 fold_convert_loc (loc
, type
,
11716 TREE_OPERAND (arg1
, 0))));
11719 /* If arg0 is derived from the address of an object or function, we may
11720 be able to fold this expression using the object or function's
11722 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
11724 unsigned HOST_WIDE_INT modulus
, residue
;
11725 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
11727 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11728 integer_onep (arg1
));
11730 /* This works because modulus is a power of 2. If this weren't the
11731 case, we'd have to replace it by its greatest power-of-2
11732 divisor: modulus & -modulus. */
11734 return build_int_cst (type
, residue
& low
);
11737 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11738 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11739 if the new mask might be further optimized. */
11740 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11741 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11742 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
11743 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
11744 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
11745 < TYPE_PRECISION (TREE_TYPE (arg0
))
11746 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11747 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
11749 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
11750 unsigned HOST_WIDE_INT mask
11751 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11752 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11753 tree shift_type
= TREE_TYPE (arg0
);
11755 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11756 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11757 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11758 && TYPE_PRECISION (TREE_TYPE (arg0
))
11759 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
11761 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11762 tree arg00
= TREE_OPERAND (arg0
, 0);
11763 /* See if more bits can be proven as zero because of
11765 if (TREE_CODE (arg00
) == NOP_EXPR
11766 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11768 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11769 if (TYPE_PRECISION (inner_type
)
11770 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
11771 && TYPE_PRECISION (inner_type
) < prec
)
11773 prec
= TYPE_PRECISION (inner_type
);
11774 /* See if we can shorten the right shift. */
11776 shift_type
= inner_type
;
11779 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11780 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11781 zerobits
<<= prec
- shiftc
;
11782 /* For arithmetic shift if sign bit could be set, zerobits
11783 can contain actually sign bits, so no transformation is
11784 possible, unless MASK masks them all away. In that
11785 case the shift needs to be converted into logical shift. */
11786 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11787 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11789 if ((mask
& zerobits
) == 0)
11790 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11796 /* ((X << 16) & 0xff00) is (X, 0). */
11797 if ((mask
& zerobits
) == mask
)
11798 return omit_one_operand_loc (loc
, type
,
11799 build_int_cst (type
, 0), arg0
);
11801 newmask
= mask
| zerobits
;
11802 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11806 /* Only do the transformation if NEWMASK is some integer
11808 for (prec
= BITS_PER_UNIT
;
11809 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11810 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11812 if (prec
< HOST_BITS_PER_WIDE_INT
11813 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11817 if (shift_type
!= TREE_TYPE (arg0
))
11819 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11820 fold_convert_loc (loc
, shift_type
,
11821 TREE_OPERAND (arg0
, 0)),
11822 TREE_OPERAND (arg0
, 1));
11823 tem
= fold_convert_loc (loc
, type
, tem
);
11827 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11828 if (!tree_int_cst_equal (newmaskt
, arg1
))
11829 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11837 /* Don't touch a floating-point divide by zero unless the mode
11838 of the constant can represent infinity. */
11839 if (TREE_CODE (arg1
) == REAL_CST
11840 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11841 && real_zerop (arg1
))
11844 /* Optimize A / A to 1.0 if we don't care about
11845 NaNs or Infinities. Skip the transformation
11846 for non-real operands. */
11847 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11848 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11849 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11850 && operand_equal_p (arg0
, arg1
, 0))
11852 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11854 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11857 /* The complex version of the above A / A optimization. */
11858 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11859 && operand_equal_p (arg0
, arg1
, 0))
11861 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11862 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11863 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11865 tree r
= build_real (elem_type
, dconst1
);
11866 /* omit_two_operands will call fold_convert for us. */
11867 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11871 /* (-A) / (-B) -> A / B */
11872 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11873 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11874 TREE_OPERAND (arg0
, 0),
11875 negate_expr (arg1
));
11876 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11877 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11878 negate_expr (arg0
),
11879 TREE_OPERAND (arg1
, 0));
11881 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11882 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11883 && real_onep (arg1
))
11884 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11886 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11887 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11888 && real_minus_onep (arg1
))
11889 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11890 negate_expr (arg0
)));
11892 /* If ARG1 is a constant, we can convert this to a multiply by the
11893 reciprocal. This does not have the same rounding properties,
11894 so only do this if -freciprocal-math. We can actually
11895 always safely do it if ARG1 is a power of two, but it's hard to
11896 tell if it is or not in a portable manner. */
11897 if (TREE_CODE (arg1
) == REAL_CST
)
11899 if (flag_reciprocal_math
11900 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
11902 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11903 /* Find the reciprocal if optimizing and the result is exact. */
11907 r
= TREE_REAL_CST (arg1
);
11908 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
11910 tem
= build_real (type
, r
);
11911 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11912 fold_convert_loc (loc
, type
, arg0
), tem
);
11916 /* Convert A/B/C to A/(B*C). */
11917 if (flag_reciprocal_math
11918 && TREE_CODE (arg0
) == RDIV_EXPR
)
11919 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11920 fold_build2_loc (loc
, MULT_EXPR
, type
,
11921 TREE_OPERAND (arg0
, 1), arg1
));
11923 /* Convert A/(B/C) to (A/B)*C. */
11924 if (flag_reciprocal_math
11925 && TREE_CODE (arg1
) == RDIV_EXPR
)
11926 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11927 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11928 TREE_OPERAND (arg1
, 0)),
11929 TREE_OPERAND (arg1
, 1));
11931 /* Convert C1/(X*C2) into (C1/C2)/X. */
11932 if (flag_reciprocal_math
11933 && TREE_CODE (arg1
) == MULT_EXPR
11934 && TREE_CODE (arg0
) == REAL_CST
11935 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11937 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11938 TREE_OPERAND (arg1
, 1), 0);
11940 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11941 TREE_OPERAND (arg1
, 0));
11944 if (flag_unsafe_math_optimizations
)
11946 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11947 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11949 /* Optimize sin(x)/cos(x) as tan(x). */
11950 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11951 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11952 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11953 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11954 CALL_EXPR_ARG (arg1
, 0), 0))
11956 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11958 if (tanfn
!= NULL_TREE
)
11959 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11962 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11963 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11964 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11965 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11966 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11967 CALL_EXPR_ARG (arg1
, 0), 0))
11969 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11971 if (tanfn
!= NULL_TREE
)
11973 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11974 CALL_EXPR_ARG (arg0
, 0));
11975 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11976 build_real (type
, dconst1
), tmp
);
11980 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11981 NaNs or Infinities. */
11982 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11983 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11984 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11986 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11987 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11989 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11990 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11991 && operand_equal_p (arg00
, arg01
, 0))
11993 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11995 if (cosfn
!= NULL_TREE
)
11996 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12000 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12001 NaNs or Infinities. */
12002 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
12003 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
12004 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
12006 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12007 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12009 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12010 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12011 && operand_equal_p (arg00
, arg01
, 0))
12013 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12015 if (cosfn
!= NULL_TREE
)
12017 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12018 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12019 build_real (type
, dconst1
),
12025 /* Optimize pow(x,c)/x as pow(x,c-1). */
12026 if (fcode0
== BUILT_IN_POW
12027 || fcode0
== BUILT_IN_POWF
12028 || fcode0
== BUILT_IN_POWL
)
12030 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12031 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
12032 if (TREE_CODE (arg01
) == REAL_CST
12033 && !TREE_OVERFLOW (arg01
)
12034 && operand_equal_p (arg1
, arg00
, 0))
12036 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
12040 c
= TREE_REAL_CST (arg01
);
12041 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
12042 arg
= build_real (type
, c
);
12043 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
12047 /* Optimize a/root(b/c) into a*root(c/b). */
12048 if (BUILTIN_ROOT_P (fcode1
))
12050 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
12052 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
12054 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12055 tree b
= TREE_OPERAND (rootarg
, 0);
12056 tree c
= TREE_OPERAND (rootarg
, 1);
12058 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
12060 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
12061 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
12065 /* Optimize x/expN(y) into x*expN(-y). */
12066 if (BUILTIN_EXPONENT_P (fcode1
))
12068 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12069 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
12070 arg1
= build_call_expr_loc (loc
,
12072 fold_convert_loc (loc
, type
, arg
));
12073 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12076 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12077 if (fcode1
== BUILT_IN_POW
12078 || fcode1
== BUILT_IN_POWF
12079 || fcode1
== BUILT_IN_POWL
)
12081 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12082 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
12083 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
12084 tree neg11
= fold_convert_loc (loc
, type
,
12085 negate_expr (arg11
));
12086 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
12087 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12092 case TRUNC_DIV_EXPR
:
12093 case FLOOR_DIV_EXPR
:
12094 /* Simplify A / (B << N) where A and B are positive and B is
12095 a power of 2, to A >> (N + log2(B)). */
12096 strict_overflow_p
= false;
12097 if (TREE_CODE (arg1
) == LSHIFT_EXPR
12098 && (TYPE_UNSIGNED (type
)
12099 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12101 tree sval
= TREE_OPERAND (arg1
, 0);
12102 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
12104 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
12105 unsigned long pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
12107 if (strict_overflow_p
)
12108 fold_overflow_warning (("assuming signed overflow does not "
12109 "occur when simplifying A / (B << N)"),
12110 WARN_STRICT_OVERFLOW_MISC
);
12112 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
12113 sh_cnt
, build_int_cst (NULL_TREE
, pow2
));
12114 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12115 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
12119 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12120 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12121 if (INTEGRAL_TYPE_P (type
)
12122 && TYPE_UNSIGNED (type
)
12123 && code
== FLOOR_DIV_EXPR
)
12124 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
12128 case ROUND_DIV_EXPR
:
12129 case CEIL_DIV_EXPR
:
12130 case EXACT_DIV_EXPR
:
12131 if (integer_onep (arg1
))
12132 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12133 if (integer_zerop (arg1
))
12135 /* X / -1 is -X. */
12136 if (!TYPE_UNSIGNED (type
)
12137 && TREE_CODE (arg1
) == INTEGER_CST
12138 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12139 && TREE_INT_CST_HIGH (arg1
) == -1)
12140 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
12142 /* Convert -A / -B to A / B when the type is signed and overflow is
12144 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12145 && TREE_CODE (arg0
) == NEGATE_EXPR
12146 && negate_expr_p (arg1
))
12148 if (INTEGRAL_TYPE_P (type
))
12149 fold_overflow_warning (("assuming signed overflow does not occur "
12150 "when distributing negation across "
12152 WARN_STRICT_OVERFLOW_MISC
);
12153 return fold_build2_loc (loc
, code
, type
,
12154 fold_convert_loc (loc
, type
,
12155 TREE_OPERAND (arg0
, 0)),
12156 fold_convert_loc (loc
, type
,
12157 negate_expr (arg1
)));
12159 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12160 && TREE_CODE (arg1
) == NEGATE_EXPR
12161 && negate_expr_p (arg0
))
12163 if (INTEGRAL_TYPE_P (type
))
12164 fold_overflow_warning (("assuming signed overflow does not occur "
12165 "when distributing negation across "
12167 WARN_STRICT_OVERFLOW_MISC
);
12168 return fold_build2_loc (loc
, code
, type
,
12169 fold_convert_loc (loc
, type
,
12170 negate_expr (arg0
)),
12171 fold_convert_loc (loc
, type
,
12172 TREE_OPERAND (arg1
, 0)));
12175 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12176 operation, EXACT_DIV_EXPR.
12178 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12179 At one time others generated faster code, it's not clear if they do
12180 after the last round to changes to the DIV code in expmed.c. */
12181 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12182 && multiple_of_p (type
, arg0
, arg1
))
12183 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12185 strict_overflow_p
= false;
12186 if (TREE_CODE (arg1
) == INTEGER_CST
12187 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12188 &strict_overflow_p
)))
12190 if (strict_overflow_p
)
12191 fold_overflow_warning (("assuming signed overflow does not occur "
12192 "when simplifying division"),
12193 WARN_STRICT_OVERFLOW_MISC
);
12194 return fold_convert_loc (loc
, type
, tem
);
12199 case CEIL_MOD_EXPR
:
12200 case FLOOR_MOD_EXPR
:
12201 case ROUND_MOD_EXPR
:
12202 case TRUNC_MOD_EXPR
:
12203 /* X % 1 is always zero, but be sure to preserve any side
12205 if (integer_onep (arg1
))
12206 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12208 /* X % 0, return X % 0 unchanged so that we can get the
12209 proper warnings and errors. */
12210 if (integer_zerop (arg1
))
12213 /* 0 % X is always zero, but be sure to preserve any side
12214 effects in X. Place this after checking for X == 0. */
12215 if (integer_zerop (arg0
))
12216 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12218 /* X % -1 is zero. */
12219 if (!TYPE_UNSIGNED (type
)
12220 && TREE_CODE (arg1
) == INTEGER_CST
12221 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12222 && TREE_INT_CST_HIGH (arg1
) == -1)
12223 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12225 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12226 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12227 strict_overflow_p
= false;
12228 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12229 && (TYPE_UNSIGNED (type
)
12230 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12233 /* Also optimize A % (C << N) where C is a power of 2,
12234 to A & ((C << N) - 1). */
12235 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12236 c
= TREE_OPERAND (arg1
, 0);
12238 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12240 tree mask
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12241 build_int_cst (TREE_TYPE (arg1
), 1));
12242 if (strict_overflow_p
)
12243 fold_overflow_warning (("assuming signed overflow does not "
12244 "occur when simplifying "
12245 "X % (power of two)"),
12246 WARN_STRICT_OVERFLOW_MISC
);
12247 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12248 fold_convert_loc (loc
, type
, arg0
),
12249 fold_convert_loc (loc
, type
, mask
));
12253 /* X % -C is the same as X % C. */
12254 if (code
== TRUNC_MOD_EXPR
12255 && !TYPE_UNSIGNED (type
)
12256 && TREE_CODE (arg1
) == INTEGER_CST
12257 && !TREE_OVERFLOW (arg1
)
12258 && TREE_INT_CST_HIGH (arg1
) < 0
12259 && !TYPE_OVERFLOW_TRAPS (type
)
12260 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12261 && !sign_bit_p (arg1
, arg1
))
12262 return fold_build2_loc (loc
, code
, type
,
12263 fold_convert_loc (loc
, type
, arg0
),
12264 fold_convert_loc (loc
, type
,
12265 negate_expr (arg1
)));
12267 /* X % -Y is the same as X % Y. */
12268 if (code
== TRUNC_MOD_EXPR
12269 && !TYPE_UNSIGNED (type
)
12270 && TREE_CODE (arg1
) == NEGATE_EXPR
12271 && !TYPE_OVERFLOW_TRAPS (type
))
12272 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12273 fold_convert_loc (loc
, type
,
12274 TREE_OPERAND (arg1
, 0)));
12276 if (TREE_CODE (arg1
) == INTEGER_CST
12277 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12278 &strict_overflow_p
)))
12280 if (strict_overflow_p
)
12281 fold_overflow_warning (("assuming signed overflow does not occur "
12282 "when simplifying modulus"),
12283 WARN_STRICT_OVERFLOW_MISC
);
12284 return fold_convert_loc (loc
, type
, tem
);
12291 if (integer_all_onesp (arg0
))
12292 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12296 /* Optimize -1 >> x for arithmetic right shifts. */
12297 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12298 && tree_expr_nonnegative_p (arg1
))
12299 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12300 /* ... fall through ... */
12304 if (integer_zerop (arg1
))
12305 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12306 if (integer_zerop (arg0
))
12307 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12309 /* Since negative shift count is not well-defined,
12310 don't try to compute it in the compiler. */
12311 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12314 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12315 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
12316 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12317 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12318 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12320 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
12321 + TREE_INT_CST_LOW (arg1
));
12323 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12324 being well defined. */
12325 if (low
>= TYPE_PRECISION (type
))
12327 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12328 low
= low
% TYPE_PRECISION (type
);
12329 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12330 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 0),
12331 TREE_OPERAND (arg0
, 0));
12333 low
= TYPE_PRECISION (type
) - 1;
12336 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12337 build_int_cst (type
, low
));
12340 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12341 into x & ((unsigned)-1 >> c) for unsigned types. */
12342 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12343 || (TYPE_UNSIGNED (type
)
12344 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12345 && host_integerp (arg1
, false)
12346 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12347 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12348 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12350 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
12351 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
12357 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12359 lshift
= build_int_cst (type
, -1);
12360 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
12362 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12366 /* Rewrite an LROTATE_EXPR by a constant into an
12367 RROTATE_EXPR by a new constant. */
12368 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12370 tree tem
= build_int_cst (TREE_TYPE (arg1
),
12371 TYPE_PRECISION (type
));
12372 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
12373 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12376 /* If we have a rotate of a bit operation with the rotate count and
12377 the second operand of the bit operation both constant,
12378 permute the two operations. */
12379 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12380 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12381 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12382 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12383 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12384 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12385 fold_build2_loc (loc
, code
, type
,
12386 TREE_OPERAND (arg0
, 0), arg1
),
12387 fold_build2_loc (loc
, code
, type
,
12388 TREE_OPERAND (arg0
, 1), arg1
));
12390 /* Two consecutive rotates adding up to the precision of the
12391 type can be ignored. */
12392 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12393 && TREE_CODE (arg0
) == RROTATE_EXPR
12394 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12395 && TREE_INT_CST_HIGH (arg1
) == 0
12396 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
12397 && ((TREE_INT_CST_LOW (arg1
)
12398 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
12399 == (unsigned int) TYPE_PRECISION (type
)))
12400 return TREE_OPERAND (arg0
, 0);
12402 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12403 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12404 if the latter can be further optimized. */
12405 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12406 && TREE_CODE (arg0
) == BIT_AND_EXPR
12407 && TREE_CODE (arg1
) == INTEGER_CST
12408 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12410 tree mask
= fold_build2_loc (loc
, code
, type
,
12411 fold_convert_loc (loc
, type
,
12412 TREE_OPERAND (arg0
, 1)),
12414 tree shift
= fold_build2_loc (loc
, code
, type
,
12415 fold_convert_loc (loc
, type
,
12416 TREE_OPERAND (arg0
, 0)),
12418 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12426 if (operand_equal_p (arg0
, arg1
, 0))
12427 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12428 if (INTEGRAL_TYPE_P (type
)
12429 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12430 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12431 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12437 if (operand_equal_p (arg0
, arg1
, 0))
12438 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12439 if (INTEGRAL_TYPE_P (type
)
12440 && TYPE_MAX_VALUE (type
)
12441 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12442 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12443 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12448 case TRUTH_ANDIF_EXPR
:
12449 /* Note that the operands of this must be ints
12450 and their values must be 0 or 1.
12451 ("true" is a fixed value perhaps depending on the language.) */
12452 /* If first arg is constant zero, return it. */
12453 if (integer_zerop (arg0
))
12454 return fold_convert_loc (loc
, type
, arg0
);
12455 case TRUTH_AND_EXPR
:
12456 /* If either arg is constant true, drop it. */
12457 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12458 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12459 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12460 /* Preserve sequence points. */
12461 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12462 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12463 /* If second arg is constant zero, result is zero, but first arg
12464 must be evaluated. */
12465 if (integer_zerop (arg1
))
12466 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12467 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12468 case will be handled here. */
12469 if (integer_zerop (arg0
))
12470 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12472 /* !X && X is always false. */
12473 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12474 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12475 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12476 /* X && !X is always false. */
12477 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12478 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12479 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12481 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12482 means A >= Y && A != MAX, but in this case we know that
12485 if (!TREE_SIDE_EFFECTS (arg0
)
12486 && !TREE_SIDE_EFFECTS (arg1
))
12488 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12489 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12490 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12492 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12493 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12494 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12498 /* We only do these simplifications if we are optimizing. */
12502 /* Check for things like (A || B) && (A || C). We can convert this
12503 to A || (B && C). Note that either operator can be any of the four
12504 truth and/or operations and the transformation will still be
12505 valid. Also note that we only care about order for the
12506 ANDIF and ORIF operators. If B contains side effects, this
12507 might change the truth-value of A. */
12508 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
12509 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
12510 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
12511 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
12512 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
12513 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
12515 tree a00
= TREE_OPERAND (arg0
, 0);
12516 tree a01
= TREE_OPERAND (arg0
, 1);
12517 tree a10
= TREE_OPERAND (arg1
, 0);
12518 tree a11
= TREE_OPERAND (arg1
, 1);
12519 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
12520 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
12521 && (code
== TRUTH_AND_EXPR
12522 || code
== TRUTH_OR_EXPR
));
12524 if (operand_equal_p (a00
, a10
, 0))
12525 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
12526 fold_build2_loc (loc
, code
, type
, a01
, a11
));
12527 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
12528 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
12529 fold_build2_loc (loc
, code
, type
, a01
, a10
));
12530 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
12531 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
12532 fold_build2_loc (loc
, code
, type
, a00
, a11
));
12534 /* This case if tricky because we must either have commutative
12535 operators or else A10 must not have side-effects. */
12537 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
12538 && operand_equal_p (a01
, a11
, 0))
12539 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12540 fold_build2_loc (loc
, code
, type
, a00
, a10
),
12544 /* See if we can build a range comparison. */
12545 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
12548 /* Check for the possibility of merging component references. If our
12549 lhs is another similar operation, try to merge its rhs with our
12550 rhs. Then try to merge our lhs and rhs. */
12551 if (TREE_CODE (arg0
) == code
12552 && 0 != (tem
= fold_truthop (loc
, code
, type
,
12553 TREE_OPERAND (arg0
, 1), arg1
)))
12554 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12556 if ((tem
= fold_truthop (loc
, code
, type
, arg0
, arg1
)) != 0)
12561 case TRUTH_ORIF_EXPR
:
12562 /* Note that the operands of this must be ints
12563 and their values must be 0 or true.
12564 ("true" is a fixed value perhaps depending on the language.) */
12565 /* If first arg is constant true, return it. */
12566 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12567 return fold_convert_loc (loc
, type
, arg0
);
12568 case TRUTH_OR_EXPR
:
12569 /* If either arg is constant zero, drop it. */
12570 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12571 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12572 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12573 /* Preserve sequence points. */
12574 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12575 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12576 /* If second arg is constant true, result is true, but we must
12577 evaluate first arg. */
12578 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12579 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12580 /* Likewise for first arg, but note this only occurs here for
12582 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12583 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12585 /* !X || X is always true. */
12586 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12587 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12588 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12589 /* X || !X is always true. */
12590 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12591 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12592 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12596 case TRUTH_XOR_EXPR
:
12597 /* If the second arg is constant zero, drop it. */
12598 if (integer_zerop (arg1
))
12599 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12600 /* If the second arg is constant true, this is a logical inversion. */
12601 if (integer_onep (arg1
))
12603 /* Only call invert_truthvalue if operand is a truth value. */
12604 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
12605 tem
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
12607 tem
= invert_truthvalue_loc (loc
, arg0
);
12608 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12610 /* Identical arguments cancel to zero. */
12611 if (operand_equal_p (arg0
, arg1
, 0))
12612 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12614 /* !X ^ X is always true. */
12615 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12616 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12617 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12619 /* X ^ !X is always true. */
12620 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12621 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12622 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12628 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12629 if (tem
!= NULL_TREE
)
12632 /* bool_var != 0 becomes bool_var. */
12633 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12634 && code
== NE_EXPR
)
12635 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12637 /* bool_var == 1 becomes bool_var. */
12638 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12639 && code
== EQ_EXPR
)
12640 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12642 /* bool_var != 1 becomes !bool_var. */
12643 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12644 && code
== NE_EXPR
)
12645 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
12646 fold_convert_loc (loc
, type
, arg0
));
12648 /* bool_var == 0 becomes !bool_var. */
12649 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12650 && code
== EQ_EXPR
)
12651 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
12652 fold_convert_loc (loc
, type
, arg0
));
12654 /* !exp != 0 becomes !exp */
12655 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12656 && code
== NE_EXPR
)
12657 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12659 /* If this is an equality comparison of the address of two non-weak,
12660 unaliased symbols neither of which are extern (since we do not
12661 have access to attributes for externs), then we know the result. */
12662 if (TREE_CODE (arg0
) == ADDR_EXPR
12663 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12664 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12665 && ! lookup_attribute ("alias",
12666 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12667 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12668 && TREE_CODE (arg1
) == ADDR_EXPR
12669 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12670 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12671 && ! lookup_attribute ("alias",
12672 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12673 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12675 /* We know that we're looking at the address of two
12676 non-weak, unaliased, static _DECL nodes.
12678 It is both wasteful and incorrect to call operand_equal_p
12679 to compare the two ADDR_EXPR nodes. It is wasteful in that
12680 all we need to do is test pointer equality for the arguments
12681 to the two ADDR_EXPR nodes. It is incorrect to use
12682 operand_equal_p as that function is NOT equivalent to a
12683 C equality test. It can in fact return false for two
12684 objects which would test as equal using the C equality
12686 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12687 return constant_boolean_node (equal
12688 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12692 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12693 a MINUS_EXPR of a constant, we can convert it into a comparison with
12694 a revised constant as long as no overflow occurs. */
12695 if (TREE_CODE (arg1
) == INTEGER_CST
12696 && (TREE_CODE (arg0
) == PLUS_EXPR
12697 || TREE_CODE (arg0
) == MINUS_EXPR
)
12698 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12699 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
12700 ? MINUS_EXPR
: PLUS_EXPR
,
12701 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12703 TREE_OPERAND (arg0
, 1), 0))
12704 && !TREE_OVERFLOW (tem
))
12705 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12707 /* Similarly for a NEGATE_EXPR. */
12708 if (TREE_CODE (arg0
) == NEGATE_EXPR
12709 && TREE_CODE (arg1
) == INTEGER_CST
12710 && 0 != (tem
= negate_expr (arg1
))
12711 && TREE_CODE (tem
) == INTEGER_CST
12712 && !TREE_OVERFLOW (tem
))
12713 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12715 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12716 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12717 && TREE_CODE (arg1
) == INTEGER_CST
12718 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12719 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12720 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12721 fold_convert_loc (loc
,
12724 TREE_OPERAND (arg0
, 1)));
12726 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12727 if ((TREE_CODE (arg0
) == PLUS_EXPR
12728 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12729 || TREE_CODE (arg0
) == MINUS_EXPR
)
12730 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12731 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12732 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12734 tree val
= TREE_OPERAND (arg0
, 1);
12735 return omit_two_operands_loc (loc
, type
,
12736 fold_build2_loc (loc
, code
, type
,
12738 build_int_cst (TREE_TYPE (val
),
12740 TREE_OPERAND (arg0
, 0), arg1
);
12743 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12744 if (TREE_CODE (arg0
) == MINUS_EXPR
12745 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12746 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0)
12747 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
12749 return omit_two_operands_loc (loc
, type
,
12751 ? boolean_true_node
: boolean_false_node
,
12752 TREE_OPERAND (arg0
, 1), arg1
);
12755 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12756 for !=. Don't do this for ordered comparisons due to overflow. */
12757 if (TREE_CODE (arg0
) == MINUS_EXPR
12758 && integer_zerop (arg1
))
12759 return fold_build2_loc (loc
, code
, type
,
12760 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
12762 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12763 if (TREE_CODE (arg0
) == ABS_EXPR
12764 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12765 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12767 /* If this is an EQ or NE comparison with zero and ARG0 is
12768 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12769 two operations, but the latter can be done in one less insn
12770 on machines that have only two-operand insns or on which a
12771 constant cannot be the first operand. */
12772 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12773 && integer_zerop (arg1
))
12775 tree arg00
= TREE_OPERAND (arg0
, 0);
12776 tree arg01
= TREE_OPERAND (arg0
, 1);
12777 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12778 && integer_onep (TREE_OPERAND (arg00
, 0)))
12780 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12781 arg01
, TREE_OPERAND (arg00
, 1));
12782 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12783 build_int_cst (TREE_TYPE (arg0
), 1));
12784 return fold_build2_loc (loc
, code
, type
,
12785 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12788 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12789 && integer_onep (TREE_OPERAND (arg01
, 0)))
12791 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12792 arg00
, TREE_OPERAND (arg01
, 1));
12793 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12794 build_int_cst (TREE_TYPE (arg0
), 1));
12795 return fold_build2_loc (loc
, code
, type
,
12796 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12801 /* If this is an NE or EQ comparison of zero against the result of a
12802 signed MOD operation whose second operand is a power of 2, make
12803 the MOD operation unsigned since it is simpler and equivalent. */
12804 if (integer_zerop (arg1
)
12805 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12806 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12807 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12808 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12809 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12810 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12812 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12813 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12814 fold_convert_loc (loc
, newtype
,
12815 TREE_OPERAND (arg0
, 0)),
12816 fold_convert_loc (loc
, newtype
,
12817 TREE_OPERAND (arg0
, 1)));
12819 return fold_build2_loc (loc
, code
, type
, newmod
,
12820 fold_convert_loc (loc
, newtype
, arg1
));
12823 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12824 C1 is a valid shift constant, and C2 is a power of two, i.e.
12826 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12827 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12828 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12830 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12831 && integer_zerop (arg1
))
12833 tree itype
= TREE_TYPE (arg0
);
12834 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
12835 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12837 /* Check for a valid shift count. */
12838 if (TREE_INT_CST_HIGH (arg001
) == 0
12839 && TREE_INT_CST_LOW (arg001
) < prec
)
12841 tree arg01
= TREE_OPERAND (arg0
, 1);
12842 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12843 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12844 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12845 can be rewritten as (X & (C2 << C1)) != 0. */
12846 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12848 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12849 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12850 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12852 /* Otherwise, for signed (arithmetic) shifts,
12853 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12854 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12855 else if (!TYPE_UNSIGNED (itype
))
12856 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12857 arg000
, build_int_cst (itype
, 0));
12858 /* Otherwise, of unsigned (logical) shifts,
12859 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12860 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12862 return omit_one_operand_loc (loc
, type
,
12863 code
== EQ_EXPR
? integer_one_node
12864 : integer_zero_node
,
12869 /* If this is an NE comparison of zero with an AND of one, remove the
12870 comparison since the AND will give the correct value. */
12871 if (code
== NE_EXPR
12872 && integer_zerop (arg1
)
12873 && TREE_CODE (arg0
) == BIT_AND_EXPR
12874 && integer_onep (TREE_OPERAND (arg0
, 1)))
12875 return fold_convert_loc (loc
, type
, arg0
);
12877 /* If we have (A & C) == C where C is a power of 2, convert this into
12878 (A & C) != 0. Similarly for NE_EXPR. */
12879 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12880 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12881 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12882 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12883 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12884 integer_zero_node
));
12886 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12887 bit, then fold the expression into A < 0 or A >= 0. */
12888 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12892 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12893 Similarly for NE_EXPR. */
12894 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12895 && TREE_CODE (arg1
) == INTEGER_CST
12896 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12898 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12899 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12900 TREE_OPERAND (arg0
, 1));
12901 tree dandnotc
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12903 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12904 if (integer_nonzerop (dandnotc
))
12905 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12908 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12909 Similarly for NE_EXPR. */
12910 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12911 && TREE_CODE (arg1
) == INTEGER_CST
12912 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12914 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12915 tree candnotd
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12916 TREE_OPERAND (arg0
, 1), notd
);
12917 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12918 if (integer_nonzerop (candnotd
))
12919 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12922 /* If this is a comparison of a field, we may be able to simplify it. */
12923 if ((TREE_CODE (arg0
) == COMPONENT_REF
12924 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12925 /* Handle the constant case even without -O
12926 to make sure the warnings are given. */
12927 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12929 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12934 /* Optimize comparisons of strlen vs zero to a compare of the
12935 first character of the string vs zero. To wit,
12936 strlen(ptr) == 0 => *ptr == 0
12937 strlen(ptr) != 0 => *ptr != 0
12938 Other cases should reduce to one of these two (or a constant)
12939 due to the return value of strlen being unsigned. */
12940 if (TREE_CODE (arg0
) == CALL_EXPR
12941 && integer_zerop (arg1
))
12943 tree fndecl
= get_callee_fndecl (arg0
);
12946 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12947 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12948 && call_expr_nargs (arg0
) == 1
12949 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12951 tree iref
= build_fold_indirect_ref_loc (loc
,
12952 CALL_EXPR_ARG (arg0
, 0));
12953 return fold_build2_loc (loc
, code
, type
, iref
,
12954 build_int_cst (TREE_TYPE (iref
), 0));
12958 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12959 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12960 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12961 && integer_zerop (arg1
)
12962 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12964 tree arg00
= TREE_OPERAND (arg0
, 0);
12965 tree arg01
= TREE_OPERAND (arg0
, 1);
12966 tree itype
= TREE_TYPE (arg00
);
12967 if (TREE_INT_CST_HIGH (arg01
) == 0
12968 && TREE_INT_CST_LOW (arg01
)
12969 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
12971 if (TYPE_UNSIGNED (itype
))
12973 itype
= signed_type_for (itype
);
12974 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12976 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12977 type
, arg00
, build_int_cst (itype
, 0));
12981 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12982 if (integer_zerop (arg1
)
12983 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12984 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12985 TREE_OPERAND (arg0
, 1));
12987 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12988 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12989 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12990 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12991 build_int_cst (TREE_TYPE (arg1
), 0));
12992 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12993 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12994 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12995 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12996 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12997 build_int_cst (TREE_TYPE (arg1
), 0));
12999 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13000 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13001 && TREE_CODE (arg1
) == INTEGER_CST
13002 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13003 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13004 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
13005 TREE_OPERAND (arg0
, 1), arg1
));
13007 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13008 (X & C) == 0 when C is a single bit. */
13009 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13010 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
13011 && integer_zerop (arg1
)
13012 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13014 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13015 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
13016 TREE_OPERAND (arg0
, 1));
13017 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
13021 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13022 constant C is a power of two, i.e. a single bit. */
13023 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13024 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13025 && integer_zerop (arg1
)
13026 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13027 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13028 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13030 tree arg00
= TREE_OPERAND (arg0
, 0);
13031 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13032 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
13035 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13036 when is C is a power of two, i.e. a single bit. */
13037 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13038 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
13039 && integer_zerop (arg1
)
13040 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13041 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13042 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13044 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13045 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
13046 arg000
, TREE_OPERAND (arg0
, 1));
13047 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13048 tem
, build_int_cst (TREE_TYPE (tem
), 0));
13051 if (integer_zerop (arg1
)
13052 && tree_expr_nonzero_p (arg0
))
13054 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
13055 return omit_one_operand_loc (loc
, type
, res
, arg0
);
13058 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13059 if (TREE_CODE (arg0
) == NEGATE_EXPR
13060 && TREE_CODE (arg1
) == NEGATE_EXPR
)
13061 return fold_build2_loc (loc
, code
, type
,
13062 TREE_OPERAND (arg0
, 0),
13063 TREE_OPERAND (arg1
, 0));
13065 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13066 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13067 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
13069 tree arg00
= TREE_OPERAND (arg0
, 0);
13070 tree arg01
= TREE_OPERAND (arg0
, 1);
13071 tree arg10
= TREE_OPERAND (arg1
, 0);
13072 tree arg11
= TREE_OPERAND (arg1
, 1);
13073 tree itype
= TREE_TYPE (arg0
);
13075 if (operand_equal_p (arg01
, arg11
, 0))
13076 return fold_build2_loc (loc
, code
, type
,
13077 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13078 fold_build2_loc (loc
,
13079 BIT_XOR_EXPR
, itype
,
13082 build_int_cst (itype
, 0));
13084 if (operand_equal_p (arg01
, arg10
, 0))
13085 return fold_build2_loc (loc
, code
, type
,
13086 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13087 fold_build2_loc (loc
,
13088 BIT_XOR_EXPR
, itype
,
13091 build_int_cst (itype
, 0));
13093 if (operand_equal_p (arg00
, arg11
, 0))
13094 return fold_build2_loc (loc
, code
, type
,
13095 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13096 fold_build2_loc (loc
,
13097 BIT_XOR_EXPR
, itype
,
13100 build_int_cst (itype
, 0));
13102 if (operand_equal_p (arg00
, arg10
, 0))
13103 return fold_build2_loc (loc
, code
, type
,
13104 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13105 fold_build2_loc (loc
,
13106 BIT_XOR_EXPR
, itype
,
13109 build_int_cst (itype
, 0));
13112 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13113 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
13115 tree arg00
= TREE_OPERAND (arg0
, 0);
13116 tree arg01
= TREE_OPERAND (arg0
, 1);
13117 tree arg10
= TREE_OPERAND (arg1
, 0);
13118 tree arg11
= TREE_OPERAND (arg1
, 1);
13119 tree itype
= TREE_TYPE (arg0
);
13121 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13122 operand_equal_p guarantees no side-effects so we don't need
13123 to use omit_one_operand on Z. */
13124 if (operand_equal_p (arg01
, arg11
, 0))
13125 return fold_build2_loc (loc
, code
, type
, arg00
, arg10
);
13126 if (operand_equal_p (arg01
, arg10
, 0))
13127 return fold_build2_loc (loc
, code
, type
, arg00
, arg11
);
13128 if (operand_equal_p (arg00
, arg11
, 0))
13129 return fold_build2_loc (loc
, code
, type
, arg01
, arg10
);
13130 if (operand_equal_p (arg00
, arg10
, 0))
13131 return fold_build2_loc (loc
, code
, type
, arg01
, arg11
);
13133 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13134 if (TREE_CODE (arg01
) == INTEGER_CST
13135 && TREE_CODE (arg11
) == INTEGER_CST
)
13136 return fold_build2_loc (loc
, code
, type
,
13137 fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
,
13138 fold_build2_loc (loc
,
13139 BIT_XOR_EXPR
, itype
,
13144 /* Attempt to simplify equality/inequality comparisons of complex
13145 values. Only lower the comparison if the result is known or
13146 can be simplified to a single scalar comparison. */
13147 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
13148 || TREE_CODE (arg0
) == COMPLEX_CST
)
13149 && (TREE_CODE (arg1
) == COMPLEX_EXPR
13150 || TREE_CODE (arg1
) == COMPLEX_CST
))
13152 tree real0
, imag0
, real1
, imag1
;
13155 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
13157 real0
= TREE_OPERAND (arg0
, 0);
13158 imag0
= TREE_OPERAND (arg0
, 1);
13162 real0
= TREE_REALPART (arg0
);
13163 imag0
= TREE_IMAGPART (arg0
);
13166 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
13168 real1
= TREE_OPERAND (arg1
, 0);
13169 imag1
= TREE_OPERAND (arg1
, 1);
13173 real1
= TREE_REALPART (arg1
);
13174 imag1
= TREE_IMAGPART (arg1
);
13177 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
13178 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
13180 if (integer_zerop (rcond
))
13182 if (code
== EQ_EXPR
)
13183 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13185 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13189 if (code
== NE_EXPR
)
13190 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13192 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13196 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13197 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13199 if (integer_zerop (icond
))
13201 if (code
== EQ_EXPR
)
13202 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13204 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13208 if (code
== NE_EXPR
)
13209 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13211 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13222 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13223 if (tem
!= NULL_TREE
)
13226 /* Transform comparisons of the form X +- C CMP X. */
13227 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13228 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13229 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13230 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13231 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13232 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13234 tree arg01
= TREE_OPERAND (arg0
, 1);
13235 enum tree_code code0
= TREE_CODE (arg0
);
13238 if (TREE_CODE (arg01
) == REAL_CST
)
13239 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13241 is_positive
= tree_int_cst_sgn (arg01
);
13243 /* (X - c) > X becomes false. */
13244 if (code
== GT_EXPR
13245 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13246 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13248 if (TREE_CODE (arg01
) == INTEGER_CST
13249 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13250 fold_overflow_warning (("assuming signed overflow does not "
13251 "occur when assuming that (X - c) > X "
13252 "is always false"),
13253 WARN_STRICT_OVERFLOW_ALL
);
13254 return constant_boolean_node (0, type
);
13257 /* Likewise (X + c) < X becomes false. */
13258 if (code
== LT_EXPR
13259 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13260 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13262 if (TREE_CODE (arg01
) == INTEGER_CST
13263 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13264 fold_overflow_warning (("assuming signed overflow does not "
13265 "occur when assuming that "
13266 "(X + c) < X is always false"),
13267 WARN_STRICT_OVERFLOW_ALL
);
13268 return constant_boolean_node (0, type
);
13271 /* Convert (X - c) <= X to true. */
13272 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13274 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13275 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13277 if (TREE_CODE (arg01
) == INTEGER_CST
13278 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13279 fold_overflow_warning (("assuming signed overflow does not "
13280 "occur when assuming that "
13281 "(X - c) <= X is always true"),
13282 WARN_STRICT_OVERFLOW_ALL
);
13283 return constant_boolean_node (1, type
);
13286 /* Convert (X + c) >= X to true. */
13287 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13289 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13290 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13292 if (TREE_CODE (arg01
) == INTEGER_CST
13293 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13294 fold_overflow_warning (("assuming signed overflow does not "
13295 "occur when assuming that "
13296 "(X + c) >= X is always true"),
13297 WARN_STRICT_OVERFLOW_ALL
);
13298 return constant_boolean_node (1, type
);
13301 if (TREE_CODE (arg01
) == INTEGER_CST
)
13303 /* Convert X + c > X and X - c < X to true for integers. */
13304 if (code
== GT_EXPR
13305 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13306 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13308 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13309 fold_overflow_warning (("assuming signed overflow does "
13310 "not occur when assuming that "
13311 "(X + c) > X is always true"),
13312 WARN_STRICT_OVERFLOW_ALL
);
13313 return constant_boolean_node (1, type
);
13316 if (code
== LT_EXPR
13317 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13318 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13320 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13321 fold_overflow_warning (("assuming signed overflow does "
13322 "not occur when assuming that "
13323 "(X - c) < X is always true"),
13324 WARN_STRICT_OVERFLOW_ALL
);
13325 return constant_boolean_node (1, type
);
13328 /* Convert X + c <= X and X - c >= X to false for integers. */
13329 if (code
== LE_EXPR
13330 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13331 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13333 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13334 fold_overflow_warning (("assuming signed overflow does "
13335 "not occur when assuming that "
13336 "(X + c) <= X is always false"),
13337 WARN_STRICT_OVERFLOW_ALL
);
13338 return constant_boolean_node (0, type
);
13341 if (code
== GE_EXPR
13342 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13343 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13345 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13346 fold_overflow_warning (("assuming signed overflow does "
13347 "not occur when assuming that "
13348 "(X - c) >= X is always false"),
13349 WARN_STRICT_OVERFLOW_ALL
);
13350 return constant_boolean_node (0, type
);
13355 /* Comparisons with the highest or lowest possible integer of
13356 the specified precision will have known values. */
13358 tree arg1_type
= TREE_TYPE (arg1
);
13359 unsigned int width
= TYPE_PRECISION (arg1_type
);
13361 if (TREE_CODE (arg1
) == INTEGER_CST
13362 && width
<= 2 * HOST_BITS_PER_WIDE_INT
13363 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13365 HOST_WIDE_INT signed_max_hi
;
13366 unsigned HOST_WIDE_INT signed_max_lo
;
13367 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
13369 if (width
<= HOST_BITS_PER_WIDE_INT
)
13371 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13376 if (TYPE_UNSIGNED (arg1_type
))
13378 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13384 max_lo
= signed_max_lo
;
13385 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13391 width
-= HOST_BITS_PER_WIDE_INT
;
13392 signed_max_lo
= -1;
13393 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13398 if (TYPE_UNSIGNED (arg1_type
))
13400 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13405 max_hi
= signed_max_hi
;
13406 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13410 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
13411 && TREE_INT_CST_LOW (arg1
) == max_lo
)
13415 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13418 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13421 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13424 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13426 /* The GE_EXPR and LT_EXPR cases above are not normally
13427 reached because of previous transformations. */
13432 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13434 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
13438 arg1
= const_binop (PLUS_EXPR
, arg1
,
13439 build_int_cst (TREE_TYPE (arg1
), 1), 0);
13440 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13441 fold_convert_loc (loc
,
13442 TREE_TYPE (arg1
), arg0
),
13445 arg1
= const_binop (PLUS_EXPR
, arg1
,
13446 build_int_cst (TREE_TYPE (arg1
), 1), 0);
13447 return fold_build2_loc (loc
, NE_EXPR
, type
,
13448 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13454 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13456 && TREE_INT_CST_LOW (arg1
) == min_lo
)
13460 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13463 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13466 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13469 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13474 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13476 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13480 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
13481 return fold_build2_loc (loc
, NE_EXPR
, type
,
13482 fold_convert_loc (loc
,
13483 TREE_TYPE (arg1
), arg0
),
13486 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
13487 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13488 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13495 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13496 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13497 && TYPE_UNSIGNED (arg1_type
)
13498 /* We will flip the signedness of the comparison operator
13499 associated with the mode of arg1, so the sign bit is
13500 specified by this mode. Check that arg1 is the signed
13501 max associated with this sign bit. */
13502 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
13503 /* signed_type does not work on pointer types. */
13504 && INTEGRAL_TYPE_P (arg1_type
))
13506 /* The following case also applies to X < signed_max+1
13507 and X >= signed_max+1 because previous transformations. */
13508 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13511 st
= signed_type_for (TREE_TYPE (arg1
));
13512 return fold_build2_loc (loc
,
13513 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13514 type
, fold_convert_loc (loc
, st
, arg0
),
13515 build_int_cst (st
, 0));
13521 /* If we are comparing an ABS_EXPR with a constant, we can
13522 convert all the cases into explicit comparisons, but they may
13523 well not be faster than doing the ABS and one comparison.
13524 But ABS (X) <= C is a range comparison, which becomes a subtraction
13525 and a comparison, and is probably faster. */
13526 if (code
== LE_EXPR
13527 && TREE_CODE (arg1
) == INTEGER_CST
13528 && TREE_CODE (arg0
) == ABS_EXPR
13529 && ! TREE_SIDE_EFFECTS (arg0
)
13530 && (0 != (tem
= negate_expr (arg1
)))
13531 && TREE_CODE (tem
) == INTEGER_CST
13532 && !TREE_OVERFLOW (tem
))
13533 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13534 build2 (GE_EXPR
, type
,
13535 TREE_OPERAND (arg0
, 0), tem
),
13536 build2 (LE_EXPR
, type
,
13537 TREE_OPERAND (arg0
, 0), arg1
));
13539 /* Convert ABS_EXPR<x> >= 0 to true. */
13540 strict_overflow_p
= false;
13541 if (code
== GE_EXPR
13542 && (integer_zerop (arg1
)
13543 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13544 && real_zerop (arg1
)))
13545 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13547 if (strict_overflow_p
)
13548 fold_overflow_warning (("assuming signed overflow does not occur "
13549 "when simplifying comparison of "
13550 "absolute value and zero"),
13551 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13552 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13555 /* Convert ABS_EXPR<x> < 0 to false. */
13556 strict_overflow_p
= false;
13557 if (code
== LT_EXPR
13558 && (integer_zerop (arg1
) || real_zerop (arg1
))
13559 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13561 if (strict_overflow_p
)
13562 fold_overflow_warning (("assuming signed overflow does not occur "
13563 "when simplifying comparison of "
13564 "absolute value and zero"),
13565 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13566 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13569 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13570 and similarly for >= into !=. */
13571 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13572 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13573 && TREE_CODE (arg1
) == LSHIFT_EXPR
13574 && integer_onep (TREE_OPERAND (arg1
, 0)))
13576 tem
= build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13577 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13578 TREE_OPERAND (arg1
, 1)),
13579 build_int_cst (TREE_TYPE (arg0
), 0));
13580 goto fold_binary_exit
;
13583 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13584 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13585 && CONVERT_EXPR_P (arg1
)
13586 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13587 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13589 tem
= build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13590 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13591 build2 (RSHIFT_EXPR
,
13592 TREE_TYPE (arg0
), arg0
,
13593 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
13595 build_int_cst (TREE_TYPE (arg0
), 0));
13596 goto fold_binary_exit
;
13601 case UNORDERED_EXPR
:
13609 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13611 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13612 if (t1
!= NULL_TREE
)
13616 /* If the first operand is NaN, the result is constant. */
13617 if (TREE_CODE (arg0
) == REAL_CST
13618 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13619 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13621 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13622 ? integer_zero_node
13623 : integer_one_node
;
13624 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13627 /* If the second operand is NaN, the result is constant. */
13628 if (TREE_CODE (arg1
) == REAL_CST
13629 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13630 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13632 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13633 ? integer_zero_node
13634 : integer_one_node
;
13635 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13638 /* Simplify unordered comparison of something with itself. */
13639 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13640 && operand_equal_p (arg0
, arg1
, 0))
13641 return constant_boolean_node (1, type
);
13643 if (code
== LTGT_EXPR
13644 && !flag_trapping_math
13645 && operand_equal_p (arg0
, arg1
, 0))
13646 return constant_boolean_node (0, type
);
13648 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13650 tree targ0
= strip_float_extensions (arg0
);
13651 tree targ1
= strip_float_extensions (arg1
);
13652 tree newtype
= TREE_TYPE (targ0
);
13654 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13655 newtype
= TREE_TYPE (targ1
);
13657 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13658 return fold_build2_loc (loc
, code
, type
,
13659 fold_convert_loc (loc
, newtype
, targ0
),
13660 fold_convert_loc (loc
, newtype
, targ1
));
13665 case COMPOUND_EXPR
:
13666 /* When pedantic, a compound expression can be neither an lvalue
13667 nor an integer constant expression. */
13668 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13670 /* Don't let (0, 0) be null pointer constant. */
13671 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13672 : fold_convert_loc (loc
, type
, arg1
);
13673 return pedantic_non_lvalue_loc (loc
, tem
);
13676 if ((TREE_CODE (arg0
) == REAL_CST
13677 && TREE_CODE (arg1
) == REAL_CST
)
13678 || (TREE_CODE (arg0
) == INTEGER_CST
13679 && TREE_CODE (arg1
) == INTEGER_CST
))
13680 return build_complex (type
, arg0
, arg1
);
13684 /* An ASSERT_EXPR should never be passed to fold_binary. */
13685 gcc_unreachable ();
13689 } /* switch (code) */
13691 protected_set_expr_location (tem
, loc
);
13695 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13696 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13700 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13702 switch (TREE_CODE (*tp
))
13708 *walk_subtrees
= 0;
13710 /* ... fall through ... */
13717 /* Return whether the sub-tree ST contains a label which is accessible from
13718 outside the sub-tree. */
13721 contains_label_p (tree st
)
13724 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13727 /* Fold a ternary expression of code CODE and type TYPE with operands
13728 OP0, OP1, and OP2. Return the folded expression if folding is
13729 successful. Otherwise, return NULL_TREE. */
13732 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13733 tree op0
, tree op1
, tree op2
)
13736 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
13737 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13739 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13740 && TREE_CODE_LENGTH (code
) == 3);
13742 /* Strip any conversions that don't change the mode. This is safe
13743 for every expression, except for a comparison expression because
13744 its signedness is derived from its operands. So, in the latter
13745 case, only strip conversions that don't change the signedness.
13747 Note that this is done as an internal manipulation within the
13748 constant folder, in order to find the simplest representation of
13749 the arguments so that their form can be studied. In any cases,
13750 the appropriate type conversions should be put back in the tree
13751 that will get out of the constant folder. */
13766 case COMPONENT_REF
:
13767 if (TREE_CODE (arg0
) == CONSTRUCTOR
13768 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13770 unsigned HOST_WIDE_INT idx
;
13772 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13779 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13780 so all simple results must be passed through pedantic_non_lvalue. */
13781 if (TREE_CODE (arg0
) == INTEGER_CST
)
13783 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13784 tem
= integer_zerop (arg0
) ? op2
: op1
;
13785 /* Only optimize constant conditions when the selected branch
13786 has the same type as the COND_EXPR. This avoids optimizing
13787 away "c ? x : throw", where the throw has a void type.
13788 Avoid throwing away that operand which contains label. */
13789 if ((!TREE_SIDE_EFFECTS (unused_op
)
13790 || !contains_label_p (unused_op
))
13791 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13792 || VOID_TYPE_P (type
)))
13793 return pedantic_non_lvalue_loc (loc
, tem
);
13796 if (operand_equal_p (arg1
, op2
, 0))
13797 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
13799 /* If we have A op B ? A : C, we may be able to convert this to a
13800 simpler expression, depending on the operation and the values
13801 of B and C. Signed zeros prevent all of these transformations,
13802 for reasons given above each one.
13804 Also try swapping the arguments and inverting the conditional. */
13805 if (COMPARISON_CLASS_P (arg0
)
13806 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13807 arg1
, TREE_OPERAND (arg0
, 1))
13808 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
13810 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13815 if (COMPARISON_CLASS_P (arg0
)
13816 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13818 TREE_OPERAND (arg0
, 1))
13819 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
13821 tem
= fold_truth_not_expr (loc
, arg0
);
13822 if (tem
&& COMPARISON_CLASS_P (tem
))
13824 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13830 /* If the second operand is simpler than the third, swap them
13831 since that produces better jump optimization results. */
13832 if (truth_value_p (TREE_CODE (arg0
))
13833 && tree_swap_operands_p (op1
, op2
, false))
13835 /* See if this can be inverted. If it can't, possibly because
13836 it was a floating-point inequality comparison, don't do
13838 tem
= fold_truth_not_expr (loc
, arg0
);
13840 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13843 /* Convert A ? 1 : 0 to simply A. */
13844 if (integer_onep (op1
)
13845 && integer_zerop (op2
)
13846 /* If we try to convert OP0 to our type, the
13847 call to fold will try to move the conversion inside
13848 a COND, which will recurse. In that case, the COND_EXPR
13849 is probably the best choice, so leave it alone. */
13850 && type
== TREE_TYPE (arg0
))
13851 return pedantic_non_lvalue_loc (loc
, arg0
);
13853 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13854 over COND_EXPR in cases such as floating point comparisons. */
13855 if (integer_zerop (op1
)
13856 && integer_onep (op2
)
13857 && truth_value_p (TREE_CODE (arg0
)))
13858 return pedantic_non_lvalue_loc (loc
,
13859 fold_convert_loc (loc
, type
,
13860 invert_truthvalue_loc (loc
,
13863 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13864 if (TREE_CODE (arg0
) == LT_EXPR
13865 && integer_zerop (TREE_OPERAND (arg0
, 1))
13866 && integer_zerop (op2
)
13867 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13869 /* sign_bit_p only checks ARG1 bits within A's precision.
13870 If <sign bit of A> has wider type than A, bits outside
13871 of A's precision in <sign bit of A> need to be checked.
13872 If they are all 0, this optimization needs to be done
13873 in unsigned A's type, if they are all 1 in signed A's type,
13874 otherwise this can't be done. */
13875 if (TYPE_PRECISION (TREE_TYPE (tem
))
13876 < TYPE_PRECISION (TREE_TYPE (arg1
))
13877 && TYPE_PRECISION (TREE_TYPE (tem
))
13878 < TYPE_PRECISION (type
))
13880 unsigned HOST_WIDE_INT mask_lo
;
13881 HOST_WIDE_INT mask_hi
;
13882 int inner_width
, outer_width
;
13885 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13886 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13887 if (outer_width
> TYPE_PRECISION (type
))
13888 outer_width
= TYPE_PRECISION (type
);
13890 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
13892 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
13893 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
13899 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
13900 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
13902 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
13904 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
13905 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13909 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
13910 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13912 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
13913 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
13915 tem_type
= signed_type_for (TREE_TYPE (tem
));
13916 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13918 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
13919 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
13921 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13922 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13930 fold_convert_loc (loc
, type
,
13931 fold_build2_loc (loc
, BIT_AND_EXPR
,
13932 TREE_TYPE (tem
), tem
,
13933 fold_convert_loc (loc
,
13938 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13939 already handled above. */
13940 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13941 && integer_onep (TREE_OPERAND (arg0
, 1))
13942 && integer_zerop (op2
)
13943 && integer_pow2p (arg1
))
13945 tree tem
= TREE_OPERAND (arg0
, 0);
13947 if (TREE_CODE (tem
) == RSHIFT_EXPR
13948 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
13949 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13950 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
13951 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13952 TREE_OPERAND (tem
, 0), arg1
);
13955 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13956 is probably obsolete because the first operand should be a
13957 truth value (that's why we have the two cases above), but let's
13958 leave it in until we can confirm this for all front-ends. */
13959 if (integer_zerop (op2
)
13960 && TREE_CODE (arg0
) == NE_EXPR
13961 && integer_zerop (TREE_OPERAND (arg0
, 1))
13962 && integer_pow2p (arg1
)
13963 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13964 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13965 arg1
, OEP_ONLY_CONST
))
13966 return pedantic_non_lvalue_loc (loc
,
13967 fold_convert_loc (loc
, type
,
13968 TREE_OPERAND (arg0
, 0)));
13970 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13971 if (integer_zerop (op2
)
13972 && truth_value_p (TREE_CODE (arg0
))
13973 && truth_value_p (TREE_CODE (arg1
)))
13974 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13975 fold_convert_loc (loc
, type
, arg0
),
13978 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13979 if (integer_onep (op2
)
13980 && truth_value_p (TREE_CODE (arg0
))
13981 && truth_value_p (TREE_CODE (arg1
)))
13983 /* Only perform transformation if ARG0 is easily inverted. */
13984 tem
= fold_truth_not_expr (loc
, arg0
);
13986 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
13987 fold_convert_loc (loc
, type
, tem
),
13991 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13992 if (integer_zerop (arg1
)
13993 && truth_value_p (TREE_CODE (arg0
))
13994 && truth_value_p (TREE_CODE (op2
)))
13996 /* Only perform transformation if ARG0 is easily inverted. */
13997 tem
= fold_truth_not_expr (loc
, arg0
);
13999 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
14000 fold_convert_loc (loc
, type
, tem
),
14004 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14005 if (integer_onep (arg1
)
14006 && truth_value_p (TREE_CODE (arg0
))
14007 && truth_value_p (TREE_CODE (op2
)))
14008 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
14009 fold_convert_loc (loc
, type
, arg0
),
14015 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14016 of fold_ternary on them. */
14017 gcc_unreachable ();
14019 case BIT_FIELD_REF
:
14020 if ((TREE_CODE (arg0
) == VECTOR_CST
14021 || (TREE_CODE (arg0
) == CONSTRUCTOR
&& TREE_CONSTANT (arg0
)))
14022 && type
== TREE_TYPE (TREE_TYPE (arg0
)))
14024 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
14025 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
14028 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
14029 && (idx
% width
) == 0
14030 && (idx
= idx
/ width
)
14031 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
14033 tree elements
= NULL_TREE
;
14035 if (TREE_CODE (arg0
) == VECTOR_CST
)
14036 elements
= TREE_VECTOR_CST_ELTS (arg0
);
14039 unsigned HOST_WIDE_INT idx
;
14042 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0
), idx
, value
)
14043 elements
= tree_cons (NULL_TREE
, value
, elements
);
14045 while (idx
-- > 0 && elements
)
14046 elements
= TREE_CHAIN (elements
);
14048 return TREE_VALUE (elements
);
14050 return fold_convert_loc (loc
, type
, integer_zero_node
);
14054 /* A bit-field-ref that referenced the full argument can be stripped. */
14055 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
14056 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_low_cst (arg1
, 1)
14057 && integer_zerop (op2
))
14058 return fold_convert_loc (loc
, type
, arg0
);
14064 } /* switch (code) */
14067 /* Perform constant folding and related simplification of EXPR.
14068 The related simplifications include x*1 => x, x*0 => 0, etc.,
14069 and application of the associative law.
14070 NOP_EXPR conversions may be removed freely (as long as we
14071 are careful not to change the type of the overall expression).
14072 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14073 but we can constant-fold them if they have constant operands. */
14075 #ifdef ENABLE_FOLD_CHECKING
14076 # define fold(x) fold_1 (x)
14077 static tree
fold_1 (tree
);
14083 const tree t
= expr
;
14084 enum tree_code code
= TREE_CODE (t
);
14085 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14087 location_t loc
= EXPR_LOCATION (expr
);
14089 /* Return right away if a constant. */
14090 if (kind
== tcc_constant
)
14093 /* CALL_EXPR-like objects with variable numbers of operands are
14094 treated specially. */
14095 if (kind
== tcc_vl_exp
)
14097 if (code
== CALL_EXPR
)
14099 tem
= fold_call_expr (loc
, expr
, false);
14100 return tem
? tem
: expr
;
14105 if (IS_EXPR_CODE_CLASS (kind
))
14107 tree type
= TREE_TYPE (t
);
14108 tree op0
, op1
, op2
;
14110 switch (TREE_CODE_LENGTH (code
))
14113 op0
= TREE_OPERAND (t
, 0);
14114 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14115 return tem
? tem
: expr
;
14117 op0
= TREE_OPERAND (t
, 0);
14118 op1
= TREE_OPERAND (t
, 1);
14119 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14120 return tem
? tem
: expr
;
14122 op0
= TREE_OPERAND (t
, 0);
14123 op1
= TREE_OPERAND (t
, 1);
14124 op2
= TREE_OPERAND (t
, 2);
14125 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14126 return tem
? tem
: expr
;
14136 tree op0
= TREE_OPERAND (t
, 0);
14137 tree op1
= TREE_OPERAND (t
, 1);
14139 if (TREE_CODE (op1
) == INTEGER_CST
14140 && TREE_CODE (op0
) == CONSTRUCTOR
14141 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14143 VEC(constructor_elt
,gc
) *elts
= CONSTRUCTOR_ELTS (op0
);
14144 unsigned HOST_WIDE_INT end
= VEC_length (constructor_elt
, elts
);
14145 unsigned HOST_WIDE_INT begin
= 0;
14147 /* Find a matching index by means of a binary search. */
14148 while (begin
!= end
)
14150 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14151 tree index
= VEC_index (constructor_elt
, elts
, middle
)->index
;
14153 if (TREE_CODE (index
) == INTEGER_CST
14154 && tree_int_cst_lt (index
, op1
))
14155 begin
= middle
+ 1;
14156 else if (TREE_CODE (index
) == INTEGER_CST
14157 && tree_int_cst_lt (op1
, index
))
14159 else if (TREE_CODE (index
) == RANGE_EXPR
14160 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14161 begin
= middle
+ 1;
14162 else if (TREE_CODE (index
) == RANGE_EXPR
14163 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14166 return VEC_index (constructor_elt
, elts
, middle
)->value
;
14174 return fold (DECL_INITIAL (t
));
14178 } /* switch (code) */
14181 #ifdef ENABLE_FOLD_CHECKING
14184 static void fold_checksum_tree (const_tree
, struct md5_ctx
*, htab_t
);
14185 static void fold_check_failed (const_tree
, const_tree
);
14186 void print_fold_checksum (const_tree
);
14188 /* When --enable-checking=fold, compute a digest of expr before
14189 and after actual fold call to see if fold did not accidentally
14190 change original expr. */
14196 struct md5_ctx ctx
;
14197 unsigned char checksum_before
[16], checksum_after
[16];
14200 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14201 md5_init_ctx (&ctx
);
14202 fold_checksum_tree (expr
, &ctx
, ht
);
14203 md5_finish_ctx (&ctx
, checksum_before
);
14206 ret
= fold_1 (expr
);
14208 md5_init_ctx (&ctx
);
14209 fold_checksum_tree (expr
, &ctx
, ht
);
14210 md5_finish_ctx (&ctx
, checksum_after
);
14213 if (memcmp (checksum_before
, checksum_after
, 16))
14214 fold_check_failed (expr
, ret
);
14220 print_fold_checksum (const_tree expr
)
14222 struct md5_ctx ctx
;
14223 unsigned char checksum
[16], cnt
;
14226 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14227 md5_init_ctx (&ctx
);
14228 fold_checksum_tree (expr
, &ctx
, ht
);
14229 md5_finish_ctx (&ctx
, checksum
);
14231 for (cnt
= 0; cnt
< 16; ++cnt
)
14232 fprintf (stderr
, "%02x", checksum
[cnt
]);
14233 putc ('\n', stderr
);
14237 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14239 internal_error ("fold check: original tree changed by fold");
14243 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
14246 enum tree_code code
;
14247 union tree_node buf
;
14252 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
14253 <= sizeof (struct tree_function_decl
))
14254 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
14257 slot
= (const void **) htab_find_slot (ht
, expr
, INSERT
);
14261 code
= TREE_CODE (expr
);
14262 if (TREE_CODE_CLASS (code
) == tcc_declaration
14263 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14265 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14266 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14267 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14268 expr
= (tree
) &buf
;
14270 else if (TREE_CODE_CLASS (code
) == tcc_type
14271 && (TYPE_POINTER_TO (expr
)
14272 || TYPE_REFERENCE_TO (expr
)
14273 || TYPE_CACHED_VALUES_P (expr
)
14274 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14275 || TYPE_NEXT_VARIANT (expr
)))
14277 /* Allow these fields to be modified. */
14279 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14280 expr
= tmp
= (tree
) &buf
;
14281 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14282 TYPE_POINTER_TO (tmp
) = NULL
;
14283 TYPE_REFERENCE_TO (tmp
) = NULL
;
14284 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14285 if (TYPE_CACHED_VALUES_P (tmp
))
14287 TYPE_CACHED_VALUES_P (tmp
) = 0;
14288 TYPE_CACHED_VALUES (tmp
) = NULL
;
14291 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14292 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14293 if (TREE_CODE_CLASS (code
) != tcc_type
14294 && TREE_CODE_CLASS (code
) != tcc_declaration
14295 && code
!= TREE_LIST
14296 && code
!= SSA_NAME
)
14297 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14298 switch (TREE_CODE_CLASS (code
))
14304 md5_process_bytes (TREE_STRING_POINTER (expr
),
14305 TREE_STRING_LENGTH (expr
), ctx
);
14308 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14309 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14312 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
14318 case tcc_exceptional
:
14322 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14323 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14324 expr
= TREE_CHAIN (expr
);
14325 goto recursive_label
;
14328 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14329 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14335 case tcc_expression
:
14336 case tcc_reference
:
14337 case tcc_comparison
:
14340 case tcc_statement
:
14342 len
= TREE_OPERAND_LENGTH (expr
);
14343 for (i
= 0; i
< len
; ++i
)
14344 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14346 case tcc_declaration
:
14347 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14348 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14349 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14351 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14352 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14353 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14354 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14355 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14357 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
14358 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
14360 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14362 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14363 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14364 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
14368 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14369 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14370 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14371 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14372 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14373 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14374 if (INTEGRAL_TYPE_P (expr
)
14375 || SCALAR_FLOAT_TYPE_P (expr
))
14377 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14378 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14380 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14381 if (TREE_CODE (expr
) == RECORD_TYPE
14382 || TREE_CODE (expr
) == UNION_TYPE
14383 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14384 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14385 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14392 /* Helper function for outputting the checksum of a tree T. When
14393 debugging with gdb, you can "define mynext" to be "next" followed
14394 by "call debug_fold_checksum (op0)", then just trace down till the
14398 debug_fold_checksum (const_tree t
)
14401 unsigned char checksum
[16];
14402 struct md5_ctx ctx
;
14403 htab_t ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14405 md5_init_ctx (&ctx
);
14406 fold_checksum_tree (t
, &ctx
, ht
);
14407 md5_finish_ctx (&ctx
, checksum
);
14410 for (i
= 0; i
< 16; i
++)
14411 fprintf (stderr
, "%d ", checksum
[i
]);
14413 fprintf (stderr
, "\n");
14418 /* Fold a unary tree expression with code CODE of type TYPE with an
14419 operand OP0. LOC is the location of the resulting expression.
14420 Return a folded expression if successful. Otherwise, return a tree
14421 expression with code CODE of type TYPE with an operand OP0. */
14424 fold_build1_stat_loc (location_t loc
,
14425 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14428 #ifdef ENABLE_FOLD_CHECKING
14429 unsigned char checksum_before
[16], checksum_after
[16];
14430 struct md5_ctx ctx
;
14433 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14434 md5_init_ctx (&ctx
);
14435 fold_checksum_tree (op0
, &ctx
, ht
);
14436 md5_finish_ctx (&ctx
, checksum_before
);
14440 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14443 tem
= build1_stat (code
, type
, op0 PASS_MEM_STAT
);
14444 SET_EXPR_LOCATION (tem
, loc
);
14447 #ifdef ENABLE_FOLD_CHECKING
14448 md5_init_ctx (&ctx
);
14449 fold_checksum_tree (op0
, &ctx
, ht
);
14450 md5_finish_ctx (&ctx
, checksum_after
);
14453 if (memcmp (checksum_before
, checksum_after
, 16))
14454 fold_check_failed (op0
, tem
);
14459 /* Fold a binary tree expression with code CODE of type TYPE with
14460 operands OP0 and OP1. LOC is the location of the resulting
14461 expression. Return a folded expression if successful. Otherwise,
14462 return a tree expression with code CODE of type TYPE with operands
14466 fold_build2_stat_loc (location_t loc
,
14467 enum tree_code code
, tree type
, tree op0
, tree op1
14471 #ifdef ENABLE_FOLD_CHECKING
14472 unsigned char checksum_before_op0
[16],
14473 checksum_before_op1
[16],
14474 checksum_after_op0
[16],
14475 checksum_after_op1
[16];
14476 struct md5_ctx ctx
;
14479 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14480 md5_init_ctx (&ctx
);
14481 fold_checksum_tree (op0
, &ctx
, ht
);
14482 md5_finish_ctx (&ctx
, checksum_before_op0
);
14485 md5_init_ctx (&ctx
);
14486 fold_checksum_tree (op1
, &ctx
, ht
);
14487 md5_finish_ctx (&ctx
, checksum_before_op1
);
14491 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14494 tem
= build2_stat (code
, type
, op0
, op1 PASS_MEM_STAT
);
14495 SET_EXPR_LOCATION (tem
, loc
);
14498 #ifdef ENABLE_FOLD_CHECKING
14499 md5_init_ctx (&ctx
);
14500 fold_checksum_tree (op0
, &ctx
, ht
);
14501 md5_finish_ctx (&ctx
, checksum_after_op0
);
14504 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14505 fold_check_failed (op0
, tem
);
14507 md5_init_ctx (&ctx
);
14508 fold_checksum_tree (op1
, &ctx
, ht
);
14509 md5_finish_ctx (&ctx
, checksum_after_op1
);
14512 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14513 fold_check_failed (op1
, tem
);
14518 /* Fold a ternary tree expression with code CODE of type TYPE with
14519 operands OP0, OP1, and OP2. Return a folded expression if
14520 successful. Otherwise, return a tree expression with code CODE of
14521 type TYPE with operands OP0, OP1, and OP2. */
14524 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14525 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14528 #ifdef ENABLE_FOLD_CHECKING
14529 unsigned char checksum_before_op0
[16],
14530 checksum_before_op1
[16],
14531 checksum_before_op2
[16],
14532 checksum_after_op0
[16],
14533 checksum_after_op1
[16],
14534 checksum_after_op2
[16];
14535 struct md5_ctx ctx
;
14538 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14539 md5_init_ctx (&ctx
);
14540 fold_checksum_tree (op0
, &ctx
, ht
);
14541 md5_finish_ctx (&ctx
, checksum_before_op0
);
14544 md5_init_ctx (&ctx
);
14545 fold_checksum_tree (op1
, &ctx
, ht
);
14546 md5_finish_ctx (&ctx
, checksum_before_op1
);
14549 md5_init_ctx (&ctx
);
14550 fold_checksum_tree (op2
, &ctx
, ht
);
14551 md5_finish_ctx (&ctx
, checksum_before_op2
);
14555 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14556 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14559 tem
= build3_stat (code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14560 SET_EXPR_LOCATION (tem
, loc
);
14563 #ifdef ENABLE_FOLD_CHECKING
14564 md5_init_ctx (&ctx
);
14565 fold_checksum_tree (op0
, &ctx
, ht
);
14566 md5_finish_ctx (&ctx
, checksum_after_op0
);
14569 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14570 fold_check_failed (op0
, tem
);
14572 md5_init_ctx (&ctx
);
14573 fold_checksum_tree (op1
, &ctx
, ht
);
14574 md5_finish_ctx (&ctx
, checksum_after_op1
);
14577 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14578 fold_check_failed (op1
, tem
);
14580 md5_init_ctx (&ctx
);
14581 fold_checksum_tree (op2
, &ctx
, ht
);
14582 md5_finish_ctx (&ctx
, checksum_after_op2
);
14585 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14586 fold_check_failed (op2
, tem
);
14591 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14592 arguments in ARGARRAY, and a null static chain.
14593 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14594 of type TYPE from the given operands as constructed by build_call_array. */
14597 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14598 int nargs
, tree
*argarray
)
14601 #ifdef ENABLE_FOLD_CHECKING
14602 unsigned char checksum_before_fn
[16],
14603 checksum_before_arglist
[16],
14604 checksum_after_fn
[16],
14605 checksum_after_arglist
[16];
14606 struct md5_ctx ctx
;
14610 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14611 md5_init_ctx (&ctx
);
14612 fold_checksum_tree (fn
, &ctx
, ht
);
14613 md5_finish_ctx (&ctx
, checksum_before_fn
);
14616 md5_init_ctx (&ctx
);
14617 for (i
= 0; i
< nargs
; i
++)
14618 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14619 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14623 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14625 #ifdef ENABLE_FOLD_CHECKING
14626 md5_init_ctx (&ctx
);
14627 fold_checksum_tree (fn
, &ctx
, ht
);
14628 md5_finish_ctx (&ctx
, checksum_after_fn
);
14631 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14632 fold_check_failed (fn
, tem
);
14634 md5_init_ctx (&ctx
);
14635 for (i
= 0; i
< nargs
; i
++)
14636 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14637 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14640 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14641 fold_check_failed (NULL_TREE
, tem
);
14646 /* Perform constant folding and related simplification of initializer
14647 expression EXPR. These behave identically to "fold_buildN" but ignore
14648 potential run-time traps and exceptions that fold must preserve. */
14650 #define START_FOLD_INIT \
14651 int saved_signaling_nans = flag_signaling_nans;\
14652 int saved_trapping_math = flag_trapping_math;\
14653 int saved_rounding_math = flag_rounding_math;\
14654 int saved_trapv = flag_trapv;\
14655 int saved_folding_initializer = folding_initializer;\
14656 flag_signaling_nans = 0;\
14657 flag_trapping_math = 0;\
14658 flag_rounding_math = 0;\
14660 folding_initializer = 1;
14662 #define END_FOLD_INIT \
14663 flag_signaling_nans = saved_signaling_nans;\
14664 flag_trapping_math = saved_trapping_math;\
14665 flag_rounding_math = saved_rounding_math;\
14666 flag_trapv = saved_trapv;\
14667 folding_initializer = saved_folding_initializer;
14670 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14671 tree type
, tree op
)
14676 result
= fold_build1_loc (loc
, code
, type
, op
);
14683 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14684 tree type
, tree op0
, tree op1
)
14689 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14696 fold_build3_initializer_loc (location_t loc
, enum tree_code code
,
14697 tree type
, tree op0
, tree op1
, tree op2
)
14702 result
= fold_build3_loc (loc
, code
, type
, op0
, op1
, op2
);
14709 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14710 int nargs
, tree
*argarray
)
14715 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14721 #undef START_FOLD_INIT
14722 #undef END_FOLD_INIT
14724 /* Determine if first argument is a multiple of second argument. Return 0 if
14725 it is not, or we cannot easily determined it to be.
14727 An example of the sort of thing we care about (at this point; this routine
14728 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14729 fold cases do now) is discovering that
14731 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14737 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14739 This code also handles discovering that
14741 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14743 is a multiple of 8 so we don't have to worry about dealing with a
14744 possible remainder.
14746 Note that we *look* inside a SAVE_EXPR only to determine how it was
14747 calculated; it is not safe for fold to do much of anything else with the
14748 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14749 at run time. For example, the latter example above *cannot* be implemented
14750 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14751 evaluation time of the original SAVE_EXPR is not necessarily the same at
14752 the time the new expression is evaluated. The only optimization of this
14753 sort that would be valid is changing
14755 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14759 SAVE_EXPR (I) * SAVE_EXPR (J)
14761 (where the same SAVE_EXPR (J) is used in the original and the
14762 transformed version). */
14765 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
14767 if (operand_equal_p (top
, bottom
, 0))
14770 if (TREE_CODE (type
) != INTEGER_TYPE
)
14773 switch (TREE_CODE (top
))
14776 /* Bitwise and provides a power of two multiple. If the mask is
14777 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14778 if (!integer_pow2p (bottom
))
14783 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14784 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14788 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14789 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14792 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14796 op1
= TREE_OPERAND (top
, 1);
14797 /* const_binop may not detect overflow correctly,
14798 so check for it explicitly here. */
14799 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
14800 > TREE_INT_CST_LOW (op1
)
14801 && TREE_INT_CST_HIGH (op1
) == 0
14802 && 0 != (t1
= fold_convert (type
,
14803 const_binop (LSHIFT_EXPR
,
14806 && !TREE_OVERFLOW (t1
))
14807 return multiple_of_p (type
, t1
, bottom
);
14812 /* Can't handle conversions from non-integral or wider integral type. */
14813 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14814 || (TYPE_PRECISION (type
)
14815 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14818 /* .. fall through ... */
14821 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14824 if (TREE_CODE (bottom
) != INTEGER_CST
14825 || integer_zerop (bottom
)
14826 || (TYPE_UNSIGNED (type
)
14827 && (tree_int_cst_sgn (top
) < 0
14828 || tree_int_cst_sgn (bottom
) < 0)))
14830 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
14838 /* Return true if CODE or TYPE is known to be non-negative. */
14841 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14843 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14844 && truth_value_p (code
))
14845 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14846 have a signed:1 type (where the value is -1 and 0). */
14851 /* Return true if (CODE OP0) is known to be non-negative. If the return
14852 value is based on the assumption that signed overflow is undefined,
14853 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14854 *STRICT_OVERFLOW_P. */
14857 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14858 bool *strict_overflow_p
)
14860 if (TYPE_UNSIGNED (type
))
14866 /* We can't return 1 if flag_wrapv is set because
14867 ABS_EXPR<INT_MIN> = INT_MIN. */
14868 if (!INTEGRAL_TYPE_P (type
))
14870 if (TYPE_OVERFLOW_UNDEFINED (type
))
14872 *strict_overflow_p
= true;
14877 case NON_LVALUE_EXPR
:
14879 case FIX_TRUNC_EXPR
:
14880 return tree_expr_nonnegative_warnv_p (op0
,
14881 strict_overflow_p
);
14885 tree inner_type
= TREE_TYPE (op0
);
14886 tree outer_type
= type
;
14888 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14890 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14891 return tree_expr_nonnegative_warnv_p (op0
,
14892 strict_overflow_p
);
14893 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14895 if (TYPE_UNSIGNED (inner_type
))
14897 return tree_expr_nonnegative_warnv_p (op0
,
14898 strict_overflow_p
);
14901 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
14903 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14904 return tree_expr_nonnegative_warnv_p (op0
,
14905 strict_overflow_p
);
14906 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14907 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14908 && TYPE_UNSIGNED (inner_type
);
14914 return tree_simple_nonnegative_warnv_p (code
, type
);
14917 /* We don't know sign of `t', so be conservative and return false. */
14921 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14922 value is based on the assumption that signed overflow is undefined,
14923 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14924 *STRICT_OVERFLOW_P. */
14927 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14928 tree op1
, bool *strict_overflow_p
)
14930 if (TYPE_UNSIGNED (type
))
14935 case POINTER_PLUS_EXPR
:
14937 if (FLOAT_TYPE_P (type
))
14938 return (tree_expr_nonnegative_warnv_p (op0
,
14940 && tree_expr_nonnegative_warnv_p (op1
,
14941 strict_overflow_p
));
14943 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14944 both unsigned and at least 2 bits shorter than the result. */
14945 if (TREE_CODE (type
) == INTEGER_TYPE
14946 && TREE_CODE (op0
) == NOP_EXPR
14947 && TREE_CODE (op1
) == NOP_EXPR
)
14949 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
14950 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
14951 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14952 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14954 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14955 TYPE_PRECISION (inner2
)) + 1;
14956 return prec
< TYPE_PRECISION (type
);
14962 if (FLOAT_TYPE_P (type
))
14964 /* x * x for floating point x is always non-negative. */
14965 if (operand_equal_p (op0
, op1
, 0))
14967 return (tree_expr_nonnegative_warnv_p (op0
,
14969 && tree_expr_nonnegative_warnv_p (op1
,
14970 strict_overflow_p
));
14973 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14974 both unsigned and their total bits is shorter than the result. */
14975 if (TREE_CODE (type
) == INTEGER_TYPE
14976 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14977 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14979 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14980 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14982 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14983 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14986 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14987 bool unsigned1
= TYPE_UNSIGNED (inner1
);
14989 if (TREE_CODE (op0
) == INTEGER_CST
)
14990 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
14992 if (TREE_CODE (op1
) == INTEGER_CST
)
14993 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
14995 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
14996 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
14998 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
14999 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
15000 : TYPE_PRECISION (inner0
);
15002 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
15003 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
15004 : TYPE_PRECISION (inner1
);
15006 return precision0
+ precision1
< TYPE_PRECISION (type
);
15013 return (tree_expr_nonnegative_warnv_p (op0
,
15015 || tree_expr_nonnegative_warnv_p (op1
,
15016 strict_overflow_p
));
15022 case TRUNC_DIV_EXPR
:
15023 case CEIL_DIV_EXPR
:
15024 case FLOOR_DIV_EXPR
:
15025 case ROUND_DIV_EXPR
:
15026 return (tree_expr_nonnegative_warnv_p (op0
,
15028 && tree_expr_nonnegative_warnv_p (op1
,
15029 strict_overflow_p
));
15031 case TRUNC_MOD_EXPR
:
15032 case CEIL_MOD_EXPR
:
15033 case FLOOR_MOD_EXPR
:
15034 case ROUND_MOD_EXPR
:
15035 return tree_expr_nonnegative_warnv_p (op0
,
15036 strict_overflow_p
);
15038 return tree_simple_nonnegative_warnv_p (code
, type
);
15041 /* We don't know sign of `t', so be conservative and return false. */
15045 /* Return true if T is known to be non-negative. If the return
15046 value is based on the assumption that signed overflow is undefined,
15047 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15048 *STRICT_OVERFLOW_P. */
15051 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15053 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15056 switch (TREE_CODE (t
))
15059 return tree_int_cst_sgn (t
) >= 0;
15062 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15065 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15068 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15070 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15071 strict_overflow_p
));
15073 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15076 /* We don't know sign of `t', so be conservative and return false. */
15080 /* Return true if T is known to be non-negative. If the return
15081 value is based on the assumption that signed overflow is undefined,
15082 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15083 *STRICT_OVERFLOW_P. */
15086 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15087 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15089 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15090 switch (DECL_FUNCTION_CODE (fndecl
))
15092 CASE_FLT_FN (BUILT_IN_ACOS
):
15093 CASE_FLT_FN (BUILT_IN_ACOSH
):
15094 CASE_FLT_FN (BUILT_IN_CABS
):
15095 CASE_FLT_FN (BUILT_IN_COSH
):
15096 CASE_FLT_FN (BUILT_IN_ERFC
):
15097 CASE_FLT_FN (BUILT_IN_EXP
):
15098 CASE_FLT_FN (BUILT_IN_EXP10
):
15099 CASE_FLT_FN (BUILT_IN_EXP2
):
15100 CASE_FLT_FN (BUILT_IN_FABS
):
15101 CASE_FLT_FN (BUILT_IN_FDIM
):
15102 CASE_FLT_FN (BUILT_IN_HYPOT
):
15103 CASE_FLT_FN (BUILT_IN_POW10
):
15104 CASE_INT_FN (BUILT_IN_FFS
):
15105 CASE_INT_FN (BUILT_IN_PARITY
):
15106 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15107 case BUILT_IN_BSWAP32
:
15108 case BUILT_IN_BSWAP64
:
15112 CASE_FLT_FN (BUILT_IN_SQRT
):
15113 /* sqrt(-0.0) is -0.0. */
15114 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15116 return tree_expr_nonnegative_warnv_p (arg0
,
15117 strict_overflow_p
);
15119 CASE_FLT_FN (BUILT_IN_ASINH
):
15120 CASE_FLT_FN (BUILT_IN_ATAN
):
15121 CASE_FLT_FN (BUILT_IN_ATANH
):
15122 CASE_FLT_FN (BUILT_IN_CBRT
):
15123 CASE_FLT_FN (BUILT_IN_CEIL
):
15124 CASE_FLT_FN (BUILT_IN_ERF
):
15125 CASE_FLT_FN (BUILT_IN_EXPM1
):
15126 CASE_FLT_FN (BUILT_IN_FLOOR
):
15127 CASE_FLT_FN (BUILT_IN_FMOD
):
15128 CASE_FLT_FN (BUILT_IN_FREXP
):
15129 CASE_FLT_FN (BUILT_IN_LCEIL
):
15130 CASE_FLT_FN (BUILT_IN_LDEXP
):
15131 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15132 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15133 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15134 CASE_FLT_FN (BUILT_IN_LLRINT
):
15135 CASE_FLT_FN (BUILT_IN_LLROUND
):
15136 CASE_FLT_FN (BUILT_IN_LRINT
):
15137 CASE_FLT_FN (BUILT_IN_LROUND
):
15138 CASE_FLT_FN (BUILT_IN_MODF
):
15139 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15140 CASE_FLT_FN (BUILT_IN_RINT
):
15141 CASE_FLT_FN (BUILT_IN_ROUND
):
15142 CASE_FLT_FN (BUILT_IN_SCALB
):
15143 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15144 CASE_FLT_FN (BUILT_IN_SCALBN
):
15145 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15146 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15147 CASE_FLT_FN (BUILT_IN_SINH
):
15148 CASE_FLT_FN (BUILT_IN_TANH
):
15149 CASE_FLT_FN (BUILT_IN_TRUNC
):
15150 /* True if the 1st argument is nonnegative. */
15151 return tree_expr_nonnegative_warnv_p (arg0
,
15152 strict_overflow_p
);
15154 CASE_FLT_FN (BUILT_IN_FMAX
):
15155 /* True if the 1st OR 2nd arguments are nonnegative. */
15156 return (tree_expr_nonnegative_warnv_p (arg0
,
15158 || (tree_expr_nonnegative_warnv_p (arg1
,
15159 strict_overflow_p
)));
15161 CASE_FLT_FN (BUILT_IN_FMIN
):
15162 /* True if the 1st AND 2nd arguments are nonnegative. */
15163 return (tree_expr_nonnegative_warnv_p (arg0
,
15165 && (tree_expr_nonnegative_warnv_p (arg1
,
15166 strict_overflow_p
)));
15168 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15169 /* True if the 2nd argument is nonnegative. */
15170 return tree_expr_nonnegative_warnv_p (arg1
,
15171 strict_overflow_p
);
15173 CASE_FLT_FN (BUILT_IN_POWI
):
15174 /* True if the 1st argument is nonnegative or the second
15175 argument is an even integer. */
15176 if (TREE_CODE (arg1
) == INTEGER_CST
15177 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15179 return tree_expr_nonnegative_warnv_p (arg0
,
15180 strict_overflow_p
);
15182 CASE_FLT_FN (BUILT_IN_POW
):
15183 /* True if the 1st argument is nonnegative or the second
15184 argument is an even integer valued real. */
15185 if (TREE_CODE (arg1
) == REAL_CST
)
15190 c
= TREE_REAL_CST (arg1
);
15191 n
= real_to_integer (&c
);
15194 REAL_VALUE_TYPE cint
;
15195 real_from_integer (&cint
, VOIDmode
, n
,
15196 n
< 0 ? -1 : 0, 0);
15197 if (real_identical (&c
, &cint
))
15201 return tree_expr_nonnegative_warnv_p (arg0
,
15202 strict_overflow_p
);
15207 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15211 /* Return true if T is known to be non-negative. If the return
15212 value is based on the assumption that signed overflow is undefined,
15213 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15214 *STRICT_OVERFLOW_P. */
15217 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15219 enum tree_code code
= TREE_CODE (t
);
15220 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15227 tree temp
= TARGET_EXPR_SLOT (t
);
15228 t
= TARGET_EXPR_INITIAL (t
);
15230 /* If the initializer is non-void, then it's a normal expression
15231 that will be assigned to the slot. */
15232 if (!VOID_TYPE_P (t
))
15233 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15235 /* Otherwise, the initializer sets the slot in some way. One common
15236 way is an assignment statement at the end of the initializer. */
15239 if (TREE_CODE (t
) == BIND_EXPR
)
15240 t
= expr_last (BIND_EXPR_BODY (t
));
15241 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15242 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15243 t
= expr_last (TREE_OPERAND (t
, 0));
15244 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15249 if (TREE_CODE (t
) == MODIFY_EXPR
15250 && TREE_OPERAND (t
, 0) == temp
)
15251 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15252 strict_overflow_p
);
15259 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15260 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15262 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15263 get_callee_fndecl (t
),
15266 strict_overflow_p
);
15268 case COMPOUND_EXPR
:
15270 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15271 strict_overflow_p
);
15273 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15274 strict_overflow_p
);
15276 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15277 strict_overflow_p
);
15280 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15284 /* We don't know sign of `t', so be conservative and return false. */
15288 /* Return true if T is known to be non-negative. If the return
15289 value is based on the assumption that signed overflow is undefined,
15290 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15291 *STRICT_OVERFLOW_P. */
15294 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15296 enum tree_code code
;
15297 if (t
== error_mark_node
)
15300 code
= TREE_CODE (t
);
15301 switch (TREE_CODE_CLASS (code
))
15304 case tcc_comparison
:
15305 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15307 TREE_OPERAND (t
, 0),
15308 TREE_OPERAND (t
, 1),
15309 strict_overflow_p
);
15312 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15314 TREE_OPERAND (t
, 0),
15315 strict_overflow_p
);
15318 case tcc_declaration
:
15319 case tcc_reference
:
15320 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15328 case TRUTH_AND_EXPR
:
15329 case TRUTH_OR_EXPR
:
15330 case TRUTH_XOR_EXPR
:
15331 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15333 TREE_OPERAND (t
, 0),
15334 TREE_OPERAND (t
, 1),
15335 strict_overflow_p
);
15336 case TRUTH_NOT_EXPR
:
15337 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15339 TREE_OPERAND (t
, 0),
15340 strict_overflow_p
);
15347 case WITH_SIZE_EXPR
:
15349 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15352 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15356 /* Return true if `t' is known to be non-negative. Handle warnings
15357 about undefined signed overflow. */
15360 tree_expr_nonnegative_p (tree t
)
15362 bool ret
, strict_overflow_p
;
15364 strict_overflow_p
= false;
15365 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15366 if (strict_overflow_p
)
15367 fold_overflow_warning (("assuming signed overflow does not occur when "
15368 "determining that expression is always "
15370 WARN_STRICT_OVERFLOW_MISC
);
15375 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15376 For floating point we further ensure that T is not denormal.
15377 Similar logic is present in nonzero_address in rtlanal.h.
15379 If the return value is based on the assumption that signed overflow
15380 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15381 change *STRICT_OVERFLOW_P. */
15384 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15385 bool *strict_overflow_p
)
15390 return tree_expr_nonzero_warnv_p (op0
,
15391 strict_overflow_p
);
15395 tree inner_type
= TREE_TYPE (op0
);
15396 tree outer_type
= type
;
15398 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15399 && tree_expr_nonzero_warnv_p (op0
,
15400 strict_overflow_p
));
15404 case NON_LVALUE_EXPR
:
15405 return tree_expr_nonzero_warnv_p (op0
,
15406 strict_overflow_p
);
15415 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15416 For floating point we further ensure that T is not denormal.
15417 Similar logic is present in nonzero_address in rtlanal.h.
15419 If the return value is based on the assumption that signed overflow
15420 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15421 change *STRICT_OVERFLOW_P. */
15424 tree_binary_nonzero_warnv_p (enum tree_code code
,
15427 tree op1
, bool *strict_overflow_p
)
15429 bool sub_strict_overflow_p
;
15432 case POINTER_PLUS_EXPR
:
15434 if (TYPE_OVERFLOW_UNDEFINED (type
))
15436 /* With the presence of negative values it is hard
15437 to say something. */
15438 sub_strict_overflow_p
= false;
15439 if (!tree_expr_nonnegative_warnv_p (op0
,
15440 &sub_strict_overflow_p
)
15441 || !tree_expr_nonnegative_warnv_p (op1
,
15442 &sub_strict_overflow_p
))
15444 /* One of operands must be positive and the other non-negative. */
15445 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15446 overflows, on a twos-complement machine the sum of two
15447 nonnegative numbers can never be zero. */
15448 return (tree_expr_nonzero_warnv_p (op0
,
15450 || tree_expr_nonzero_warnv_p (op1
,
15451 strict_overflow_p
));
15456 if (TYPE_OVERFLOW_UNDEFINED (type
))
15458 if (tree_expr_nonzero_warnv_p (op0
,
15460 && tree_expr_nonzero_warnv_p (op1
,
15461 strict_overflow_p
))
15463 *strict_overflow_p
= true;
15470 sub_strict_overflow_p
= false;
15471 if (tree_expr_nonzero_warnv_p (op0
,
15472 &sub_strict_overflow_p
)
15473 && tree_expr_nonzero_warnv_p (op1
,
15474 &sub_strict_overflow_p
))
15476 if (sub_strict_overflow_p
)
15477 *strict_overflow_p
= true;
15482 sub_strict_overflow_p
= false;
15483 if (tree_expr_nonzero_warnv_p (op0
,
15484 &sub_strict_overflow_p
))
15486 if (sub_strict_overflow_p
)
15487 *strict_overflow_p
= true;
15489 /* When both operands are nonzero, then MAX must be too. */
15490 if (tree_expr_nonzero_warnv_p (op1
,
15491 strict_overflow_p
))
15494 /* MAX where operand 0 is positive is positive. */
15495 return tree_expr_nonnegative_warnv_p (op0
,
15496 strict_overflow_p
);
15498 /* MAX where operand 1 is positive is positive. */
15499 else if (tree_expr_nonzero_warnv_p (op1
,
15500 &sub_strict_overflow_p
)
15501 && tree_expr_nonnegative_warnv_p (op1
,
15502 &sub_strict_overflow_p
))
15504 if (sub_strict_overflow_p
)
15505 *strict_overflow_p
= true;
15511 return (tree_expr_nonzero_warnv_p (op1
,
15513 || tree_expr_nonzero_warnv_p (op0
,
15514 strict_overflow_p
));
15523 /* Return true when T is an address and is known to be nonzero.
15524 For floating point we further ensure that T is not denormal.
15525 Similar logic is present in nonzero_address in rtlanal.h.
15527 If the return value is based on the assumption that signed overflow
15528 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15529 change *STRICT_OVERFLOW_P. */
15532 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15534 bool sub_strict_overflow_p
;
15535 switch (TREE_CODE (t
))
15538 return !integer_zerop (t
);
15542 tree base
= get_base_address (TREE_OPERAND (t
, 0));
15547 /* Weak declarations may link to NULL. Other things may also be NULL
15548 so protect with -fdelete-null-pointer-checks; but not variables
15549 allocated on the stack. */
15551 && (flag_delete_null_pointer_checks
15552 || (TREE_CODE (base
) == VAR_DECL
&& !TREE_STATIC (base
))))
15553 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
15555 /* Constants are never weak. */
15556 if (CONSTANT_CLASS_P (base
))
15563 sub_strict_overflow_p
= false;
15564 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15565 &sub_strict_overflow_p
)
15566 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15567 &sub_strict_overflow_p
))
15569 if (sub_strict_overflow_p
)
15570 *strict_overflow_p
= true;
15581 /* Return true when T is an address and is known to be nonzero.
15582 For floating point we further ensure that T is not denormal.
15583 Similar logic is present in nonzero_address in rtlanal.h.
15585 If the return value is based on the assumption that signed overflow
15586 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15587 change *STRICT_OVERFLOW_P. */
15590 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15592 tree type
= TREE_TYPE (t
);
15593 enum tree_code code
;
15595 /* Doing something useful for floating point would need more work. */
15596 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
15599 code
= TREE_CODE (t
);
15600 switch (TREE_CODE_CLASS (code
))
15603 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15604 strict_overflow_p
);
15606 case tcc_comparison
:
15607 return tree_binary_nonzero_warnv_p (code
, type
,
15608 TREE_OPERAND (t
, 0),
15609 TREE_OPERAND (t
, 1),
15610 strict_overflow_p
);
15612 case tcc_declaration
:
15613 case tcc_reference
:
15614 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15622 case TRUTH_NOT_EXPR
:
15623 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15624 strict_overflow_p
);
15626 case TRUTH_AND_EXPR
:
15627 case TRUTH_OR_EXPR
:
15628 case TRUTH_XOR_EXPR
:
15629 return tree_binary_nonzero_warnv_p (code
, type
,
15630 TREE_OPERAND (t
, 0),
15631 TREE_OPERAND (t
, 1),
15632 strict_overflow_p
);
15639 case WITH_SIZE_EXPR
:
15641 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15643 case COMPOUND_EXPR
:
15646 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15647 strict_overflow_p
);
15650 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
15651 strict_overflow_p
);
15654 return alloca_call_p (t
);
15662 /* Return true when T is an address and is known to be nonzero.
15663 Handle warnings about undefined signed overflow. */
15666 tree_expr_nonzero_p (tree t
)
15668 bool ret
, strict_overflow_p
;
15670 strict_overflow_p
= false;
15671 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
15672 if (strict_overflow_p
)
15673 fold_overflow_warning (("assuming signed overflow does not occur when "
15674 "determining that expression is always "
15676 WARN_STRICT_OVERFLOW_MISC
);
15680 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15681 attempt to fold the expression to a constant without modifying TYPE,
15684 If the expression could be simplified to a constant, then return
15685 the constant. If the expression would not be simplified to a
15686 constant, then return NULL_TREE. */
15689 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15691 tree tem
= fold_binary (code
, type
, op0
, op1
);
15692 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15695 /* Given the components of a unary expression CODE, TYPE and OP0,
15696 attempt to fold the expression to a constant without modifying
15699 If the expression could be simplified to a constant, then return
15700 the constant. If the expression would not be simplified to a
15701 constant, then return NULL_TREE. */
15704 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15706 tree tem
= fold_unary (code
, type
, op0
);
15707 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15710 /* If EXP represents referencing an element in a constant string
15711 (either via pointer arithmetic or array indexing), return the
15712 tree representing the value accessed, otherwise return NULL. */
15715 fold_read_from_constant_string (tree exp
)
15717 if ((TREE_CODE (exp
) == INDIRECT_REF
15718 || TREE_CODE (exp
) == ARRAY_REF
)
15719 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15721 tree exp1
= TREE_OPERAND (exp
, 0);
15724 location_t loc
= EXPR_LOCATION (exp
);
15726 if (TREE_CODE (exp
) == INDIRECT_REF
)
15727 string
= string_constant (exp1
, &index
);
15730 tree low_bound
= array_ref_low_bound (exp
);
15731 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15733 /* Optimize the special-case of a zero lower bound.
15735 We convert the low_bound to sizetype to avoid some problems
15736 with constant folding. (E.g. suppose the lower bound is 1,
15737 and its mode is QI. Without the conversion,l (ARRAY
15738 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15739 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15740 if (! integer_zerop (low_bound
))
15741 index
= size_diffop_loc (loc
, index
,
15742 fold_convert_loc (loc
, sizetype
, low_bound
));
15748 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15749 && TREE_CODE (string
) == STRING_CST
15750 && TREE_CODE (index
) == INTEGER_CST
15751 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15752 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15754 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15755 return build_int_cst_type (TREE_TYPE (exp
),
15756 (TREE_STRING_POINTER (string
)
15757 [TREE_INT_CST_LOW (index
)]));
15762 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15763 an integer constant, real, or fixed-point constant.
15765 TYPE is the type of the result. */
15768 fold_negate_const (tree arg0
, tree type
)
15770 tree t
= NULL_TREE
;
15772 switch (TREE_CODE (arg0
))
15776 unsigned HOST_WIDE_INT low
;
15777 HOST_WIDE_INT high
;
15778 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
15779 TREE_INT_CST_HIGH (arg0
),
15781 t
= force_fit_type_double (type
, low
, high
, 1,
15782 (overflow
| TREE_OVERFLOW (arg0
))
15783 && !TYPE_UNSIGNED (type
));
15788 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
15793 FIXED_VALUE_TYPE f
;
15794 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15795 &(TREE_FIXED_CST (arg0
)), NULL
,
15796 TYPE_SATURATING (type
));
15797 t
= build_fixed (type
, f
);
15798 /* Propagate overflow flags. */
15799 if (overflow_p
| TREE_OVERFLOW (arg0
))
15800 TREE_OVERFLOW (t
) = 1;
15805 gcc_unreachable ();
15811 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15812 an integer constant or real constant.
15814 TYPE is the type of the result. */
15817 fold_abs_const (tree arg0
, tree type
)
15819 tree t
= NULL_TREE
;
15821 switch (TREE_CODE (arg0
))
15824 /* If the value is unsigned, then the absolute value is
15825 the same as the ordinary value. */
15826 if (TYPE_UNSIGNED (type
))
15828 /* Similarly, if the value is non-negative. */
15829 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
15831 /* If the value is negative, then the absolute value is
15835 unsigned HOST_WIDE_INT low
;
15836 HOST_WIDE_INT high
;
15837 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
15838 TREE_INT_CST_HIGH (arg0
),
15840 t
= force_fit_type_double (type
, low
, high
, -1,
15841 overflow
| TREE_OVERFLOW (arg0
));
15846 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15847 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
15853 gcc_unreachable ();
15859 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15860 constant. TYPE is the type of the result. */
15863 fold_not_const (tree arg0
, tree type
)
15865 tree t
= NULL_TREE
;
15867 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15869 t
= force_fit_type_double (type
, ~TREE_INT_CST_LOW (arg0
),
15870 ~TREE_INT_CST_HIGH (arg0
), 0,
15871 TREE_OVERFLOW (arg0
));
15876 /* Given CODE, a relational operator, the target type, TYPE and two
15877 constant operands OP0 and OP1, return the result of the
15878 relational operation. If the result is not a compile time
15879 constant, then return NULL_TREE. */
15882 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15884 int result
, invert
;
15886 /* From here on, the only cases we handle are when the result is
15887 known to be a constant. */
15889 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15891 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15892 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15894 /* Handle the cases where either operand is a NaN. */
15895 if (real_isnan (c0
) || real_isnan (c1
))
15905 case UNORDERED_EXPR
:
15919 if (flag_trapping_math
)
15925 gcc_unreachable ();
15928 return constant_boolean_node (result
, type
);
15931 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
15934 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
15936 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
15937 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
15938 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
15941 /* Handle equality/inequality of complex constants. */
15942 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
15944 tree rcond
= fold_relational_const (code
, type
,
15945 TREE_REALPART (op0
),
15946 TREE_REALPART (op1
));
15947 tree icond
= fold_relational_const (code
, type
,
15948 TREE_IMAGPART (op0
),
15949 TREE_IMAGPART (op1
));
15950 if (code
== EQ_EXPR
)
15951 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
15952 else if (code
== NE_EXPR
)
15953 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15958 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15960 To compute GT, swap the arguments and do LT.
15961 To compute GE, do LT and invert the result.
15962 To compute LE, swap the arguments, do LT and invert the result.
15963 To compute NE, do EQ and invert the result.
15965 Therefore, the code below must handle only EQ and LT. */
15967 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15972 code
= swap_tree_comparison (code
);
15975 /* Note that it is safe to invert for real values here because we
15976 have already handled the one case that it matters. */
15979 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15982 code
= invert_tree_comparison (code
, false);
15985 /* Compute a result for LT or EQ if args permit;
15986 Otherwise return T. */
15987 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15989 if (code
== EQ_EXPR
)
15990 result
= tree_int_cst_equal (op0
, op1
);
15991 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
15992 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
15994 result
= INT_CST_LT (op0
, op1
);
16001 return constant_boolean_node (result
, type
);
16004 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16005 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16009 fold_build_cleanup_point_expr (tree type
, tree expr
)
16011 /* If the expression does not have side effects then we don't have to wrap
16012 it with a cleanup point expression. */
16013 if (!TREE_SIDE_EFFECTS (expr
))
16016 /* If the expression is a return, check to see if the expression inside the
16017 return has no side effects or the right hand side of the modify expression
16018 inside the return. If either don't have side effects set we don't need to
16019 wrap the expression in a cleanup point expression. Note we don't check the
16020 left hand side of the modify because it should always be a return decl. */
16021 if (TREE_CODE (expr
) == RETURN_EXPR
)
16023 tree op
= TREE_OPERAND (expr
, 0);
16024 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16026 op
= TREE_OPERAND (op
, 1);
16027 if (!TREE_SIDE_EFFECTS (op
))
16031 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16034 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16035 of an indirection through OP0, or NULL_TREE if no simplification is
16039 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16045 subtype
= TREE_TYPE (sub
);
16046 if (!POINTER_TYPE_P (subtype
))
16049 if (TREE_CODE (sub
) == ADDR_EXPR
)
16051 tree op
= TREE_OPERAND (sub
, 0);
16052 tree optype
= TREE_TYPE (op
);
16053 /* *&CONST_DECL -> to the value of the const decl. */
16054 if (TREE_CODE (op
) == CONST_DECL
)
16055 return DECL_INITIAL (op
);
16056 /* *&p => p; make sure to handle *&"str"[cst] here. */
16057 if (type
== optype
)
16059 tree fop
= fold_read_from_constant_string (op
);
16065 /* *(foo *)&fooarray => fooarray[0] */
16066 else if (TREE_CODE (optype
) == ARRAY_TYPE
16067 && type
== TREE_TYPE (optype
))
16069 tree type_domain
= TYPE_DOMAIN (optype
);
16070 tree min_val
= size_zero_node
;
16071 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16072 min_val
= TYPE_MIN_VALUE (type_domain
);
16073 op0
= build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
16074 SET_EXPR_LOCATION (op0
, loc
);
16077 /* *(foo *)&complexfoo => __real__ complexfoo */
16078 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16079 && type
== TREE_TYPE (optype
))
16080 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16081 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16082 else if (TREE_CODE (optype
) == VECTOR_TYPE
16083 && type
== TREE_TYPE (optype
))
16085 tree part_width
= TYPE_SIZE (type
);
16086 tree index
= bitsize_int (0);
16087 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16091 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16092 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16093 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16095 tree op00
= TREE_OPERAND (sub
, 0);
16096 tree op01
= TREE_OPERAND (sub
, 1);
16100 op00type
= TREE_TYPE (op00
);
16101 if (TREE_CODE (op00
) == ADDR_EXPR
16102 && TREE_CODE (TREE_TYPE (op00type
)) == VECTOR_TYPE
16103 && type
== TREE_TYPE (TREE_TYPE (op00type
)))
16105 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
16106 tree part_width
= TYPE_SIZE (type
);
16107 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
16108 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16109 tree index
= bitsize_int (indexi
);
16111 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type
)))
16112 return fold_build3_loc (loc
,
16113 BIT_FIELD_REF
, type
, TREE_OPERAND (op00
, 0),
16114 part_width
, index
);
16120 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16121 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16122 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16124 tree op00
= TREE_OPERAND (sub
, 0);
16125 tree op01
= TREE_OPERAND (sub
, 1);
16129 op00type
= TREE_TYPE (op00
);
16130 if (TREE_CODE (op00
) == ADDR_EXPR
16131 && TREE_CODE (TREE_TYPE (op00type
)) == COMPLEX_TYPE
16132 && type
== TREE_TYPE (TREE_TYPE (op00type
)))
16134 tree size
= TYPE_SIZE_UNIT (type
);
16135 if (tree_int_cst_equal (size
, op01
))
16136 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
,
16137 TREE_OPERAND (op00
, 0));
16141 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16142 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16143 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
16146 tree min_val
= size_zero_node
;
16147 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16148 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16149 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16150 min_val
= TYPE_MIN_VALUE (type_domain
);
16151 op0
= build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
16152 SET_EXPR_LOCATION (op0
, loc
);
16159 /* Builds an expression for an indirection through T, simplifying some
16163 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16165 tree type
= TREE_TYPE (TREE_TYPE (t
));
16166 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16171 t
= build1 (INDIRECT_REF
, type
, t
);
16172 SET_EXPR_LOCATION (t
, loc
);
16176 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16179 fold_indirect_ref_loc (location_t loc
, tree t
)
16181 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16189 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16190 whose result is ignored. The type of the returned tree need not be
16191 the same as the original expression. */
16194 fold_ignored_result (tree t
)
16196 if (!TREE_SIDE_EFFECTS (t
))
16197 return integer_zero_node
;
16200 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16203 t
= TREE_OPERAND (t
, 0);
16207 case tcc_comparison
:
16208 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16209 t
= TREE_OPERAND (t
, 0);
16210 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16211 t
= TREE_OPERAND (t
, 1);
16216 case tcc_expression
:
16217 switch (TREE_CODE (t
))
16219 case COMPOUND_EXPR
:
16220 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16222 t
= TREE_OPERAND (t
, 0);
16226 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16227 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16229 t
= TREE_OPERAND (t
, 0);
16242 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16243 This can only be applied to objects of a sizetype. */
16246 round_up_loc (location_t loc
, tree value
, int divisor
)
16248 tree div
= NULL_TREE
;
16250 gcc_assert (divisor
> 0);
16254 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16255 have to do anything. Only do this when we are not given a const,
16256 because in that case, this check is more expensive than just
16258 if (TREE_CODE (value
) != INTEGER_CST
)
16260 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16262 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16266 /* If divisor is a power of two, simplify this to bit manipulation. */
16267 if (divisor
== (divisor
& -divisor
))
16269 if (TREE_CODE (value
) == INTEGER_CST
)
16271 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (value
);
16272 unsigned HOST_WIDE_INT high
;
16275 if ((low
& (divisor
- 1)) == 0)
16278 overflow_p
= TREE_OVERFLOW (value
);
16279 high
= TREE_INT_CST_HIGH (value
);
16280 low
&= ~(divisor
- 1);
16289 return force_fit_type_double (TREE_TYPE (value
), low
, high
,
16296 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16297 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16298 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16299 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16305 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16306 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16307 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16313 /* Likewise, but round down. */
16316 round_down_loc (location_t loc
, tree value
, int divisor
)
16318 tree div
= NULL_TREE
;
16320 gcc_assert (divisor
> 0);
16324 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16325 have to do anything. Only do this when we are not given a const,
16326 because in that case, this check is more expensive than just
16328 if (TREE_CODE (value
) != INTEGER_CST
)
16330 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16332 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16336 /* If divisor is a power of two, simplify this to bit manipulation. */
16337 if (divisor
== (divisor
& -divisor
))
16341 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16342 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16347 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16348 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16349 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16355 /* Returns the pointer to the base of the object addressed by EXP and
16356 extracts the information about the offset of the access, storing it
16357 to PBITPOS and POFFSET. */
16360 split_address_to_core_and_offset (tree exp
,
16361 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16364 enum machine_mode mode
;
16365 int unsignedp
, volatilep
;
16366 HOST_WIDE_INT bitsize
;
16367 location_t loc
= EXPR_LOCATION (exp
);
16369 if (TREE_CODE (exp
) == ADDR_EXPR
)
16371 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16372 poffset
, &mode
, &unsignedp
, &volatilep
,
16374 core
= build_fold_addr_expr_loc (loc
, core
);
16380 *poffset
= NULL_TREE
;
16386 /* Returns true if addresses of E1 and E2 differ by a constant, false
16387 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16390 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16393 HOST_WIDE_INT bitpos1
, bitpos2
;
16394 tree toffset1
, toffset2
, tdiff
, type
;
16396 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16397 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16399 if (bitpos1
% BITS_PER_UNIT
!= 0
16400 || bitpos2
% BITS_PER_UNIT
!= 0
16401 || !operand_equal_p (core1
, core2
, 0))
16404 if (toffset1
&& toffset2
)
16406 type
= TREE_TYPE (toffset1
);
16407 if (type
!= TREE_TYPE (toffset2
))
16408 toffset2
= fold_convert (type
, toffset2
);
16410 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16411 if (!cst_and_fits_in_hwi (tdiff
))
16414 *diff
= int_cst_value (tdiff
);
16416 else if (toffset1
|| toffset2
)
16418 /* If only one of the offsets is non-constant, the difference cannot
16425 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16429 /* Simplify the floating point expression EXP when the sign of the
16430 result is not significant. Return NULL_TREE if no simplification
16434 fold_strip_sign_ops (tree exp
)
16437 location_t loc
= EXPR_LOCATION (exp
);
16439 switch (TREE_CODE (exp
))
16443 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16444 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16448 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16450 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16451 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16452 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16453 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16454 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16455 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16458 case COMPOUND_EXPR
:
16459 arg0
= TREE_OPERAND (exp
, 0);
16460 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16462 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16466 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16467 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16469 return fold_build3_loc (loc
,
16470 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16471 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16472 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16477 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16480 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16481 /* Strip copysign function call, return the 1st argument. */
16482 arg0
= CALL_EXPR_ARG (exp
, 0);
16483 arg1
= CALL_EXPR_ARG (exp
, 1);
16484 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16487 /* Strip sign ops from the argument of "odd" math functions. */
16488 if (negate_mathfn_p (fcode
))
16490 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16492 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);