1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
65 #include "langhooks.h"
68 /* Nonzero if we are folding constants inside an initializer; zero
70 int folding_initializer
= 0;
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code
{
94 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
95 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
96 static bool negate_mathfn_p (enum built_in_function
);
97 static bool negate_expr_p (tree
);
98 static tree
negate_expr (tree
);
99 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
100 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
101 static tree
const_binop (enum tree_code
, tree
, tree
, int);
102 static enum comparison_code
comparison_to_compcode (enum tree_code
);
103 static enum tree_code
compcode_to_comparison (enum comparison_code
);
104 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
105 enum tree_code
, tree
, tree
, tree
);
106 static int truth_value_p (enum tree_code
);
107 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
108 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
109 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
110 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
111 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
112 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
113 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
114 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
115 enum machine_mode
*, int *, int *,
117 static int all_ones_mask_p (const_tree
, int);
118 static tree
sign_bit_p (tree
, const_tree
);
119 static int simple_operand_p (const_tree
);
120 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
121 static tree
range_predecessor (tree
);
122 static tree
range_successor (tree
);
123 static tree
make_range (tree
, int *, tree
*, tree
*, bool *);
124 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
125 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
127 static tree
fold_range_test (enum tree_code
, tree
, tree
, tree
);
128 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
129 static tree
unextend (tree
, int, int, tree
);
130 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
131 static tree
optimize_minmax_comparison (enum tree_code
, tree
, tree
, tree
);
132 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
133 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
134 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
,
137 static bool fold_real_zero_addition_p (const_tree
, const_tree
, int);
138 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
140 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
141 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
142 static bool reorder_operands_p (const_tree
, const_tree
);
143 static tree
fold_negate_const (tree
, tree
);
144 static tree
fold_not_const (tree
, tree
);
145 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
148 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
149 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
150 and SUM1. Then this yields nonzero if overflow occurred during the
153 Overflow occurs if A and B have the same sign, but A and SUM differ in
154 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
156 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
158 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
159 We do that by representing the two-word integer in 4 words, with only
160 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
161 number. The value of the word is LOWPART + HIGHPART * BASE. */
164 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
165 #define HIGHPART(x) \
166 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
167 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
169 /* Unpack a two-word integer into 4 words.
170 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
171 WORDS points to the array of HOST_WIDE_INTs. */
174 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
176 words
[0] = LOWPART (low
);
177 words
[1] = HIGHPART (low
);
178 words
[2] = LOWPART (hi
);
179 words
[3] = HIGHPART (hi
);
182 /* Pack an array of 4 words into a two-word integer.
183 WORDS points to the array of words.
184 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
187 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
190 *low
= words
[0] + words
[1] * BASE
;
191 *hi
= words
[2] + words
[3] * BASE
;
194 /* Force the double-word integer L1, H1 to be within the range of the
195 integer type TYPE. Stores the properly truncated and sign-extended
196 double-word integer in *LV, *HV. Returns true if the operation
197 overflows, that is, argument and result are different. */
200 fit_double_type (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
201 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, const_tree type
)
203 unsigned HOST_WIDE_INT low0
= l1
;
204 HOST_WIDE_INT high0
= h1
;
206 int sign_extended_type
;
208 if (POINTER_TYPE_P (type
)
209 || TREE_CODE (type
) == OFFSET_TYPE
)
212 prec
= TYPE_PRECISION (type
);
214 /* Size types *are* sign extended. */
215 sign_extended_type
= (!TYPE_UNSIGNED (type
)
216 || (TREE_CODE (type
) == INTEGER_TYPE
217 && TYPE_IS_SIZETYPE (type
)));
219 /* First clear all bits that are beyond the type's precision. */
220 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
222 else if (prec
> HOST_BITS_PER_WIDE_INT
)
223 h1
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
227 if (prec
< HOST_BITS_PER_WIDE_INT
)
228 l1
&= ~((HOST_WIDE_INT
) (-1) << prec
);
231 /* Then do sign extension if necessary. */
232 if (!sign_extended_type
)
233 /* No sign extension */;
234 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
235 /* Correct width already. */;
236 else if (prec
> HOST_BITS_PER_WIDE_INT
)
238 /* Sign extend top half? */
239 if (h1
& ((unsigned HOST_WIDE_INT
)1
240 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
241 h1
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
243 else if (prec
== HOST_BITS_PER_WIDE_INT
)
245 if ((HOST_WIDE_INT
)l1
< 0)
250 /* Sign extend bottom half? */
251 if (l1
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
254 l1
|= (HOST_WIDE_INT
)(-1) << prec
;
261 /* If the value didn't fit, signal overflow. */
262 return l1
!= low0
|| h1
!= high0
;
265 /* We force the double-int HIGH:LOW to the range of the type TYPE by
266 sign or zero extending it.
267 OVERFLOWABLE indicates if we are interested
268 in overflow of the value, when >0 we are only interested in signed
269 overflow, for <0 we are interested in any overflow. OVERFLOWED
270 indicates whether overflow has already occurred. CONST_OVERFLOWED
271 indicates whether constant overflow has already occurred. We force
272 T's value to be within range of T's type (by setting to 0 or 1 all
273 the bits outside the type's range). We set TREE_OVERFLOWED if,
274 OVERFLOWED is nonzero,
275 or OVERFLOWABLE is >0 and signed overflow occurs
276 or OVERFLOWABLE is <0 and any overflow occurs
277 We return a new tree node for the extended double-int. The node
278 is shared if no overflow flags are set. */
281 force_fit_type_double (tree type
, unsigned HOST_WIDE_INT low
,
282 HOST_WIDE_INT high
, int overflowable
,
285 int sign_extended_type
;
288 /* Size types *are* sign extended. */
289 sign_extended_type
= (!TYPE_UNSIGNED (type
)
290 || (TREE_CODE (type
) == INTEGER_TYPE
291 && TYPE_IS_SIZETYPE (type
)));
293 overflow
= fit_double_type (low
, high
, &low
, &high
, type
);
295 /* If we need to set overflow flags, return a new unshared node. */
296 if (overflowed
|| overflow
)
300 || (overflowable
> 0 && sign_extended_type
))
302 tree t
= make_node (INTEGER_CST
);
303 TREE_INT_CST_LOW (t
) = low
;
304 TREE_INT_CST_HIGH (t
) = high
;
305 TREE_TYPE (t
) = type
;
306 TREE_OVERFLOW (t
) = 1;
311 /* Else build a shared node. */
312 return build_int_cst_wide (type
, low
, high
);
315 /* Add two doubleword integers with doubleword result.
316 Return nonzero if the operation overflows according to UNSIGNED_P.
317 Each argument is given as two `HOST_WIDE_INT' pieces.
318 One argument is L1 and H1; the other, L2 and H2.
319 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
322 add_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
323 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
324 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
327 unsigned HOST_WIDE_INT l
;
331 h
= h1
+ h2
+ (l
< l1
);
337 return (unsigned HOST_WIDE_INT
) h
< (unsigned HOST_WIDE_INT
) h1
;
339 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
342 /* Negate a doubleword integer with doubleword result.
343 Return nonzero if the operation overflows, assuming it's signed.
344 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
345 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
348 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
349 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
355 return (*hv
& h1
) < 0;
365 /* Multiply two doubleword integers with doubleword result.
366 Return nonzero if the operation overflows according to UNSIGNED_P.
367 Each argument is given as two `HOST_WIDE_INT' pieces.
368 One argument is L1 and H1; the other, L2 and H2.
369 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
372 mul_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
373 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
374 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
377 HOST_WIDE_INT arg1
[4];
378 HOST_WIDE_INT arg2
[4];
379 HOST_WIDE_INT prod
[4 * 2];
380 unsigned HOST_WIDE_INT carry
;
382 unsigned HOST_WIDE_INT toplow
, neglow
;
383 HOST_WIDE_INT tophigh
, neghigh
;
385 encode (arg1
, l1
, h1
);
386 encode (arg2
, l2
, h2
);
388 memset (prod
, 0, sizeof prod
);
390 for (i
= 0; i
< 4; i
++)
393 for (j
= 0; j
< 4; j
++)
396 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
397 carry
+= arg1
[i
] * arg2
[j
];
398 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
400 prod
[k
] = LOWPART (carry
);
401 carry
= HIGHPART (carry
);
406 decode (prod
, lv
, hv
);
407 decode (prod
+ 4, &toplow
, &tophigh
);
409 /* Unsigned overflow is immediate. */
411 return (toplow
| tophigh
) != 0;
413 /* Check for signed overflow by calculating the signed representation of the
414 top half of the result; it should agree with the low half's sign bit. */
417 neg_double (l2
, h2
, &neglow
, &neghigh
);
418 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
422 neg_double (l1
, h1
, &neglow
, &neghigh
);
423 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
425 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
428 /* Shift the doubleword integer in L1, H1 left by COUNT places
429 keeping only PREC bits of result.
430 Shift right if COUNT is negative.
431 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
432 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
435 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
436 HOST_WIDE_INT count
, unsigned int prec
,
437 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
439 unsigned HOST_WIDE_INT signmask
;
443 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
447 if (SHIFT_COUNT_TRUNCATED
)
450 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
452 /* Shifting by the host word size is undefined according to the
453 ANSI standard, so we must handle this as a special case. */
457 else if (count
>= HOST_BITS_PER_WIDE_INT
)
459 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
464 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
465 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
469 /* Sign extend all bits that are beyond the precision. */
471 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
472 ? ((unsigned HOST_WIDE_INT
) *hv
473 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
474 : (*lv
>> (prec
- 1))) & 1);
476 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
478 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
480 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
481 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
486 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
487 *lv
|= signmask
<< prec
;
491 /* Shift the doubleword integer in L1, H1 right by COUNT places
492 keeping only PREC bits of result. COUNT must be positive.
493 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
494 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
497 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
498 HOST_WIDE_INT count
, unsigned int prec
,
499 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
502 unsigned HOST_WIDE_INT signmask
;
505 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
508 if (SHIFT_COUNT_TRUNCATED
)
511 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
513 /* Shifting by the host word size is undefined according to the
514 ANSI standard, so we must handle this as a special case. */
518 else if (count
>= HOST_BITS_PER_WIDE_INT
)
521 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
525 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
527 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
530 /* Zero / sign extend all bits that are beyond the precision. */
532 if (count
>= (HOST_WIDE_INT
)prec
)
537 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
539 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
541 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
542 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
547 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
548 *lv
|= signmask
<< (prec
- count
);
552 /* Rotate the doubleword integer in L1, H1 left by COUNT places
553 keeping only PREC bits of result.
554 Rotate right if COUNT is negative.
555 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
558 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
559 HOST_WIDE_INT count
, unsigned int prec
,
560 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
562 unsigned HOST_WIDE_INT s1l
, s2l
;
563 HOST_WIDE_INT s1h
, s2h
;
569 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
570 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
575 /* Rotate the doubleword integer in L1, H1 left by COUNT places
576 keeping only PREC bits of result. COUNT must be positive.
577 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
580 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
581 HOST_WIDE_INT count
, unsigned int prec
,
582 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
584 unsigned HOST_WIDE_INT s1l
, s2l
;
585 HOST_WIDE_INT s1h
, s2h
;
591 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
592 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
597 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
598 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
599 CODE is a tree code for a kind of division, one of
600 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
602 It controls how the quotient is rounded to an integer.
603 Return nonzero if the operation overflows.
604 UNS nonzero says do unsigned division. */
607 div_and_round_double (enum tree_code code
, int uns
,
608 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
609 HOST_WIDE_INT hnum_orig
,
610 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
611 HOST_WIDE_INT hden_orig
,
612 unsigned HOST_WIDE_INT
*lquo
,
613 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
617 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
618 HOST_WIDE_INT den
[4], quo
[4];
620 unsigned HOST_WIDE_INT work
;
621 unsigned HOST_WIDE_INT carry
= 0;
622 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
623 HOST_WIDE_INT hnum
= hnum_orig
;
624 unsigned HOST_WIDE_INT lden
= lden_orig
;
625 HOST_WIDE_INT hden
= hden_orig
;
628 if (hden
== 0 && lden
== 0)
629 overflow
= 1, lden
= 1;
631 /* Calculate quotient sign and convert operands to unsigned. */
637 /* (minimum integer) / (-1) is the only overflow case. */
638 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
639 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
645 neg_double (lden
, hden
, &lden
, &hden
);
649 if (hnum
== 0 && hden
== 0)
650 { /* single precision */
652 /* This unsigned division rounds toward zero. */
658 { /* trivial case: dividend < divisor */
659 /* hden != 0 already checked. */
666 memset (quo
, 0, sizeof quo
);
668 memset (num
, 0, sizeof num
); /* to zero 9th element */
669 memset (den
, 0, sizeof den
);
671 encode (num
, lnum
, hnum
);
672 encode (den
, lden
, hden
);
674 /* Special code for when the divisor < BASE. */
675 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
677 /* hnum != 0 already checked. */
678 for (i
= 4 - 1; i
>= 0; i
--)
680 work
= num
[i
] + carry
* BASE
;
681 quo
[i
] = work
/ lden
;
687 /* Full double precision division,
688 with thanks to Don Knuth's "Seminumerical Algorithms". */
689 int num_hi_sig
, den_hi_sig
;
690 unsigned HOST_WIDE_INT quo_est
, scale
;
692 /* Find the highest nonzero divisor digit. */
693 for (i
= 4 - 1;; i
--)
700 /* Insure that the first digit of the divisor is at least BASE/2.
701 This is required by the quotient digit estimation algorithm. */
703 scale
= BASE
/ (den
[den_hi_sig
] + 1);
705 { /* scale divisor and dividend */
707 for (i
= 0; i
<= 4 - 1; i
++)
709 work
= (num
[i
] * scale
) + carry
;
710 num
[i
] = LOWPART (work
);
711 carry
= HIGHPART (work
);
716 for (i
= 0; i
<= 4 - 1; i
++)
718 work
= (den
[i
] * scale
) + carry
;
719 den
[i
] = LOWPART (work
);
720 carry
= HIGHPART (work
);
721 if (den
[i
] != 0) den_hi_sig
= i
;
728 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
730 /* Guess the next quotient digit, quo_est, by dividing the first
731 two remaining dividend digits by the high order quotient digit.
732 quo_est is never low and is at most 2 high. */
733 unsigned HOST_WIDE_INT tmp
;
735 num_hi_sig
= i
+ den_hi_sig
+ 1;
736 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
737 if (num
[num_hi_sig
] != den
[den_hi_sig
])
738 quo_est
= work
/ den
[den_hi_sig
];
742 /* Refine quo_est so it's usually correct, and at most one high. */
743 tmp
= work
- quo_est
* den
[den_hi_sig
];
745 && (den
[den_hi_sig
- 1] * quo_est
746 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
749 /* Try QUO_EST as the quotient digit, by multiplying the
750 divisor by QUO_EST and subtracting from the remaining dividend.
751 Keep in mind that QUO_EST is the I - 1st digit. */
754 for (j
= 0; j
<= den_hi_sig
; j
++)
756 work
= quo_est
* den
[j
] + carry
;
757 carry
= HIGHPART (work
);
758 work
= num
[i
+ j
] - LOWPART (work
);
759 num
[i
+ j
] = LOWPART (work
);
760 carry
+= HIGHPART (work
) != 0;
763 /* If quo_est was high by one, then num[i] went negative and
764 we need to correct things. */
765 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
768 carry
= 0; /* add divisor back in */
769 for (j
= 0; j
<= den_hi_sig
; j
++)
771 work
= num
[i
+ j
] + den
[j
] + carry
;
772 carry
= HIGHPART (work
);
773 num
[i
+ j
] = LOWPART (work
);
776 num
[num_hi_sig
] += carry
;
779 /* Store the quotient digit. */
784 decode (quo
, lquo
, hquo
);
787 /* If result is negative, make it so. */
789 neg_double (*lquo
, *hquo
, lquo
, hquo
);
791 /* Compute trial remainder: rem = num - (quo * den) */
792 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
793 neg_double (*lrem
, *hrem
, lrem
, hrem
);
794 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
799 case TRUNC_MOD_EXPR
: /* round toward zero */
800 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
804 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
805 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
808 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
816 case CEIL_MOD_EXPR
: /* round toward positive infinity */
817 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
819 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
827 case ROUND_MOD_EXPR
: /* round to closest integer */
829 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
830 HOST_WIDE_INT habs_rem
= *hrem
;
831 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
832 HOST_WIDE_INT habs_den
= hden
, htwice
;
834 /* Get absolute values. */
836 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
838 neg_double (lden
, hden
, &labs_den
, &habs_den
);
840 /* If (2 * abs (lrem) >= abs (lden)) */
841 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
842 labs_rem
, habs_rem
, <wice
, &htwice
);
844 if (((unsigned HOST_WIDE_INT
) habs_den
845 < (unsigned HOST_WIDE_INT
) htwice
)
846 || (((unsigned HOST_WIDE_INT
) habs_den
847 == (unsigned HOST_WIDE_INT
) htwice
)
848 && (labs_den
< ltwice
)))
852 add_double (*lquo
, *hquo
,
853 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
856 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
868 /* Compute true remainder: rem = num - (quo * den) */
869 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
870 neg_double (*lrem
, *hrem
, lrem
, hrem
);
871 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
875 /* If ARG2 divides ARG1 with zero remainder, carries out the division
876 of type CODE and returns the quotient.
877 Otherwise returns NULL_TREE. */
880 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
882 unsigned HOST_WIDE_INT int1l
, int2l
;
883 HOST_WIDE_INT int1h
, int2h
;
884 unsigned HOST_WIDE_INT quol
, reml
;
885 HOST_WIDE_INT quoh
, remh
;
886 tree type
= TREE_TYPE (arg1
);
887 int uns
= TYPE_UNSIGNED (type
);
889 int1l
= TREE_INT_CST_LOW (arg1
);
890 int1h
= TREE_INT_CST_HIGH (arg1
);
891 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
892 &obj[some_exotic_number]. */
893 if (POINTER_TYPE_P (type
))
896 type
= signed_type_for (type
);
897 fit_double_type (int1l
, int1h
, &int1l
, &int1h
,
901 fit_double_type (int1l
, int1h
, &int1l
, &int1h
, type
);
902 int2l
= TREE_INT_CST_LOW (arg2
);
903 int2h
= TREE_INT_CST_HIGH (arg2
);
905 div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
906 &quol
, &quoh
, &reml
, &remh
);
907 if (remh
!= 0 || reml
!= 0)
910 return build_int_cst_wide (type
, quol
, quoh
);
913 /* This is nonzero if we should defer warnings about undefined
914 overflow. This facility exists because these warnings are a
915 special case. The code to estimate loop iterations does not want
916 to issue any warnings, since it works with expressions which do not
917 occur in user code. Various bits of cleanup code call fold(), but
918 only use the result if it has certain characteristics (e.g., is a
919 constant); that code only wants to issue a warning if the result is
922 static int fold_deferring_overflow_warnings
;
924 /* If a warning about undefined overflow is deferred, this is the
925 warning. Note that this may cause us to turn two warnings into
926 one, but that is fine since it is sufficient to only give one
927 warning per expression. */
929 static const char* fold_deferred_overflow_warning
;
931 /* If a warning about undefined overflow is deferred, this is the
932 level at which the warning should be emitted. */
934 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
936 /* Start deferring overflow warnings. We could use a stack here to
937 permit nested calls, but at present it is not necessary. */
940 fold_defer_overflow_warnings (void)
942 ++fold_deferring_overflow_warnings
;
945 /* Stop deferring overflow warnings. If there is a pending warning,
946 and ISSUE is true, then issue the warning if appropriate. STMT is
947 the statement with which the warning should be associated (used for
948 location information); STMT may be NULL. CODE is the level of the
949 warning--a warn_strict_overflow_code value. This function will use
950 the smaller of CODE and the deferred code when deciding whether to
951 issue the warning. CODE may be zero to mean to always use the
955 fold_undefer_overflow_warnings (bool issue
, const_tree stmt
, int code
)
960 gcc_assert (fold_deferring_overflow_warnings
> 0);
961 --fold_deferring_overflow_warnings
;
962 if (fold_deferring_overflow_warnings
> 0)
964 if (fold_deferred_overflow_warning
!= NULL
966 && code
< (int) fold_deferred_overflow_code
)
967 fold_deferred_overflow_code
= code
;
971 warnmsg
= fold_deferred_overflow_warning
;
972 fold_deferred_overflow_warning
= NULL
;
974 if (!issue
|| warnmsg
== NULL
)
977 if (stmt
!= NULL_TREE
&& TREE_NO_WARNING (stmt
))
980 /* Use the smallest code level when deciding to issue the
982 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
983 code
= fold_deferred_overflow_code
;
985 if (!issue_strict_overflow_warning (code
))
988 if (stmt
== NULL_TREE
|| !expr_has_location (stmt
))
989 locus
= input_location
;
991 locus
= expr_location (stmt
);
992 warning (OPT_Wstrict_overflow
, "%H%s", &locus
, warnmsg
);
995 /* Stop deferring overflow warnings, ignoring any deferred
999 fold_undefer_and_ignore_overflow_warnings (void)
1001 fold_undefer_overflow_warnings (false, NULL_TREE
, 0);
1004 /* Whether we are deferring overflow warnings. */
1007 fold_deferring_overflow_warnings_p (void)
1009 return fold_deferring_overflow_warnings
> 0;
1012 /* This is called when we fold something based on the fact that signed
1013 overflow is undefined. */
1016 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
1018 gcc_assert (!flag_wrapv
&& !flag_trapv
);
1019 if (fold_deferring_overflow_warnings
> 0)
1021 if (fold_deferred_overflow_warning
== NULL
1022 || wc
< fold_deferred_overflow_code
)
1024 fold_deferred_overflow_warning
= gmsgid
;
1025 fold_deferred_overflow_code
= wc
;
1028 else if (issue_strict_overflow_warning (wc
))
1029 warning (OPT_Wstrict_overflow
, gmsgid
);
1032 /* Return true if the built-in mathematical function specified by CODE
1033 is odd, i.e. -f(x) == f(-x). */
1036 negate_mathfn_p (enum built_in_function code
)
1040 CASE_FLT_FN (BUILT_IN_ASIN
):
1041 CASE_FLT_FN (BUILT_IN_ASINH
):
1042 CASE_FLT_FN (BUILT_IN_ATAN
):
1043 CASE_FLT_FN (BUILT_IN_ATANH
):
1044 CASE_FLT_FN (BUILT_IN_CASIN
):
1045 CASE_FLT_FN (BUILT_IN_CASINH
):
1046 CASE_FLT_FN (BUILT_IN_CATAN
):
1047 CASE_FLT_FN (BUILT_IN_CATANH
):
1048 CASE_FLT_FN (BUILT_IN_CBRT
):
1049 CASE_FLT_FN (BUILT_IN_CPROJ
):
1050 CASE_FLT_FN (BUILT_IN_CSIN
):
1051 CASE_FLT_FN (BUILT_IN_CSINH
):
1052 CASE_FLT_FN (BUILT_IN_CTAN
):
1053 CASE_FLT_FN (BUILT_IN_CTANH
):
1054 CASE_FLT_FN (BUILT_IN_ERF
):
1055 CASE_FLT_FN (BUILT_IN_LLROUND
):
1056 CASE_FLT_FN (BUILT_IN_LROUND
):
1057 CASE_FLT_FN (BUILT_IN_ROUND
):
1058 CASE_FLT_FN (BUILT_IN_SIN
):
1059 CASE_FLT_FN (BUILT_IN_SINH
):
1060 CASE_FLT_FN (BUILT_IN_TAN
):
1061 CASE_FLT_FN (BUILT_IN_TANH
):
1062 CASE_FLT_FN (BUILT_IN_TRUNC
):
1065 CASE_FLT_FN (BUILT_IN_LLRINT
):
1066 CASE_FLT_FN (BUILT_IN_LRINT
):
1067 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
1068 CASE_FLT_FN (BUILT_IN_RINT
):
1069 return !flag_rounding_math
;
1077 /* Check whether we may negate an integer constant T without causing
1081 may_negate_without_overflow_p (const_tree t
)
1083 unsigned HOST_WIDE_INT val
;
1087 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
1089 type
= TREE_TYPE (t
);
1090 if (TYPE_UNSIGNED (type
))
1093 prec
= TYPE_PRECISION (type
);
1094 if (prec
> HOST_BITS_PER_WIDE_INT
)
1096 if (TREE_INT_CST_LOW (t
) != 0)
1098 prec
-= HOST_BITS_PER_WIDE_INT
;
1099 val
= TREE_INT_CST_HIGH (t
);
1102 val
= TREE_INT_CST_LOW (t
);
1103 if (prec
< HOST_BITS_PER_WIDE_INT
)
1104 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
1105 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
1108 /* Determine whether an expression T can be cheaply negated using
1109 the function negate_expr without introducing undefined overflow. */
1112 negate_expr_p (tree t
)
1119 type
= TREE_TYPE (t
);
1121 STRIP_SIGN_NOPS (t
);
1122 switch (TREE_CODE (t
))
1125 if (TYPE_OVERFLOW_WRAPS (type
))
1128 /* Check that -CST will not overflow type. */
1129 return may_negate_without_overflow_p (t
);
1131 return (INTEGRAL_TYPE_P (type
)
1132 && TYPE_OVERFLOW_WRAPS (type
));
1140 return negate_expr_p (TREE_REALPART (t
))
1141 && negate_expr_p (TREE_IMAGPART (t
));
1144 return negate_expr_p (TREE_OPERAND (t
, 0))
1145 && negate_expr_p (TREE_OPERAND (t
, 1));
1148 return negate_expr_p (TREE_OPERAND (t
, 0));
1151 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1152 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1154 /* -(A + B) -> (-B) - A. */
1155 if (negate_expr_p (TREE_OPERAND (t
, 1))
1156 && reorder_operands_p (TREE_OPERAND (t
, 0),
1157 TREE_OPERAND (t
, 1)))
1159 /* -(A + B) -> (-A) - B. */
1160 return negate_expr_p (TREE_OPERAND (t
, 0));
1163 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1164 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1165 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1166 && reorder_operands_p (TREE_OPERAND (t
, 0),
1167 TREE_OPERAND (t
, 1));
1170 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1176 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1177 return negate_expr_p (TREE_OPERAND (t
, 1))
1178 || negate_expr_p (TREE_OPERAND (t
, 0));
1181 case TRUNC_DIV_EXPR
:
1182 case ROUND_DIV_EXPR
:
1183 case FLOOR_DIV_EXPR
:
1185 case EXACT_DIV_EXPR
:
1186 /* In general we can't negate A / B, because if A is INT_MIN and
1187 B is 1, we may turn this into INT_MIN / -1 which is undefined
1188 and actually traps on some architectures. But if overflow is
1189 undefined, we can negate, because - (INT_MIN / 1) is an
1191 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
1192 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
1194 return negate_expr_p (TREE_OPERAND (t
, 1))
1195 || negate_expr_p (TREE_OPERAND (t
, 0));
1198 /* Negate -((double)float) as (double)(-float). */
1199 if (TREE_CODE (type
) == REAL_TYPE
)
1201 tree tem
= strip_float_extensions (t
);
1203 return negate_expr_p (tem
);
1208 /* Negate -f(x) as f(-x). */
1209 if (negate_mathfn_p (builtin_mathfn_code (t
)))
1210 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
1214 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1215 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1217 tree op1
= TREE_OPERAND (t
, 1);
1218 if (TREE_INT_CST_HIGH (op1
) == 0
1219 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1220 == TREE_INT_CST_LOW (op1
))
1231 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1232 simplification is possible.
1233 If negate_expr_p would return true for T, NULL_TREE will never be
1237 fold_negate_expr (tree t
)
1239 tree type
= TREE_TYPE (t
);
1242 switch (TREE_CODE (t
))
1244 /* Convert - (~A) to A + 1. */
1246 if (INTEGRAL_TYPE_P (type
))
1247 return fold_build2 (PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
1248 build_int_cst (type
, 1));
1252 tem
= fold_negate_const (t
, type
);
1253 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
1254 || !TYPE_OVERFLOW_TRAPS (type
))
1259 tem
= fold_negate_const (t
, type
);
1260 /* Two's complement FP formats, such as c4x, may overflow. */
1261 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
1266 tem
= fold_negate_const (t
, type
);
1271 tree rpart
= negate_expr (TREE_REALPART (t
));
1272 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1274 if ((TREE_CODE (rpart
) == REAL_CST
1275 && TREE_CODE (ipart
) == REAL_CST
)
1276 || (TREE_CODE (rpart
) == INTEGER_CST
1277 && TREE_CODE (ipart
) == INTEGER_CST
))
1278 return build_complex (type
, rpart
, ipart
);
1283 if (negate_expr_p (t
))
1284 return fold_build2 (COMPLEX_EXPR
, type
,
1285 fold_negate_expr (TREE_OPERAND (t
, 0)),
1286 fold_negate_expr (TREE_OPERAND (t
, 1)));
1290 if (negate_expr_p (t
))
1291 return fold_build1 (CONJ_EXPR
, type
,
1292 fold_negate_expr (TREE_OPERAND (t
, 0)));
1296 return TREE_OPERAND (t
, 0);
1299 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1300 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1302 /* -(A + B) -> (-B) - A. */
1303 if (negate_expr_p (TREE_OPERAND (t
, 1))
1304 && reorder_operands_p (TREE_OPERAND (t
, 0),
1305 TREE_OPERAND (t
, 1)))
1307 tem
= negate_expr (TREE_OPERAND (t
, 1));
1308 return fold_build2 (MINUS_EXPR
, type
,
1309 tem
, TREE_OPERAND (t
, 0));
1312 /* -(A + B) -> (-A) - B. */
1313 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1315 tem
= negate_expr (TREE_OPERAND (t
, 0));
1316 return fold_build2 (MINUS_EXPR
, type
,
1317 tem
, TREE_OPERAND (t
, 1));
1323 /* - (A - B) -> B - A */
1324 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1325 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1326 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1327 return fold_build2 (MINUS_EXPR
, type
,
1328 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
1332 if (TYPE_UNSIGNED (type
))
1338 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
1340 tem
= TREE_OPERAND (t
, 1);
1341 if (negate_expr_p (tem
))
1342 return fold_build2 (TREE_CODE (t
), type
,
1343 TREE_OPERAND (t
, 0), negate_expr (tem
));
1344 tem
= TREE_OPERAND (t
, 0);
1345 if (negate_expr_p (tem
))
1346 return fold_build2 (TREE_CODE (t
), type
,
1347 negate_expr (tem
), TREE_OPERAND (t
, 1));
1351 case TRUNC_DIV_EXPR
:
1352 case ROUND_DIV_EXPR
:
1353 case FLOOR_DIV_EXPR
:
1355 case EXACT_DIV_EXPR
:
1356 /* In general we can't negate A / B, because if A is INT_MIN and
1357 B is 1, we may turn this into INT_MIN / -1 which is undefined
1358 and actually traps on some architectures. But if overflow is
1359 undefined, we can negate, because - (INT_MIN / 1) is an
1361 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
1363 const char * const warnmsg
= G_("assuming signed overflow does not "
1364 "occur when negating a division");
1365 tem
= TREE_OPERAND (t
, 1);
1366 if (negate_expr_p (tem
))
1368 if (INTEGRAL_TYPE_P (type
)
1369 && (TREE_CODE (tem
) != INTEGER_CST
1370 || integer_onep (tem
)))
1371 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
1372 return fold_build2 (TREE_CODE (t
), type
,
1373 TREE_OPERAND (t
, 0), negate_expr (tem
));
1375 tem
= TREE_OPERAND (t
, 0);
1376 if (negate_expr_p (tem
))
1378 if (INTEGRAL_TYPE_P (type
)
1379 && (TREE_CODE (tem
) != INTEGER_CST
1380 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
1381 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
1382 return fold_build2 (TREE_CODE (t
), type
,
1383 negate_expr (tem
), TREE_OPERAND (t
, 1));
1389 /* Convert -((double)float) into (double)(-float). */
1390 if (TREE_CODE (type
) == REAL_TYPE
)
1392 tem
= strip_float_extensions (t
);
1393 if (tem
!= t
&& negate_expr_p (tem
))
1394 return fold_convert (type
, negate_expr (tem
));
1399 /* Negate -f(x) as f(-x). */
1400 if (negate_mathfn_p (builtin_mathfn_code (t
))
1401 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
1405 fndecl
= get_callee_fndecl (t
);
1406 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
1407 return build_call_expr (fndecl
, 1, arg
);
1412 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1413 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1415 tree op1
= TREE_OPERAND (t
, 1);
1416 if (TREE_INT_CST_HIGH (op1
) == 0
1417 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1418 == TREE_INT_CST_LOW (op1
))
1420 tree ntype
= TYPE_UNSIGNED (type
)
1421 ? signed_type_for (type
)
1422 : unsigned_type_for (type
);
1423 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1424 temp
= fold_build2 (RSHIFT_EXPR
, ntype
, temp
, op1
);
1425 return fold_convert (type
, temp
);
1437 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1438 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1439 return NULL_TREE. */
1442 negate_expr (tree t
)
1449 type
= TREE_TYPE (t
);
1450 STRIP_SIGN_NOPS (t
);
1452 tem
= fold_negate_expr (t
);
1454 tem
= build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1455 return fold_convert (type
, tem
);
1458 /* Split a tree IN into a constant, literal and variable parts that could be
1459 combined with CODE to make IN. "constant" means an expression with
1460 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1461 commutative arithmetic operation. Store the constant part into *CONP,
1462 the literal in *LITP and return the variable part. If a part isn't
1463 present, set it to null. If the tree does not decompose in this way,
1464 return the entire tree as the variable part and the other parts as null.
1466 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1467 case, we negate an operand that was subtracted. Except if it is a
1468 literal for which we use *MINUS_LITP instead.
1470 If NEGATE_P is true, we are negating all of IN, again except a literal
1471 for which we use *MINUS_LITP instead.
1473 If IN is itself a literal or constant, return it as appropriate.
1475 Note that we do not guarantee that any of the three values will be the
1476 same type as IN, but they will have the same signedness and mode. */
1479 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1480 tree
*minus_litp
, int negate_p
)
1488 /* Strip any conversions that don't change the machine mode or signedness. */
1489 STRIP_SIGN_NOPS (in
);
1491 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
1492 || TREE_CODE (in
) == FIXED_CST
)
1494 else if (TREE_CODE (in
) == code
1495 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1496 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
1497 /* We can associate addition and subtraction together (even
1498 though the C standard doesn't say so) for integers because
1499 the value is not affected. For reals, the value might be
1500 affected, so we can't. */
1501 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1502 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1504 tree op0
= TREE_OPERAND (in
, 0);
1505 tree op1
= TREE_OPERAND (in
, 1);
1506 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1507 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1509 /* First see if either of the operands is a literal, then a constant. */
1510 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
1511 || TREE_CODE (op0
) == FIXED_CST
)
1512 *litp
= op0
, op0
= 0;
1513 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
1514 || TREE_CODE (op1
) == FIXED_CST
)
1515 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1517 if (op0
!= 0 && TREE_CONSTANT (op0
))
1518 *conp
= op0
, op0
= 0;
1519 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1520 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1522 /* If we haven't dealt with either operand, this is not a case we can
1523 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1524 if (op0
!= 0 && op1
!= 0)
1529 var
= op1
, neg_var_p
= neg1_p
;
1531 /* Now do any needed negations. */
1533 *minus_litp
= *litp
, *litp
= 0;
1535 *conp
= negate_expr (*conp
);
1537 var
= negate_expr (var
);
1539 else if (TREE_CONSTANT (in
))
1547 *minus_litp
= *litp
, *litp
= 0;
1548 else if (*minus_litp
)
1549 *litp
= *minus_litp
, *minus_litp
= 0;
1550 *conp
= negate_expr (*conp
);
1551 var
= negate_expr (var
);
1557 /* Re-associate trees split by the above function. T1 and T2 are either
1558 expressions to associate or null. Return the new expression, if any. If
1559 we build an operation, do it in TYPE and with CODE. */
1562 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1569 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1570 try to fold this since we will have infinite recursion. But do
1571 deal with any NEGATE_EXPRs. */
1572 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1573 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1575 if (code
== PLUS_EXPR
)
1577 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1578 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1579 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1580 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1581 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1582 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1583 else if (integer_zerop (t2
))
1584 return fold_convert (type
, t1
);
1586 else if (code
== MINUS_EXPR
)
1588 if (integer_zerop (t2
))
1589 return fold_convert (type
, t1
);
1592 return build2 (code
, type
, fold_convert (type
, t1
),
1593 fold_convert (type
, t2
));
1596 return fold_build2 (code
, type
, fold_convert (type
, t1
),
1597 fold_convert (type
, t2
));
1600 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1601 for use in int_const_binop, size_binop and size_diffop. */
1604 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
1606 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
1608 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
1623 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
1624 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
1625 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
1629 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1630 to produce a new constant. Return NULL_TREE if we don't know how
1631 to evaluate CODE at compile-time.
1633 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1636 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
, int notrunc
)
1638 unsigned HOST_WIDE_INT int1l
, int2l
;
1639 HOST_WIDE_INT int1h
, int2h
;
1640 unsigned HOST_WIDE_INT low
;
1642 unsigned HOST_WIDE_INT garbagel
;
1643 HOST_WIDE_INT garbageh
;
1645 tree type
= TREE_TYPE (arg1
);
1646 int uns
= TYPE_UNSIGNED (type
);
1648 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1651 int1l
= TREE_INT_CST_LOW (arg1
);
1652 int1h
= TREE_INT_CST_HIGH (arg1
);
1653 int2l
= TREE_INT_CST_LOW (arg2
);
1654 int2h
= TREE_INT_CST_HIGH (arg2
);
1659 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1663 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1667 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1673 /* It's unclear from the C standard whether shifts can overflow.
1674 The following code ignores overflow; perhaps a C standard
1675 interpretation ruling is needed. */
1676 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1683 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1688 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1692 neg_double (int2l
, int2h
, &low
, &hi
);
1693 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1694 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1698 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1701 case TRUNC_DIV_EXPR
:
1702 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1703 case EXACT_DIV_EXPR
:
1704 /* This is a shortcut for a common special case. */
1705 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1706 && !TREE_OVERFLOW (arg1
)
1707 && !TREE_OVERFLOW (arg2
)
1708 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1710 if (code
== CEIL_DIV_EXPR
)
1713 low
= int1l
/ int2l
, hi
= 0;
1717 /* ... fall through ... */
1719 case ROUND_DIV_EXPR
:
1720 if (int2h
== 0 && int2l
== 0)
1722 if (int2h
== 0 && int2l
== 1)
1724 low
= int1l
, hi
= int1h
;
1727 if (int1l
== int2l
&& int1h
== int2h
1728 && ! (int1l
== 0 && int1h
== 0))
1733 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1734 &low
, &hi
, &garbagel
, &garbageh
);
1737 case TRUNC_MOD_EXPR
:
1738 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1739 /* This is a shortcut for a common special case. */
1740 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1741 && !TREE_OVERFLOW (arg1
)
1742 && !TREE_OVERFLOW (arg2
)
1743 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1745 if (code
== CEIL_MOD_EXPR
)
1747 low
= int1l
% int2l
, hi
= 0;
1751 /* ... fall through ... */
1753 case ROUND_MOD_EXPR
:
1754 if (int2h
== 0 && int2l
== 0)
1756 overflow
= div_and_round_double (code
, uns
,
1757 int1l
, int1h
, int2l
, int2h
,
1758 &garbagel
, &garbageh
, &low
, &hi
);
1764 low
= (((unsigned HOST_WIDE_INT
) int1h
1765 < (unsigned HOST_WIDE_INT
) int2h
)
1766 || (((unsigned HOST_WIDE_INT
) int1h
1767 == (unsigned HOST_WIDE_INT
) int2h
)
1770 low
= (int1h
< int2h
1771 || (int1h
== int2h
&& int1l
< int2l
));
1773 if (low
== (code
== MIN_EXPR
))
1774 low
= int1l
, hi
= int1h
;
1776 low
= int2l
, hi
= int2h
;
1785 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1787 /* Propagate overflow flags ourselves. */
1788 if (((!uns
|| is_sizetype
) && overflow
)
1789 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1792 TREE_OVERFLOW (t
) = 1;
1796 t
= force_fit_type_double (TREE_TYPE (arg1
), low
, hi
, 1,
1797 ((!uns
|| is_sizetype
) && overflow
)
1798 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1803 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1804 constant. We assume ARG1 and ARG2 have the same data type, or at least
1805 are the same kind of constant and the same machine mode. Return zero if
1806 combining the constants is not allowed in the current operating mode.
1808 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1811 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1813 /* Sanity check for the recursive cases. */
1820 if (TREE_CODE (arg1
) == INTEGER_CST
)
1821 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1823 if (TREE_CODE (arg1
) == REAL_CST
)
1825 enum machine_mode mode
;
1828 REAL_VALUE_TYPE value
;
1829 REAL_VALUE_TYPE result
;
1833 /* The following codes are handled by real_arithmetic. */
1848 d1
= TREE_REAL_CST (arg1
);
1849 d2
= TREE_REAL_CST (arg2
);
1851 type
= TREE_TYPE (arg1
);
1852 mode
= TYPE_MODE (type
);
1854 /* Don't perform operation if we honor signaling NaNs and
1855 either operand is a NaN. */
1856 if (HONOR_SNANS (mode
)
1857 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1860 /* Don't perform operation if it would raise a division
1861 by zero exception. */
1862 if (code
== RDIV_EXPR
1863 && REAL_VALUES_EQUAL (d2
, dconst0
)
1864 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1867 /* If either operand is a NaN, just return it. Otherwise, set up
1868 for floating-point trap; we return an overflow. */
1869 if (REAL_VALUE_ISNAN (d1
))
1871 else if (REAL_VALUE_ISNAN (d2
))
1874 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1875 real_convert (&result
, mode
, &value
);
1877 /* Don't constant fold this floating point operation if
1878 the result has overflowed and flag_trapping_math. */
1879 if (flag_trapping_math
1880 && MODE_HAS_INFINITIES (mode
)
1881 && REAL_VALUE_ISINF (result
)
1882 && !REAL_VALUE_ISINF (d1
)
1883 && !REAL_VALUE_ISINF (d2
))
1886 /* Don't constant fold this floating point operation if the
1887 result may dependent upon the run-time rounding mode and
1888 flag_rounding_math is set, or if GCC's software emulation
1889 is unable to accurately represent the result. */
1890 if ((flag_rounding_math
1891 || (REAL_MODE_FORMAT_COMPOSITE_P (mode
)
1892 && !flag_unsafe_math_optimizations
))
1893 && (inexact
|| !real_identical (&result
, &value
)))
1896 t
= build_real (type
, result
);
1898 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1902 if (TREE_CODE (arg1
) == FIXED_CST
)
1904 FIXED_VALUE_TYPE f1
;
1905 FIXED_VALUE_TYPE f2
;
1906 FIXED_VALUE_TYPE result
;
1911 /* The following codes are handled by fixed_arithmetic. */
1917 case TRUNC_DIV_EXPR
:
1918 f2
= TREE_FIXED_CST (arg2
);
1923 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1924 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1932 f1
= TREE_FIXED_CST (arg1
);
1933 type
= TREE_TYPE (arg1
);
1934 sat_p
= TYPE_SATURATING (type
);
1935 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1936 t
= build_fixed (type
, result
);
1937 /* Propagate overflow flags. */
1938 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1940 TREE_OVERFLOW (t
) = 1;
1941 TREE_CONSTANT_OVERFLOW (t
) = 1;
1943 else if (TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
))
1944 TREE_CONSTANT_OVERFLOW (t
) = 1;
1948 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1950 tree type
= TREE_TYPE (arg1
);
1951 tree r1
= TREE_REALPART (arg1
);
1952 tree i1
= TREE_IMAGPART (arg1
);
1953 tree r2
= TREE_REALPART (arg2
);
1954 tree i2
= TREE_IMAGPART (arg2
);
1961 real
= const_binop (code
, r1
, r2
, notrunc
);
1962 imag
= const_binop (code
, i1
, i2
, notrunc
);
1966 real
= const_binop (MINUS_EXPR
,
1967 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1968 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1970 imag
= const_binop (PLUS_EXPR
,
1971 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1972 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1979 = const_binop (PLUS_EXPR
,
1980 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1981 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1984 = const_binop (PLUS_EXPR
,
1985 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1986 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1989 = const_binop (MINUS_EXPR
,
1990 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1991 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1994 if (INTEGRAL_TYPE_P (TREE_TYPE (r1
)))
1995 code
= TRUNC_DIV_EXPR
;
1997 real
= const_binop (code
, t1
, magsquared
, notrunc
);
1998 imag
= const_binop (code
, t2
, magsquared
, notrunc
);
2007 return build_complex (type
, real
, imag
);
2013 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2014 indicates which particular sizetype to create. */
2017 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
2019 return build_int_cst (sizetype_tab
[(int) kind
], number
);
2022 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2023 is a tree code. The type of the result is taken from the operands.
2024 Both must be equivalent integer types, ala int_binop_types_match_p.
2025 If the operands are constant, so is the result. */
2028 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
2030 tree type
= TREE_TYPE (arg0
);
2032 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
2033 return error_mark_node
;
2035 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
2038 /* Handle the special case of two integer constants faster. */
2039 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2041 /* And some specific cases even faster than that. */
2042 if (code
== PLUS_EXPR
)
2044 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
2046 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
2049 else if (code
== MINUS_EXPR
)
2051 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
2054 else if (code
== MULT_EXPR
)
2056 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
2060 /* Handle general case of two integer constants. */
2061 return int_const_binop (code
, arg0
, arg1
, 0);
2064 return fold_build2 (code
, type
, arg0
, arg1
);
2067 /* Given two values, either both of sizetype or both of bitsizetype,
2068 compute the difference between the two values. Return the value
2069 in signed type corresponding to the type of the operands. */
2072 size_diffop (tree arg0
, tree arg1
)
2074 tree type
= TREE_TYPE (arg0
);
2077 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
2080 /* If the type is already signed, just do the simple thing. */
2081 if (!TYPE_UNSIGNED (type
))
2082 return size_binop (MINUS_EXPR
, arg0
, arg1
);
2084 if (type
== sizetype
)
2086 else if (type
== bitsizetype
)
2087 ctype
= sbitsizetype
;
2089 ctype
= signed_type_for (type
);
2091 /* If either operand is not a constant, do the conversions to the signed
2092 type and subtract. The hardware will do the right thing with any
2093 overflow in the subtraction. */
2094 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
2095 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
2096 fold_convert (ctype
, arg1
));
2098 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2099 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2100 overflow) and negate (which can't either). Special-case a result
2101 of zero while we're here. */
2102 if (tree_int_cst_equal (arg0
, arg1
))
2103 return build_int_cst (ctype
, 0);
2104 else if (tree_int_cst_lt (arg1
, arg0
))
2105 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
2107 return size_binop (MINUS_EXPR
, build_int_cst (ctype
, 0),
2108 fold_convert (ctype
, size_binop (MINUS_EXPR
,
2112 /* A subroutine of fold_convert_const handling conversions of an
2113 INTEGER_CST to another integer type. */
2116 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
2120 /* Given an integer constant, make new constant with new type,
2121 appropriately sign-extended or truncated. */
2122 t
= force_fit_type_double (type
, TREE_INT_CST_LOW (arg1
),
2123 TREE_INT_CST_HIGH (arg1
),
2124 /* Don't set the overflow when
2125 converting a pointer */
2126 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
2127 (TREE_INT_CST_HIGH (arg1
) < 0
2128 && (TYPE_UNSIGNED (type
)
2129 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2130 | TREE_OVERFLOW (arg1
));
2135 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2136 to an integer type. */
2139 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
2144 /* The following code implements the floating point to integer
2145 conversion rules required by the Java Language Specification,
2146 that IEEE NaNs are mapped to zero and values that overflow
2147 the target precision saturate, i.e. values greater than
2148 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2149 are mapped to INT_MIN. These semantics are allowed by the
2150 C and C++ standards that simply state that the behavior of
2151 FP-to-integer conversion is unspecified upon overflow. */
2153 HOST_WIDE_INT high
, low
;
2155 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
2159 case FIX_TRUNC_EXPR
:
2160 real_trunc (&r
, VOIDmode
, &x
);
2167 /* If R is NaN, return zero and show we have an overflow. */
2168 if (REAL_VALUE_ISNAN (r
))
2175 /* See if R is less than the lower bound or greater than the
2180 tree lt
= TYPE_MIN_VALUE (type
);
2181 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
2182 if (REAL_VALUES_LESS (r
, l
))
2185 high
= TREE_INT_CST_HIGH (lt
);
2186 low
= TREE_INT_CST_LOW (lt
);
2192 tree ut
= TYPE_MAX_VALUE (type
);
2195 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
2196 if (REAL_VALUES_LESS (u
, r
))
2199 high
= TREE_INT_CST_HIGH (ut
);
2200 low
= TREE_INT_CST_LOW (ut
);
2206 REAL_VALUE_TO_INT (&low
, &high
, r
);
2208 t
= force_fit_type_double (type
, low
, high
, -1,
2209 overflow
| TREE_OVERFLOW (arg1
));
2213 /* A subroutine of fold_convert_const handling conversions of a
2214 FIXED_CST to an integer type. */
2217 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
2220 double_int temp
, temp_trunc
;
2223 /* Right shift FIXED_CST to temp by fbit. */
2224 temp
= TREE_FIXED_CST (arg1
).data
;
2225 mode
= TREE_FIXED_CST (arg1
).mode
;
2226 if (GET_MODE_FBIT (mode
) < 2 * HOST_BITS_PER_WIDE_INT
)
2228 lshift_double (temp
.low
, temp
.high
,
2229 - GET_MODE_FBIT (mode
), 2 * HOST_BITS_PER_WIDE_INT
,
2230 &temp
.low
, &temp
.high
, SIGNED_FIXED_POINT_MODE_P (mode
));
2232 /* Left shift temp to temp_trunc by fbit. */
2233 lshift_double (temp
.low
, temp
.high
,
2234 GET_MODE_FBIT (mode
), 2 * HOST_BITS_PER_WIDE_INT
,
2235 &temp_trunc
.low
, &temp_trunc
.high
,
2236 SIGNED_FIXED_POINT_MODE_P (mode
));
2243 temp_trunc
.high
= 0;
2246 /* If FIXED_CST is negative, we need to round the value toward 0.
2247 By checking if the fractional bits are not zero to add 1 to temp. */
2248 if (SIGNED_FIXED_POINT_MODE_P (mode
) && temp_trunc
.high
< 0
2249 && !double_int_equal_p (TREE_FIXED_CST (arg1
).data
, temp_trunc
))
2254 temp
= double_int_add (temp
, one
);
2257 /* Given a fixed-point constant, make new constant with new type,
2258 appropriately sign-extended or truncated. */
2259 t
= force_fit_type_double (type
, temp
.low
, temp
.high
, -1,
2261 && (TYPE_UNSIGNED (type
)
2262 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2263 | TREE_OVERFLOW (arg1
));
2268 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2269 to another floating point type. */
2272 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
2274 REAL_VALUE_TYPE value
;
2277 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
2278 t
= build_real (type
, value
);
2280 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2284 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2285 to a floating point type. */
2288 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2290 REAL_VALUE_TYPE value
;
2293 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
2294 t
= build_real (type
, value
);
2296 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2297 TREE_CONSTANT_OVERFLOW (t
)
2298 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
2302 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2303 to another fixed-point type. */
2306 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2308 FIXED_VALUE_TYPE value
;
2312 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2313 TYPE_SATURATING (type
));
2314 t
= build_fixed (type
, value
);
2316 /* Propagate overflow flags. */
2317 if (overflow_p
| TREE_OVERFLOW (arg1
))
2319 TREE_OVERFLOW (t
) = 1;
2320 TREE_CONSTANT_OVERFLOW (t
) = 1;
2322 else if (TREE_CONSTANT_OVERFLOW (arg1
))
2323 TREE_CONSTANT_OVERFLOW (t
) = 1;
2327 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2328 to a fixed-point type. */
2331 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2333 FIXED_VALUE_TYPE value
;
2337 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
2338 TREE_INT_CST (arg1
),
2339 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2340 TYPE_SATURATING (type
));
2341 t
= build_fixed (type
, value
);
2343 /* Propagate overflow flags. */
2344 if (overflow_p
| TREE_OVERFLOW (arg1
))
2346 TREE_OVERFLOW (t
) = 1;
2347 TREE_CONSTANT_OVERFLOW (t
) = 1;
2349 else if (TREE_CONSTANT_OVERFLOW (arg1
))
2350 TREE_CONSTANT_OVERFLOW (t
) = 1;
2354 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2355 to a fixed-point type. */
2358 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2360 FIXED_VALUE_TYPE value
;
2364 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2365 &TREE_REAL_CST (arg1
),
2366 TYPE_SATURATING (type
));
2367 t
= build_fixed (type
, value
);
2369 /* Propagate overflow flags. */
2370 if (overflow_p
| TREE_OVERFLOW (arg1
))
2372 TREE_OVERFLOW (t
) = 1;
2373 TREE_CONSTANT_OVERFLOW (t
) = 1;
2375 else if (TREE_CONSTANT_OVERFLOW (arg1
))
2376 TREE_CONSTANT_OVERFLOW (t
) = 1;
2380 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2381 type TYPE. If no simplification can be done return NULL_TREE. */
2384 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2386 if (TREE_TYPE (arg1
) == type
)
2389 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
2391 if (TREE_CODE (arg1
) == INTEGER_CST
)
2392 return fold_convert_const_int_from_int (type
, arg1
);
2393 else if (TREE_CODE (arg1
) == REAL_CST
)
2394 return fold_convert_const_int_from_real (code
, type
, arg1
);
2395 else if (TREE_CODE (arg1
) == FIXED_CST
)
2396 return fold_convert_const_int_from_fixed (type
, arg1
);
2398 else if (TREE_CODE (type
) == REAL_TYPE
)
2400 if (TREE_CODE (arg1
) == INTEGER_CST
)
2401 return build_real_from_int_cst (type
, arg1
);
2402 else if (TREE_CODE (arg1
) == REAL_CST
)
2403 return fold_convert_const_real_from_real (type
, arg1
);
2404 else if (TREE_CODE (arg1
) == FIXED_CST
)
2405 return fold_convert_const_real_from_fixed (type
, arg1
);
2407 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2409 if (TREE_CODE (arg1
) == FIXED_CST
)
2410 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2411 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2412 return fold_convert_const_fixed_from_int (type
, arg1
);
2413 else if (TREE_CODE (arg1
) == REAL_CST
)
2414 return fold_convert_const_fixed_from_real (type
, arg1
);
2419 /* Construct a vector of zero elements of vector type TYPE. */
2422 build_zero_vector (tree type
)
2427 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2428 units
= TYPE_VECTOR_SUBPARTS (type
);
2431 for (i
= 0; i
< units
; i
++)
2432 list
= tree_cons (NULL_TREE
, elem
, list
);
2433 return build_vector (type
, list
);
2436 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2439 fold_convertible_p (const_tree type
, const_tree arg
)
2441 tree orig
= TREE_TYPE (arg
);
2446 if (TREE_CODE (arg
) == ERROR_MARK
2447 || TREE_CODE (type
) == ERROR_MARK
2448 || TREE_CODE (orig
) == ERROR_MARK
)
2451 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2454 switch (TREE_CODE (type
))
2456 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2457 case POINTER_TYPE
: case REFERENCE_TYPE
:
2459 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2460 || TREE_CODE (orig
) == OFFSET_TYPE
)
2462 return (TREE_CODE (orig
) == VECTOR_TYPE
2463 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2466 return TREE_CODE (type
) == TREE_CODE (orig
);
2470 /* Convert expression ARG to type TYPE. Used by the middle-end for
2471 simple conversions in preference to calling the front-end's convert. */
2474 fold_convert (tree type
, tree arg
)
2476 tree orig
= TREE_TYPE (arg
);
2482 if (TREE_CODE (arg
) == ERROR_MARK
2483 || TREE_CODE (type
) == ERROR_MARK
2484 || TREE_CODE (orig
) == ERROR_MARK
)
2485 return error_mark_node
;
2487 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2488 return fold_build1 (NOP_EXPR
, type
, arg
);
2490 switch (TREE_CODE (type
))
2492 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2493 case POINTER_TYPE
: case REFERENCE_TYPE
:
2495 if (TREE_CODE (arg
) == INTEGER_CST
)
2497 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2498 if (tem
!= NULL_TREE
)
2501 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2502 || TREE_CODE (orig
) == OFFSET_TYPE
)
2503 return fold_build1 (NOP_EXPR
, type
, arg
);
2504 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2506 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2507 return fold_convert (type
, tem
);
2509 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2510 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2511 return fold_build1 (NOP_EXPR
, type
, arg
);
2514 if (TREE_CODE (arg
) == INTEGER_CST
)
2516 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2517 if (tem
!= NULL_TREE
)
2520 else if (TREE_CODE (arg
) == REAL_CST
)
2522 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2523 if (tem
!= NULL_TREE
)
2526 else if (TREE_CODE (arg
) == FIXED_CST
)
2528 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2529 if (tem
!= NULL_TREE
)
2533 switch (TREE_CODE (orig
))
2536 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2537 case POINTER_TYPE
: case REFERENCE_TYPE
:
2538 return fold_build1 (FLOAT_EXPR
, type
, arg
);
2541 return fold_build1 (NOP_EXPR
, type
, arg
);
2543 case FIXED_POINT_TYPE
:
2544 return fold_build1 (FIXED_CONVERT_EXPR
, type
, arg
);
2547 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2548 return fold_convert (type
, tem
);
2554 case FIXED_POINT_TYPE
:
2555 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2556 || TREE_CODE (arg
) == REAL_CST
)
2558 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2559 if (tem
!= NULL_TREE
)
2563 switch (TREE_CODE (orig
))
2565 case FIXED_POINT_TYPE
:
2570 return fold_build1 (FIXED_CONVERT_EXPR
, type
, arg
);
2573 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2574 return fold_convert (type
, tem
);
2581 switch (TREE_CODE (orig
))
2584 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2585 case POINTER_TYPE
: case REFERENCE_TYPE
:
2587 case FIXED_POINT_TYPE
:
2588 return build2 (COMPLEX_EXPR
, type
,
2589 fold_convert (TREE_TYPE (type
), arg
),
2590 fold_convert (TREE_TYPE (type
), integer_zero_node
));
2595 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2597 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
2598 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
2599 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2602 arg
= save_expr (arg
);
2603 rpart
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2604 ipart
= fold_build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2605 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
2606 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
2607 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2615 if (integer_zerop (arg
))
2616 return build_zero_vector (type
);
2617 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2618 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2619 || TREE_CODE (orig
) == VECTOR_TYPE
);
2620 return fold_build1 (VIEW_CONVERT_EXPR
, type
, arg
);
2623 tem
= fold_ignored_result (arg
);
2624 if (TREE_CODE (tem
) == GIMPLE_MODIFY_STMT
)
2626 return fold_build1 (NOP_EXPR
, type
, tem
);
2633 /* Return false if expr can be assumed not to be an lvalue, true
2637 maybe_lvalue_p (const_tree x
)
2639 /* We only need to wrap lvalue tree codes. */
2640 switch (TREE_CODE (x
))
2651 case ALIGN_INDIRECT_REF
:
2652 case MISALIGNED_INDIRECT_REF
:
2654 case ARRAY_RANGE_REF
:
2660 case PREINCREMENT_EXPR
:
2661 case PREDECREMENT_EXPR
:
2663 case TRY_CATCH_EXPR
:
2664 case WITH_CLEANUP_EXPR
:
2667 case GIMPLE_MODIFY_STMT
:
2676 /* Assume the worst for front-end tree codes. */
2677 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2685 /* Return an expr equal to X but certainly not valid as an lvalue. */
2690 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2695 if (! maybe_lvalue_p (x
))
2697 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2700 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2701 Zero means allow extended lvalues. */
2703 int pedantic_lvalues
;
2705 /* When pedantic, return an expr equal to X but certainly not valid as a
2706 pedantic lvalue. Otherwise, return X. */
2709 pedantic_non_lvalue (tree x
)
2711 if (pedantic_lvalues
)
2712 return non_lvalue (x
);
2717 /* Given a tree comparison code, return the code that is the logical inverse
2718 of the given code. It is not safe to do this for floating-point
2719 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2720 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2723 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2725 if (honor_nans
&& flag_trapping_math
)
2735 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2737 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2739 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2741 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2755 return UNORDERED_EXPR
;
2756 case UNORDERED_EXPR
:
2757 return ORDERED_EXPR
;
2763 /* Similar, but return the comparison that results if the operands are
2764 swapped. This is safe for floating-point. */
2767 swap_tree_comparison (enum tree_code code
)
2774 case UNORDERED_EXPR
:
2800 /* Convert a comparison tree code from an enum tree_code representation
2801 into a compcode bit-based encoding. This function is the inverse of
2802 compcode_to_comparison. */
2804 static enum comparison_code
2805 comparison_to_compcode (enum tree_code code
)
2822 return COMPCODE_ORD
;
2823 case UNORDERED_EXPR
:
2824 return COMPCODE_UNORD
;
2826 return COMPCODE_UNLT
;
2828 return COMPCODE_UNEQ
;
2830 return COMPCODE_UNLE
;
2832 return COMPCODE_UNGT
;
2834 return COMPCODE_LTGT
;
2836 return COMPCODE_UNGE
;
2842 /* Convert a compcode bit-based encoding of a comparison operator back
2843 to GCC's enum tree_code representation. This function is the
2844 inverse of comparison_to_compcode. */
2846 static enum tree_code
2847 compcode_to_comparison (enum comparison_code code
)
2864 return ORDERED_EXPR
;
2865 case COMPCODE_UNORD
:
2866 return UNORDERED_EXPR
;
2884 /* Return a tree for the comparison which is the combination of
2885 doing the AND or OR (depending on CODE) of the two operations LCODE
2886 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2887 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2888 if this makes the transformation invalid. */
2891 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2892 enum tree_code rcode
, tree truth_type
,
2893 tree ll_arg
, tree lr_arg
)
2895 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2896 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2897 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2898 enum comparison_code compcode
;
2902 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2903 compcode
= lcompcode
& rcompcode
;
2906 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2907 compcode
= lcompcode
| rcompcode
;
2916 /* Eliminate unordered comparisons, as well as LTGT and ORD
2917 which are not used unless the mode has NaNs. */
2918 compcode
&= ~COMPCODE_UNORD
;
2919 if (compcode
== COMPCODE_LTGT
)
2920 compcode
= COMPCODE_NE
;
2921 else if (compcode
== COMPCODE_ORD
)
2922 compcode
= COMPCODE_TRUE
;
2924 else if (flag_trapping_math
)
2926 /* Check that the original operation and the optimized ones will trap
2927 under the same condition. */
2928 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2929 && (lcompcode
!= COMPCODE_EQ
)
2930 && (lcompcode
!= COMPCODE_ORD
);
2931 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2932 && (rcompcode
!= COMPCODE_EQ
)
2933 && (rcompcode
!= COMPCODE_ORD
);
2934 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2935 && (compcode
!= COMPCODE_EQ
)
2936 && (compcode
!= COMPCODE_ORD
);
2938 /* In a short-circuited boolean expression the LHS might be
2939 such that the RHS, if evaluated, will never trap. For
2940 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2941 if neither x nor y is NaN. (This is a mixed blessing: for
2942 example, the expression above will never trap, hence
2943 optimizing it to x < y would be invalid). */
2944 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2945 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2948 /* If the comparison was short-circuited, and only the RHS
2949 trapped, we may now generate a spurious trap. */
2951 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2954 /* If we changed the conditions that cause a trap, we lose. */
2955 if ((ltrap
|| rtrap
) != trap
)
2959 if (compcode
== COMPCODE_TRUE
)
2960 return constant_boolean_node (true, truth_type
);
2961 else if (compcode
== COMPCODE_FALSE
)
2962 return constant_boolean_node (false, truth_type
);
2964 return fold_build2 (compcode_to_comparison (compcode
),
2965 truth_type
, ll_arg
, lr_arg
);
2968 /* Return nonzero if CODE is a tree code that represents a truth value. */
2971 truth_value_p (enum tree_code code
)
2973 return (TREE_CODE_CLASS (code
) == tcc_comparison
2974 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2975 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2976 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2979 /* Return nonzero if two operands (typically of the same tree node)
2980 are necessarily equal. If either argument has side-effects this
2981 function returns zero. FLAGS modifies behavior as follows:
2983 If OEP_ONLY_CONST is set, only return nonzero for constants.
2984 This function tests whether the operands are indistinguishable;
2985 it does not test whether they are equal using C's == operation.
2986 The distinction is important for IEEE floating point, because
2987 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2988 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2990 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2991 even though it may hold multiple values during a function.
2992 This is because a GCC tree node guarantees that nothing else is
2993 executed between the evaluation of its "operands" (which may often
2994 be evaluated in arbitrary order). Hence if the operands themselves
2995 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2996 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2997 unset means assuming isochronic (or instantaneous) tree equivalence.
2998 Unless comparing arbitrary expression trees, such as from different
2999 statements, this flag can usually be left unset.
3001 If OEP_PURE_SAME is set, then pure functions with identical arguments
3002 are considered the same. It is used when the caller has other ways
3003 to ensure that global memory is unchanged in between. */
3006 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
3008 /* If either is ERROR_MARK, they aren't equal. */
3009 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
3012 /* If both types don't have the same signedness, then we can't consider
3013 them equal. We must check this before the STRIP_NOPS calls
3014 because they may change the signedness of the arguments. */
3015 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3018 /* If both types don't have the same precision, then it is not safe
3020 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
3026 /* In case both args are comparisons but with different comparison
3027 code, try to swap the comparison operands of one arg to produce
3028 a match and compare that variant. */
3029 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3030 && COMPARISON_CLASS_P (arg0
)
3031 && COMPARISON_CLASS_P (arg1
))
3033 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
3035 if (TREE_CODE (arg0
) == swap_code
)
3036 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3037 TREE_OPERAND (arg1
, 1), flags
)
3038 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3039 TREE_OPERAND (arg1
, 0), flags
);
3042 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3043 /* This is needed for conversions and for COMPONENT_REF.
3044 Might as well play it safe and always test this. */
3045 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
3046 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
3047 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
3050 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3051 We don't care about side effects in that case because the SAVE_EXPR
3052 takes care of that for us. In all other cases, two expressions are
3053 equal if they have no side effects. If we have two identical
3054 expressions with side effects that should be treated the same due
3055 to the only side effects being identical SAVE_EXPR's, that will
3056 be detected in the recursive calls below. */
3057 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
3058 && (TREE_CODE (arg0
) == SAVE_EXPR
3059 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
3062 /* Next handle constant cases, those for which we can return 1 even
3063 if ONLY_CONST is set. */
3064 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
3065 switch (TREE_CODE (arg0
))
3068 return tree_int_cst_equal (arg0
, arg1
);
3071 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
3072 TREE_FIXED_CST (arg1
));
3075 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
3076 TREE_REAL_CST (arg1
)))
3080 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
3082 /* If we do not distinguish between signed and unsigned zero,
3083 consider them equal. */
3084 if (real_zerop (arg0
) && real_zerop (arg1
))
3093 v1
= TREE_VECTOR_CST_ELTS (arg0
);
3094 v2
= TREE_VECTOR_CST_ELTS (arg1
);
3097 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
3100 v1
= TREE_CHAIN (v1
);
3101 v2
= TREE_CHAIN (v2
);
3108 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
3110 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
3114 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
3115 && ! memcmp (TREE_STRING_POINTER (arg0
),
3116 TREE_STRING_POINTER (arg1
),
3117 TREE_STRING_LENGTH (arg0
)));
3120 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
3126 if (flags
& OEP_ONLY_CONST
)
3129 /* Define macros to test an operand from arg0 and arg1 for equality and a
3130 variant that allows null and views null as being different from any
3131 non-null value. In the latter case, if either is null, the both
3132 must be; otherwise, do the normal comparison. */
3133 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3134 TREE_OPERAND (arg1, N), flags)
3136 #define OP_SAME_WITH_NULL(N) \
3137 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3138 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3140 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
3143 /* Two conversions are equal only if signedness and modes match. */
3144 switch (TREE_CODE (arg0
))
3148 case FIX_TRUNC_EXPR
:
3149 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
3150 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3160 case tcc_comparison
:
3162 if (OP_SAME (0) && OP_SAME (1))
3165 /* For commutative ops, allow the other order. */
3166 return (commutative_tree_code (TREE_CODE (arg0
))
3167 && operand_equal_p (TREE_OPERAND (arg0
, 0),
3168 TREE_OPERAND (arg1
, 1), flags
)
3169 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3170 TREE_OPERAND (arg1
, 0), flags
));
3173 /* If either of the pointer (or reference) expressions we are
3174 dereferencing contain a side effect, these cannot be equal. */
3175 if (TREE_SIDE_EFFECTS (arg0
)
3176 || TREE_SIDE_EFFECTS (arg1
))
3179 switch (TREE_CODE (arg0
))
3182 case ALIGN_INDIRECT_REF
:
3183 case MISALIGNED_INDIRECT_REF
:
3189 case ARRAY_RANGE_REF
:
3190 /* Operands 2 and 3 may be null.
3191 Compare the array index by value if it is constant first as we
3192 may have different types but same value here. */
3194 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3195 TREE_OPERAND (arg1
, 1))
3197 && OP_SAME_WITH_NULL (2)
3198 && OP_SAME_WITH_NULL (3));
3201 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3202 may be NULL when we're called to compare MEM_EXPRs. */
3203 return OP_SAME_WITH_NULL (0)
3205 && OP_SAME_WITH_NULL (2);
3208 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3214 case tcc_expression
:
3215 switch (TREE_CODE (arg0
))
3218 case TRUTH_NOT_EXPR
:
3221 case TRUTH_ANDIF_EXPR
:
3222 case TRUTH_ORIF_EXPR
:
3223 return OP_SAME (0) && OP_SAME (1);
3225 case TRUTH_AND_EXPR
:
3227 case TRUTH_XOR_EXPR
:
3228 if (OP_SAME (0) && OP_SAME (1))
3231 /* Otherwise take into account this is a commutative operation. */
3232 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3233 TREE_OPERAND (arg1
, 1), flags
)
3234 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3235 TREE_OPERAND (arg1
, 0), flags
));
3242 switch (TREE_CODE (arg0
))
3245 /* If the CALL_EXPRs call different functions, then they
3246 clearly can not be equal. */
3247 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3252 unsigned int cef
= call_expr_flags (arg0
);
3253 if (flags
& OEP_PURE_SAME
)
3254 cef
&= ECF_CONST
| ECF_PURE
;
3261 /* Now see if all the arguments are the same. */
3263 const_call_expr_arg_iterator iter0
, iter1
;
3265 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3266 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3268 a0
= next_const_call_expr_arg (&iter0
),
3269 a1
= next_const_call_expr_arg (&iter1
))
3270 if (! operand_equal_p (a0
, a1
, flags
))
3273 /* If we get here and both argument lists are exhausted
3274 then the CALL_EXPRs are equal. */
3275 return ! (a0
|| a1
);
3281 case tcc_declaration
:
3282 /* Consider __builtin_sqrt equal to sqrt. */
3283 return (TREE_CODE (arg0
) == FUNCTION_DECL
3284 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3285 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3286 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3293 #undef OP_SAME_WITH_NULL
3296 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3297 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3299 When in doubt, return 0. */
3302 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3304 int unsignedp1
, unsignedpo
;
3305 tree primarg0
, primarg1
, primother
;
3306 unsigned int correct_width
;
3308 if (operand_equal_p (arg0
, arg1
, 0))
3311 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3312 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3315 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3316 and see if the inner values are the same. This removes any
3317 signedness comparison, which doesn't matter here. */
3318 primarg0
= arg0
, primarg1
= arg1
;
3319 STRIP_NOPS (primarg0
);
3320 STRIP_NOPS (primarg1
);
3321 if (operand_equal_p (primarg0
, primarg1
, 0))
3324 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3325 actual comparison operand, ARG0.
3327 First throw away any conversions to wider types
3328 already present in the operands. */
3330 primarg1
= get_narrower (arg1
, &unsignedp1
);
3331 primother
= get_narrower (other
, &unsignedpo
);
3333 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3334 if (unsignedp1
== unsignedpo
3335 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3336 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3338 tree type
= TREE_TYPE (arg0
);
3340 /* Make sure shorter operand is extended the right way
3341 to match the longer operand. */
3342 primarg1
= fold_convert (signed_or_unsigned_type_for
3343 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3345 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3352 /* See if ARG is an expression that is either a comparison or is performing
3353 arithmetic on comparisons. The comparisons must only be comparing
3354 two different values, which will be stored in *CVAL1 and *CVAL2; if
3355 they are nonzero it means that some operands have already been found.
3356 No variables may be used anywhere else in the expression except in the
3357 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3358 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3360 If this is true, return 1. Otherwise, return zero. */
3363 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3365 enum tree_code code
= TREE_CODE (arg
);
3366 enum tree_code_class
class = TREE_CODE_CLASS (code
);
3368 /* We can handle some of the tcc_expression cases here. */
3369 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3371 else if (class == tcc_expression
3372 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3373 || code
== COMPOUND_EXPR
))
3376 else if (class == tcc_expression
&& code
== SAVE_EXPR
3377 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3379 /* If we've already found a CVAL1 or CVAL2, this expression is
3380 two complex to handle. */
3381 if (*cval1
|| *cval2
)
3391 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3394 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3395 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3396 cval1
, cval2
, save_p
));
3401 case tcc_expression
:
3402 if (code
== COND_EXPR
)
3403 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3404 cval1
, cval2
, save_p
)
3405 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3406 cval1
, cval2
, save_p
)
3407 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3408 cval1
, cval2
, save_p
));
3411 case tcc_comparison
:
3412 /* First see if we can handle the first operand, then the second. For
3413 the second operand, we know *CVAL1 can't be zero. It must be that
3414 one side of the comparison is each of the values; test for the
3415 case where this isn't true by failing if the two operands
3418 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3419 TREE_OPERAND (arg
, 1), 0))
3423 *cval1
= TREE_OPERAND (arg
, 0);
3424 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3426 else if (*cval2
== 0)
3427 *cval2
= TREE_OPERAND (arg
, 0);
3428 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3433 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3435 else if (*cval2
== 0)
3436 *cval2
= TREE_OPERAND (arg
, 1);
3437 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3449 /* ARG is a tree that is known to contain just arithmetic operations and
3450 comparisons. Evaluate the operations in the tree substituting NEW0 for
3451 any occurrence of OLD0 as an operand of a comparison and likewise for
3455 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
3457 tree type
= TREE_TYPE (arg
);
3458 enum tree_code code
= TREE_CODE (arg
);
3459 enum tree_code_class
class = TREE_CODE_CLASS (code
);
3461 /* We can handle some of the tcc_expression cases here. */
3462 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3464 else if (class == tcc_expression
3465 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3471 return fold_build1 (code
, type
,
3472 eval_subst (TREE_OPERAND (arg
, 0),
3473 old0
, new0
, old1
, new1
));
3476 return fold_build2 (code
, type
,
3477 eval_subst (TREE_OPERAND (arg
, 0),
3478 old0
, new0
, old1
, new1
),
3479 eval_subst (TREE_OPERAND (arg
, 1),
3480 old0
, new0
, old1
, new1
));
3482 case tcc_expression
:
3486 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
3489 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
3492 return fold_build3 (code
, type
,
3493 eval_subst (TREE_OPERAND (arg
, 0),
3494 old0
, new0
, old1
, new1
),
3495 eval_subst (TREE_OPERAND (arg
, 1),
3496 old0
, new0
, old1
, new1
),
3497 eval_subst (TREE_OPERAND (arg
, 2),
3498 old0
, new0
, old1
, new1
));
3502 /* Fall through - ??? */
3504 case tcc_comparison
:
3506 tree arg0
= TREE_OPERAND (arg
, 0);
3507 tree arg1
= TREE_OPERAND (arg
, 1);
3509 /* We need to check both for exact equality and tree equality. The
3510 former will be true if the operand has a side-effect. In that
3511 case, we know the operand occurred exactly once. */
3513 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3515 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3518 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3520 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3523 return fold_build2 (code
, type
, arg0
, arg1
);
3531 /* Return a tree for the case when the result of an expression is RESULT
3532 converted to TYPE and OMITTED was previously an operand of the expression
3533 but is now not needed (e.g., we folded OMITTED * 0).
3535 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3536 the conversion of RESULT to TYPE. */
3539 omit_one_operand (tree type
, tree result
, tree omitted
)
3541 tree t
= fold_convert (type
, result
);
3543 /* If the resulting operand is an empty statement, just return the omitted
3544 statement casted to void. */
3545 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3546 return build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
3548 if (TREE_SIDE_EFFECTS (omitted
))
3549 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3551 return non_lvalue (t
);
3554 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3557 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
3559 tree t
= fold_convert (type
, result
);
3561 /* If the resulting operand is an empty statement, just return the omitted
3562 statement casted to void. */
3563 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3564 return build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
3566 if (TREE_SIDE_EFFECTS (omitted
))
3567 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3569 return pedantic_non_lvalue (t
);
3572 /* Return a tree for the case when the result of an expression is RESULT
3573 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3574 of the expression but are now not needed.
3576 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3577 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3578 evaluated before OMITTED2. Otherwise, if neither has side effects,
3579 just do the conversion of RESULT to TYPE. */
3582 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
3584 tree t
= fold_convert (type
, result
);
3586 if (TREE_SIDE_EFFECTS (omitted2
))
3587 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
3588 if (TREE_SIDE_EFFECTS (omitted1
))
3589 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
3591 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
3595 /* Return a simplified tree node for the truth-negation of ARG. This
3596 never alters ARG itself. We assume that ARG is an operation that
3597 returns a truth value (0 or 1).
3599 FIXME: one would think we would fold the result, but it causes
3600 problems with the dominator optimizer. */
3603 fold_truth_not_expr (tree arg
)
3605 tree type
= TREE_TYPE (arg
);
3606 enum tree_code code
= TREE_CODE (arg
);
3608 /* If this is a comparison, we can simply invert it, except for
3609 floating-point non-equality comparisons, in which case we just
3610 enclose a TRUTH_NOT_EXPR around what we have. */
3612 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3614 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3615 if (FLOAT_TYPE_P (op_type
)
3616 && flag_trapping_math
3617 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3618 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3622 code
= invert_tree_comparison (code
,
3623 HONOR_NANS (TYPE_MODE (op_type
)));
3624 if (code
== ERROR_MARK
)
3627 return build2 (code
, type
,
3628 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
3635 return constant_boolean_node (integer_zerop (arg
), type
);
3637 case TRUTH_AND_EXPR
:
3638 return build2 (TRUTH_OR_EXPR
, type
,
3639 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3640 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3643 return build2 (TRUTH_AND_EXPR
, type
,
3644 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3645 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3647 case TRUTH_XOR_EXPR
:
3648 /* Here we can invert either operand. We invert the first operand
3649 unless the second operand is a TRUTH_NOT_EXPR in which case our
3650 result is the XOR of the first operand with the inside of the
3651 negation of the second operand. */
3653 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3654 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3655 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3657 return build2 (TRUTH_XOR_EXPR
, type
,
3658 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3659 TREE_OPERAND (arg
, 1));
3661 case TRUTH_ANDIF_EXPR
:
3662 return build2 (TRUTH_ORIF_EXPR
, type
,
3663 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3664 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3666 case TRUTH_ORIF_EXPR
:
3667 return build2 (TRUTH_ANDIF_EXPR
, type
,
3668 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3669 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3671 case TRUTH_NOT_EXPR
:
3672 return TREE_OPERAND (arg
, 0);
3676 tree arg1
= TREE_OPERAND (arg
, 1);
3677 tree arg2
= TREE_OPERAND (arg
, 2);
3678 /* A COND_EXPR may have a throw as one operand, which
3679 then has void type. Just leave void operands
3681 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3682 VOID_TYPE_P (TREE_TYPE (arg1
))
3683 ? arg1
: invert_truthvalue (arg1
),
3684 VOID_TYPE_P (TREE_TYPE (arg2
))
3685 ? arg2
: invert_truthvalue (arg2
));
3689 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3690 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3692 case NON_LVALUE_EXPR
:
3693 return invert_truthvalue (TREE_OPERAND (arg
, 0));
3696 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3697 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3701 return build1 (TREE_CODE (arg
), type
,
3702 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3705 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3707 return build2 (EQ_EXPR
, type
, arg
,
3708 build_int_cst (type
, 0));
3711 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3713 case CLEANUP_POINT_EXPR
:
3714 return build1 (CLEANUP_POINT_EXPR
, type
,
3715 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3724 /* Return a simplified tree node for the truth-negation of ARG. This
3725 never alters ARG itself. We assume that ARG is an operation that
3726 returns a truth value (0 or 1).
3728 FIXME: one would think we would fold the result, but it causes
3729 problems with the dominator optimizer. */
3732 invert_truthvalue (tree arg
)
3736 if (TREE_CODE (arg
) == ERROR_MARK
)
3739 tem
= fold_truth_not_expr (arg
);
3741 tem
= build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3746 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3747 operands are another bit-wise operation with a common input. If so,
3748 distribute the bit operations to save an operation and possibly two if
3749 constants are involved. For example, convert
3750 (A | B) & (A | C) into A | (B & C)
3751 Further simplification will occur if B and C are constants.
3753 If this optimization cannot be done, 0 will be returned. */
3756 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3761 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3762 || TREE_CODE (arg0
) == code
3763 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3764 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3767 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3769 common
= TREE_OPERAND (arg0
, 0);
3770 left
= TREE_OPERAND (arg0
, 1);
3771 right
= TREE_OPERAND (arg1
, 1);
3773 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3775 common
= TREE_OPERAND (arg0
, 0);
3776 left
= TREE_OPERAND (arg0
, 1);
3777 right
= TREE_OPERAND (arg1
, 0);
3779 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3781 common
= TREE_OPERAND (arg0
, 1);
3782 left
= TREE_OPERAND (arg0
, 0);
3783 right
= TREE_OPERAND (arg1
, 1);
3785 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3787 common
= TREE_OPERAND (arg0
, 1);
3788 left
= TREE_OPERAND (arg0
, 0);
3789 right
= TREE_OPERAND (arg1
, 0);
3794 return fold_build2 (TREE_CODE (arg0
), type
, common
,
3795 fold_build2 (code
, type
, left
, right
));
3798 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3799 with code CODE. This optimization is unsafe. */
3801 distribute_real_division (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3803 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3804 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3806 /* (A / C) +- (B / C) -> (A +- B) / C. */
3808 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3809 TREE_OPERAND (arg1
, 1), 0))
3810 return fold_build2 (mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3811 fold_build2 (code
, type
,
3812 TREE_OPERAND (arg0
, 0),
3813 TREE_OPERAND (arg1
, 0)),
3814 TREE_OPERAND (arg0
, 1));
3816 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3817 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3818 TREE_OPERAND (arg1
, 0), 0)
3819 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3820 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3822 REAL_VALUE_TYPE r0
, r1
;
3823 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3824 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3826 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3828 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3829 real_arithmetic (&r0
, code
, &r0
, &r1
);
3830 return fold_build2 (MULT_EXPR
, type
,
3831 TREE_OPERAND (arg0
, 0),
3832 build_real (type
, r0
));
3838 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3839 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3842 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
3849 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3850 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3851 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3852 && host_integerp (size
, 0)
3853 && tree_low_cst (size
, 0) == bitsize
)
3854 return fold_convert (type
, inner
);
3857 result
= build3 (BIT_FIELD_REF
, type
, inner
,
3858 size_int (bitsize
), bitsize_int (bitpos
));
3860 BIT_FIELD_REF_UNSIGNED (result
) = unsignedp
;
3865 /* Optimize a bit-field compare.
3867 There are two cases: First is a compare against a constant and the
3868 second is a comparison of two items where the fields are at the same
3869 bit position relative to the start of a chunk (byte, halfword, word)
3870 large enough to contain it. In these cases we can avoid the shift
3871 implicit in bitfield extractions.
3873 For constants, we emit a compare of the shifted constant with the
3874 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3875 compared. For two fields at the same position, we do the ANDs with the
3876 similar mask and compare the result of the ANDs.
3878 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3879 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3880 are the left and right operands of the comparison, respectively.
3882 If the optimization described above can be done, we return the resulting
3883 tree. Otherwise we return zero. */
3886 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
3889 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3890 tree type
= TREE_TYPE (lhs
);
3891 tree signed_type
, unsigned_type
;
3892 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3893 enum machine_mode lmode
, rmode
, nmode
;
3894 int lunsignedp
, runsignedp
;
3895 int lvolatilep
= 0, rvolatilep
= 0;
3896 tree linner
, rinner
= NULL_TREE
;
3900 /* Get all the information about the extractions being done. If the bit size
3901 if the same as the size of the underlying object, we aren't doing an
3902 extraction at all and so can do nothing. We also don't want to
3903 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3904 then will no longer be able to replace it. */
3905 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3906 &lunsignedp
, &lvolatilep
, false);
3907 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3908 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3913 /* If this is not a constant, we can only do something if bit positions,
3914 sizes, and signedness are the same. */
3915 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3916 &runsignedp
, &rvolatilep
, false);
3918 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3919 || lunsignedp
!= runsignedp
|| offset
!= 0
3920 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3924 /* See if we can find a mode to refer to this field. We should be able to,
3925 but fail if we can't. */
3926 nmode
= get_best_mode (lbitsize
, lbitpos
,
3927 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3928 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3929 TYPE_ALIGN (TREE_TYPE (rinner
))),
3930 word_mode
, lvolatilep
|| rvolatilep
);
3931 if (nmode
== VOIDmode
)
3934 /* Set signed and unsigned types of the precision of this mode for the
3936 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3937 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3939 /* Compute the bit position and size for the new reference and our offset
3940 within it. If the new reference is the same size as the original, we
3941 won't optimize anything, so return zero. */
3942 nbitsize
= GET_MODE_BITSIZE (nmode
);
3943 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3945 if (nbitsize
== lbitsize
)
3948 if (BYTES_BIG_ENDIAN
)
3949 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3951 /* Make the mask to be used against the extracted field. */
3952 mask
= build_int_cst_type (unsigned_type
, -1);
3953 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
3954 mask
= const_binop (RSHIFT_EXPR
, mask
,
3955 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
3958 /* If not comparing with constant, just rework the comparison
3960 return fold_build2 (code
, compare_type
,
3961 fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3962 make_bit_field_ref (linner
,
3967 fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3968 make_bit_field_ref (rinner
,
3974 /* Otherwise, we are handling the constant case. See if the constant is too
3975 big for the field. Warn and return a tree of for 0 (false) if so. We do
3976 this not only for its own sake, but to avoid having to test for this
3977 error case below. If we didn't, we might generate wrong code.
3979 For unsigned fields, the constant shifted right by the field length should
3980 be all zero. For signed fields, the high-order bits should agree with
3985 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3986 fold_convert (unsigned_type
, rhs
),
3987 size_int (lbitsize
), 0)))
3989 warning (0, "comparison is always %d due to width of bit-field",
3991 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3996 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
3997 size_int (lbitsize
- 1), 0);
3998 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
4000 warning (0, "comparison is always %d due to width of bit-field",
4002 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4006 /* Single-bit compares should always be against zero. */
4007 if (lbitsize
== 1 && ! integer_zerop (rhs
))
4009 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
4010 rhs
= build_int_cst (type
, 0);
4013 /* Make a new bitfield reference, shift the constant over the
4014 appropriate number of bits and mask it with the computed mask
4015 (in case this was a signed field). If we changed it, make a new one. */
4016 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
4019 TREE_SIDE_EFFECTS (lhs
) = 1;
4020 TREE_THIS_VOLATILE (lhs
) = 1;
4023 rhs
= const_binop (BIT_AND_EXPR
,
4024 const_binop (LSHIFT_EXPR
,
4025 fold_convert (unsigned_type
, rhs
),
4026 size_int (lbitpos
), 0),
4029 return build2 (code
, compare_type
,
4030 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
4034 /* Subroutine for fold_truthop: decode a field reference.
4036 If EXP is a comparison reference, we return the innermost reference.
4038 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4039 set to the starting bit number.
4041 If the innermost field can be completely contained in a mode-sized
4042 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4044 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4045 otherwise it is not changed.
4047 *PUNSIGNEDP is set to the signedness of the field.
4049 *PMASK is set to the mask used. This is either contained in a
4050 BIT_AND_EXPR or derived from the width of the field.
4052 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4054 Return 0 if this is not a component reference or is one that we can't
4055 do anything with. */
4058 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
4059 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
4060 int *punsignedp
, int *pvolatilep
,
4061 tree
*pmask
, tree
*pand_mask
)
4063 tree outer_type
= 0;
4065 tree mask
, inner
, offset
;
4067 unsigned int precision
;
4069 /* All the optimizations using this function assume integer fields.
4070 There are problems with FP fields since the type_for_size call
4071 below can fail for, e.g., XFmode. */
4072 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
4075 /* We are interested in the bare arrangement of bits, so strip everything
4076 that doesn't affect the machine mode. However, record the type of the
4077 outermost expression if it may matter below. */
4078 if (TREE_CODE (exp
) == NOP_EXPR
4079 || TREE_CODE (exp
) == CONVERT_EXPR
4080 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
4081 outer_type
= TREE_TYPE (exp
);
4084 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
4086 and_mask
= TREE_OPERAND (exp
, 1);
4087 exp
= TREE_OPERAND (exp
, 0);
4088 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
4089 if (TREE_CODE (and_mask
) != INTEGER_CST
)
4093 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
4094 punsignedp
, pvolatilep
, false);
4095 if ((inner
== exp
&& and_mask
== 0)
4096 || *pbitsize
< 0 || offset
!= 0
4097 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
4100 /* If the number of bits in the reference is the same as the bitsize of
4101 the outer type, then the outer type gives the signedness. Otherwise
4102 (in case of a small bitfield) the signedness is unchanged. */
4103 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
4104 *punsignedp
= TYPE_UNSIGNED (outer_type
);
4106 /* Compute the mask to access the bitfield. */
4107 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
4108 precision
= TYPE_PRECISION (unsigned_type
);
4110 mask
= build_int_cst_type (unsigned_type
, -1);
4112 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
4113 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
4115 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4117 mask
= fold_build2 (BIT_AND_EXPR
, unsigned_type
,
4118 fold_convert (unsigned_type
, and_mask
), mask
);
4121 *pand_mask
= and_mask
;
4125 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4129 all_ones_mask_p (const_tree mask
, int size
)
4131 tree type
= TREE_TYPE (mask
);
4132 unsigned int precision
= TYPE_PRECISION (type
);
4135 tmask
= build_int_cst_type (signed_type_for (type
), -1);
4138 tree_int_cst_equal (mask
,
4139 const_binop (RSHIFT_EXPR
,
4140 const_binop (LSHIFT_EXPR
, tmask
,
4141 size_int (precision
- size
),
4143 size_int (precision
- size
), 0));
4146 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4147 represents the sign bit of EXP's type. If EXP represents a sign
4148 or zero extension, also test VAL against the unextended type.
4149 The return value is the (sub)expression whose sign bit is VAL,
4150 or NULL_TREE otherwise. */
4153 sign_bit_p (tree exp
, const_tree val
)
4155 unsigned HOST_WIDE_INT mask_lo
, lo
;
4156 HOST_WIDE_INT mask_hi
, hi
;
4160 /* Tree EXP must have an integral type. */
4161 t
= TREE_TYPE (exp
);
4162 if (! INTEGRAL_TYPE_P (t
))
4165 /* Tree VAL must be an integer constant. */
4166 if (TREE_CODE (val
) != INTEGER_CST
4167 || TREE_OVERFLOW (val
))
4170 width
= TYPE_PRECISION (t
);
4171 if (width
> HOST_BITS_PER_WIDE_INT
)
4173 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
4176 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
4177 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
4183 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
4186 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
4187 >> (HOST_BITS_PER_WIDE_INT
- width
));
4190 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4191 treat VAL as if it were unsigned. */
4192 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
4193 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
4196 /* Handle extension from a narrower type. */
4197 if (TREE_CODE (exp
) == NOP_EXPR
4198 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
4199 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4204 /* Subroutine for fold_truthop: determine if an operand is simple enough
4205 to be evaluated unconditionally. */
4208 simple_operand_p (const_tree exp
)
4210 /* Strip any conversions that don't change the machine mode. */
4213 return (CONSTANT_CLASS_P (exp
)
4214 || TREE_CODE (exp
) == SSA_NAME
4216 && ! TREE_ADDRESSABLE (exp
)
4217 && ! TREE_THIS_VOLATILE (exp
)
4218 && ! DECL_NONLOCAL (exp
)
4219 /* Don't regard global variables as simple. They may be
4220 allocated in ways unknown to the compiler (shared memory,
4221 #pragma weak, etc). */
4222 && ! TREE_PUBLIC (exp
)
4223 && ! DECL_EXTERNAL (exp
)
4224 /* Loading a static variable is unduly expensive, but global
4225 registers aren't expensive. */
4226 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4229 /* The following functions are subroutines to fold_range_test and allow it to
4230 try to change a logical combination of comparisons into a range test.
4233 X == 2 || X == 3 || X == 4 || X == 5
4237 (unsigned) (X - 2) <= 3
4239 We describe each set of comparisons as being either inside or outside
4240 a range, using a variable named like IN_P, and then describe the
4241 range with a lower and upper bound. If one of the bounds is omitted,
4242 it represents either the highest or lowest value of the type.
4244 In the comments below, we represent a range by two numbers in brackets
4245 preceded by a "+" to designate being inside that range, or a "-" to
4246 designate being outside that range, so the condition can be inverted by
4247 flipping the prefix. An omitted bound is represented by a "-". For
4248 example, "- [-, 10]" means being outside the range starting at the lowest
4249 possible value and ending at 10, in other words, being greater than 10.
4250 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4253 We set up things so that the missing bounds are handled in a consistent
4254 manner so neither a missing bound nor "true" and "false" need to be
4255 handled using a special case. */
4257 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4258 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4259 and UPPER1_P are nonzero if the respective argument is an upper bound
4260 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4261 must be specified for a comparison. ARG1 will be converted to ARG0's
4262 type if both are specified. */
4265 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4266 tree arg1
, int upper1_p
)
4272 /* If neither arg represents infinity, do the normal operation.
4273 Else, if not a comparison, return infinity. Else handle the special
4274 comparison rules. Note that most of the cases below won't occur, but
4275 are handled for consistency. */
4277 if (arg0
!= 0 && arg1
!= 0)
4279 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4280 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4282 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4285 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4288 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4289 for neither. In real maths, we cannot assume open ended ranges are
4290 the same. But, this is computer arithmetic, where numbers are finite.
4291 We can therefore make the transformation of any unbounded range with
4292 the value Z, Z being greater than any representable number. This permits
4293 us to treat unbounded ranges as equal. */
4294 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4295 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4299 result
= sgn0
== sgn1
;
4302 result
= sgn0
!= sgn1
;
4305 result
= sgn0
< sgn1
;
4308 result
= sgn0
<= sgn1
;
4311 result
= sgn0
> sgn1
;
4314 result
= sgn0
>= sgn1
;
4320 return constant_boolean_node (result
, type
);
4323 /* Given EXP, a logical expression, set the range it is testing into
4324 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4325 actually being tested. *PLOW and *PHIGH will be made of the same
4326 type as the returned expression. If EXP is not a comparison, we
4327 will most likely not be returning a useful value and range. Set
4328 *STRICT_OVERFLOW_P to true if the return value is only valid
4329 because signed overflow is undefined; otherwise, do not change
4330 *STRICT_OVERFLOW_P. */
4333 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4334 bool *strict_overflow_p
)
4336 enum tree_code code
;
4337 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
4338 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
4340 tree low
, high
, n_low
, n_high
;
4342 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4343 and see if we can refine the range. Some of the cases below may not
4344 happen, but it doesn't seem worth worrying about this. We "continue"
4345 the outer loop when we've changed something; otherwise we "break"
4346 the switch, which will "break" the while. */
4349 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4353 code
= TREE_CODE (exp
);
4354 exp_type
= TREE_TYPE (exp
);
4356 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4358 if (TREE_OPERAND_LENGTH (exp
) > 0)
4359 arg0
= TREE_OPERAND (exp
, 0);
4360 if (TREE_CODE_CLASS (code
) == tcc_comparison
4361 || TREE_CODE_CLASS (code
) == tcc_unary
4362 || TREE_CODE_CLASS (code
) == tcc_binary
)
4363 arg0_type
= TREE_TYPE (arg0
);
4364 if (TREE_CODE_CLASS (code
) == tcc_binary
4365 || TREE_CODE_CLASS (code
) == tcc_comparison
4366 || (TREE_CODE_CLASS (code
) == tcc_expression
4367 && TREE_OPERAND_LENGTH (exp
) > 1))
4368 arg1
= TREE_OPERAND (exp
, 1);
4373 case TRUTH_NOT_EXPR
:
4374 in_p
= ! in_p
, exp
= arg0
;
4377 case EQ_EXPR
: case NE_EXPR
:
4378 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4379 /* We can only do something if the range is testing for zero
4380 and if the second operand is an integer constant. Note that
4381 saying something is "in" the range we make is done by
4382 complementing IN_P since it will set in the initial case of
4383 being not equal to zero; "out" is leaving it alone. */
4384 if (low
== 0 || high
== 0
4385 || ! integer_zerop (low
) || ! integer_zerop (high
)
4386 || TREE_CODE (arg1
) != INTEGER_CST
)
4391 case NE_EXPR
: /* - [c, c] */
4394 case EQ_EXPR
: /* + [c, c] */
4395 in_p
= ! in_p
, low
= high
= arg1
;
4397 case GT_EXPR
: /* - [-, c] */
4398 low
= 0, high
= arg1
;
4400 case GE_EXPR
: /* + [c, -] */
4401 in_p
= ! in_p
, low
= arg1
, high
= 0;
4403 case LT_EXPR
: /* - [c, -] */
4404 low
= arg1
, high
= 0;
4406 case LE_EXPR
: /* + [-, c] */
4407 in_p
= ! in_p
, low
= 0, high
= arg1
;
4413 /* If this is an unsigned comparison, we also know that EXP is
4414 greater than or equal to zero. We base the range tests we make
4415 on that fact, so we record it here so we can parse existing
4416 range tests. We test arg0_type since often the return type
4417 of, e.g. EQ_EXPR, is boolean. */
4418 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4420 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4422 build_int_cst (arg0_type
, 0),
4426 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4428 /* If the high bound is missing, but we have a nonzero low
4429 bound, reverse the range so it goes from zero to the low bound
4431 if (high
== 0 && low
&& ! integer_zerop (low
))
4434 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4435 integer_one_node
, 0);
4436 low
= build_int_cst (arg0_type
, 0);
4444 /* (-x) IN [a,b] -> x in [-b, -a] */
4445 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4446 build_int_cst (exp_type
, 0),
4448 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4449 build_int_cst (exp_type
, 0),
4451 low
= n_low
, high
= n_high
;
4457 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4458 build_int_cst (exp_type
, 1));
4461 case PLUS_EXPR
: case MINUS_EXPR
:
4462 if (TREE_CODE (arg1
) != INTEGER_CST
)
4465 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4466 move a constant to the other side. */
4467 if (!TYPE_UNSIGNED (arg0_type
)
4468 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4471 /* If EXP is signed, any overflow in the computation is undefined,
4472 so we don't worry about it so long as our computations on
4473 the bounds don't overflow. For unsigned, overflow is defined
4474 and this is exactly the right thing. */
4475 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4476 arg0_type
, low
, 0, arg1
, 0);
4477 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4478 arg0_type
, high
, 1, arg1
, 0);
4479 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4480 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4483 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4484 *strict_overflow_p
= true;
4486 /* Check for an unsigned range which has wrapped around the maximum
4487 value thus making n_high < n_low, and normalize it. */
4488 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4490 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4491 integer_one_node
, 0);
4492 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4493 integer_one_node
, 0);
4495 /* If the range is of the form +/- [ x+1, x ], we won't
4496 be able to normalize it. But then, it represents the
4497 whole range or the empty set, so make it
4499 if (tree_int_cst_equal (n_low
, low
)
4500 && tree_int_cst_equal (n_high
, high
))
4506 low
= n_low
, high
= n_high
;
4511 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
4512 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4515 if (! INTEGRAL_TYPE_P (arg0_type
)
4516 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4517 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4520 n_low
= low
, n_high
= high
;
4523 n_low
= fold_convert (arg0_type
, n_low
);
4526 n_high
= fold_convert (arg0_type
, n_high
);
4529 /* If we're converting arg0 from an unsigned type, to exp,
4530 a signed type, we will be doing the comparison as unsigned.
4531 The tests above have already verified that LOW and HIGH
4534 So we have to ensure that we will handle large unsigned
4535 values the same way that the current signed bounds treat
4538 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4542 /* For fixed-point modes, we need to pass the saturating flag
4543 as the 2nd parameter. */
4544 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4545 equiv_type
= lang_hooks
.types
.type_for_mode
4546 (TYPE_MODE (arg0_type
),
4547 TYPE_SATURATING (arg0_type
));
4549 equiv_type
= lang_hooks
.types
.type_for_mode
4550 (TYPE_MODE (arg0_type
), 1);
4552 /* A range without an upper bound is, naturally, unbounded.
4553 Since convert would have cropped a very large value, use
4554 the max value for the destination type. */
4556 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4557 : TYPE_MAX_VALUE (arg0_type
);
4559 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4560 high_positive
= fold_build2 (RSHIFT_EXPR
, arg0_type
,
4561 fold_convert (arg0_type
,
4563 build_int_cst (arg0_type
, 1));
4565 /* If the low bound is specified, "and" the range with the
4566 range for which the original unsigned value will be
4570 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4571 1, n_low
, n_high
, 1,
4572 fold_convert (arg0_type
,
4577 in_p
= (n_in_p
== in_p
);
4581 /* Otherwise, "or" the range with the range of the input
4582 that will be interpreted as negative. */
4583 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4584 0, n_low
, n_high
, 1,
4585 fold_convert (arg0_type
,
4590 in_p
= (in_p
!= n_in_p
);
4595 low
= n_low
, high
= n_high
;
4605 /* If EXP is a constant, we can evaluate whether this is true or false. */
4606 if (TREE_CODE (exp
) == INTEGER_CST
)
4608 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4610 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4616 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4620 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4621 type, TYPE, return an expression to test if EXP is in (or out of, depending
4622 on IN_P) the range. Return 0 if the test couldn't be created. */
4625 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
4627 tree etype
= TREE_TYPE (exp
);
4630 #ifdef HAVE_canonicalize_funcptr_for_compare
4631 /* Disable this optimization for function pointer expressions
4632 on targets that require function pointer canonicalization. */
4633 if (HAVE_canonicalize_funcptr_for_compare
4634 && TREE_CODE (etype
) == POINTER_TYPE
4635 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4641 value
= build_range_check (type
, exp
, 1, low
, high
);
4643 return invert_truthvalue (value
);
4648 if (low
== 0 && high
== 0)
4649 return build_int_cst (type
, 1);
4652 return fold_build2 (LE_EXPR
, type
, exp
,
4653 fold_convert (etype
, high
));
4656 return fold_build2 (GE_EXPR
, type
, exp
,
4657 fold_convert (etype
, low
));
4659 if (operand_equal_p (low
, high
, 0))
4660 return fold_build2 (EQ_EXPR
, type
, exp
,
4661 fold_convert (etype
, low
));
4663 if (integer_zerop (low
))
4665 if (! TYPE_UNSIGNED (etype
))
4667 etype
= unsigned_type_for (etype
);
4668 high
= fold_convert (etype
, high
);
4669 exp
= fold_convert (etype
, exp
);
4671 return build_range_check (type
, exp
, 1, 0, high
);
4674 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4675 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4677 unsigned HOST_WIDE_INT lo
;
4681 prec
= TYPE_PRECISION (etype
);
4682 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4685 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4689 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4690 lo
= (unsigned HOST_WIDE_INT
) -1;
4693 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4695 if (TYPE_UNSIGNED (etype
))
4697 etype
= signed_type_for (etype
);
4698 exp
= fold_convert (etype
, exp
);
4700 return fold_build2 (GT_EXPR
, type
, exp
,
4701 build_int_cst (etype
, 0));
4705 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4706 This requires wrap-around arithmetics for the type of the expression. */
4707 switch (TREE_CODE (etype
))
4710 /* There is no requirement that LOW be within the range of ETYPE
4711 if the latter is a subtype. It must, however, be within the base
4712 type of ETYPE. So be sure we do the subtraction in that type. */
4713 if (TREE_TYPE (etype
))
4714 etype
= TREE_TYPE (etype
);
4719 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4720 TYPE_UNSIGNED (etype
));
4727 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4728 if (TREE_CODE (etype
) == INTEGER_TYPE
4729 && !TYPE_OVERFLOW_WRAPS (etype
))
4731 tree utype
, minv
, maxv
;
4733 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4734 for the type in question, as we rely on this here. */
4735 utype
= unsigned_type_for (etype
);
4736 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
4737 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4738 integer_one_node
, 1);
4739 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
4741 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4748 high
= fold_convert (etype
, high
);
4749 low
= fold_convert (etype
, low
);
4750 exp
= fold_convert (etype
, exp
);
4752 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4755 if (POINTER_TYPE_P (etype
))
4757 if (value
!= 0 && !TREE_OVERFLOW (value
))
4759 low
= fold_convert (sizetype
, low
);
4760 low
= fold_build1 (NEGATE_EXPR
, sizetype
, low
);
4761 return build_range_check (type
,
4762 fold_build2 (POINTER_PLUS_EXPR
, etype
, exp
, low
),
4763 1, build_int_cst (etype
, 0), value
);
4768 if (value
!= 0 && !TREE_OVERFLOW (value
))
4769 return build_range_check (type
,
4770 fold_build2 (MINUS_EXPR
, etype
, exp
, low
),
4771 1, build_int_cst (etype
, 0), value
);
4776 /* Return the predecessor of VAL in its type, handling the infinite case. */
4779 range_predecessor (tree val
)
4781 tree type
= TREE_TYPE (val
);
4783 if (INTEGRAL_TYPE_P (type
)
4784 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4787 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4790 /* Return the successor of VAL in its type, handling the infinite case. */
4793 range_successor (tree val
)
4795 tree type
= TREE_TYPE (val
);
4797 if (INTEGRAL_TYPE_P (type
)
4798 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4801 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4804 /* Given two ranges, see if we can merge them into one. Return 1 if we
4805 can, 0 if we can't. Set the output range into the specified parameters. */
4808 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4809 tree high0
, int in1_p
, tree low1
, tree high1
)
4817 int lowequal
= ((low0
== 0 && low1
== 0)
4818 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4819 low0
, 0, low1
, 0)));
4820 int highequal
= ((high0
== 0 && high1
== 0)
4821 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4822 high0
, 1, high1
, 1)));
4824 /* Make range 0 be the range that starts first, or ends last if they
4825 start at the same value. Swap them if it isn't. */
4826 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4829 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4830 high1
, 1, high0
, 1))))
4832 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4833 tem
= low0
, low0
= low1
, low1
= tem
;
4834 tem
= high0
, high0
= high1
, high1
= tem
;
4837 /* Now flag two cases, whether the ranges are disjoint or whether the
4838 second range is totally subsumed in the first. Note that the tests
4839 below are simplified by the ones above. */
4840 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4841 high0
, 1, low1
, 0));
4842 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4843 high1
, 1, high0
, 1));
4845 /* We now have four cases, depending on whether we are including or
4846 excluding the two ranges. */
4849 /* If they don't overlap, the result is false. If the second range
4850 is a subset it is the result. Otherwise, the range is from the start
4851 of the second to the end of the first. */
4853 in_p
= 0, low
= high
= 0;
4855 in_p
= 1, low
= low1
, high
= high1
;
4857 in_p
= 1, low
= low1
, high
= high0
;
4860 else if (in0_p
&& ! in1_p
)
4862 /* If they don't overlap, the result is the first range. If they are
4863 equal, the result is false. If the second range is a subset of the
4864 first, and the ranges begin at the same place, we go from just after
4865 the end of the second range to the end of the first. If the second
4866 range is not a subset of the first, or if it is a subset and both
4867 ranges end at the same place, the range starts at the start of the
4868 first range and ends just before the second range.
4869 Otherwise, we can't describe this as a single range. */
4871 in_p
= 1, low
= low0
, high
= high0
;
4872 else if (lowequal
&& highequal
)
4873 in_p
= 0, low
= high
= 0;
4874 else if (subset
&& lowequal
)
4876 low
= range_successor (high1
);
4881 /* We are in the weird situation where high0 > high1 but
4882 high1 has no successor. Punt. */
4886 else if (! subset
|| highequal
)
4889 high
= range_predecessor (low1
);
4893 /* low0 < low1 but low1 has no predecessor. Punt. */
4901 else if (! in0_p
&& in1_p
)
4903 /* If they don't overlap, the result is the second range. If the second
4904 is a subset of the first, the result is false. Otherwise,
4905 the range starts just after the first range and ends at the
4906 end of the second. */
4908 in_p
= 1, low
= low1
, high
= high1
;
4909 else if (subset
|| highequal
)
4910 in_p
= 0, low
= high
= 0;
4913 low
= range_successor (high0
);
4918 /* high1 > high0 but high0 has no successor. Punt. */
4926 /* The case where we are excluding both ranges. Here the complex case
4927 is if they don't overlap. In that case, the only time we have a
4928 range is if they are adjacent. If the second is a subset of the
4929 first, the result is the first. Otherwise, the range to exclude
4930 starts at the beginning of the first range and ends at the end of the
4934 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4935 range_successor (high0
),
4937 in_p
= 0, low
= low0
, high
= high1
;
4940 /* Canonicalize - [min, x] into - [-, x]. */
4941 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4942 switch (TREE_CODE (TREE_TYPE (low0
)))
4945 if (TYPE_PRECISION (TREE_TYPE (low0
))
4946 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4950 if (tree_int_cst_equal (low0
,
4951 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4955 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4956 && integer_zerop (low0
))
4963 /* Canonicalize - [x, max] into - [x, -]. */
4964 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4965 switch (TREE_CODE (TREE_TYPE (high1
)))
4968 if (TYPE_PRECISION (TREE_TYPE (high1
))
4969 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4973 if (tree_int_cst_equal (high1
,
4974 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4978 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4979 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4981 integer_one_node
, 1)))
4988 /* The ranges might be also adjacent between the maximum and
4989 minimum values of the given type. For
4990 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4991 return + [x + 1, y - 1]. */
4992 if (low0
== 0 && high1
== 0)
4994 low
= range_successor (high0
);
4995 high
= range_predecessor (low1
);
4996 if (low
== 0 || high
== 0)
5006 in_p
= 0, low
= low0
, high
= high0
;
5008 in_p
= 0, low
= low0
, high
= high1
;
5011 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
5016 /* Subroutine of fold, looking inside expressions of the form
5017 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5018 of the COND_EXPR. This function is being used also to optimize
5019 A op B ? C : A, by reversing the comparison first.
5021 Return a folded expression whose code is not a COND_EXPR
5022 anymore, or NULL_TREE if no folding opportunity is found. */
5025 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
5027 enum tree_code comp_code
= TREE_CODE (arg0
);
5028 tree arg00
= TREE_OPERAND (arg0
, 0);
5029 tree arg01
= TREE_OPERAND (arg0
, 1);
5030 tree arg1_type
= TREE_TYPE (arg1
);
5036 /* If we have A op 0 ? A : -A, consider applying the following
5039 A == 0? A : -A same as -A
5040 A != 0? A : -A same as A
5041 A >= 0? A : -A same as abs (A)
5042 A > 0? A : -A same as abs (A)
5043 A <= 0? A : -A same as -abs (A)
5044 A < 0? A : -A same as -abs (A)
5046 None of these transformations work for modes with signed
5047 zeros. If A is +/-0, the first two transformations will
5048 change the sign of the result (from +0 to -0, or vice
5049 versa). The last four will fix the sign of the result,
5050 even though the original expressions could be positive or
5051 negative, depending on the sign of A.
5053 Note that all these transformations are correct if A is
5054 NaN, since the two alternatives (A and -A) are also NaNs. */
5055 if ((FLOAT_TYPE_P (TREE_TYPE (arg01
))
5056 ? real_zerop (arg01
)
5057 : integer_zerop (arg01
))
5058 && ((TREE_CODE (arg2
) == NEGATE_EXPR
5059 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
5060 /* In the case that A is of the form X-Y, '-A' (arg2) may
5061 have already been folded to Y-X, check for that. */
5062 || (TREE_CODE (arg1
) == MINUS_EXPR
5063 && TREE_CODE (arg2
) == MINUS_EXPR
5064 && operand_equal_p (TREE_OPERAND (arg1
, 0),
5065 TREE_OPERAND (arg2
, 1), 0)
5066 && operand_equal_p (TREE_OPERAND (arg1
, 1),
5067 TREE_OPERAND (arg2
, 0), 0))))
5072 tem
= fold_convert (arg1_type
, arg1
);
5073 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
5076 return pedantic_non_lvalue (fold_convert (type
, arg1
));
5079 if (flag_trapping_math
)
5084 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5085 arg1
= fold_convert (signed_type_for
5086 (TREE_TYPE (arg1
)), arg1
);
5087 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5088 return pedantic_non_lvalue (fold_convert (type
, tem
));
5091 if (flag_trapping_math
)
5095 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5096 arg1
= fold_convert (signed_type_for
5097 (TREE_TYPE (arg1
)), arg1
);
5098 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5099 return negate_expr (fold_convert (type
, tem
));
5101 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5105 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5106 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5107 both transformations are correct when A is NaN: A != 0
5108 is then true, and A == 0 is false. */
5110 if (integer_zerop (arg01
) && integer_zerop (arg2
))
5112 if (comp_code
== NE_EXPR
)
5113 return pedantic_non_lvalue (fold_convert (type
, arg1
));
5114 else if (comp_code
== EQ_EXPR
)
5115 return build_int_cst (type
, 0);
5118 /* Try some transformations of A op B ? A : B.
5120 A == B? A : B same as B
5121 A != B? A : B same as A
5122 A >= B? A : B same as max (A, B)
5123 A > B? A : B same as max (B, A)
5124 A <= B? A : B same as min (A, B)
5125 A < B? A : B same as min (B, A)
5127 As above, these transformations don't work in the presence
5128 of signed zeros. For example, if A and B are zeros of
5129 opposite sign, the first two transformations will change
5130 the sign of the result. In the last four, the original
5131 expressions give different results for (A=+0, B=-0) and
5132 (A=-0, B=+0), but the transformed expressions do not.
5134 The first two transformations are correct if either A or B
5135 is a NaN. In the first transformation, the condition will
5136 be false, and B will indeed be chosen. In the case of the
5137 second transformation, the condition A != B will be true,
5138 and A will be chosen.
5140 The conversions to max() and min() are not correct if B is
5141 a number and A is not. The conditions in the original
5142 expressions will be false, so all four give B. The min()
5143 and max() versions would give a NaN instead. */
5144 if (operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
5145 /* Avoid these transformations if the COND_EXPR may be used
5146 as an lvalue in the C++ front-end. PR c++/19199. */
5148 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
5149 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
5150 || ! maybe_lvalue_p (arg1
)
5151 || ! maybe_lvalue_p (arg2
)))
5153 tree comp_op0
= arg00
;
5154 tree comp_op1
= arg01
;
5155 tree comp_type
= TREE_TYPE (comp_op0
);
5157 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5158 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
5168 return pedantic_non_lvalue (fold_convert (type
, arg2
));
5170 return pedantic_non_lvalue (fold_convert (type
, arg1
));
5175 /* In C++ a ?: expression can be an lvalue, so put the
5176 operand which will be used if they are equal first
5177 so that we can convert this back to the
5178 corresponding COND_EXPR. */
5179 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5181 comp_op0
= fold_convert (comp_type
, comp_op0
);
5182 comp_op1
= fold_convert (comp_type
, comp_op1
);
5183 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5184 ? fold_build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5185 : fold_build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
);
5186 return pedantic_non_lvalue (fold_convert (type
, tem
));
5193 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5195 comp_op0
= fold_convert (comp_type
, comp_op0
);
5196 comp_op1
= fold_convert (comp_type
, comp_op1
);
5197 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5198 ? fold_build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5199 : fold_build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
);
5200 return pedantic_non_lvalue (fold_convert (type
, tem
));
5204 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5205 return pedantic_non_lvalue (fold_convert (type
, arg2
));
5208 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5209 return pedantic_non_lvalue (fold_convert (type
, arg1
));
5212 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5217 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5218 we might still be able to simplify this. For example,
5219 if C1 is one less or one more than C2, this might have started
5220 out as a MIN or MAX and been transformed by this function.
5221 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5223 if (INTEGRAL_TYPE_P (type
)
5224 && TREE_CODE (arg01
) == INTEGER_CST
5225 && TREE_CODE (arg2
) == INTEGER_CST
)
5229 /* We can replace A with C1 in this case. */
5230 arg1
= fold_convert (type
, arg01
);
5231 return fold_build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
);
5234 /* If C1 is C2 + 1, this is min(A, C2). */
5235 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5237 && operand_equal_p (arg01
,
5238 const_binop (PLUS_EXPR
, arg2
,
5239 build_int_cst (type
, 1), 0),
5241 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
5243 fold_convert (type
, arg1
),
5248 /* If C1 is C2 - 1, this is min(A, C2). */
5249 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5251 && operand_equal_p (arg01
,
5252 const_binop (MINUS_EXPR
, arg2
,
5253 build_int_cst (type
, 1), 0),
5255 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
5257 fold_convert (type
, arg1
),
5262 /* If C1 is C2 - 1, this is max(A, C2). */
5263 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5265 && operand_equal_p (arg01
,
5266 const_binop (MINUS_EXPR
, arg2
,
5267 build_int_cst (type
, 1), 0),
5269 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
5271 fold_convert (type
, arg1
),
5276 /* If C1 is C2 + 1, this is max(A, C2). */
5277 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5279 && operand_equal_p (arg01
,
5280 const_binop (PLUS_EXPR
, arg2
,
5281 build_int_cst (type
, 1), 0),
5283 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
5285 fold_convert (type
, arg1
),
5299 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5300 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5303 /* EXP is some logical combination of boolean tests. See if we can
5304 merge it into some range test. Return the new tree if so. */
5307 fold_range_test (enum tree_code code
, tree type
, tree op0
, tree op1
)
5309 int or_op
= (code
== TRUTH_ORIF_EXPR
5310 || code
== TRUTH_OR_EXPR
);
5311 int in0_p
, in1_p
, in_p
;
5312 tree low0
, low1
, low
, high0
, high1
, high
;
5313 bool strict_overflow_p
= false;
5314 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5315 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5317 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5318 "when simplifying range test");
5320 /* If this is an OR operation, invert both sides; we will invert
5321 again at the end. */
5323 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5325 /* If both expressions are the same, if we can merge the ranges, and we
5326 can build the range test, return it or it inverted. If one of the
5327 ranges is always true or always false, consider it to be the same
5328 expression as the other. */
5329 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5330 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5332 && 0 != (tem
= (build_range_check (type
,
5334 : rhs
!= 0 ? rhs
: integer_zero_node
,
5337 if (strict_overflow_p
)
5338 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5339 return or_op
? invert_truthvalue (tem
) : tem
;
5342 /* On machines where the branch cost is expensive, if this is a
5343 short-circuited branch and the underlying object on both sides
5344 is the same, make a non-short-circuit operation. */
5345 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5346 && lhs
!= 0 && rhs
!= 0
5347 && (code
== TRUTH_ANDIF_EXPR
5348 || code
== TRUTH_ORIF_EXPR
)
5349 && operand_equal_p (lhs
, rhs
, 0))
5351 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5352 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5353 which cases we can't do this. */
5354 if (simple_operand_p (lhs
))
5355 return build2 (code
== TRUTH_ANDIF_EXPR
5356 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5359 else if (lang_hooks
.decls
.global_bindings_p () == 0
5360 && ! CONTAINS_PLACEHOLDER_P (lhs
))
5362 tree common
= save_expr (lhs
);
5364 if (0 != (lhs
= build_range_check (type
, common
,
5365 or_op
? ! in0_p
: in0_p
,
5367 && (0 != (rhs
= build_range_check (type
, common
,
5368 or_op
? ! in1_p
: in1_p
,
5371 if (strict_overflow_p
)
5372 fold_overflow_warning (warnmsg
,
5373 WARN_STRICT_OVERFLOW_COMPARISON
);
5374 return build2 (code
== TRUTH_ANDIF_EXPR
5375 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5384 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5385 bit value. Arrange things so the extra bits will be set to zero if and
5386 only if C is signed-extended to its full width. If MASK is nonzero,
5387 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5390 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5392 tree type
= TREE_TYPE (c
);
5393 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5396 if (p
== modesize
|| unsignedp
)
5399 /* We work by getting just the sign bit into the low-order bit, then
5400 into the high-order bit, then sign-extend. We then XOR that value
5402 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
5403 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
5405 /* We must use a signed type in order to get an arithmetic right shift.
5406 However, we must also avoid introducing accidental overflows, so that
5407 a subsequent call to integer_zerop will work. Hence we must
5408 do the type conversion here. At this point, the constant is either
5409 zero or one, and the conversion to a signed type can never overflow.
5410 We could get an overflow if this conversion is done anywhere else. */
5411 if (TYPE_UNSIGNED (type
))
5412 temp
= fold_convert (signed_type_for (type
), temp
);
5414 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
5415 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
5417 temp
= const_binop (BIT_AND_EXPR
, temp
,
5418 fold_convert (TREE_TYPE (c
), mask
), 0);
5419 /* If necessary, convert the type back to match the type of C. */
5420 if (TYPE_UNSIGNED (type
))
5421 temp
= fold_convert (type
, temp
);
5423 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
5426 /* Find ways of folding logical expressions of LHS and RHS:
5427 Try to merge two comparisons to the same innermost item.
5428 Look for range tests like "ch >= '0' && ch <= '9'".
5429 Look for combinations of simple terms on machines with expensive branches
5430 and evaluate the RHS unconditionally.
5432 For example, if we have p->a == 2 && p->b == 4 and we can make an
5433 object large enough to span both A and B, we can do this with a comparison
5434 against the object ANDed with the a mask.
5436 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5437 operations to do this with one comparison.
5439 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5440 function and the one above.
5442 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5443 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5445 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5448 We return the simplified tree or 0 if no optimization is possible. */
5451 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
5453 /* If this is the "or" of two comparisons, we can do something if
5454 the comparisons are NE_EXPR. If this is the "and", we can do something
5455 if the comparisons are EQ_EXPR. I.e.,
5456 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5458 WANTED_CODE is this operation code. For single bit fields, we can
5459 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5460 comparison for one-bit fields. */
5462 enum tree_code wanted_code
;
5463 enum tree_code lcode
, rcode
;
5464 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5465 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5466 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5467 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5468 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5469 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5470 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5471 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5472 enum machine_mode lnmode
, rnmode
;
5473 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5474 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5475 tree l_const
, r_const
;
5476 tree lntype
, rntype
, result
;
5477 int first_bit
, end_bit
;
5479 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5480 enum tree_code orig_code
= code
;
5482 /* Start by getting the comparison codes. Fail if anything is volatile.
5483 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5484 it were surrounded with a NE_EXPR. */
5486 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5489 lcode
= TREE_CODE (lhs
);
5490 rcode
= TREE_CODE (rhs
);
5492 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5494 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5495 build_int_cst (TREE_TYPE (lhs
), 0));
5499 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5501 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5502 build_int_cst (TREE_TYPE (rhs
), 0));
5506 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5507 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5510 ll_arg
= TREE_OPERAND (lhs
, 0);
5511 lr_arg
= TREE_OPERAND (lhs
, 1);
5512 rl_arg
= TREE_OPERAND (rhs
, 0);
5513 rr_arg
= TREE_OPERAND (rhs
, 1);
5515 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5516 if (simple_operand_p (ll_arg
)
5517 && simple_operand_p (lr_arg
))
5520 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5521 && operand_equal_p (lr_arg
, rr_arg
, 0))
5523 result
= combine_comparisons (code
, lcode
, rcode
,
5524 truth_type
, ll_arg
, lr_arg
);
5528 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5529 && operand_equal_p (lr_arg
, rl_arg
, 0))
5531 result
= combine_comparisons (code
, lcode
,
5532 swap_tree_comparison (rcode
),
5533 truth_type
, ll_arg
, lr_arg
);
5539 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5540 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5542 /* If the RHS can be evaluated unconditionally and its operands are
5543 simple, it wins to evaluate the RHS unconditionally on machines
5544 with expensive branches. In this case, this isn't a comparison
5545 that can be merged. Avoid doing this if the RHS is a floating-point
5546 comparison since those can trap. */
5548 if (BRANCH_COST
>= 2
5549 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5550 && simple_operand_p (rl_arg
)
5551 && simple_operand_p (rr_arg
))
5553 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5554 if (code
== TRUTH_OR_EXPR
5555 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5556 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5557 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
5558 return build2 (NE_EXPR
, truth_type
,
5559 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5561 build_int_cst (TREE_TYPE (ll_arg
), 0));
5563 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5564 if (code
== TRUTH_AND_EXPR
5565 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5566 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5567 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
5568 return build2 (EQ_EXPR
, truth_type
,
5569 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5571 build_int_cst (TREE_TYPE (ll_arg
), 0));
5573 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
5575 if (code
!= orig_code
|| lhs
!= orig_lhs
|| rhs
!= orig_rhs
)
5576 return build2 (code
, truth_type
, lhs
, rhs
);
5581 /* See if the comparisons can be merged. Then get all the parameters for
5584 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5585 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5589 ll_inner
= decode_field_reference (ll_arg
,
5590 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5591 &ll_unsignedp
, &volatilep
, &ll_mask
,
5593 lr_inner
= decode_field_reference (lr_arg
,
5594 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5595 &lr_unsignedp
, &volatilep
, &lr_mask
,
5597 rl_inner
= decode_field_reference (rl_arg
,
5598 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5599 &rl_unsignedp
, &volatilep
, &rl_mask
,
5601 rr_inner
= decode_field_reference (rr_arg
,
5602 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5603 &rr_unsignedp
, &volatilep
, &rr_mask
,
5606 /* It must be true that the inner operation on the lhs of each
5607 comparison must be the same if we are to be able to do anything.
5608 Then see if we have constants. If not, the same must be true for
5610 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5611 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5614 if (TREE_CODE (lr_arg
) == INTEGER_CST
5615 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5616 l_const
= lr_arg
, r_const
= rr_arg
;
5617 else if (lr_inner
== 0 || rr_inner
== 0
5618 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5621 l_const
= r_const
= 0;
5623 /* If either comparison code is not correct for our logical operation,
5624 fail. However, we can convert a one-bit comparison against zero into
5625 the opposite comparison against that bit being set in the field. */
5627 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5628 if (lcode
!= wanted_code
)
5630 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5632 /* Make the left operand unsigned, since we are only interested
5633 in the value of one bit. Otherwise we are doing the wrong
5642 /* This is analogous to the code for l_const above. */
5643 if (rcode
!= wanted_code
)
5645 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5654 /* See if we can find a mode that contains both fields being compared on
5655 the left. If we can't, fail. Otherwise, update all constants and masks
5656 to be relative to a field of that size. */
5657 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5658 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5659 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5660 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5662 if (lnmode
== VOIDmode
)
5665 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5666 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5667 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5668 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5670 if (BYTES_BIG_ENDIAN
)
5672 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5673 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5676 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
5677 size_int (xll_bitpos
), 0);
5678 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
5679 size_int (xrl_bitpos
), 0);
5683 l_const
= fold_convert (lntype
, l_const
);
5684 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5685 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
5686 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5687 fold_build1 (BIT_NOT_EXPR
,
5691 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5693 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5698 r_const
= fold_convert (lntype
, r_const
);
5699 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5700 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
5701 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5702 fold_build1 (BIT_NOT_EXPR
,
5706 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5708 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5712 /* If the right sides are not constant, do the same for it. Also,
5713 disallow this optimization if a size or signedness mismatch occurs
5714 between the left and right sides. */
5717 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5718 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5719 /* Make sure the two fields on the right
5720 correspond to the left without being swapped. */
5721 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5724 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5725 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5726 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5727 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5729 if (rnmode
== VOIDmode
)
5732 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5733 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5734 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5735 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5737 if (BYTES_BIG_ENDIAN
)
5739 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5740 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5743 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
5744 size_int (xlr_bitpos
), 0);
5745 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
5746 size_int (xrr_bitpos
), 0);
5748 /* Make a mask that corresponds to both fields being compared.
5749 Do this for both items being compared. If the operands are the
5750 same size and the bits being compared are in the same position
5751 then we can do this by masking both and comparing the masked
5753 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5754 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
5755 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5757 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5758 ll_unsignedp
|| rl_unsignedp
);
5759 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5760 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5762 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5763 lr_unsignedp
|| rr_unsignedp
);
5764 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5765 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5767 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5770 /* There is still another way we can do something: If both pairs of
5771 fields being compared are adjacent, we may be able to make a wider
5772 field containing them both.
5774 Note that we still must mask the lhs/rhs expressions. Furthermore,
5775 the mask must be shifted to account for the shift done by
5776 make_bit_field_ref. */
5777 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5778 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5779 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5780 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5784 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
5785 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5786 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
5787 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5789 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5790 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
5791 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5792 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
5794 /* Convert to the smaller type before masking out unwanted bits. */
5796 if (lntype
!= rntype
)
5798 if (lnbitsize
> rnbitsize
)
5800 lhs
= fold_convert (rntype
, lhs
);
5801 ll_mask
= fold_convert (rntype
, ll_mask
);
5804 else if (lnbitsize
< rnbitsize
)
5806 rhs
= fold_convert (lntype
, rhs
);
5807 lr_mask
= fold_convert (lntype
, lr_mask
);
5812 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5813 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5815 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5816 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5818 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5824 /* Handle the case of comparisons with constants. If there is something in
5825 common between the masks, those bits of the constants must be the same.
5826 If not, the condition is always false. Test for this to avoid generating
5827 incorrect code below. */
5828 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
5829 if (! integer_zerop (result
)
5830 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
5831 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
5833 if (wanted_code
== NE_EXPR
)
5835 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5836 return constant_boolean_node (true, truth_type
);
5840 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5841 return constant_boolean_node (false, truth_type
);
5845 /* Construct the expression we will return. First get the component
5846 reference we will make. Unless the mask is all ones the width of
5847 that field, perform the mask operation. Then compare with the
5849 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5850 ll_unsignedp
|| rl_unsignedp
);
5852 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5853 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5854 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5856 return build2 (wanted_code
, truth_type
, result
,
5857 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
5860 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5864 optimize_minmax_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
5867 enum tree_code op_code
;
5868 tree comp_const
= op1
;
5870 int consts_equal
, consts_lt
;
5873 STRIP_SIGN_NOPS (arg0
);
5875 op_code
= TREE_CODE (arg0
);
5876 minmax_const
= TREE_OPERAND (arg0
, 1);
5877 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5878 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5879 inner
= TREE_OPERAND (arg0
, 0);
5881 /* If something does not permit us to optimize, return the original tree. */
5882 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5883 || TREE_CODE (comp_const
) != INTEGER_CST
5884 || TREE_OVERFLOW (comp_const
)
5885 || TREE_CODE (minmax_const
) != INTEGER_CST
5886 || TREE_OVERFLOW (minmax_const
))
5889 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5890 and GT_EXPR, doing the rest with recursive calls using logical
5894 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5896 tree tem
= optimize_minmax_comparison (invert_tree_comparison (code
, false),
5899 return invert_truthvalue (tem
);
5905 fold_build2 (TRUTH_ORIF_EXPR
, type
,
5906 optimize_minmax_comparison
5907 (EQ_EXPR
, type
, arg0
, comp_const
),
5908 optimize_minmax_comparison
5909 (GT_EXPR
, type
, arg0
, comp_const
));
5912 if (op_code
== MAX_EXPR
&& consts_equal
)
5913 /* MAX (X, 0) == 0 -> X <= 0 */
5914 return fold_build2 (LE_EXPR
, type
, inner
, comp_const
);
5916 else if (op_code
== MAX_EXPR
&& consts_lt
)
5917 /* MAX (X, 0) == 5 -> X == 5 */
5918 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5920 else if (op_code
== MAX_EXPR
)
5921 /* MAX (X, 0) == -1 -> false */
5922 return omit_one_operand (type
, integer_zero_node
, inner
);
5924 else if (consts_equal
)
5925 /* MIN (X, 0) == 0 -> X >= 0 */
5926 return fold_build2 (GE_EXPR
, type
, inner
, comp_const
);
5929 /* MIN (X, 0) == 5 -> false */
5930 return omit_one_operand (type
, integer_zero_node
, inner
);
5933 /* MIN (X, 0) == -1 -> X == -1 */
5934 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5937 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5938 /* MAX (X, 0) > 0 -> X > 0
5939 MAX (X, 0) > 5 -> X > 5 */
5940 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5942 else if (op_code
== MAX_EXPR
)
5943 /* MAX (X, 0) > -1 -> true */
5944 return omit_one_operand (type
, integer_one_node
, inner
);
5946 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5947 /* MIN (X, 0) > 0 -> false
5948 MIN (X, 0) > 5 -> false */
5949 return omit_one_operand (type
, integer_zero_node
, inner
);
5952 /* MIN (X, 0) > -1 -> X > -1 */
5953 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5960 /* T is an integer expression that is being multiplied, divided, or taken a
5961 modulus (CODE says which and what kind of divide or modulus) by a
5962 constant C. See if we can eliminate that operation by folding it with
5963 other operations already in T. WIDE_TYPE, if non-null, is a type that
5964 should be used for the computation if wider than our type.
5966 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5967 (X * 2) + (Y * 4). We must, however, be assured that either the original
5968 expression would not overflow or that overflow is undefined for the type
5969 in the language in question.
5971 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5972 the machine has a multiply-accumulate insn or that this is part of an
5973 addressing calculation.
5975 If we return a non-null expression, it is an equivalent form of the
5976 original computation, but need not be in the original type.
5978 We set *STRICT_OVERFLOW_P to true if the return values depends on
5979 signed overflow being undefined. Otherwise we do not change
5980 *STRICT_OVERFLOW_P. */
5983 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5984 bool *strict_overflow_p
)
5986 /* To avoid exponential search depth, refuse to allow recursion past
5987 three levels. Beyond that (1) it's highly unlikely that we'll find
5988 something interesting and (2) we've probably processed it before
5989 when we built the inner expression. */
5998 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
6005 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6006 bool *strict_overflow_p
)
6008 tree type
= TREE_TYPE (t
);
6009 enum tree_code tcode
= TREE_CODE (t
);
6010 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
6011 > GET_MODE_SIZE (TYPE_MODE (type
)))
6012 ? wide_type
: type
);
6014 int same_p
= tcode
== code
;
6015 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
6016 bool sub_strict_overflow_p
;
6018 /* Don't deal with constants of zero here; they confuse the code below. */
6019 if (integer_zerop (c
))
6022 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
6023 op0
= TREE_OPERAND (t
, 0);
6025 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
6026 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
6028 /* Note that we need not handle conditional operations here since fold
6029 already handles those cases. So just do arithmetic here. */
6033 /* For a constant, we can always simplify if we are a multiply
6034 or (for divide and modulus) if it is a multiple of our constant. */
6035 if (code
== MULT_EXPR
6036 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
6037 return const_binop (code
, fold_convert (ctype
, t
),
6038 fold_convert (ctype
, c
), 0);
6041 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
6042 /* If op0 is an expression ... */
6043 if ((COMPARISON_CLASS_P (op0
)
6044 || UNARY_CLASS_P (op0
)
6045 || BINARY_CLASS_P (op0
)
6046 || VL_EXP_CLASS_P (op0
)
6047 || EXPRESSION_CLASS_P (op0
))
6048 /* ... and is unsigned, and its type is smaller than ctype,
6049 then we cannot pass through as widening. */
6050 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
6051 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
6052 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
6053 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
6054 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
6055 /* ... or this is a truncation (t is narrower than op0),
6056 then we cannot pass through this narrowing. */
6057 || (GET_MODE_SIZE (TYPE_MODE (type
))
6058 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
6059 /* ... or signedness changes for division or modulus,
6060 then we cannot pass through this conversion. */
6061 || (code
!= MULT_EXPR
6062 && (TYPE_UNSIGNED (ctype
)
6063 != TYPE_UNSIGNED (TREE_TYPE (op0
))))))
6066 /* Pass the constant down and see if we can make a simplification. If
6067 we can, replace this expression with the inner simplification for
6068 possible later conversion to our or some other type. */
6069 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6070 && TREE_CODE (t2
) == INTEGER_CST
6071 && !TREE_OVERFLOW (t2
)
6072 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
6074 ? ctype
: NULL_TREE
,
6075 strict_overflow_p
))))
6080 /* If widening the type changes it from signed to unsigned, then we
6081 must avoid building ABS_EXPR itself as unsigned. */
6082 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6084 tree cstype
= (*signed_type_for
) (ctype
);
6085 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6088 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6089 return fold_convert (ctype
, t1
);
6095 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6097 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6100 case MIN_EXPR
: case MAX_EXPR
:
6101 /* If widening the type changes the signedness, then we can't perform
6102 this optimization as that changes the result. */
6103 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6106 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6107 sub_strict_overflow_p
= false;
6108 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6109 &sub_strict_overflow_p
)) != 0
6110 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6111 &sub_strict_overflow_p
)) != 0)
6113 if (tree_int_cst_sgn (c
) < 0)
6114 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6115 if (sub_strict_overflow_p
)
6116 *strict_overflow_p
= true;
6117 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6118 fold_convert (ctype
, t2
));
6122 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6123 /* If the second operand is constant, this is a multiplication
6124 or floor division, by a power of two, so we can treat it that
6125 way unless the multiplier or divisor overflows. Signed
6126 left-shift overflow is implementation-defined rather than
6127 undefined in C90, so do not convert signed left shift into
6129 if (TREE_CODE (op1
) == INTEGER_CST
6130 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6131 /* const_binop may not detect overflow correctly,
6132 so check for it explicitly here. */
6133 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
6134 && TREE_INT_CST_HIGH (op1
) == 0
6135 && 0 != (t1
= fold_convert (ctype
,
6136 const_binop (LSHIFT_EXPR
,
6139 && !TREE_OVERFLOW (t1
))
6140 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6141 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6142 ctype
, fold_convert (ctype
, op0
), t1
),
6143 c
, code
, wide_type
, strict_overflow_p
);
6146 case PLUS_EXPR
: case MINUS_EXPR
:
6147 /* See if we can eliminate the operation on both sides. If we can, we
6148 can return a new PLUS or MINUS. If we can't, the only remaining
6149 cases where we can do anything are if the second operand is a
6151 sub_strict_overflow_p
= false;
6152 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6153 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6154 if (t1
!= 0 && t2
!= 0
6155 && (code
== MULT_EXPR
6156 /* If not multiplication, we can only do this if both operands
6157 are divisible by c. */
6158 || (multiple_of_p (ctype
, op0
, c
)
6159 && multiple_of_p (ctype
, op1
, c
))))
6161 if (sub_strict_overflow_p
)
6162 *strict_overflow_p
= true;
6163 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6164 fold_convert (ctype
, t2
));
6167 /* If this was a subtraction, negate OP1 and set it to be an addition.
6168 This simplifies the logic below. */
6169 if (tcode
== MINUS_EXPR
)
6170 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6172 if (TREE_CODE (op1
) != INTEGER_CST
)
6175 /* If either OP1 or C are negative, this optimization is not safe for
6176 some of the division and remainder types while for others we need
6177 to change the code. */
6178 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6180 if (code
== CEIL_DIV_EXPR
)
6181 code
= FLOOR_DIV_EXPR
;
6182 else if (code
== FLOOR_DIV_EXPR
)
6183 code
= CEIL_DIV_EXPR
;
6184 else if (code
!= MULT_EXPR
6185 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6189 /* If it's a multiply or a division/modulus operation of a multiple
6190 of our constant, do the operation and verify it doesn't overflow. */
6191 if (code
== MULT_EXPR
6192 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
6194 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6195 fold_convert (ctype
, c
), 0);
6196 /* We allow the constant to overflow with wrapping semantics. */
6198 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6204 /* If we have an unsigned type is not a sizetype, we cannot widen
6205 the operation since it will change the result if the original
6206 computation overflowed. */
6207 if (TYPE_UNSIGNED (ctype
)
6208 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
6212 /* If we were able to eliminate our operation from the first side,
6213 apply our operation to the second side and reform the PLUS. */
6214 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
6215 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
6217 /* The last case is if we are a multiply. In that case, we can
6218 apply the distributive law to commute the multiply and addition
6219 if the multiplication of the constants doesn't overflow. */
6220 if (code
== MULT_EXPR
)
6221 return fold_build2 (tcode
, ctype
,
6222 fold_build2 (code
, ctype
,
6223 fold_convert (ctype
, op0
),
6224 fold_convert (ctype
, c
)),
6230 /* We have a special case here if we are doing something like
6231 (C * 8) % 4 since we know that's zero. */
6232 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6233 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6234 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6235 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
6236 return omit_one_operand (type
, integer_zero_node
, op0
);
6238 /* ... fall through ... */
6240 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6241 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6242 /* If we can extract our operation from the LHS, do so and return a
6243 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6244 do something only if the second operand is a constant. */
6246 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6247 strict_overflow_p
)) != 0)
6248 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6249 fold_convert (ctype
, op1
));
6250 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6251 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6252 strict_overflow_p
)) != 0)
6253 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6254 fold_convert (ctype
, t1
));
6255 else if (TREE_CODE (op1
) != INTEGER_CST
)
6258 /* If these are the same operation types, we can associate them
6259 assuming no overflow. */
6261 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
6262 fold_convert (ctype
, c
), 0))
6263 && !TREE_OVERFLOW (t1
))
6264 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
6266 /* If these operations "cancel" each other, we have the main
6267 optimizations of this pass, which occur when either constant is a
6268 multiple of the other, in which case we replace this with either an
6269 operation or CODE or TCODE.
6271 If we have an unsigned type that is not a sizetype, we cannot do
6272 this since it will change the result if the original computation
6274 if ((TYPE_OVERFLOW_UNDEFINED (ctype
)
6275 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
6276 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6277 || (tcode
== MULT_EXPR
6278 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6279 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6280 && code
!= MULT_EXPR
)))
6282 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
6284 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6285 *strict_overflow_p
= true;
6286 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6287 fold_convert (ctype
,
6288 const_binop (TRUNC_DIV_EXPR
,
6291 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
6293 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6294 *strict_overflow_p
= true;
6295 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6296 fold_convert (ctype
,
6297 const_binop (TRUNC_DIV_EXPR
,
6310 /* Return a node which has the indicated constant VALUE (either 0 or
6311 1), and is of the indicated TYPE. */
6314 constant_boolean_node (int value
, tree type
)
6316 if (type
== integer_type_node
)
6317 return value
? integer_one_node
: integer_zero_node
;
6318 else if (type
== boolean_type_node
)
6319 return value
? boolean_true_node
: boolean_false_node
;
6321 return build_int_cst (type
, value
);
6325 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6326 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6327 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6328 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6329 COND is the first argument to CODE; otherwise (as in the example
6330 given here), it is the second argument. TYPE is the type of the
6331 original expression. Return NULL_TREE if no simplification is
6335 fold_binary_op_with_conditional_arg (enum tree_code code
,
6336 tree type
, tree op0
, tree op1
,
6337 tree cond
, tree arg
, int cond_first_p
)
6339 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6340 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6341 tree test
, true_value
, false_value
;
6342 tree lhs
= NULL_TREE
;
6343 tree rhs
= NULL_TREE
;
6345 /* This transformation is only worthwhile if we don't have to wrap
6346 arg in a SAVE_EXPR, and the operation can be simplified on at least
6347 one of the branches once its pushed inside the COND_EXPR. */
6348 if (!TREE_CONSTANT (arg
))
6351 if (TREE_CODE (cond
) == COND_EXPR
)
6353 test
= TREE_OPERAND (cond
, 0);
6354 true_value
= TREE_OPERAND (cond
, 1);
6355 false_value
= TREE_OPERAND (cond
, 2);
6356 /* If this operand throws an expression, then it does not make
6357 sense to try to perform a logical or arithmetic operation
6359 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6361 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6366 tree testtype
= TREE_TYPE (cond
);
6368 true_value
= constant_boolean_node (true, testtype
);
6369 false_value
= constant_boolean_node (false, testtype
);
6372 arg
= fold_convert (arg_type
, arg
);
6375 true_value
= fold_convert (cond_type
, true_value
);
6377 lhs
= fold_build2 (code
, type
, true_value
, arg
);
6379 lhs
= fold_build2 (code
, type
, arg
, true_value
);
6383 false_value
= fold_convert (cond_type
, false_value
);
6385 rhs
= fold_build2 (code
, type
, false_value
, arg
);
6387 rhs
= fold_build2 (code
, type
, arg
, false_value
);
6390 test
= fold_build3 (COND_EXPR
, type
, test
, lhs
, rhs
);
6391 return fold_convert (type
, test
);
6395 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6397 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6398 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6399 ADDEND is the same as X.
6401 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6402 and finite. The problematic cases are when X is zero, and its mode
6403 has signed zeros. In the case of rounding towards -infinity,
6404 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6405 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6408 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6410 if (!real_zerop (addend
))
6413 /* Don't allow the fold with -fsignaling-nans. */
6414 if (HONOR_SNANS (TYPE_MODE (type
)))
6417 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6418 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6421 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6422 if (TREE_CODE (addend
) == REAL_CST
6423 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6426 /* The mode has signed zeros, and we have to honor their sign.
6427 In this situation, there is only one case we can return true for.
6428 X - 0 is the same as X unless rounding towards -infinity is
6430 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6433 /* Subroutine of fold() that checks comparisons of built-in math
6434 functions against real constants.
6436 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6437 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6438 is the type of the result and ARG0 and ARG1 are the operands of the
6439 comparison. ARG1 must be a TREE_REAL_CST.
6441 The function returns the constant folded tree if a simplification
6442 can be made, and NULL_TREE otherwise. */
6445 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
6446 tree type
, tree arg0
, tree arg1
)
6450 if (BUILTIN_SQRT_P (fcode
))
6452 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6453 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6455 c
= TREE_REAL_CST (arg1
);
6456 if (REAL_VALUE_NEGATIVE (c
))
6458 /* sqrt(x) < y is always false, if y is negative. */
6459 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6460 return omit_one_operand (type
, integer_zero_node
, arg
);
6462 /* sqrt(x) > y is always true, if y is negative and we
6463 don't care about NaNs, i.e. negative values of x. */
6464 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6465 return omit_one_operand (type
, integer_one_node
, arg
);
6467 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6468 return fold_build2 (GE_EXPR
, type
, arg
,
6469 build_real (TREE_TYPE (arg
), dconst0
));
6471 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6475 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6476 real_convert (&c2
, mode
, &c2
);
6478 if (REAL_VALUE_ISINF (c2
))
6480 /* sqrt(x) > y is x == +Inf, when y is very large. */
6481 if (HONOR_INFINITIES (mode
))
6482 return fold_build2 (EQ_EXPR
, type
, arg
,
6483 build_real (TREE_TYPE (arg
), c2
));
6485 /* sqrt(x) > y is always false, when y is very large
6486 and we don't care about infinities. */
6487 return omit_one_operand (type
, integer_zero_node
, arg
);
6490 /* sqrt(x) > c is the same as x > c*c. */
6491 return fold_build2 (code
, type
, arg
,
6492 build_real (TREE_TYPE (arg
), c2
));
6494 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6498 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6499 real_convert (&c2
, mode
, &c2
);
6501 if (REAL_VALUE_ISINF (c2
))
6503 /* sqrt(x) < y is always true, when y is a very large
6504 value and we don't care about NaNs or Infinities. */
6505 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6506 return omit_one_operand (type
, integer_one_node
, arg
);
6508 /* sqrt(x) < y is x != +Inf when y is very large and we
6509 don't care about NaNs. */
6510 if (! HONOR_NANS (mode
))
6511 return fold_build2 (NE_EXPR
, type
, arg
,
6512 build_real (TREE_TYPE (arg
), c2
));
6514 /* sqrt(x) < y is x >= 0 when y is very large and we
6515 don't care about Infinities. */
6516 if (! HONOR_INFINITIES (mode
))
6517 return fold_build2 (GE_EXPR
, type
, arg
,
6518 build_real (TREE_TYPE (arg
), dconst0
));
6520 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6521 if (lang_hooks
.decls
.global_bindings_p () != 0
6522 || CONTAINS_PLACEHOLDER_P (arg
))
6525 arg
= save_expr (arg
);
6526 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
6527 fold_build2 (GE_EXPR
, type
, arg
,
6528 build_real (TREE_TYPE (arg
),
6530 fold_build2 (NE_EXPR
, type
, arg
,
6531 build_real (TREE_TYPE (arg
),
6535 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6536 if (! HONOR_NANS (mode
))
6537 return fold_build2 (code
, type
, arg
,
6538 build_real (TREE_TYPE (arg
), c2
));
6540 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6541 if (lang_hooks
.decls
.global_bindings_p () == 0
6542 && ! CONTAINS_PLACEHOLDER_P (arg
))
6544 arg
= save_expr (arg
);
6545 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
6546 fold_build2 (GE_EXPR
, type
, arg
,
6547 build_real (TREE_TYPE (arg
),
6549 fold_build2 (code
, type
, arg
,
6550 build_real (TREE_TYPE (arg
),
6559 /* Subroutine of fold() that optimizes comparisons against Infinities,
6560 either +Inf or -Inf.
6562 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6563 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6564 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6566 The function returns the constant folded tree if a simplification
6567 can be made, and NULL_TREE otherwise. */
6570 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6572 enum machine_mode mode
;
6573 REAL_VALUE_TYPE max
;
6577 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6579 /* For negative infinity swap the sense of the comparison. */
6580 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6582 code
= swap_tree_comparison (code
);
6587 /* x > +Inf is always false, if with ignore sNANs. */
6588 if (HONOR_SNANS (mode
))
6590 return omit_one_operand (type
, integer_zero_node
, arg0
);
6593 /* x <= +Inf is always true, if we don't case about NaNs. */
6594 if (! HONOR_NANS (mode
))
6595 return omit_one_operand (type
, integer_one_node
, arg0
);
6597 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6598 if (lang_hooks
.decls
.global_bindings_p () == 0
6599 && ! CONTAINS_PLACEHOLDER_P (arg0
))
6601 arg0
= save_expr (arg0
);
6602 return fold_build2 (EQ_EXPR
, type
, arg0
, arg0
);
6608 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6609 real_maxval (&max
, neg
, mode
);
6610 return fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6611 arg0
, build_real (TREE_TYPE (arg0
), max
));
6614 /* x < +Inf is always equal to x <= DBL_MAX. */
6615 real_maxval (&max
, neg
, mode
);
6616 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6617 arg0
, build_real (TREE_TYPE (arg0
), max
));
6620 /* x != +Inf is always equal to !(x > DBL_MAX). */
6621 real_maxval (&max
, neg
, mode
);
6622 if (! HONOR_NANS (mode
))
6623 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6624 arg0
, build_real (TREE_TYPE (arg0
), max
));
6626 temp
= fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6627 arg0
, build_real (TREE_TYPE (arg0
), max
));
6628 return fold_build1 (TRUTH_NOT_EXPR
, type
, temp
);
6637 /* Subroutine of fold() that optimizes comparisons of a division by
6638 a nonzero integer constant against an integer constant, i.e.
6641 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6642 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6643 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6645 The function returns the constant folded tree if a simplification
6646 can be made, and NULL_TREE otherwise. */
6649 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6651 tree prod
, tmp
, hi
, lo
;
6652 tree arg00
= TREE_OPERAND (arg0
, 0);
6653 tree arg01
= TREE_OPERAND (arg0
, 1);
6654 unsigned HOST_WIDE_INT lpart
;
6655 HOST_WIDE_INT hpart
;
6656 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6660 /* We have to do this the hard way to detect unsigned overflow.
6661 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6662 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6663 TREE_INT_CST_HIGH (arg01
),
6664 TREE_INT_CST_LOW (arg1
),
6665 TREE_INT_CST_HIGH (arg1
),
6666 &lpart
, &hpart
, unsigned_p
);
6667 prod
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6669 neg_overflow
= false;
6673 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6674 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6677 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6678 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6679 TREE_INT_CST_HIGH (prod
),
6680 TREE_INT_CST_LOW (tmp
),
6681 TREE_INT_CST_HIGH (tmp
),
6682 &lpart
, &hpart
, unsigned_p
);
6683 hi
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6684 -1, overflow
| TREE_OVERFLOW (prod
));
6686 else if (tree_int_cst_sgn (arg01
) >= 0)
6688 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6689 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6690 switch (tree_int_cst_sgn (arg1
))
6693 neg_overflow
= true;
6694 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6699 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6704 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6714 /* A negative divisor reverses the relational operators. */
6715 code
= swap_tree_comparison (code
);
6717 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6718 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6719 switch (tree_int_cst_sgn (arg1
))
6722 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6727 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6732 neg_overflow
= true;
6733 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6745 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6746 return omit_one_operand (type
, integer_zero_node
, arg00
);
6747 if (TREE_OVERFLOW (hi
))
6748 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6749 if (TREE_OVERFLOW (lo
))
6750 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6751 return build_range_check (type
, arg00
, 1, lo
, hi
);
6754 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6755 return omit_one_operand (type
, integer_one_node
, arg00
);
6756 if (TREE_OVERFLOW (hi
))
6757 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6758 if (TREE_OVERFLOW (lo
))
6759 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6760 return build_range_check (type
, arg00
, 0, lo
, hi
);
6763 if (TREE_OVERFLOW (lo
))
6765 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6766 return omit_one_operand (type
, tmp
, arg00
);
6768 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6771 if (TREE_OVERFLOW (hi
))
6773 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6774 return omit_one_operand (type
, tmp
, arg00
);
6776 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6779 if (TREE_OVERFLOW (hi
))
6781 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6782 return omit_one_operand (type
, tmp
, arg00
);
6784 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6787 if (TREE_OVERFLOW (lo
))
6789 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6790 return omit_one_operand (type
, tmp
, arg00
);
6792 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6802 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6803 equality/inequality test, then return a simplified form of the test
6804 using a sign testing. Otherwise return NULL. TYPE is the desired
6808 fold_single_bit_test_into_sign_test (enum tree_code code
, tree arg0
, tree arg1
,
6811 /* If this is testing a single bit, we can optimize the test. */
6812 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6813 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6814 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6816 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6817 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6818 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6820 if (arg00
!= NULL_TREE
6821 /* This is only a win if casting to a signed type is cheap,
6822 i.e. when arg00's type is not a partial mode. */
6823 && TYPE_PRECISION (TREE_TYPE (arg00
))
6824 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6826 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6827 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6828 result_type
, fold_convert (stype
, arg00
),
6829 build_int_cst (stype
, 0));
6836 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6837 equality/inequality test, then return a simplified form of
6838 the test using shifts and logical operations. Otherwise return
6839 NULL. TYPE is the desired result type. */
6842 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
6845 /* If this is testing a single bit, we can optimize the test. */
6846 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6847 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6848 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6850 tree inner
= TREE_OPERAND (arg0
, 0);
6851 tree type
= TREE_TYPE (arg0
);
6852 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6853 enum machine_mode operand_mode
= TYPE_MODE (type
);
6855 tree signed_type
, unsigned_type
, intermediate_type
;
6858 /* First, see if we can fold the single bit test into a sign-bit
6860 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
,
6865 /* Otherwise we have (A & C) != 0 where C is a single bit,
6866 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6867 Similarly for (A & C) == 0. */
6869 /* If INNER is a right shift of a constant and it plus BITNUM does
6870 not overflow, adjust BITNUM and INNER. */
6871 if (TREE_CODE (inner
) == RSHIFT_EXPR
6872 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6873 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6874 && bitnum
< TYPE_PRECISION (type
)
6875 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6876 bitnum
- TYPE_PRECISION (type
)))
6878 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6879 inner
= TREE_OPERAND (inner
, 0);
6882 /* If we are going to be able to omit the AND below, we must do our
6883 operations as unsigned. If we must use the AND, we have a choice.
6884 Normally unsigned is faster, but for some machines signed is. */
6885 #ifdef LOAD_EXTEND_OP
6886 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6887 && !flag_syntax_only
) ? 0 : 1;
6892 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6893 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6894 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6895 inner
= fold_convert (intermediate_type
, inner
);
6898 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6899 inner
, size_int (bitnum
));
6901 one
= build_int_cst (intermediate_type
, 1);
6903 if (code
== EQ_EXPR
)
6904 inner
= fold_build2 (BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6906 /* Put the AND last so it can combine with more things. */
6907 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6909 /* Make sure to return the proper type. */
6910 inner
= fold_convert (result_type
, inner
);
6917 /* Check whether we are allowed to reorder operands arg0 and arg1,
6918 such that the evaluation of arg1 occurs before arg0. */
6921 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6923 if (! flag_evaluation_order
)
6925 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6927 return ! TREE_SIDE_EFFECTS (arg0
)
6928 && ! TREE_SIDE_EFFECTS (arg1
);
6931 /* Test whether it is preferable two swap two operands, ARG0 and
6932 ARG1, for example because ARG0 is an integer constant and ARG1
6933 isn't. If REORDER is true, only recommend swapping if we can
6934 evaluate the operands in reverse order. */
6937 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6939 STRIP_SIGN_NOPS (arg0
);
6940 STRIP_SIGN_NOPS (arg1
);
6942 if (TREE_CODE (arg1
) == INTEGER_CST
)
6944 if (TREE_CODE (arg0
) == INTEGER_CST
)
6947 if (TREE_CODE (arg1
) == REAL_CST
)
6949 if (TREE_CODE (arg0
) == REAL_CST
)
6952 if (TREE_CODE (arg1
) == FIXED_CST
)
6954 if (TREE_CODE (arg0
) == FIXED_CST
)
6957 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6959 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6962 if (TREE_CONSTANT (arg1
))
6964 if (TREE_CONSTANT (arg0
))
6970 if (reorder
&& flag_evaluation_order
6971 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6974 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6975 for commutative and comparison operators. Ensuring a canonical
6976 form allows the optimizers to find additional redundancies without
6977 having to explicitly check for both orderings. */
6978 if (TREE_CODE (arg0
) == SSA_NAME
6979 && TREE_CODE (arg1
) == SSA_NAME
6980 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6983 /* Put SSA_NAMEs last. */
6984 if (TREE_CODE (arg1
) == SSA_NAME
)
6986 if (TREE_CODE (arg0
) == SSA_NAME
)
6989 /* Put variables last. */
6998 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6999 ARG0 is extended to a wider type. */
7002 fold_widened_comparison (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
7004 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
7006 tree shorter_type
, outer_type
;
7010 if (arg0_unw
== arg0
)
7012 shorter_type
= TREE_TYPE (arg0_unw
);
7014 #ifdef HAVE_canonicalize_funcptr_for_compare
7015 /* Disable this optimization if we're casting a function pointer
7016 type on targets that require function pointer canonicalization. */
7017 if (HAVE_canonicalize_funcptr_for_compare
7018 && TREE_CODE (shorter_type
) == POINTER_TYPE
7019 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
7023 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
7026 arg1_unw
= get_unwidened (arg1
, shorter_type
);
7028 /* If possible, express the comparison in the shorter mode. */
7029 if ((code
== EQ_EXPR
|| code
== NE_EXPR
7030 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
7031 && (TREE_TYPE (arg1_unw
) == shorter_type
7032 || (TREE_CODE (arg1_unw
) == INTEGER_CST
7033 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
7034 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
7035 && int_fits_type_p (arg1_unw
, shorter_type
))))
7036 return fold_build2 (code
, type
, arg0_unw
,
7037 fold_convert (shorter_type
, arg1_unw
));
7039 if (TREE_CODE (arg1_unw
) != INTEGER_CST
7040 || TREE_CODE (shorter_type
) != INTEGER_TYPE
7041 || !int_fits_type_p (arg1_unw
, shorter_type
))
7044 /* If we are comparing with the integer that does not fit into the range
7045 of the shorter type, the result is known. */
7046 outer_type
= TREE_TYPE (arg1_unw
);
7047 min
= lower_bound_in_type (outer_type
, shorter_type
);
7048 max
= upper_bound_in_type (outer_type
, shorter_type
);
7050 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
7052 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
7059 return omit_one_operand (type
, integer_zero_node
, arg0
);
7064 return omit_one_operand (type
, integer_one_node
, arg0
);
7070 return omit_one_operand (type
, integer_one_node
, arg0
);
7072 return omit_one_operand (type
, integer_zero_node
, arg0
);
7077 return omit_one_operand (type
, integer_zero_node
, arg0
);
7079 return omit_one_operand (type
, integer_one_node
, arg0
);
7088 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7089 ARG0 just the signedness is changed. */
7092 fold_sign_changed_comparison (enum tree_code code
, tree type
,
7093 tree arg0
, tree arg1
)
7096 tree inner_type
, outer_type
;
7098 if (TREE_CODE (arg0
) != NOP_EXPR
7099 && TREE_CODE (arg0
) != CONVERT_EXPR
)
7102 outer_type
= TREE_TYPE (arg0
);
7103 arg0_inner
= TREE_OPERAND (arg0
, 0);
7104 inner_type
= TREE_TYPE (arg0_inner
);
7106 #ifdef HAVE_canonicalize_funcptr_for_compare
7107 /* Disable this optimization if we're casting a function pointer
7108 type on targets that require function pointer canonicalization. */
7109 if (HAVE_canonicalize_funcptr_for_compare
7110 && TREE_CODE (inner_type
) == POINTER_TYPE
7111 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
7115 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
7118 if (TREE_CODE (arg1
) != INTEGER_CST
7119 && !((TREE_CODE (arg1
) == NOP_EXPR
7120 || TREE_CODE (arg1
) == CONVERT_EXPR
)
7121 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
7124 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
7129 if (TREE_CODE (arg1
) == INTEGER_CST
)
7130 arg1
= force_fit_type_double (inner_type
, TREE_INT_CST_LOW (arg1
),
7131 TREE_INT_CST_HIGH (arg1
), 0,
7132 TREE_OVERFLOW (arg1
));
7134 arg1
= fold_convert (inner_type
, arg1
);
7136 return fold_build2 (code
, type
, arg0_inner
, arg1
);
7139 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7140 step of the array. Reconstructs s and delta in the case of s * delta
7141 being an integer constant (and thus already folded).
7142 ADDR is the address. MULT is the multiplicative expression.
7143 If the function succeeds, the new address expression is returned. Otherwise
7144 NULL_TREE is returned. */
7147 try_move_mult_to_index (tree addr
, tree op1
)
7149 tree s
, delta
, step
;
7150 tree ref
= TREE_OPERAND (addr
, 0), pref
;
7155 /* Strip the nops that might be added when converting op1 to sizetype. */
7158 /* Canonicalize op1 into a possibly non-constant delta
7159 and an INTEGER_CST s. */
7160 if (TREE_CODE (op1
) == MULT_EXPR
)
7162 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
7167 if (TREE_CODE (arg0
) == INTEGER_CST
)
7172 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7180 else if (TREE_CODE (op1
) == INTEGER_CST
)
7187 /* Simulate we are delta * 1. */
7189 s
= integer_one_node
;
7192 for (;; ref
= TREE_OPERAND (ref
, 0))
7194 if (TREE_CODE (ref
) == ARRAY_REF
)
7196 /* Remember if this was a multi-dimensional array. */
7197 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
7200 itype
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
7204 step
= array_ref_element_size (ref
);
7205 if (TREE_CODE (step
) != INTEGER_CST
)
7210 if (! tree_int_cst_equal (step
, s
))
7215 /* Try if delta is a multiple of step. */
7216 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, delta
, step
);
7222 /* Only fold here if we can verify we do not overflow one
7223 dimension of a multi-dimensional array. */
7228 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
7229 || !INTEGRAL_TYPE_P (itype
)
7230 || !TYPE_MAX_VALUE (itype
)
7231 || TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
)
7234 tmp
= fold_binary (PLUS_EXPR
, itype
,
7235 fold_convert (itype
,
7236 TREE_OPERAND (ref
, 1)),
7237 fold_convert (itype
, delta
));
7239 || TREE_CODE (tmp
) != INTEGER_CST
7240 || tree_int_cst_lt (TYPE_MAX_VALUE (itype
), tmp
))
7249 if (!handled_component_p (ref
))
7253 /* We found the suitable array reference. So copy everything up to it,
7254 and replace the index. */
7256 pref
= TREE_OPERAND (addr
, 0);
7257 ret
= copy_node (pref
);
7262 pref
= TREE_OPERAND (pref
, 0);
7263 TREE_OPERAND (pos
, 0) = copy_node (pref
);
7264 pos
= TREE_OPERAND (pos
, 0);
7267 TREE_OPERAND (pos
, 1) = fold_build2 (PLUS_EXPR
, itype
,
7268 fold_convert (itype
,
7269 TREE_OPERAND (pos
, 1)),
7270 fold_convert (itype
, delta
));
7272 return fold_build1 (ADDR_EXPR
, TREE_TYPE (addr
), ret
);
7276 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7277 means A >= Y && A != MAX, but in this case we know that
7278 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7281 fold_to_nonsharp_ineq_using_bound (tree ineq
, tree bound
)
7283 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7285 if (TREE_CODE (bound
) == LT_EXPR
)
7286 a
= TREE_OPERAND (bound
, 0);
7287 else if (TREE_CODE (bound
) == GT_EXPR
)
7288 a
= TREE_OPERAND (bound
, 1);
7292 typea
= TREE_TYPE (a
);
7293 if (!INTEGRAL_TYPE_P (typea
)
7294 && !POINTER_TYPE_P (typea
))
7297 if (TREE_CODE (ineq
) == LT_EXPR
)
7299 a1
= TREE_OPERAND (ineq
, 1);
7300 y
= TREE_OPERAND (ineq
, 0);
7302 else if (TREE_CODE (ineq
) == GT_EXPR
)
7304 a1
= TREE_OPERAND (ineq
, 0);
7305 y
= TREE_OPERAND (ineq
, 1);
7310 if (TREE_TYPE (a1
) != typea
)
7313 if (POINTER_TYPE_P (typea
))
7315 /* Convert the pointer types into integer before taking the difference. */
7316 tree ta
= fold_convert (ssizetype
, a
);
7317 tree ta1
= fold_convert (ssizetype
, a1
);
7318 diff
= fold_binary (MINUS_EXPR
, ssizetype
, ta1
, ta
);
7321 diff
= fold_binary (MINUS_EXPR
, typea
, a1
, a
);
7323 if (!diff
|| !integer_onep (diff
))
7326 return fold_build2 (GE_EXPR
, type
, a
, y
);
7329 /* Fold a sum or difference of at least one multiplication.
7330 Returns the folded tree or NULL if no simplification could be made. */
7333 fold_plusminus_mult_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
7335 tree arg00
, arg01
, arg10
, arg11
;
7336 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7338 /* (A * C) +- (B * C) -> (A+-B) * C.
7339 (A * C) +- A -> A * (C+-1).
7340 We are most concerned about the case where C is a constant,
7341 but other combinations show up during loop reduction. Since
7342 it is not difficult, try all four possibilities. */
7344 if (TREE_CODE (arg0
) == MULT_EXPR
)
7346 arg00
= TREE_OPERAND (arg0
, 0);
7347 arg01
= TREE_OPERAND (arg0
, 1);
7349 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7351 arg00
= build_one_cst (type
);
7356 /* We cannot generate constant 1 for fract. */
7357 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7360 arg01
= build_one_cst (type
);
7362 if (TREE_CODE (arg1
) == MULT_EXPR
)
7364 arg10
= TREE_OPERAND (arg1
, 0);
7365 arg11
= TREE_OPERAND (arg1
, 1);
7367 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7369 arg10
= build_one_cst (type
);
7374 /* We cannot generate constant 1 for fract. */
7375 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7378 arg11
= build_one_cst (type
);
7382 if (operand_equal_p (arg01
, arg11
, 0))
7383 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7384 else if (operand_equal_p (arg00
, arg10
, 0))
7385 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7386 else if (operand_equal_p (arg00
, arg11
, 0))
7387 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7388 else if (operand_equal_p (arg01
, arg10
, 0))
7389 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7391 /* No identical multiplicands; see if we can find a common
7392 power-of-two factor in non-power-of-two multiplies. This
7393 can help in multi-dimensional array access. */
7394 else if (host_integerp (arg01
, 0)
7395 && host_integerp (arg11
, 0))
7397 HOST_WIDE_INT int01
, int11
, tmp
;
7400 int01
= TREE_INT_CST_LOW (arg01
);
7401 int11
= TREE_INT_CST_LOW (arg11
);
7403 /* Move min of absolute values to int11. */
7404 if ((int01
>= 0 ? int01
: -int01
)
7405 < (int11
>= 0 ? int11
: -int11
))
7407 tmp
= int01
, int01
= int11
, int11
= tmp
;
7408 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7415 if (exact_log2 (abs (int11
)) > 0 && int01
% int11
== 0)
7417 alt0
= fold_build2 (MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7418 build_int_cst (TREE_TYPE (arg00
),
7423 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7428 return fold_build2 (MULT_EXPR
, type
,
7429 fold_build2 (code
, type
,
7430 fold_convert (type
, alt0
),
7431 fold_convert (type
, alt1
)),
7432 fold_convert (type
, same
));
7437 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7438 specified by EXPR into the buffer PTR of length LEN bytes.
7439 Return the number of bytes placed in the buffer, or zero
7443 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7445 tree type
= TREE_TYPE (expr
);
7446 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7447 int byte
, offset
, word
, words
;
7448 unsigned char value
;
7450 if (total_bytes
> len
)
7452 words
= total_bytes
/ UNITS_PER_WORD
;
7454 for (byte
= 0; byte
< total_bytes
; byte
++)
7456 int bitpos
= byte
* BITS_PER_UNIT
;
7457 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7458 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7460 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7461 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7463 if (total_bytes
> UNITS_PER_WORD
)
7465 word
= byte
/ UNITS_PER_WORD
;
7466 if (WORDS_BIG_ENDIAN
)
7467 word
= (words
- 1) - word
;
7468 offset
= word
* UNITS_PER_WORD
;
7469 if (BYTES_BIG_ENDIAN
)
7470 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7472 offset
+= byte
% UNITS_PER_WORD
;
7475 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7476 ptr
[offset
] = value
;
7482 /* Subroutine of native_encode_expr. Encode the REAL_CST
7483 specified by EXPR into the buffer PTR of length LEN bytes.
7484 Return the number of bytes placed in the buffer, or zero
7488 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7490 tree type
= TREE_TYPE (expr
);
7491 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7492 int byte
, offset
, word
, words
, bitpos
;
7493 unsigned char value
;
7495 /* There are always 32 bits in each long, no matter the size of
7496 the hosts long. We handle floating point representations with
7500 if (total_bytes
> len
)
7502 words
= 32 / UNITS_PER_WORD
;
7504 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7506 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7507 bitpos
+= BITS_PER_UNIT
)
7509 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7510 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7512 if (UNITS_PER_WORD
< 4)
7514 word
= byte
/ UNITS_PER_WORD
;
7515 if (WORDS_BIG_ENDIAN
)
7516 word
= (words
- 1) - word
;
7517 offset
= word
* UNITS_PER_WORD
;
7518 if (BYTES_BIG_ENDIAN
)
7519 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7521 offset
+= byte
% UNITS_PER_WORD
;
7524 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7525 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7530 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7531 specified by EXPR into the buffer PTR of length LEN bytes.
7532 Return the number of bytes placed in the buffer, or zero
7536 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7541 part
= TREE_REALPART (expr
);
7542 rsize
= native_encode_expr (part
, ptr
, len
);
7545 part
= TREE_IMAGPART (expr
);
7546 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7549 return rsize
+ isize
;
7553 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7554 specified by EXPR into the buffer PTR of length LEN bytes.
7555 Return the number of bytes placed in the buffer, or zero
7559 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7561 int i
, size
, offset
, count
;
7562 tree itype
, elem
, elements
;
7565 elements
= TREE_VECTOR_CST_ELTS (expr
);
7566 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
7567 itype
= TREE_TYPE (TREE_TYPE (expr
));
7568 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7569 for (i
= 0; i
< count
; i
++)
7573 elem
= TREE_VALUE (elements
);
7574 elements
= TREE_CHAIN (elements
);
7581 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7586 if (offset
+ size
> len
)
7588 memset (ptr
+offset
, 0, size
);
7596 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7597 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7598 buffer PTR of length LEN bytes. Return the number of bytes
7599 placed in the buffer, or zero upon failure. */
7602 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7604 switch (TREE_CODE (expr
))
7607 return native_encode_int (expr
, ptr
, len
);
7610 return native_encode_real (expr
, ptr
, len
);
7613 return native_encode_complex (expr
, ptr
, len
);
7616 return native_encode_vector (expr
, ptr
, len
);
7624 /* Subroutine of native_interpret_expr. Interpret the contents of
7625 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7626 If the buffer cannot be interpreted, return NULL_TREE. */
7629 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7631 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7632 int byte
, offset
, word
, words
;
7633 unsigned char value
;
7634 unsigned int HOST_WIDE_INT lo
= 0;
7635 HOST_WIDE_INT hi
= 0;
7637 if (total_bytes
> len
)
7639 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
7641 words
= total_bytes
/ UNITS_PER_WORD
;
7643 for (byte
= 0; byte
< total_bytes
; byte
++)
7645 int bitpos
= byte
* BITS_PER_UNIT
;
7646 if (total_bytes
> UNITS_PER_WORD
)
7648 word
= byte
/ UNITS_PER_WORD
;
7649 if (WORDS_BIG_ENDIAN
)
7650 word
= (words
- 1) - word
;
7651 offset
= word
* UNITS_PER_WORD
;
7652 if (BYTES_BIG_ENDIAN
)
7653 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7655 offset
+= byte
% UNITS_PER_WORD
;
7658 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7659 value
= ptr
[offset
];
7661 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7662 lo
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7664 hi
|= (unsigned HOST_WIDE_INT
) value
7665 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7668 return build_int_cst_wide_type (type
, lo
, hi
);
7672 /* Subroutine of native_interpret_expr. Interpret the contents of
7673 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7674 If the buffer cannot be interpreted, return NULL_TREE. */
7677 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7679 enum machine_mode mode
= TYPE_MODE (type
);
7680 int total_bytes
= GET_MODE_SIZE (mode
);
7681 int byte
, offset
, word
, words
, bitpos
;
7682 unsigned char value
;
7683 /* There are always 32 bits in each long, no matter the size of
7684 the hosts long. We handle floating point representations with
7689 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7690 if (total_bytes
> len
|| total_bytes
> 24)
7692 words
= 32 / UNITS_PER_WORD
;
7694 memset (tmp
, 0, sizeof (tmp
));
7695 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7696 bitpos
+= BITS_PER_UNIT
)
7698 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7699 if (UNITS_PER_WORD
< 4)
7701 word
= byte
/ UNITS_PER_WORD
;
7702 if (WORDS_BIG_ENDIAN
)
7703 word
= (words
- 1) - word
;
7704 offset
= word
* UNITS_PER_WORD
;
7705 if (BYTES_BIG_ENDIAN
)
7706 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7708 offset
+= byte
% UNITS_PER_WORD
;
7711 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7712 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7714 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7717 real_from_target (&r
, tmp
, mode
);
7718 return build_real (type
, r
);
7722 /* Subroutine of native_interpret_expr. Interpret the contents of
7723 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7724 If the buffer cannot be interpreted, return NULL_TREE. */
7727 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7729 tree etype
, rpart
, ipart
;
7732 etype
= TREE_TYPE (type
);
7733 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7736 rpart
= native_interpret_expr (etype
, ptr
, size
);
7739 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7742 return build_complex (type
, rpart
, ipart
);
7746 /* Subroutine of native_interpret_expr. Interpret the contents of
7747 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7748 If the buffer cannot be interpreted, return NULL_TREE. */
7751 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7753 tree etype
, elem
, elements
;
7756 etype
= TREE_TYPE (type
);
7757 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7758 count
= TYPE_VECTOR_SUBPARTS (type
);
7759 if (size
* count
> len
)
7762 elements
= NULL_TREE
;
7763 for (i
= count
- 1; i
>= 0; i
--)
7765 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7768 elements
= tree_cons (NULL_TREE
, elem
, elements
);
7770 return build_vector (type
, elements
);
7774 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7775 the buffer PTR of length LEN as a constant of type TYPE. For
7776 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7777 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7778 return NULL_TREE. */
7781 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7783 switch (TREE_CODE (type
))
7788 return native_interpret_int (type
, ptr
, len
);
7791 return native_interpret_real (type
, ptr
, len
);
7794 return native_interpret_complex (type
, ptr
, len
);
7797 return native_interpret_vector (type
, ptr
, len
);
7805 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7806 TYPE at compile-time. If we're unable to perform the conversion
7807 return NULL_TREE. */
7810 fold_view_convert_expr (tree type
, tree expr
)
7812 /* We support up to 512-bit values (for V8DFmode). */
7813 unsigned char buffer
[64];
7816 /* Check that the host and target are sane. */
7817 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7820 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7824 return native_interpret_expr (type
, buffer
, len
);
7827 /* Build an expression for the address of T. Folds away INDIRECT_REF
7828 to avoid confusing the gimplify process. When IN_FOLD is true
7829 avoid modifications of T. */
7832 build_fold_addr_expr_with_type_1 (tree t
, tree ptrtype
, bool in_fold
)
7834 /* The size of the object is not relevant when talking about its address. */
7835 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7836 t
= TREE_OPERAND (t
, 0);
7838 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7839 if (TREE_CODE (t
) == INDIRECT_REF
7840 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
7842 t
= TREE_OPERAND (t
, 0);
7844 if (TREE_TYPE (t
) != ptrtype
)
7845 t
= build1 (NOP_EXPR
, ptrtype
, t
);
7851 while (handled_component_p (base
))
7852 base
= TREE_OPERAND (base
, 0);
7855 TREE_ADDRESSABLE (base
) = 1;
7857 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
7860 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
7865 /* Build an expression for the address of T with type PTRTYPE. This
7866 function modifies the input parameter 'T' by sometimes setting the
7867 TREE_ADDRESSABLE flag. */
7870 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
7872 return build_fold_addr_expr_with_type_1 (t
, ptrtype
, false);
7875 /* Build an expression for the address of T. This function modifies
7876 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7877 flag. When called from fold functions, use fold_addr_expr instead. */
7880 build_fold_addr_expr (tree t
)
7882 return build_fold_addr_expr_with_type_1 (t
,
7883 build_pointer_type (TREE_TYPE (t
)),
7887 /* Same as build_fold_addr_expr, builds an expression for the address
7888 of T, but avoids touching the input node 't'. Fold functions
7889 should use this version. */
7892 fold_addr_expr (tree t
)
7894 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7896 return build_fold_addr_expr_with_type_1 (t
, ptrtype
, true);
7899 /* Fold a unary expression of code CODE and type TYPE with operand
7900 OP0. Return the folded expression if folding is successful.
7901 Otherwise, return NULL_TREE. */
7904 fold_unary (enum tree_code code
, tree type
, tree op0
)
7908 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7910 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7911 && TREE_CODE_LENGTH (code
) == 1);
7916 if (code
== NOP_EXPR
|| code
== CONVERT_EXPR
7917 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
7919 /* Don't use STRIP_NOPS, because signedness of argument type
7921 STRIP_SIGN_NOPS (arg0
);
7925 /* Strip any conversions that don't change the mode. This
7926 is safe for every expression, except for a comparison
7927 expression because its signedness is derived from its
7930 Note that this is done as an internal manipulation within
7931 the constant folder, in order to find the simplest
7932 representation of the arguments so that their form can be
7933 studied. In any cases, the appropriate type conversions
7934 should be put back in the tree that will get out of the
7940 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7942 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7943 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7944 fold_build1 (code
, type
, TREE_OPERAND (arg0
, 1)));
7945 else if (TREE_CODE (arg0
) == COND_EXPR
)
7947 tree arg01
= TREE_OPERAND (arg0
, 1);
7948 tree arg02
= TREE_OPERAND (arg0
, 2);
7949 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7950 arg01
= fold_build1 (code
, type
, arg01
);
7951 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7952 arg02
= fold_build1 (code
, type
, arg02
);
7953 tem
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7956 /* If this was a conversion, and all we did was to move into
7957 inside the COND_EXPR, bring it back out. But leave it if
7958 it is a conversion from integer to integer and the
7959 result precision is no wider than a word since such a
7960 conversion is cheap and may be optimized away by combine,
7961 while it couldn't if it were outside the COND_EXPR. Then return
7962 so we don't get into an infinite recursion loop taking the
7963 conversion out and then back in. */
7965 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
7966 || code
== NON_LVALUE_EXPR
)
7967 && TREE_CODE (tem
) == COND_EXPR
7968 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7969 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7970 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7971 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7972 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7973 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7974 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7976 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7977 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7978 || flag_syntax_only
))
7979 tem
= build1 (code
, type
,
7981 TREE_TYPE (TREE_OPERAND
7982 (TREE_OPERAND (tem
, 1), 0)),
7983 TREE_OPERAND (tem
, 0),
7984 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7985 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
7988 else if (COMPARISON_CLASS_P (arg0
))
7990 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7992 arg0
= copy_node (arg0
);
7993 TREE_TYPE (arg0
) = type
;
7996 else if (TREE_CODE (type
) != INTEGER_TYPE
)
7997 return fold_build3 (COND_EXPR
, type
, arg0
,
7998 fold_build1 (code
, type
,
8000 fold_build1 (code
, type
,
8001 integer_zero_node
));
8010 case FIX_TRUNC_EXPR
:
8011 if (TREE_TYPE (op0
) == type
)
8014 /* If we have (type) (a CMP b) and type is an integral type, return
8015 new expression involving the new type. */
8016 if (COMPARISON_CLASS_P (op0
) && INTEGRAL_TYPE_P (type
))
8017 return fold_build2 (TREE_CODE (op0
), type
, TREE_OPERAND (op0
, 0),
8018 TREE_OPERAND (op0
, 1));
8020 /* Handle cases of two conversions in a row. */
8021 if (TREE_CODE (op0
) == NOP_EXPR
8022 || TREE_CODE (op0
) == CONVERT_EXPR
)
8024 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
8025 tree inter_type
= TREE_TYPE (op0
);
8026 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
8027 int inside_ptr
= POINTER_TYPE_P (inside_type
);
8028 int inside_float
= FLOAT_TYPE_P (inside_type
);
8029 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
8030 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
8031 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
8032 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
8033 int inter_ptr
= POINTER_TYPE_P (inter_type
);
8034 int inter_float
= FLOAT_TYPE_P (inter_type
);
8035 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
8036 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
8037 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
8038 int final_int
= INTEGRAL_TYPE_P (type
);
8039 int final_ptr
= POINTER_TYPE_P (type
);
8040 int final_float
= FLOAT_TYPE_P (type
);
8041 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
8042 unsigned int final_prec
= TYPE_PRECISION (type
);
8043 int final_unsignedp
= TYPE_UNSIGNED (type
);
8045 /* In addition to the cases of two conversions in a row
8046 handled below, if we are converting something to its own
8047 type via an object of identical or wider precision, neither
8048 conversion is needed. */
8049 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
8050 && (((inter_int
|| inter_ptr
) && final_int
)
8051 || (inter_float
&& final_float
))
8052 && inter_prec
>= final_prec
)
8053 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
8055 /* Likewise, if the intermediate and final types are either both
8056 float or both integer, we don't need the middle conversion if
8057 it is wider than the final type and doesn't change the signedness
8058 (for integers). Avoid this if the final type is a pointer
8059 since then we sometimes need the inner conversion. Likewise if
8060 the outer has a precision not equal to the size of its mode. */
8061 if (((inter_int
&& inside_int
)
8062 || (inter_float
&& inside_float
)
8063 || (inter_vec
&& inside_vec
))
8064 && inter_prec
>= inside_prec
8065 && (inter_float
|| inter_vec
8066 || inter_unsignedp
== inside_unsignedp
)
8067 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
8068 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
8070 && (! final_vec
|| inter_prec
== inside_prec
))
8071 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
8073 /* If we have a sign-extension of a zero-extended value, we can
8074 replace that by a single zero-extension. */
8075 if (inside_int
&& inter_int
&& final_int
8076 && inside_prec
< inter_prec
&& inter_prec
< final_prec
8077 && inside_unsignedp
&& !inter_unsignedp
)
8078 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
8080 /* Two conversions in a row are not needed unless:
8081 - some conversion is floating-point (overstrict for now), or
8082 - some conversion is a vector (overstrict for now), or
8083 - the intermediate type is narrower than both initial and
8085 - the intermediate type and innermost type differ in signedness,
8086 and the outermost type is wider than the intermediate, or
8087 - the initial type is a pointer type and the precisions of the
8088 intermediate and final types differ, or
8089 - the final type is a pointer type and the precisions of the
8090 initial and intermediate types differ.
8091 - the initial type is a pointer to an array and the final type
8093 if (! inside_float
&& ! inter_float
&& ! final_float
8094 && ! inside_vec
&& ! inter_vec
&& ! final_vec
8095 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
8096 && ! (inside_int
&& inter_int
8097 && inter_unsignedp
!= inside_unsignedp
8098 && inter_prec
< final_prec
)
8099 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
8100 == (final_unsignedp
&& final_prec
> inter_prec
))
8101 && ! (inside_ptr
&& inter_prec
!= final_prec
)
8102 && ! (final_ptr
&& inside_prec
!= inter_prec
)
8103 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
8104 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
8105 && ! (inside_ptr
&& final_ptr
8106 && TREE_CODE (TREE_TYPE (inside_type
)) == ARRAY_TYPE
8107 && TREE_CODE (TREE_TYPE (type
)) != ARRAY_TYPE
))
8108 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
8111 /* Handle (T *)&A.B.C for A being of type T and B and C
8112 living at offset zero. This occurs frequently in
8113 C++ upcasting and then accessing the base. */
8114 if (TREE_CODE (op0
) == ADDR_EXPR
8115 && POINTER_TYPE_P (type
)
8116 && handled_component_p (TREE_OPERAND (op0
, 0)))
8118 HOST_WIDE_INT bitsize
, bitpos
;
8120 enum machine_mode mode
;
8121 int unsignedp
, volatilep
;
8122 tree base
= TREE_OPERAND (op0
, 0);
8123 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
8124 &mode
, &unsignedp
, &volatilep
, false);
8125 /* If the reference was to a (constant) zero offset, we can use
8126 the address of the base if it has the same base type
8127 as the result type. */
8128 if (! offset
&& bitpos
== 0
8129 && TYPE_MAIN_VARIANT (TREE_TYPE (type
))
8130 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
8131 return fold_convert (type
, fold_addr_expr (base
));
8134 if ((TREE_CODE (op0
) == MODIFY_EXPR
8135 || TREE_CODE (op0
) == GIMPLE_MODIFY_STMT
)
8136 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0
, 1))
8137 /* Detect assigning a bitfield. */
8138 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0
, 0)) == COMPONENT_REF
8140 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0
, 0), 1))))
8142 /* Don't leave an assignment inside a conversion
8143 unless assigning a bitfield. */
8144 tem
= fold_build1 (code
, type
, GENERIC_TREE_OPERAND (op0
, 1));
8145 /* First do the assignment, then return converted constant. */
8146 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
8147 TREE_NO_WARNING (tem
) = 1;
8148 TREE_USED (tem
) = 1;
8152 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8153 constants (if x has signed type, the sign bit cannot be set
8154 in c). This folds extension into the BIT_AND_EXPR. */
8155 if (INTEGRAL_TYPE_P (type
)
8156 && TREE_CODE (type
) != BOOLEAN_TYPE
8157 && TREE_CODE (op0
) == BIT_AND_EXPR
8158 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8161 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
8164 if (TYPE_UNSIGNED (TREE_TYPE (and))
8165 || (TYPE_PRECISION (type
)
8166 <= TYPE_PRECISION (TREE_TYPE (and))))
8168 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8169 <= HOST_BITS_PER_WIDE_INT
8170 && host_integerp (and1
, 1))
8172 unsigned HOST_WIDE_INT cst
;
8174 cst
= tree_low_cst (and1
, 1);
8175 cst
&= (HOST_WIDE_INT
) -1
8176 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8177 change
= (cst
== 0);
8178 #ifdef LOAD_EXTEND_OP
8180 && !flag_syntax_only
8181 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
8184 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8185 and0
= fold_convert (uns
, and0
);
8186 and1
= fold_convert (uns
, and1
);
8192 tem
= force_fit_type_double (type
, TREE_INT_CST_LOW (and1
),
8193 TREE_INT_CST_HIGH (and1
), 0,
8194 TREE_OVERFLOW (and1
));
8195 return fold_build2 (BIT_AND_EXPR
, type
,
8196 fold_convert (type
, and0
), tem
);
8200 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8201 when one of the new casts will fold away. Conservatively we assume
8202 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8203 if (POINTER_TYPE_P (type
)
8204 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8205 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8206 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
8207 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
8209 tree arg00
= TREE_OPERAND (arg0
, 0);
8210 tree arg01
= TREE_OPERAND (arg0
, 1);
8212 return fold_build2 (TREE_CODE (arg0
), type
, fold_convert (type
, arg00
),
8213 fold_convert (sizetype
, arg01
));
8216 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8217 of the same precision, and X is an integer type not narrower than
8218 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8219 if (INTEGRAL_TYPE_P (type
)
8220 && TREE_CODE (op0
) == BIT_NOT_EXPR
8221 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8222 && (TREE_CODE (TREE_OPERAND (op0
, 0)) == NOP_EXPR
8223 || TREE_CODE (TREE_OPERAND (op0
, 0)) == CONVERT_EXPR
)
8224 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8226 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8227 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8228 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8229 return fold_build1 (BIT_NOT_EXPR
, type
, fold_convert (type
, tem
));
8232 tem
= fold_convert_const (code
, type
, op0
);
8233 return tem
? tem
: NULL_TREE
;
8235 case FIXED_CONVERT_EXPR
:
8236 tem
= fold_convert_const (code
, type
, arg0
);
8237 return tem
? tem
: NULL_TREE
;
8239 case VIEW_CONVERT_EXPR
:
8240 if (TREE_TYPE (op0
) == type
)
8242 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8243 return fold_build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
8244 return fold_view_convert_expr (type
, op0
);
8247 tem
= fold_negate_expr (arg0
);
8249 return fold_convert (type
, tem
);
8253 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8254 return fold_abs_const (arg0
, type
);
8255 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8256 return fold_build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8257 /* Convert fabs((double)float) into (double)fabsf(float). */
8258 else if (TREE_CODE (arg0
) == NOP_EXPR
8259 && TREE_CODE (type
) == REAL_TYPE
)
8261 tree targ0
= strip_float_extensions (arg0
);
8263 return fold_convert (type
, fold_build1 (ABS_EXPR
,
8267 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8268 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8270 else if (tree_expr_nonnegative_p (arg0
))
8273 /* Strip sign ops from argument. */
8274 if (TREE_CODE (type
) == REAL_TYPE
)
8276 tem
= fold_strip_sign_ops (arg0
);
8278 return fold_build1 (ABS_EXPR
, type
, fold_convert (type
, tem
));
8283 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8284 return fold_convert (type
, arg0
);
8285 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8287 tree itype
= TREE_TYPE (type
);
8288 tree rpart
= fold_convert (itype
, TREE_OPERAND (arg0
, 0));
8289 tree ipart
= fold_convert (itype
, TREE_OPERAND (arg0
, 1));
8290 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, negate_expr (ipart
));
8292 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8294 tree itype
= TREE_TYPE (type
);
8295 tree rpart
= fold_convert (itype
, TREE_REALPART (arg0
));
8296 tree ipart
= fold_convert (itype
, TREE_IMAGPART (arg0
));
8297 return build_complex (type
, rpart
, negate_expr (ipart
));
8299 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8300 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
8304 if (TREE_CODE (arg0
) == INTEGER_CST
)
8305 return fold_not_const (arg0
, type
);
8306 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8307 return TREE_OPERAND (op0
, 0);
8308 /* Convert ~ (-A) to A - 1. */
8309 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8310 return fold_build2 (MINUS_EXPR
, type
,
8311 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
8312 build_int_cst (type
, 1));
8313 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8314 else if (INTEGRAL_TYPE_P (type
)
8315 && ((TREE_CODE (arg0
) == MINUS_EXPR
8316 && integer_onep (TREE_OPERAND (arg0
, 1)))
8317 || (TREE_CODE (arg0
) == PLUS_EXPR
8318 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8319 return fold_build1 (NEGATE_EXPR
, type
,
8320 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
8321 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8322 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8323 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
8325 TREE_OPERAND (arg0
, 0)))))
8326 return fold_build2 (BIT_XOR_EXPR
, type
, tem
,
8327 fold_convert (type
, TREE_OPERAND (arg0
, 1)));
8328 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8329 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
8331 TREE_OPERAND (arg0
, 1)))))
8332 return fold_build2 (BIT_XOR_EXPR
, type
,
8333 fold_convert (type
, TREE_OPERAND (arg0
, 0)), tem
);
8334 /* Perform BIT_NOT_EXPR on each element individually. */
8335 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8337 tree elements
= TREE_VECTOR_CST_ELTS (arg0
), elem
, list
= NULL_TREE
;
8338 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
8340 for (i
= 0; i
< count
; i
++)
8344 elem
= TREE_VALUE (elements
);
8345 elem
= fold_unary (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8346 if (elem
== NULL_TREE
)
8348 elements
= TREE_CHAIN (elements
);
8351 elem
= build_int_cst (TREE_TYPE (type
), -1);
8352 list
= tree_cons (NULL_TREE
, elem
, list
);
8355 return build_vector (type
, nreverse (list
));
8360 case TRUTH_NOT_EXPR
:
8361 /* The argument to invert_truthvalue must have Boolean type. */
8362 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8363 arg0
= fold_convert (boolean_type_node
, arg0
);
8365 /* Note that the operand of this must be an int
8366 and its values must be 0 or 1.
8367 ("true" is a fixed value perhaps depending on the language,
8368 but we don't handle values other than 1 correctly yet.) */
8369 tem
= fold_truth_not_expr (arg0
);
8372 return fold_convert (type
, tem
);
8375 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8376 return fold_convert (type
, arg0
);
8377 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8378 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
8379 TREE_OPERAND (arg0
, 1));
8380 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8381 return fold_convert (type
, TREE_REALPART (arg0
));
8382 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8384 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8385 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
8386 fold_build1 (REALPART_EXPR
, itype
,
8387 TREE_OPERAND (arg0
, 0)),
8388 fold_build1 (REALPART_EXPR
, itype
,
8389 TREE_OPERAND (arg0
, 1)));
8390 return fold_convert (type
, tem
);
8392 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8394 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8395 tem
= fold_build1 (REALPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8396 return fold_convert (type
, tem
);
8398 if (TREE_CODE (arg0
) == CALL_EXPR
)
8400 tree fn
= get_callee_fndecl (arg0
);
8401 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8402 switch (DECL_FUNCTION_CODE (fn
))
8404 CASE_FLT_FN (BUILT_IN_CEXPI
):
8405 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8407 return build_call_expr (fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8417 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8418 return fold_convert (type
, integer_zero_node
);
8419 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8420 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
8421 TREE_OPERAND (arg0
, 0));
8422 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8423 return fold_convert (type
, TREE_IMAGPART (arg0
));
8424 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8426 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8427 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
8428 fold_build1 (IMAGPART_EXPR
, itype
,
8429 TREE_OPERAND (arg0
, 0)),
8430 fold_build1 (IMAGPART_EXPR
, itype
,
8431 TREE_OPERAND (arg0
, 1)));
8432 return fold_convert (type
, tem
);
8434 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8436 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8437 tem
= fold_build1 (IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8438 return fold_convert (type
, negate_expr (tem
));
8440 if (TREE_CODE (arg0
) == CALL_EXPR
)
8442 tree fn
= get_callee_fndecl (arg0
);
8443 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8444 switch (DECL_FUNCTION_CODE (fn
))
8446 CASE_FLT_FN (BUILT_IN_CEXPI
):
8447 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8449 return build_call_expr (fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8460 } /* switch (code) */
8463 /* Fold a binary expression of code CODE and type TYPE with operands
8464 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8465 Return the folded expression if folding is successful. Otherwise,
8466 return NULL_TREE. */
8469 fold_minmax (enum tree_code code
, tree type
, tree op0
, tree op1
)
8471 enum tree_code compl_code
;
8473 if (code
== MIN_EXPR
)
8474 compl_code
= MAX_EXPR
;
8475 else if (code
== MAX_EXPR
)
8476 compl_code
= MIN_EXPR
;
8480 /* MIN (MAX (a, b), b) == b. */
8481 if (TREE_CODE (op0
) == compl_code
8482 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8483 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 0));
8485 /* MIN (MAX (b, a), b) == b. */
8486 if (TREE_CODE (op0
) == compl_code
8487 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8488 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8489 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 1));
8491 /* MIN (a, MAX (a, b)) == a. */
8492 if (TREE_CODE (op1
) == compl_code
8493 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8494 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8495 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 1));
8497 /* MIN (a, MAX (b, a)) == a. */
8498 if (TREE_CODE (op1
) == compl_code
8499 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8500 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8501 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 0));
8506 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8507 by changing CODE to reduce the magnitude of constants involved in
8508 ARG0 of the comparison.
8509 Returns a canonicalized comparison tree if a simplification was
8510 possible, otherwise returns NULL_TREE.
8511 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8512 valid if signed overflow is undefined. */
8515 maybe_canonicalize_comparison_1 (enum tree_code code
, tree type
,
8516 tree arg0
, tree arg1
,
8517 bool *strict_overflow_p
)
8519 enum tree_code code0
= TREE_CODE (arg0
);
8520 tree t
, cst0
= NULL_TREE
;
8524 /* Match A +- CST code arg1 and CST code arg1. */
8525 if (!(((code0
== MINUS_EXPR
8526 || code0
== PLUS_EXPR
)
8527 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8528 || code0
== INTEGER_CST
))
8531 /* Identify the constant in arg0 and its sign. */
8532 if (code0
== INTEGER_CST
)
8535 cst0
= TREE_OPERAND (arg0
, 1);
8536 sgn0
= tree_int_cst_sgn (cst0
);
8538 /* Overflowed constants and zero will cause problems. */
8539 if (integer_zerop (cst0
)
8540 || TREE_OVERFLOW (cst0
))
8543 /* See if we can reduce the magnitude of the constant in
8544 arg0 by changing the comparison code. */
8545 if (code0
== INTEGER_CST
)
8547 /* CST <= arg1 -> CST-1 < arg1. */
8548 if (code
== LE_EXPR
&& sgn0
== 1)
8550 /* -CST < arg1 -> -CST-1 <= arg1. */
8551 else if (code
== LT_EXPR
&& sgn0
== -1)
8553 /* CST > arg1 -> CST-1 >= arg1. */
8554 else if (code
== GT_EXPR
&& sgn0
== 1)
8556 /* -CST >= arg1 -> -CST-1 > arg1. */
8557 else if (code
== GE_EXPR
&& sgn0
== -1)
8561 /* arg1 code' CST' might be more canonical. */
8566 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8568 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8570 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8571 else if (code
== GT_EXPR
8572 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8574 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8575 else if (code
== LE_EXPR
8576 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8578 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8579 else if (code
== GE_EXPR
8580 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8584 *strict_overflow_p
= true;
8587 /* Now build the constant reduced in magnitude. */
8588 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8589 cst0
, build_int_cst (TREE_TYPE (cst0
), 1), 0);
8590 if (code0
!= INTEGER_CST
)
8591 t
= fold_build2 (code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8593 /* If swapping might yield to a more canonical form, do so. */
8595 return fold_build2 (swap_tree_comparison (code
), type
, arg1
, t
);
8597 return fold_build2 (code
, type
, t
, arg1
);
8600 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8601 overflow further. Try to decrease the magnitude of constants involved
8602 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8603 and put sole constants at the second argument position.
8604 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8607 maybe_canonicalize_comparison (enum tree_code code
, tree type
,
8608 tree arg0
, tree arg1
)
8611 bool strict_overflow_p
;
8612 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8613 "when reducing constant in comparison");
8615 /* In principle pointers also have undefined overflow behavior,
8616 but that causes problems elsewhere. */
8617 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8618 || POINTER_TYPE_P (TREE_TYPE (arg0
)))
8621 /* Try canonicalization by simplifying arg0. */
8622 strict_overflow_p
= false;
8623 t
= maybe_canonicalize_comparison_1 (code
, type
, arg0
, arg1
,
8624 &strict_overflow_p
);
8627 if (strict_overflow_p
)
8628 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8632 /* Try canonicalization by simplifying arg1 using the swapped
8634 code
= swap_tree_comparison (code
);
8635 strict_overflow_p
= false;
8636 t
= maybe_canonicalize_comparison_1 (code
, type
, arg1
, arg0
,
8637 &strict_overflow_p
);
8638 if (t
&& strict_overflow_p
)
8639 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8643 /* Subroutine of fold_binary. This routine performs all of the
8644 transformations that are common to the equality/inequality
8645 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8646 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8647 fold_binary should call fold_binary. Fold a comparison with
8648 tree code CODE and type TYPE with operands OP0 and OP1. Return
8649 the folded comparison or NULL_TREE. */
8652 fold_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
8654 tree arg0
, arg1
, tem
;
8659 STRIP_SIGN_NOPS (arg0
);
8660 STRIP_SIGN_NOPS (arg1
);
8662 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8663 if (tem
!= NULL_TREE
)
8666 /* If one arg is a real or integer constant, put it last. */
8667 if (tree_swap_operands_p (arg0
, arg1
, true))
8668 return fold_build2 (swap_tree_comparison (code
), type
, op1
, op0
);
8670 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8671 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8672 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8673 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8674 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8675 && (TREE_CODE (arg1
) == INTEGER_CST
8676 && !TREE_OVERFLOW (arg1
)))
8678 tree const1
= TREE_OPERAND (arg0
, 1);
8680 tree variable
= TREE_OPERAND (arg0
, 0);
8683 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8685 lhs
= fold_build2 (lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8686 TREE_TYPE (arg1
), const2
, const1
);
8688 /* If the constant operation overflowed this can be
8689 simplified as a comparison against INT_MAX/INT_MIN. */
8690 if (TREE_CODE (lhs
) == INTEGER_CST
8691 && TREE_OVERFLOW (lhs
))
8693 int const1_sgn
= tree_int_cst_sgn (const1
);
8694 enum tree_code code2
= code
;
8696 /* Get the sign of the constant on the lhs if the
8697 operation were VARIABLE + CONST1. */
8698 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8699 const1_sgn
= -const1_sgn
;
8701 /* The sign of the constant determines if we overflowed
8702 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8703 Canonicalize to the INT_MIN overflow by swapping the comparison
8705 if (const1_sgn
== -1)
8706 code2
= swap_tree_comparison (code
);
8708 /* We now can look at the canonicalized case
8709 VARIABLE + 1 CODE2 INT_MIN
8710 and decide on the result. */
8711 if (code2
== LT_EXPR
8713 || code2
== EQ_EXPR
)
8714 return omit_one_operand (type
, boolean_false_node
, variable
);
8715 else if (code2
== NE_EXPR
8717 || code2
== GT_EXPR
)
8718 return omit_one_operand (type
, boolean_true_node
, variable
);
8721 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8722 && (TREE_CODE (lhs
) != INTEGER_CST
8723 || !TREE_OVERFLOW (lhs
)))
8725 fold_overflow_warning (("assuming signed overflow does not occur "
8726 "when changing X +- C1 cmp C2 to "
8728 WARN_STRICT_OVERFLOW_COMPARISON
);
8729 return fold_build2 (code
, type
, variable
, lhs
);
8733 /* For comparisons of pointers we can decompose it to a compile time
8734 comparison of the base objects and the offsets into the object.
8735 This requires at least one operand being an ADDR_EXPR or a
8736 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8737 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8738 && (TREE_CODE (arg0
) == ADDR_EXPR
8739 || TREE_CODE (arg1
) == ADDR_EXPR
8740 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8741 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8743 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8744 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8745 enum machine_mode mode
;
8746 int volatilep
, unsignedp
;
8747 bool indirect_base0
= false;
8749 /* Get base and offset for the access. Strip ADDR_EXPR for
8750 get_inner_reference, but put it back by stripping INDIRECT_REF
8751 off the base object if possible. */
8753 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8755 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8756 &bitsize
, &bitpos0
, &offset0
, &mode
,
8757 &unsignedp
, &volatilep
, false);
8758 if (TREE_CODE (base0
) == INDIRECT_REF
)
8759 base0
= TREE_OPERAND (base0
, 0);
8761 indirect_base0
= true;
8763 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8765 base0
= TREE_OPERAND (arg0
, 0);
8766 offset0
= TREE_OPERAND (arg0
, 1);
8770 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8772 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8773 &bitsize
, &bitpos1
, &offset1
, &mode
,
8774 &unsignedp
, &volatilep
, false);
8775 /* We have to make sure to have an indirect/non-indirect base1
8776 just the same as we did for base0. */
8777 if (TREE_CODE (base1
) == INDIRECT_REF
8779 base1
= TREE_OPERAND (base1
, 0);
8780 else if (!indirect_base0
)
8783 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8785 base1
= TREE_OPERAND (arg1
, 0);
8786 offset1
= TREE_OPERAND (arg1
, 1);
8788 else if (indirect_base0
)
8791 /* If we have equivalent bases we might be able to simplify. */
8793 && operand_equal_p (base0
, base1
, 0))
8795 /* We can fold this expression to a constant if the non-constant
8796 offset parts are equal. */
8797 if (offset0
== offset1
8798 || (offset0
&& offset1
8799 && operand_equal_p (offset0
, offset1
, 0)))
8804 return build_int_cst (boolean_type_node
, bitpos0
== bitpos1
);
8806 return build_int_cst (boolean_type_node
, bitpos0
!= bitpos1
);
8808 return build_int_cst (boolean_type_node
, bitpos0
< bitpos1
);
8810 return build_int_cst (boolean_type_node
, bitpos0
<= bitpos1
);
8812 return build_int_cst (boolean_type_node
, bitpos0
>= bitpos1
);
8814 return build_int_cst (boolean_type_node
, bitpos0
> bitpos1
);
8818 /* We can simplify the comparison to a comparison of the variable
8819 offset parts if the constant offset parts are equal.
8820 Be careful to use signed size type here because otherwise we
8821 mess with array offsets in the wrong way. This is possible
8822 because pointer arithmetic is restricted to retain within an
8823 object and overflow on pointer differences is undefined as of
8824 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8825 else if (bitpos0
== bitpos1
)
8827 tree signed_size_type_node
;
8828 signed_size_type_node
= signed_type_for (size_type_node
);
8830 /* By converting to signed size type we cover middle-end pointer
8831 arithmetic which operates on unsigned pointer types of size
8832 type size and ARRAY_REF offsets which are properly sign or
8833 zero extended from their type in case it is narrower than
8835 if (offset0
== NULL_TREE
)
8836 offset0
= build_int_cst (signed_size_type_node
, 0);
8838 offset0
= fold_convert (signed_size_type_node
, offset0
);
8839 if (offset1
== NULL_TREE
)
8840 offset1
= build_int_cst (signed_size_type_node
, 0);
8842 offset1
= fold_convert (signed_size_type_node
, offset1
);
8844 return fold_build2 (code
, type
, offset0
, offset1
);
8849 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8850 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8851 the resulting offset is smaller in absolute value than the
8853 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8854 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8855 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8856 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8857 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8858 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8859 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8861 tree const1
= TREE_OPERAND (arg0
, 1);
8862 tree const2
= TREE_OPERAND (arg1
, 1);
8863 tree variable1
= TREE_OPERAND (arg0
, 0);
8864 tree variable2
= TREE_OPERAND (arg1
, 0);
8866 const char * const warnmsg
= G_("assuming signed overflow does not "
8867 "occur when combining constants around "
8870 /* Put the constant on the side where it doesn't overflow and is
8871 of lower absolute value than before. */
8872 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8873 ? MINUS_EXPR
: PLUS_EXPR
,
8875 if (!TREE_OVERFLOW (cst
)
8876 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
8878 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8879 return fold_build2 (code
, type
,
8881 fold_build2 (TREE_CODE (arg1
), TREE_TYPE (arg1
),
8885 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8886 ? MINUS_EXPR
: PLUS_EXPR
,
8888 if (!TREE_OVERFLOW (cst
)
8889 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
8891 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8892 return fold_build2 (code
, type
,
8893 fold_build2 (TREE_CODE (arg0
), TREE_TYPE (arg0
),
8899 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8900 signed arithmetic case. That form is created by the compiler
8901 often enough for folding it to be of value. One example is in
8902 computing loop trip counts after Operator Strength Reduction. */
8903 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8904 && TREE_CODE (arg0
) == MULT_EXPR
8905 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8906 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8907 && integer_zerop (arg1
))
8909 tree const1
= TREE_OPERAND (arg0
, 1);
8910 tree const2
= arg1
; /* zero */
8911 tree variable1
= TREE_OPERAND (arg0
, 0);
8912 enum tree_code cmp_code
= code
;
8914 gcc_assert (!integer_zerop (const1
));
8916 fold_overflow_warning (("assuming signed overflow does not occur when "
8917 "eliminating multiplication in comparison "
8919 WARN_STRICT_OVERFLOW_COMPARISON
);
8921 /* If const1 is negative we swap the sense of the comparison. */
8922 if (tree_int_cst_sgn (const1
) < 0)
8923 cmp_code
= swap_tree_comparison (cmp_code
);
8925 return fold_build2 (cmp_code
, type
, variable1
, const2
);
8928 tem
= maybe_canonicalize_comparison (code
, type
, op0
, op1
);
8932 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8934 tree targ0
= strip_float_extensions (arg0
);
8935 tree targ1
= strip_float_extensions (arg1
);
8936 tree newtype
= TREE_TYPE (targ0
);
8938 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8939 newtype
= TREE_TYPE (targ1
);
8941 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8942 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
8943 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
8944 fold_convert (newtype
, targ1
));
8946 /* (-a) CMP (-b) -> b CMP a */
8947 if (TREE_CODE (arg0
) == NEGATE_EXPR
8948 && TREE_CODE (arg1
) == NEGATE_EXPR
)
8949 return fold_build2 (code
, type
, TREE_OPERAND (arg1
, 0),
8950 TREE_OPERAND (arg0
, 0));
8952 if (TREE_CODE (arg1
) == REAL_CST
)
8954 REAL_VALUE_TYPE cst
;
8955 cst
= TREE_REAL_CST (arg1
);
8957 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8958 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8959 return fold_build2 (swap_tree_comparison (code
), type
,
8960 TREE_OPERAND (arg0
, 0),
8961 build_real (TREE_TYPE (arg1
),
8962 REAL_VALUE_NEGATE (cst
)));
8964 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8965 /* a CMP (-0) -> a CMP 0 */
8966 if (REAL_VALUE_MINUS_ZERO (cst
))
8967 return fold_build2 (code
, type
, arg0
,
8968 build_real (TREE_TYPE (arg1
), dconst0
));
8970 /* x != NaN is always true, other ops are always false. */
8971 if (REAL_VALUE_ISNAN (cst
)
8972 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
8974 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
8975 return omit_one_operand (type
, tem
, arg0
);
8978 /* Fold comparisons against infinity. */
8979 if (REAL_VALUE_ISINF (cst
))
8981 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
8982 if (tem
!= NULL_TREE
)
8987 /* If this is a comparison of a real constant with a PLUS_EXPR
8988 or a MINUS_EXPR of a real constant, we can convert it into a
8989 comparison with a revised real constant as long as no overflow
8990 occurs when unsafe_math_optimizations are enabled. */
8991 if (flag_unsafe_math_optimizations
8992 && TREE_CODE (arg1
) == REAL_CST
8993 && (TREE_CODE (arg0
) == PLUS_EXPR
8994 || TREE_CODE (arg0
) == MINUS_EXPR
)
8995 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
8996 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
8997 ? MINUS_EXPR
: PLUS_EXPR
,
8998 arg1
, TREE_OPERAND (arg0
, 1), 0))
8999 && !TREE_OVERFLOW (tem
))
9000 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9002 /* Likewise, we can simplify a comparison of a real constant with
9003 a MINUS_EXPR whose first operand is also a real constant, i.e.
9004 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9005 floating-point types only if -fassociative-math is set. */
9006 if (flag_associative_math
9007 && TREE_CODE (arg1
) == REAL_CST
9008 && TREE_CODE (arg0
) == MINUS_EXPR
9009 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9010 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9012 && !TREE_OVERFLOW (tem
))
9013 return fold_build2 (swap_tree_comparison (code
), type
,
9014 TREE_OPERAND (arg0
, 1), tem
);
9016 /* Fold comparisons against built-in math functions. */
9017 if (TREE_CODE (arg1
) == REAL_CST
9018 && flag_unsafe_math_optimizations
9019 && ! flag_errno_math
)
9021 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9023 if (fcode
!= END_BUILTINS
)
9025 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
9026 if (tem
!= NULL_TREE
)
9032 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9033 && (TREE_CODE (arg0
) == NOP_EXPR
9034 || TREE_CODE (arg0
) == CONVERT_EXPR
))
9036 /* If we are widening one operand of an integer comparison,
9037 see if the other operand is similarly being widened. Perhaps we
9038 can do the comparison in the narrower type. */
9039 tem
= fold_widened_comparison (code
, type
, arg0
, arg1
);
9043 /* Or if we are changing signedness. */
9044 tem
= fold_sign_changed_comparison (code
, type
, arg0
, arg1
);
9049 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9050 constant, we can simplify it. */
9051 if (TREE_CODE (arg1
) == INTEGER_CST
9052 && (TREE_CODE (arg0
) == MIN_EXPR
9053 || TREE_CODE (arg0
) == MAX_EXPR
)
9054 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9056 tem
= optimize_minmax_comparison (code
, type
, op0
, op1
);
9061 /* Simplify comparison of something with itself. (For IEEE
9062 floating-point, we can only do some of these simplifications.) */
9063 if (operand_equal_p (arg0
, arg1
, 0))
9068 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9069 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9070 return constant_boolean_node (1, type
);
9075 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9076 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9077 return constant_boolean_node (1, type
);
9078 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9081 /* For NE, we can only do this simplification if integer
9082 or we don't honor IEEE floating point NaNs. */
9083 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9084 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9086 /* ... fall through ... */
9089 return constant_boolean_node (0, type
);
9095 /* If we are comparing an expression that just has comparisons
9096 of two integer values, arithmetic expressions of those comparisons,
9097 and constants, we can simplify it. There are only three cases
9098 to check: the two values can either be equal, the first can be
9099 greater, or the second can be greater. Fold the expression for
9100 those three values. Since each value must be 0 or 1, we have
9101 eight possibilities, each of which corresponds to the constant 0
9102 or 1 or one of the six possible comparisons.
9104 This handles common cases like (a > b) == 0 but also handles
9105 expressions like ((x > y) - (y > x)) > 0, which supposedly
9106 occur in macroized code. */
9108 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9110 tree cval1
= 0, cval2
= 0;
9113 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9114 /* Don't handle degenerate cases here; they should already
9115 have been handled anyway. */
9116 && cval1
!= 0 && cval2
!= 0
9117 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9118 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9119 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9120 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9121 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9122 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9123 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9125 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9126 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9128 /* We can't just pass T to eval_subst in case cval1 or cval2
9129 was the same as ARG1. */
9132 = fold_build2 (code
, type
,
9133 eval_subst (arg0
, cval1
, maxval
,
9137 = fold_build2 (code
, type
,
9138 eval_subst (arg0
, cval1
, maxval
,
9142 = fold_build2 (code
, type
,
9143 eval_subst (arg0
, cval1
, minval
,
9147 /* All three of these results should be 0 or 1. Confirm they are.
9148 Then use those values to select the proper code to use. */
9150 if (TREE_CODE (high_result
) == INTEGER_CST
9151 && TREE_CODE (equal_result
) == INTEGER_CST
9152 && TREE_CODE (low_result
) == INTEGER_CST
)
9154 /* Make a 3-bit mask with the high-order bit being the
9155 value for `>', the next for '=', and the low for '<'. */
9156 switch ((integer_onep (high_result
) * 4)
9157 + (integer_onep (equal_result
) * 2)
9158 + integer_onep (low_result
))
9162 return omit_one_operand (type
, integer_zero_node
, arg0
);
9183 return omit_one_operand (type
, integer_one_node
, arg0
);
9187 return save_expr (build2 (code
, type
, cval1
, cval2
));
9188 return fold_build2 (code
, type
, cval1
, cval2
);
9193 /* Fold a comparison of the address of COMPONENT_REFs with the same
9194 type and component to a comparison of the address of the base
9195 object. In short, &x->a OP &y->a to x OP y and
9196 &x->a OP &y.a to x OP &y */
9197 if (TREE_CODE (arg0
) == ADDR_EXPR
9198 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == COMPONENT_REF
9199 && TREE_CODE (arg1
) == ADDR_EXPR
9200 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == COMPONENT_REF
)
9202 tree cref0
= TREE_OPERAND (arg0
, 0);
9203 tree cref1
= TREE_OPERAND (arg1
, 0);
9204 if (TREE_OPERAND (cref0
, 1) == TREE_OPERAND (cref1
, 1))
9206 tree op0
= TREE_OPERAND (cref0
, 0);
9207 tree op1
= TREE_OPERAND (cref1
, 0);
9208 return fold_build2 (code
, type
,
9209 fold_addr_expr (op0
),
9210 fold_addr_expr (op1
));
9214 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9215 into a single range test. */
9216 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9217 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9218 && TREE_CODE (arg1
) == INTEGER_CST
9219 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9220 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9221 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9222 && !TREE_OVERFLOW (arg1
))
9224 tem
= fold_div_compare (code
, type
, arg0
, arg1
);
9225 if (tem
!= NULL_TREE
)
9229 /* Fold ~X op ~Y as Y op X. */
9230 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9231 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9233 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9234 return fold_build2 (code
, type
,
9235 fold_convert (cmp_type
, TREE_OPERAND (arg1
, 0)),
9236 TREE_OPERAND (arg0
, 0));
9239 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9240 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9241 && TREE_CODE (arg1
) == INTEGER_CST
)
9243 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9244 return fold_build2 (swap_tree_comparison (code
), type
,
9245 TREE_OPERAND (arg0
, 0),
9246 fold_build1 (BIT_NOT_EXPR
, cmp_type
,
9247 fold_convert (cmp_type
, arg1
)));
9254 /* Subroutine of fold_binary. Optimize complex multiplications of the
9255 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9256 argument EXPR represents the expression "z" of type TYPE. */
9259 fold_mult_zconjz (tree type
, tree expr
)
9261 tree itype
= TREE_TYPE (type
);
9262 tree rpart
, ipart
, tem
;
9264 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9266 rpart
= TREE_OPERAND (expr
, 0);
9267 ipart
= TREE_OPERAND (expr
, 1);
9269 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9271 rpart
= TREE_REALPART (expr
);
9272 ipart
= TREE_IMAGPART (expr
);
9276 expr
= save_expr (expr
);
9277 rpart
= fold_build1 (REALPART_EXPR
, itype
, expr
);
9278 ipart
= fold_build1 (IMAGPART_EXPR
, itype
, expr
);
9281 rpart
= save_expr (rpart
);
9282 ipart
= save_expr (ipart
);
9283 tem
= fold_build2 (PLUS_EXPR
, itype
,
9284 fold_build2 (MULT_EXPR
, itype
, rpart
, rpart
),
9285 fold_build2 (MULT_EXPR
, itype
, ipart
, ipart
));
9286 return fold_build2 (COMPLEX_EXPR
, type
, tem
,
9287 fold_convert (itype
, integer_zero_node
));
9291 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9292 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9293 guarantees that P and N have the same least significant log2(M) bits.
9294 N is not otherwise constrained. In particular, N is not normalized to
9295 0 <= N < M as is common. In general, the precise value of P is unknown.
9296 M is chosen as large as possible such that constant N can be determined.
9298 Returns M and sets *RESIDUE to N. */
9300 static unsigned HOST_WIDE_INT
9301 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
)
9303 enum tree_code code
;
9307 code
= TREE_CODE (expr
);
9308 if (code
== ADDR_EXPR
)
9310 expr
= TREE_OPERAND (expr
, 0);
9311 if (handled_component_p (expr
))
9313 HOST_WIDE_INT bitsize
, bitpos
;
9315 enum machine_mode mode
;
9316 int unsignedp
, volatilep
;
9318 expr
= get_inner_reference (expr
, &bitsize
, &bitpos
, &offset
,
9319 &mode
, &unsignedp
, &volatilep
, false);
9320 *residue
= bitpos
/ BITS_PER_UNIT
;
9323 if (TREE_CODE (offset
) == INTEGER_CST
)
9324 *residue
+= TREE_INT_CST_LOW (offset
);
9326 /* We don't handle more complicated offset expressions. */
9332 return DECL_ALIGN_UNIT (expr
);
9334 else if (code
== POINTER_PLUS_EXPR
)
9337 unsigned HOST_WIDE_INT modulus
;
9338 enum tree_code inner_code
;
9340 op0
= TREE_OPERAND (expr
, 0);
9342 modulus
= get_pointer_modulus_and_residue (op0
, residue
);
9344 op1
= TREE_OPERAND (expr
, 1);
9346 inner_code
= TREE_CODE (op1
);
9347 if (inner_code
== INTEGER_CST
)
9349 *residue
+= TREE_INT_CST_LOW (op1
);
9352 else if (inner_code
== MULT_EXPR
)
9354 op1
= TREE_OPERAND (op1
, 1);
9355 if (TREE_CODE (op1
) == INTEGER_CST
)
9357 unsigned HOST_WIDE_INT align
;
9359 /* Compute the greatest power-of-2 divisor of op1. */
9360 align
= TREE_INT_CST_LOW (op1
);
9363 /* If align is non-zero and less than *modulus, replace
9364 *modulus with align., If align is 0, then either op1 is 0
9365 or the greatest power-of-2 divisor of op1 doesn't fit in an
9366 unsigned HOST_WIDE_INT. In either case, no additional
9367 constraint is imposed. */
9369 modulus
= MIN (modulus
, align
);
9376 /* If we get here, we were unable to determine anything useful about the
9382 /* Fold a binary expression of code CODE and type TYPE with operands
9383 OP0 and OP1. Return the folded expression if folding is
9384 successful. Otherwise, return NULL_TREE. */
9387 fold_binary (enum tree_code code
, tree type
, tree op0
, tree op1
)
9389 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9390 tree arg0
, arg1
, tem
;
9391 tree t1
= NULL_TREE
;
9392 bool strict_overflow_p
;
9394 gcc_assert ((IS_EXPR_CODE_CLASS (kind
)
9395 || IS_GIMPLE_STMT_CODE_CLASS (kind
))
9396 && TREE_CODE_LENGTH (code
) == 2
9398 && op1
!= NULL_TREE
);
9403 /* Strip any conversions that don't change the mode. This is
9404 safe for every expression, except for a comparison expression
9405 because its signedness is derived from its operands. So, in
9406 the latter case, only strip conversions that don't change the
9409 Note that this is done as an internal manipulation within the
9410 constant folder, in order to find the simplest representation
9411 of the arguments so that their form can be studied. In any
9412 cases, the appropriate type conversions should be put back in
9413 the tree that will get out of the constant folder. */
9415 if (kind
== tcc_comparison
)
9417 STRIP_SIGN_NOPS (arg0
);
9418 STRIP_SIGN_NOPS (arg1
);
9426 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9427 constant but we can't do arithmetic on them. */
9428 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9429 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9430 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9431 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9432 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9433 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
9435 if (kind
== tcc_binary
)
9437 /* Make sure type and arg0 have the same saturating flag. */
9438 gcc_assert (TYPE_SATURATING (type
)
9439 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9440 tem
= const_binop (code
, arg0
, arg1
, 0);
9442 else if (kind
== tcc_comparison
)
9443 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9447 if (tem
!= NULL_TREE
)
9449 if (TREE_TYPE (tem
) != type
)
9450 tem
= fold_convert (type
, tem
);
9455 /* If this is a commutative operation, and ARG0 is a constant, move it
9456 to ARG1 to reduce the number of tests below. */
9457 if (commutative_tree_code (code
)
9458 && tree_swap_operands_p (arg0
, arg1
, true))
9459 return fold_build2 (code
, type
, op1
, op0
);
9461 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9463 First check for cases where an arithmetic operation is applied to a
9464 compound, conditional, or comparison operation. Push the arithmetic
9465 operation inside the compound or conditional to see if any folding
9466 can then be done. Convert comparison to conditional for this purpose.
9467 The also optimizes non-constant cases that used to be done in
9470 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9471 one of the operands is a comparison and the other is a comparison, a
9472 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9473 code below would make the expression more complex. Change it to a
9474 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9475 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9477 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9478 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9479 && ((truth_value_p (TREE_CODE (arg0
))
9480 && (truth_value_p (TREE_CODE (arg1
))
9481 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9482 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9483 || (truth_value_p (TREE_CODE (arg1
))
9484 && (truth_value_p (TREE_CODE (arg0
))
9485 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9486 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9488 tem
= fold_build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9489 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9492 fold_convert (boolean_type_node
, arg0
),
9493 fold_convert (boolean_type_node
, arg1
));
9495 if (code
== EQ_EXPR
)
9496 tem
= invert_truthvalue (tem
);
9498 return fold_convert (type
, tem
);
9501 if (TREE_CODE_CLASS (code
) == tcc_binary
9502 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9504 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9505 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9506 fold_build2 (code
, type
,
9507 TREE_OPERAND (arg0
, 1), op1
));
9508 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9509 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9510 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9511 fold_build2 (code
, type
,
9512 op0
, TREE_OPERAND (arg1
, 1)));
9514 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
9516 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
9518 /*cond_first_p=*/1);
9519 if (tem
!= NULL_TREE
)
9523 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
9525 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
9527 /*cond_first_p=*/0);
9528 if (tem
!= NULL_TREE
)
9535 case POINTER_PLUS_EXPR
:
9536 /* 0 +p index -> (type)index */
9537 if (integer_zerop (arg0
))
9538 return non_lvalue (fold_convert (type
, arg1
));
9540 /* PTR +p 0 -> PTR */
9541 if (integer_zerop (arg1
))
9542 return non_lvalue (fold_convert (type
, arg0
));
9544 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9545 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9546 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9547 return fold_convert (type
, fold_build2 (PLUS_EXPR
, sizetype
,
9548 fold_convert (sizetype
, arg1
),
9549 fold_convert (sizetype
, arg0
)));
9551 /* index +p PTR -> PTR +p index */
9552 if (POINTER_TYPE_P (TREE_TYPE (arg1
))
9553 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9554 return fold_build2 (POINTER_PLUS_EXPR
, type
,
9555 fold_convert (type
, arg1
),
9556 fold_convert (sizetype
, arg0
));
9558 /* (PTR +p B) +p A -> PTR +p (B + A) */
9559 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9562 tree arg01
= fold_convert (sizetype
, TREE_OPERAND (arg0
, 1));
9563 tree arg00
= TREE_OPERAND (arg0
, 0);
9564 inner
= fold_build2 (PLUS_EXPR
, sizetype
,
9565 arg01
, fold_convert (sizetype
, arg1
));
9566 return fold_convert (type
,
9567 fold_build2 (POINTER_PLUS_EXPR
,
9568 TREE_TYPE (arg00
), arg00
, inner
));
9571 /* PTR_CST +p CST -> CST1 */
9572 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9573 return fold_build2 (PLUS_EXPR
, type
, arg0
, fold_convert (type
, arg1
));
9575 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9576 of the array. Loop optimizer sometimes produce this type of
9578 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9580 tem
= try_move_mult_to_index (arg0
, fold_convert (sizetype
, arg1
));
9582 return fold_convert (type
, tem
);
9588 /* PTR + INT -> (INT)(PTR p+ INT) */
9589 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
9590 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
9591 return fold_convert (type
, fold_build2 (POINTER_PLUS_EXPR
,
9594 fold_convert (sizetype
, arg1
)));
9595 /* INT + PTR -> (INT)(PTR p+ INT) */
9596 if (POINTER_TYPE_P (TREE_TYPE (arg1
))
9597 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9598 return fold_convert (type
, fold_build2 (POINTER_PLUS_EXPR
,
9601 fold_convert (sizetype
, arg0
)));
9602 /* A + (-B) -> A - B */
9603 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9604 return fold_build2 (MINUS_EXPR
, type
,
9605 fold_convert (type
, arg0
),
9606 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
9607 /* (-A) + B -> B - A */
9608 if (TREE_CODE (arg0
) == NEGATE_EXPR
9609 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
9610 return fold_build2 (MINUS_EXPR
, type
,
9611 fold_convert (type
, arg1
),
9612 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
9614 if (INTEGRAL_TYPE_P (type
))
9616 /* Convert ~A + 1 to -A. */
9617 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9618 && integer_onep (arg1
))
9619 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
9622 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9623 && !TYPE_OVERFLOW_TRAPS (type
))
9625 tree tem
= TREE_OPERAND (arg0
, 0);
9628 if (operand_equal_p (tem
, arg1
, 0))
9630 t1
= build_int_cst_type (type
, -1);
9631 return omit_one_operand (type
, t1
, arg1
);
9636 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9637 && !TYPE_OVERFLOW_TRAPS (type
))
9639 tree tem
= TREE_OPERAND (arg1
, 0);
9642 if (operand_equal_p (arg0
, tem
, 0))
9644 t1
= build_int_cst_type (type
, -1);
9645 return omit_one_operand (type
, t1
, arg0
);
9650 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9651 same or one. Make sure type is not saturating.
9652 fold_plusminus_mult_expr will re-associate. */
9653 if ((TREE_CODE (arg0
) == MULT_EXPR
9654 || TREE_CODE (arg1
) == MULT_EXPR
)
9655 && !TYPE_SATURATING (type
)
9656 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9658 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
9663 if (! FLOAT_TYPE_P (type
))
9665 if (integer_zerop (arg1
))
9666 return non_lvalue (fold_convert (type
, arg0
));
9668 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9669 with a constant, and the two constants have no bits in common,
9670 we should treat this as a BIT_IOR_EXPR since this may produce more
9672 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9673 && TREE_CODE (arg1
) == BIT_AND_EXPR
9674 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9675 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9676 && integer_zerop (const_binop (BIT_AND_EXPR
,
9677 TREE_OPERAND (arg0
, 1),
9678 TREE_OPERAND (arg1
, 1), 0)))
9680 code
= BIT_IOR_EXPR
;
9684 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9685 (plus (plus (mult) (mult)) (foo)) so that we can
9686 take advantage of the factoring cases below. */
9687 if (((TREE_CODE (arg0
) == PLUS_EXPR
9688 || TREE_CODE (arg0
) == MINUS_EXPR
)
9689 && TREE_CODE (arg1
) == MULT_EXPR
)
9690 || ((TREE_CODE (arg1
) == PLUS_EXPR
9691 || TREE_CODE (arg1
) == MINUS_EXPR
)
9692 && TREE_CODE (arg0
) == MULT_EXPR
))
9694 tree parg0
, parg1
, parg
, marg
;
9695 enum tree_code pcode
;
9697 if (TREE_CODE (arg1
) == MULT_EXPR
)
9698 parg
= arg0
, marg
= arg1
;
9700 parg
= arg1
, marg
= arg0
;
9701 pcode
= TREE_CODE (parg
);
9702 parg0
= TREE_OPERAND (parg
, 0);
9703 parg1
= TREE_OPERAND (parg
, 1);
9707 if (TREE_CODE (parg0
) == MULT_EXPR
9708 && TREE_CODE (parg1
) != MULT_EXPR
)
9709 return fold_build2 (pcode
, type
,
9710 fold_build2 (PLUS_EXPR
, type
,
9711 fold_convert (type
, parg0
),
9712 fold_convert (type
, marg
)),
9713 fold_convert (type
, parg1
));
9714 if (TREE_CODE (parg0
) != MULT_EXPR
9715 && TREE_CODE (parg1
) == MULT_EXPR
)
9716 return fold_build2 (PLUS_EXPR
, type
,
9717 fold_convert (type
, parg0
),
9718 fold_build2 (pcode
, type
,
9719 fold_convert (type
, marg
),
9726 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9727 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
9728 return non_lvalue (fold_convert (type
, arg0
));
9730 /* Likewise if the operands are reversed. */
9731 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
9732 return non_lvalue (fold_convert (type
, arg1
));
9734 /* Convert X + -C into X - C. */
9735 if (TREE_CODE (arg1
) == REAL_CST
9736 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
9738 tem
= fold_negate_const (arg1
, type
);
9739 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
9740 return fold_build2 (MINUS_EXPR
, type
,
9741 fold_convert (type
, arg0
),
9742 fold_convert (type
, tem
));
9745 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9746 to __complex__ ( x, y ). This is not the same for SNaNs or
9747 if signed zeros are involved. */
9748 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9749 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9750 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9752 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9753 tree arg0r
= fold_unary (REALPART_EXPR
, rtype
, arg0
);
9754 tree arg0i
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
9755 bool arg0rz
= false, arg0iz
= false;
9756 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9757 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9759 tree arg1r
= fold_unary (REALPART_EXPR
, rtype
, arg1
);
9760 tree arg1i
= fold_unary (IMAGPART_EXPR
, rtype
, arg1
);
9761 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9763 tree rp
= arg1r
? arg1r
9764 : build1 (REALPART_EXPR
, rtype
, arg1
);
9765 tree ip
= arg0i
? arg0i
9766 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9767 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9769 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9771 tree rp
= arg0r
? arg0r
9772 : build1 (REALPART_EXPR
, rtype
, arg0
);
9773 tree ip
= arg1i
? arg1i
9774 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9775 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9780 if (flag_unsafe_math_optimizations
9781 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9782 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9783 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
9786 /* Convert x+x into x*2.0. */
9787 if (operand_equal_p (arg0
, arg1
, 0)
9788 && SCALAR_FLOAT_TYPE_P (type
))
9789 return fold_build2 (MULT_EXPR
, type
, arg0
,
9790 build_real (type
, dconst2
));
9792 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9793 We associate floats only if the user has specified
9794 -fassociative-math. */
9795 if (flag_associative_math
9796 && TREE_CODE (arg1
) == PLUS_EXPR
9797 && TREE_CODE (arg0
) != MULT_EXPR
)
9799 tree tree10
= TREE_OPERAND (arg1
, 0);
9800 tree tree11
= TREE_OPERAND (arg1
, 1);
9801 if (TREE_CODE (tree11
) == MULT_EXPR
9802 && TREE_CODE (tree10
) == MULT_EXPR
)
9805 tree0
= fold_build2 (PLUS_EXPR
, type
, arg0
, tree10
);
9806 return fold_build2 (PLUS_EXPR
, type
, tree0
, tree11
);
9809 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9810 We associate floats only if the user has specified
9811 -fassociative-math. */
9812 if (flag_associative_math
9813 && TREE_CODE (arg0
) == PLUS_EXPR
9814 && TREE_CODE (arg1
) != MULT_EXPR
)
9816 tree tree00
= TREE_OPERAND (arg0
, 0);
9817 tree tree01
= TREE_OPERAND (arg0
, 1);
9818 if (TREE_CODE (tree01
) == MULT_EXPR
9819 && TREE_CODE (tree00
) == MULT_EXPR
)
9822 tree0
= fold_build2 (PLUS_EXPR
, type
, tree01
, arg1
);
9823 return fold_build2 (PLUS_EXPR
, type
, tree00
, tree0
);
9829 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9830 is a rotate of A by C1 bits. */
9831 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9832 is a rotate of A by B bits. */
9834 enum tree_code code0
, code1
;
9835 code0
= TREE_CODE (arg0
);
9836 code1
= TREE_CODE (arg1
);
9837 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9838 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9839 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9840 TREE_OPERAND (arg1
, 0), 0)
9841 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
9843 tree tree01
, tree11
;
9844 enum tree_code code01
, code11
;
9846 tree01
= TREE_OPERAND (arg0
, 1);
9847 tree11
= TREE_OPERAND (arg1
, 1);
9848 STRIP_NOPS (tree01
);
9849 STRIP_NOPS (tree11
);
9850 code01
= TREE_CODE (tree01
);
9851 code11
= TREE_CODE (tree11
);
9852 if (code01
== INTEGER_CST
9853 && code11
== INTEGER_CST
9854 && TREE_INT_CST_HIGH (tree01
) == 0
9855 && TREE_INT_CST_HIGH (tree11
) == 0
9856 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
9857 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9858 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9859 code0
== LSHIFT_EXPR
? tree01
: tree11
);
9860 else if (code11
== MINUS_EXPR
)
9862 tree tree110
, tree111
;
9863 tree110
= TREE_OPERAND (tree11
, 0);
9864 tree111
= TREE_OPERAND (tree11
, 1);
9865 STRIP_NOPS (tree110
);
9866 STRIP_NOPS (tree111
);
9867 if (TREE_CODE (tree110
) == INTEGER_CST
9868 && 0 == compare_tree_int (tree110
,
9870 (TREE_TYPE (TREE_OPERAND
9872 && operand_equal_p (tree01
, tree111
, 0))
9873 return build2 ((code0
== LSHIFT_EXPR
9876 type
, TREE_OPERAND (arg0
, 0), tree01
);
9878 else if (code01
== MINUS_EXPR
)
9880 tree tree010
, tree011
;
9881 tree010
= TREE_OPERAND (tree01
, 0);
9882 tree011
= TREE_OPERAND (tree01
, 1);
9883 STRIP_NOPS (tree010
);
9884 STRIP_NOPS (tree011
);
9885 if (TREE_CODE (tree010
) == INTEGER_CST
9886 && 0 == compare_tree_int (tree010
,
9888 (TREE_TYPE (TREE_OPERAND
9890 && operand_equal_p (tree11
, tree011
, 0))
9891 return build2 ((code0
!= LSHIFT_EXPR
9894 type
, TREE_OPERAND (arg0
, 0), tree11
);
9900 /* In most languages, can't associate operations on floats through
9901 parentheses. Rather than remember where the parentheses were, we
9902 don't associate floats at all, unless the user has specified
9904 And, we need to make sure type is not saturating. */
9906 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
9907 && !TYPE_SATURATING (type
))
9909 tree var0
, con0
, lit0
, minus_lit0
;
9910 tree var1
, con1
, lit1
, minus_lit1
;
9913 /* Split both trees into variables, constants, and literals. Then
9914 associate each group together, the constants with literals,
9915 then the result with variables. This increases the chances of
9916 literals being recombined later and of generating relocatable
9917 expressions for the sum of a constant and literal. */
9918 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
9919 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
9920 code
== MINUS_EXPR
);
9922 /* With undefined overflow we can only associate constants
9923 with one variable. */
9924 if ((POINTER_TYPE_P (type
)
9925 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
9931 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
9932 tmp0
= TREE_OPERAND (tmp0
, 0);
9933 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
9934 tmp1
= TREE_OPERAND (tmp1
, 0);
9935 /* The only case we can still associate with two variables
9936 is if they are the same, modulo negation. */
9937 if (!operand_equal_p (tmp0
, tmp1
, 0))
9941 /* Only do something if we found more than two objects. Otherwise,
9942 nothing has changed and we risk infinite recursion. */
9944 && (2 < ((var0
!= 0) + (var1
!= 0)
9945 + (con0
!= 0) + (con1
!= 0)
9946 + (lit0
!= 0) + (lit1
!= 0)
9947 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
9949 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9950 if (code
== MINUS_EXPR
)
9953 var0
= associate_trees (var0
, var1
, code
, type
);
9954 con0
= associate_trees (con0
, con1
, code
, type
);
9955 lit0
= associate_trees (lit0
, lit1
, code
, type
);
9956 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
9958 /* Preserve the MINUS_EXPR if the negative part of the literal is
9959 greater than the positive part. Otherwise, the multiplicative
9960 folding code (i.e extract_muldiv) may be fooled in case
9961 unsigned constants are subtracted, like in the following
9962 example: ((X*2 + 4) - 8U)/2. */
9963 if (minus_lit0
&& lit0
)
9965 if (TREE_CODE (lit0
) == INTEGER_CST
9966 && TREE_CODE (minus_lit0
) == INTEGER_CST
9967 && tree_int_cst_lt (lit0
, minus_lit0
))
9969 minus_lit0
= associate_trees (minus_lit0
, lit0
,
9975 lit0
= associate_trees (lit0
, minus_lit0
,
9983 return fold_convert (type
,
9984 associate_trees (var0
, minus_lit0
,
9988 con0
= associate_trees (con0
, minus_lit0
,
9990 return fold_convert (type
,
9991 associate_trees (var0
, con0
,
9996 con0
= associate_trees (con0
, lit0
, code
, type
);
9997 return fold_convert (type
, associate_trees (var0
, con0
,
10005 /* Pointer simplifications for subtraction, simple reassociations. */
10006 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10008 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10009 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10010 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10012 tree arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10013 tree arg01
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10014 tree arg10
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10015 tree arg11
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10016 return fold_build2 (PLUS_EXPR
, type
,
10017 fold_build2 (MINUS_EXPR
, type
, arg00
, arg10
),
10018 fold_build2 (MINUS_EXPR
, type
, arg01
, arg11
));
10020 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10021 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10023 tree arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10024 tree arg01
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10025 tree tmp
= fold_binary (MINUS_EXPR
, type
, arg00
, fold_convert (type
, arg1
));
10027 return fold_build2 (PLUS_EXPR
, type
, tmp
, arg01
);
10030 /* A - (-B) -> A + B */
10031 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10032 return fold_build2 (PLUS_EXPR
, type
, op0
,
10033 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
10034 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10035 if (TREE_CODE (arg0
) == NEGATE_EXPR
10036 && (FLOAT_TYPE_P (type
)
10037 || INTEGRAL_TYPE_P (type
))
10038 && negate_expr_p (arg1
)
10039 && reorder_operands_p (arg0
, arg1
))
10040 return fold_build2 (MINUS_EXPR
, type
,
10041 fold_convert (type
, negate_expr (arg1
)),
10042 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
10043 /* Convert -A - 1 to ~A. */
10044 if (INTEGRAL_TYPE_P (type
)
10045 && TREE_CODE (arg0
) == NEGATE_EXPR
10046 && integer_onep (arg1
)
10047 && !TYPE_OVERFLOW_TRAPS (type
))
10048 return fold_build1 (BIT_NOT_EXPR
, type
,
10049 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
10051 /* Convert -1 - A to ~A. */
10052 if (INTEGRAL_TYPE_P (type
)
10053 && integer_all_onesp (arg0
))
10054 return fold_build1 (BIT_NOT_EXPR
, type
, op1
);
10056 if (! FLOAT_TYPE_P (type
))
10058 if (integer_zerop (arg0
))
10059 return negate_expr (fold_convert (type
, arg1
));
10060 if (integer_zerop (arg1
))
10061 return non_lvalue (fold_convert (type
, arg0
));
10063 /* Fold A - (A & B) into ~B & A. */
10064 if (!TREE_SIDE_EFFECTS (arg0
)
10065 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10067 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10069 tree arg10
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10070 return fold_build2 (BIT_AND_EXPR
, type
,
10071 fold_build1 (BIT_NOT_EXPR
, type
, arg10
),
10072 fold_convert (type
, arg0
));
10074 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10076 tree arg11
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10077 return fold_build2 (BIT_AND_EXPR
, type
,
10078 fold_build1 (BIT_NOT_EXPR
, type
, arg11
),
10079 fold_convert (type
, arg0
));
10083 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10084 any power of 2 minus 1. */
10085 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10086 && TREE_CODE (arg1
) == BIT_AND_EXPR
10087 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10088 TREE_OPERAND (arg1
, 0), 0))
10090 tree mask0
= TREE_OPERAND (arg0
, 1);
10091 tree mask1
= TREE_OPERAND (arg1
, 1);
10092 tree tem
= fold_build1 (BIT_NOT_EXPR
, type
, mask0
);
10094 if (operand_equal_p (tem
, mask1
, 0))
10096 tem
= fold_build2 (BIT_XOR_EXPR
, type
,
10097 TREE_OPERAND (arg0
, 0), mask1
);
10098 return fold_build2 (MINUS_EXPR
, type
, tem
, mask1
);
10103 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10104 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10105 return non_lvalue (fold_convert (type
, arg0
));
10107 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10108 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10109 (-ARG1 + ARG0) reduces to -ARG1. */
10110 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10111 return negate_expr (fold_convert (type
, arg1
));
10113 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10114 __complex__ ( x, -y ). This is not the same for SNaNs or if
10115 signed zeros are involved. */
10116 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10117 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10118 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10120 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10121 tree arg0r
= fold_unary (REALPART_EXPR
, rtype
, arg0
);
10122 tree arg0i
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
10123 bool arg0rz
= false, arg0iz
= false;
10124 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10125 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10127 tree arg1r
= fold_unary (REALPART_EXPR
, rtype
, arg1
);
10128 tree arg1i
= fold_unary (IMAGPART_EXPR
, rtype
, arg1
);
10129 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10131 tree rp
= fold_build1 (NEGATE_EXPR
, rtype
,
10133 : build1 (REALPART_EXPR
, rtype
, arg1
));
10134 tree ip
= arg0i
? arg0i
10135 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10136 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
10138 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10140 tree rp
= arg0r
? arg0r
10141 : build1 (REALPART_EXPR
, rtype
, arg0
);
10142 tree ip
= fold_build1 (NEGATE_EXPR
, rtype
,
10144 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10145 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
10150 /* Fold &x - &x. This can happen from &x.foo - &x.
10151 This is unsafe for certain floats even in non-IEEE formats.
10152 In IEEE, it is unsafe because it does wrong for NaNs.
10153 Also note that operand_equal_p is always false if an operand
10156 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10157 && operand_equal_p (arg0
, arg1
, 0))
10158 return fold_convert (type
, integer_zero_node
);
10160 /* A - B -> A + (-B) if B is easily negatable. */
10161 if (negate_expr_p (arg1
)
10162 && ((FLOAT_TYPE_P (type
)
10163 /* Avoid this transformation if B is a positive REAL_CST. */
10164 && (TREE_CODE (arg1
) != REAL_CST
10165 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10166 || INTEGRAL_TYPE_P (type
)))
10167 return fold_build2 (PLUS_EXPR
, type
,
10168 fold_convert (type
, arg0
),
10169 fold_convert (type
, negate_expr (arg1
)));
10171 /* Try folding difference of addresses. */
10173 HOST_WIDE_INT diff
;
10175 if ((TREE_CODE (arg0
) == ADDR_EXPR
10176 || TREE_CODE (arg1
) == ADDR_EXPR
)
10177 && ptr_difference_const (arg0
, arg1
, &diff
))
10178 return build_int_cst_type (type
, diff
);
10181 /* Fold &a[i] - &a[j] to i-j. */
10182 if (TREE_CODE (arg0
) == ADDR_EXPR
10183 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10184 && TREE_CODE (arg1
) == ADDR_EXPR
10185 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10187 tree aref0
= TREE_OPERAND (arg0
, 0);
10188 tree aref1
= TREE_OPERAND (arg1
, 0);
10189 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
10190 TREE_OPERAND (aref1
, 0), 0))
10192 tree op0
= fold_convert (type
, TREE_OPERAND (aref0
, 1));
10193 tree op1
= fold_convert (type
, TREE_OPERAND (aref1
, 1));
10194 tree esz
= array_ref_element_size (aref0
);
10195 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
10196 return fold_build2 (MULT_EXPR
, type
, diff
,
10197 fold_convert (type
, esz
));
10202 if (flag_unsafe_math_optimizations
10203 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10204 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10205 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
10208 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10209 same or one. Make sure type is not saturating.
10210 fold_plusminus_mult_expr will re-associate. */
10211 if ((TREE_CODE (arg0
) == MULT_EXPR
10212 || TREE_CODE (arg1
) == MULT_EXPR
)
10213 && !TYPE_SATURATING (type
)
10214 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10216 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
10224 /* (-A) * (-B) -> A * B */
10225 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10226 return fold_build2 (MULT_EXPR
, type
,
10227 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10228 fold_convert (type
, negate_expr (arg1
)));
10229 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10230 return fold_build2 (MULT_EXPR
, type
,
10231 fold_convert (type
, negate_expr (arg0
)),
10232 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
10234 if (! FLOAT_TYPE_P (type
))
10236 if (integer_zerop (arg1
))
10237 return omit_one_operand (type
, arg1
, arg0
);
10238 if (integer_onep (arg1
))
10239 return non_lvalue (fold_convert (type
, arg0
));
10240 /* Transform x * -1 into -x. Make sure to do the negation
10241 on the original operand with conversions not stripped
10242 because we can only strip non-sign-changing conversions. */
10243 if (integer_all_onesp (arg1
))
10244 return fold_convert (type
, negate_expr (op0
));
10245 /* Transform x * -C into -x * C if x is easily negatable. */
10246 if (TREE_CODE (arg1
) == INTEGER_CST
10247 && tree_int_cst_sgn (arg1
) == -1
10248 && negate_expr_p (arg0
)
10249 && (tem
= negate_expr (arg1
)) != arg1
10250 && !TREE_OVERFLOW (tem
))
10251 return fold_build2 (MULT_EXPR
, type
,
10252 fold_convert (type
, negate_expr (arg0
)), tem
);
10254 /* (a * (1 << b)) is (a << b) */
10255 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10256 && integer_onep (TREE_OPERAND (arg1
, 0)))
10257 return fold_build2 (LSHIFT_EXPR
, type
, op0
,
10258 TREE_OPERAND (arg1
, 1));
10259 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10260 && integer_onep (TREE_OPERAND (arg0
, 0)))
10261 return fold_build2 (LSHIFT_EXPR
, type
, op1
,
10262 TREE_OPERAND (arg0
, 1));
10264 strict_overflow_p
= false;
10265 if (TREE_CODE (arg1
) == INTEGER_CST
10266 && 0 != (tem
= extract_muldiv (op0
,
10267 fold_convert (type
, arg1
),
10269 &strict_overflow_p
)))
10271 if (strict_overflow_p
)
10272 fold_overflow_warning (("assuming signed overflow does not "
10273 "occur when simplifying "
10275 WARN_STRICT_OVERFLOW_MISC
);
10276 return fold_convert (type
, tem
);
10279 /* Optimize z * conj(z) for integer complex numbers. */
10280 if (TREE_CODE (arg0
) == CONJ_EXPR
10281 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10282 return fold_mult_zconjz (type
, arg1
);
10283 if (TREE_CODE (arg1
) == CONJ_EXPR
10284 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10285 return fold_mult_zconjz (type
, arg0
);
10289 /* Maybe fold x * 0 to 0. The expressions aren't the same
10290 when x is NaN, since x * 0 is also NaN. Nor are they the
10291 same in modes with signed zeros, since multiplying a
10292 negative value by 0 gives -0, not +0. */
10293 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10294 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10295 && real_zerop (arg1
))
10296 return omit_one_operand (type
, arg1
, arg0
);
10297 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10298 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10299 && real_onep (arg1
))
10300 return non_lvalue (fold_convert (type
, arg0
));
10302 /* Transform x * -1.0 into -x. */
10303 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10304 && real_minus_onep (arg1
))
10305 return fold_convert (type
, negate_expr (arg0
));
10307 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10308 the result for floating point types due to rounding so it is applied
10309 only if -fassociative-math was specify. */
10310 if (flag_associative_math
10311 && TREE_CODE (arg0
) == RDIV_EXPR
10312 && TREE_CODE (arg1
) == REAL_CST
10313 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10315 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10318 return fold_build2 (RDIV_EXPR
, type
, tem
,
10319 TREE_OPERAND (arg0
, 1));
10322 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10323 if (operand_equal_p (arg0
, arg1
, 0))
10325 tree tem
= fold_strip_sign_ops (arg0
);
10326 if (tem
!= NULL_TREE
)
10328 tem
= fold_convert (type
, tem
);
10329 return fold_build2 (MULT_EXPR
, type
, tem
, tem
);
10333 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10334 This is not the same for NaNs or if signed zeros are
10336 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10337 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10338 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10339 && TREE_CODE (arg1
) == COMPLEX_CST
10340 && real_zerop (TREE_REALPART (arg1
)))
10342 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10343 if (real_onep (TREE_IMAGPART (arg1
)))
10344 return fold_build2 (COMPLEX_EXPR
, type
,
10345 negate_expr (fold_build1 (IMAGPART_EXPR
,
10347 fold_build1 (REALPART_EXPR
, rtype
, arg0
));
10348 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10349 return fold_build2 (COMPLEX_EXPR
, type
,
10350 fold_build1 (IMAGPART_EXPR
, rtype
, arg0
),
10351 negate_expr (fold_build1 (REALPART_EXPR
,
10355 /* Optimize z * conj(z) for floating point complex numbers.
10356 Guarded by flag_unsafe_math_optimizations as non-finite
10357 imaginary components don't produce scalar results. */
10358 if (flag_unsafe_math_optimizations
10359 && TREE_CODE (arg0
) == CONJ_EXPR
10360 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10361 return fold_mult_zconjz (type
, arg1
);
10362 if (flag_unsafe_math_optimizations
10363 && TREE_CODE (arg1
) == CONJ_EXPR
10364 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10365 return fold_mult_zconjz (type
, arg0
);
10367 if (flag_unsafe_math_optimizations
)
10369 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10370 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10372 /* Optimizations of root(...)*root(...). */
10373 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10376 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10377 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10379 /* Optimize sqrt(x)*sqrt(x) as x. */
10380 if (BUILTIN_SQRT_P (fcode0
)
10381 && operand_equal_p (arg00
, arg10
, 0)
10382 && ! HONOR_SNANS (TYPE_MODE (type
)))
10385 /* Optimize root(x)*root(y) as root(x*y). */
10386 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10387 arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
10388 return build_call_expr (rootfn
, 1, arg
);
10391 /* Optimize expN(x)*expN(y) as expN(x+y). */
10392 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10394 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10395 tree arg
= fold_build2 (PLUS_EXPR
, type
,
10396 CALL_EXPR_ARG (arg0
, 0),
10397 CALL_EXPR_ARG (arg1
, 0));
10398 return build_call_expr (expfn
, 1, arg
);
10401 /* Optimizations of pow(...)*pow(...). */
10402 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10403 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10404 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10406 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10407 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10408 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10409 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10411 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10412 if (operand_equal_p (arg01
, arg11
, 0))
10414 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10415 tree arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
10416 return build_call_expr (powfn
, 2, arg
, arg01
);
10419 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10420 if (operand_equal_p (arg00
, arg10
, 0))
10422 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10423 tree arg
= fold_build2 (PLUS_EXPR
, type
, arg01
, arg11
);
10424 return build_call_expr (powfn
, 2, arg00
, arg
);
10428 /* Optimize tan(x)*cos(x) as sin(x). */
10429 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10430 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10431 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10432 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10433 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10434 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10435 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10436 CALL_EXPR_ARG (arg1
, 0), 0))
10438 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10440 if (sinfn
!= NULL_TREE
)
10441 return build_call_expr (sinfn
, 1, CALL_EXPR_ARG (arg0
, 0));
10444 /* Optimize x*pow(x,c) as pow(x,c+1). */
10445 if (fcode1
== BUILT_IN_POW
10446 || fcode1
== BUILT_IN_POWF
10447 || fcode1
== BUILT_IN_POWL
)
10449 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10450 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10451 if (TREE_CODE (arg11
) == REAL_CST
10452 && !TREE_OVERFLOW (arg11
)
10453 && operand_equal_p (arg0
, arg10
, 0))
10455 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10459 c
= TREE_REAL_CST (arg11
);
10460 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10461 arg
= build_real (type
, c
);
10462 return build_call_expr (powfn
, 2, arg0
, arg
);
10466 /* Optimize pow(x,c)*x as pow(x,c+1). */
10467 if (fcode0
== BUILT_IN_POW
10468 || fcode0
== BUILT_IN_POWF
10469 || fcode0
== BUILT_IN_POWL
)
10471 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10472 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10473 if (TREE_CODE (arg01
) == REAL_CST
10474 && !TREE_OVERFLOW (arg01
)
10475 && operand_equal_p (arg1
, arg00
, 0))
10477 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10481 c
= TREE_REAL_CST (arg01
);
10482 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10483 arg
= build_real (type
, c
);
10484 return build_call_expr (powfn
, 2, arg1
, arg
);
10488 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10489 if (! optimize_size
10490 && operand_equal_p (arg0
, arg1
, 0))
10492 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10496 tree arg
= build_real (type
, dconst2
);
10497 return build_call_expr (powfn
, 2, arg0
, arg
);
10506 if (integer_all_onesp (arg1
))
10507 return omit_one_operand (type
, arg1
, arg0
);
10508 if (integer_zerop (arg1
))
10509 return non_lvalue (fold_convert (type
, arg0
));
10510 if (operand_equal_p (arg0
, arg1
, 0))
10511 return non_lvalue (fold_convert (type
, arg0
));
10513 /* ~X | X is -1. */
10514 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10515 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10517 t1
= fold_convert (type
, integer_zero_node
);
10518 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10519 return omit_one_operand (type
, t1
, arg1
);
10522 /* X | ~X is -1. */
10523 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10524 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10526 t1
= fold_convert (type
, integer_zero_node
);
10527 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10528 return omit_one_operand (type
, t1
, arg0
);
10531 /* Canonicalize (X & C1) | C2. */
10532 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10533 && TREE_CODE (arg1
) == INTEGER_CST
10534 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10536 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, mlo
, mhi
;
10537 int width
= TYPE_PRECISION (type
);
10538 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
10539 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
10540 hi2
= TREE_INT_CST_HIGH (arg1
);
10541 lo2
= TREE_INT_CST_LOW (arg1
);
10543 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10544 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
10545 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10547 if (width
> HOST_BITS_PER_WIDE_INT
)
10549 mhi
= (unsigned HOST_WIDE_INT
) -1
10550 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
10556 mlo
= (unsigned HOST_WIDE_INT
) -1
10557 >> (HOST_BITS_PER_WIDE_INT
- width
);
10560 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10561 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
10562 return fold_build2 (BIT_IOR_EXPR
, type
,
10563 TREE_OPERAND (arg0
, 0), arg1
);
10565 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
10568 if ((hi1
& ~hi2
) != hi1
|| (lo1
& ~lo2
) != lo1
)
10569 return fold_build2 (BIT_IOR_EXPR
, type
,
10570 fold_build2 (BIT_AND_EXPR
, type
,
10571 TREE_OPERAND (arg0
, 0),
10572 build_int_cst_wide (type
,
10578 /* (X & Y) | Y is (X, Y). */
10579 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10580 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10581 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10582 /* (X & Y) | X is (Y, X). */
10583 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10584 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10585 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10586 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
10587 /* X | (X & Y) is (Y, X). */
10588 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10589 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10590 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10591 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
10592 /* X | (Y & X) is (Y, X). */
10593 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10594 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10595 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10596 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
10598 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
10599 if (t1
!= NULL_TREE
)
10602 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10604 This results in more efficient code for machines without a NAND
10605 instruction. Combine will canonicalize to the first form
10606 which will allow use of NAND instructions provided by the
10607 backend if they exist. */
10608 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10609 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10611 return fold_build1 (BIT_NOT_EXPR
, type
,
10612 build2 (BIT_AND_EXPR
, type
,
10613 TREE_OPERAND (arg0
, 0),
10614 TREE_OPERAND (arg1
, 0)));
10617 /* See if this can be simplified into a rotate first. If that
10618 is unsuccessful continue in the association code. */
10622 if (integer_zerop (arg1
))
10623 return non_lvalue (fold_convert (type
, arg0
));
10624 if (integer_all_onesp (arg1
))
10625 return fold_build1 (BIT_NOT_EXPR
, type
, op0
);
10626 if (operand_equal_p (arg0
, arg1
, 0))
10627 return omit_one_operand (type
, integer_zero_node
, arg0
);
10629 /* ~X ^ X is -1. */
10630 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10631 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10633 t1
= fold_convert (type
, integer_zero_node
);
10634 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10635 return omit_one_operand (type
, t1
, arg1
);
10638 /* X ^ ~X is -1. */
10639 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10640 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10642 t1
= fold_convert (type
, integer_zero_node
);
10643 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10644 return omit_one_operand (type
, t1
, arg0
);
10647 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10648 with a constant, and the two constants have no bits in common,
10649 we should treat this as a BIT_IOR_EXPR since this may produce more
10650 simplifications. */
10651 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10652 && TREE_CODE (arg1
) == BIT_AND_EXPR
10653 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10654 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10655 && integer_zerop (const_binop (BIT_AND_EXPR
,
10656 TREE_OPERAND (arg0
, 1),
10657 TREE_OPERAND (arg1
, 1), 0)))
10659 code
= BIT_IOR_EXPR
;
10663 /* (X | Y) ^ X -> Y & ~ X*/
10664 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10665 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10667 tree t2
= TREE_OPERAND (arg0
, 1);
10668 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10670 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10671 fold_convert (type
, t1
));
10675 /* (Y | X) ^ X -> Y & ~ X*/
10676 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10677 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10679 tree t2
= TREE_OPERAND (arg0
, 0);
10680 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10682 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10683 fold_convert (type
, t1
));
10687 /* X ^ (X | Y) -> Y & ~ X*/
10688 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10689 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
10691 tree t2
= TREE_OPERAND (arg1
, 1);
10692 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10694 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10695 fold_convert (type
, t1
));
10699 /* X ^ (Y | X) -> Y & ~ X*/
10700 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10701 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
10703 tree t2
= TREE_OPERAND (arg1
, 0);
10704 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10706 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10707 fold_convert (type
, t1
));
10711 /* Convert ~X ^ ~Y to X ^ Y. */
10712 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10713 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10714 return fold_build2 (code
, type
,
10715 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10716 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
10718 /* Convert ~X ^ C to X ^ ~C. */
10719 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10720 && TREE_CODE (arg1
) == INTEGER_CST
)
10721 return fold_build2 (code
, type
,
10722 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10723 fold_build1 (BIT_NOT_EXPR
, type
, arg1
));
10725 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10726 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10727 && integer_onep (TREE_OPERAND (arg0
, 1))
10728 && integer_onep (arg1
))
10729 return fold_build2 (EQ_EXPR
, type
, arg0
,
10730 build_int_cst (TREE_TYPE (arg0
), 0));
10732 /* Fold (X & Y) ^ Y as ~X & Y. */
10733 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10734 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10736 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10737 return fold_build2 (BIT_AND_EXPR
, type
,
10738 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10739 fold_convert (type
, arg1
));
10741 /* Fold (X & Y) ^ X as ~Y & X. */
10742 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10743 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10744 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10746 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10747 return fold_build2 (BIT_AND_EXPR
, type
,
10748 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10749 fold_convert (type
, arg1
));
10751 /* Fold X ^ (X & Y) as X & ~Y. */
10752 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10753 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10755 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10756 return fold_build2 (BIT_AND_EXPR
, type
,
10757 fold_convert (type
, arg0
),
10758 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
10760 /* Fold X ^ (Y & X) as ~Y & X. */
10761 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10762 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10763 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10765 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10766 return fold_build2 (BIT_AND_EXPR
, type
,
10767 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10768 fold_convert (type
, arg0
));
10771 /* See if this can be simplified into a rotate first. If that
10772 is unsuccessful continue in the association code. */
10776 if (integer_all_onesp (arg1
))
10777 return non_lvalue (fold_convert (type
, arg0
));
10778 if (integer_zerop (arg1
))
10779 return omit_one_operand (type
, arg1
, arg0
);
10780 if (operand_equal_p (arg0
, arg1
, 0))
10781 return non_lvalue (fold_convert (type
, arg0
));
10783 /* ~X & X is always zero. */
10784 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10785 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10786 return omit_one_operand (type
, integer_zero_node
, arg1
);
10788 /* X & ~X is always zero. */
10789 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10790 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10791 return omit_one_operand (type
, integer_zero_node
, arg0
);
10793 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10794 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10795 && TREE_CODE (arg1
) == INTEGER_CST
10796 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10798 tree tmp1
= fold_convert (TREE_TYPE (arg0
), arg1
);
10799 tree tmp2
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10800 TREE_OPERAND (arg0
, 0), tmp1
);
10801 tree tmp3
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10802 TREE_OPERAND (arg0
, 1), tmp1
);
10803 return fold_convert (type
,
10804 fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (arg0
),
10808 /* (X | Y) & Y is (X, Y). */
10809 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10810 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10811 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10812 /* (X | Y) & X is (Y, X). */
10813 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10814 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10815 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10816 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
10817 /* X & (X | Y) is (Y, X). */
10818 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10819 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10820 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10821 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
10822 /* X & (Y | X) is (Y, X). */
10823 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10824 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10825 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10826 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
10828 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10829 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10830 && integer_onep (TREE_OPERAND (arg0
, 1))
10831 && integer_onep (arg1
))
10833 tem
= TREE_OPERAND (arg0
, 0);
10834 return fold_build2 (EQ_EXPR
, type
,
10835 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
10836 build_int_cst (TREE_TYPE (tem
), 1)),
10837 build_int_cst (TREE_TYPE (tem
), 0));
10839 /* Fold ~X & 1 as (X & 1) == 0. */
10840 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10841 && integer_onep (arg1
))
10843 tem
= TREE_OPERAND (arg0
, 0);
10844 return fold_build2 (EQ_EXPR
, type
,
10845 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
10846 build_int_cst (TREE_TYPE (tem
), 1)),
10847 build_int_cst (TREE_TYPE (tem
), 0));
10850 /* Fold (X ^ Y) & Y as ~X & Y. */
10851 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10852 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10854 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10855 return fold_build2 (BIT_AND_EXPR
, type
,
10856 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10857 fold_convert (type
, arg1
));
10859 /* Fold (X ^ Y) & X as ~Y & X. */
10860 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10861 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10862 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10864 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10865 return fold_build2 (BIT_AND_EXPR
, type
,
10866 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10867 fold_convert (type
, arg1
));
10869 /* Fold X & (X ^ Y) as X & ~Y. */
10870 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10871 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10873 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10874 return fold_build2 (BIT_AND_EXPR
, type
,
10875 fold_convert (type
, arg0
),
10876 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
10878 /* Fold X & (Y ^ X) as ~Y & X. */
10879 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10880 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10881 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10883 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10884 return fold_build2 (BIT_AND_EXPR
, type
,
10885 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10886 fold_convert (type
, arg0
));
10889 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
10890 if (t1
!= NULL_TREE
)
10892 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10893 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
10894 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
10897 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
10899 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
10900 && (~TREE_INT_CST_LOW (arg1
)
10901 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
10902 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
10905 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10907 This results in more efficient code for machines without a NOR
10908 instruction. Combine will canonicalize to the first form
10909 which will allow use of NOR instructions provided by the
10910 backend if they exist. */
10911 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10912 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10914 return fold_build1 (BIT_NOT_EXPR
, type
,
10915 build2 (BIT_IOR_EXPR
, type
,
10916 fold_convert (type
,
10917 TREE_OPERAND (arg0
, 0)),
10918 fold_convert (type
,
10919 TREE_OPERAND (arg1
, 0))));
10922 /* If arg0 is derived from the address of an object or function, we may
10923 be able to fold this expression using the object or function's
10925 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
10927 unsigned HOST_WIDE_INT modulus
, residue
;
10928 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
10930 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
);
10932 /* This works because modulus is a power of 2. If this weren't the
10933 case, we'd have to replace it by its greatest power-of-2
10934 divisor: modulus & -modulus. */
10936 return build_int_cst (type
, residue
& low
);
10942 /* Don't touch a floating-point divide by zero unless the mode
10943 of the constant can represent infinity. */
10944 if (TREE_CODE (arg1
) == REAL_CST
10945 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
10946 && real_zerop (arg1
))
10949 /* Optimize A / A to 1.0 if we don't care about
10950 NaNs or Infinities. Skip the transformation
10951 for non-real operands. */
10952 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10953 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10954 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
10955 && operand_equal_p (arg0
, arg1
, 0))
10957 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
10959 return omit_two_operands (type
, r
, arg0
, arg1
);
10962 /* The complex version of the above A / A optimization. */
10963 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10964 && operand_equal_p (arg0
, arg1
, 0))
10966 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
10967 if (! HONOR_NANS (TYPE_MODE (elem_type
))
10968 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
10970 tree r
= build_real (elem_type
, dconst1
);
10971 /* omit_two_operands will call fold_convert for us. */
10972 return omit_two_operands (type
, r
, arg0
, arg1
);
10976 /* (-A) / (-B) -> A / B */
10977 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10978 return fold_build2 (RDIV_EXPR
, type
,
10979 TREE_OPERAND (arg0
, 0),
10980 negate_expr (arg1
));
10981 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10982 return fold_build2 (RDIV_EXPR
, type
,
10983 negate_expr (arg0
),
10984 TREE_OPERAND (arg1
, 0));
10986 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10987 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10988 && real_onep (arg1
))
10989 return non_lvalue (fold_convert (type
, arg0
));
10991 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10992 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10993 && real_minus_onep (arg1
))
10994 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
10996 /* If ARG1 is a constant, we can convert this to a multiply by the
10997 reciprocal. This does not have the same rounding properties,
10998 so only do this if -freciprocal-math. We can actually
10999 always safely do it if ARG1 is a power of two, but it's hard to
11000 tell if it is or not in a portable manner. */
11001 if (TREE_CODE (arg1
) == REAL_CST
)
11003 if (flag_reciprocal_math
11004 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
11006 return fold_build2 (MULT_EXPR
, type
, arg0
, tem
);
11007 /* Find the reciprocal if optimizing and the result is exact. */
11011 r
= TREE_REAL_CST (arg1
);
11012 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
11014 tem
= build_real (type
, r
);
11015 return fold_build2 (MULT_EXPR
, type
,
11016 fold_convert (type
, arg0
), tem
);
11020 /* Convert A/B/C to A/(B*C). */
11021 if (flag_reciprocal_math
11022 && TREE_CODE (arg0
) == RDIV_EXPR
)
11023 return fold_build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11024 fold_build2 (MULT_EXPR
, type
,
11025 TREE_OPERAND (arg0
, 1), arg1
));
11027 /* Convert A/(B/C) to (A/B)*C. */
11028 if (flag_reciprocal_math
11029 && TREE_CODE (arg1
) == RDIV_EXPR
)
11030 return fold_build2 (MULT_EXPR
, type
,
11031 fold_build2 (RDIV_EXPR
, type
, arg0
,
11032 TREE_OPERAND (arg1
, 0)),
11033 TREE_OPERAND (arg1
, 1));
11035 /* Convert C1/(X*C2) into (C1/C2)/X. */
11036 if (flag_reciprocal_math
11037 && TREE_CODE (arg1
) == MULT_EXPR
11038 && TREE_CODE (arg0
) == REAL_CST
11039 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11041 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11042 TREE_OPERAND (arg1
, 1), 0);
11044 return fold_build2 (RDIV_EXPR
, type
, tem
,
11045 TREE_OPERAND (arg1
, 0));
11048 if (flag_unsafe_math_optimizations
)
11050 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11051 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11053 /* Optimize sin(x)/cos(x) as tan(x). */
11054 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11055 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11056 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11057 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11058 CALL_EXPR_ARG (arg1
, 0), 0))
11060 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11062 if (tanfn
!= NULL_TREE
)
11063 return build_call_expr (tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11066 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11067 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11068 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11069 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11070 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11071 CALL_EXPR_ARG (arg1
, 0), 0))
11073 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11075 if (tanfn
!= NULL_TREE
)
11077 tree tmp
= build_call_expr (tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11078 return fold_build2 (RDIV_EXPR
, type
,
11079 build_real (type
, dconst1
), tmp
);
11083 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11084 NaNs or Infinities. */
11085 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11086 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11087 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11089 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11090 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11092 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11093 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11094 && operand_equal_p (arg00
, arg01
, 0))
11096 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11098 if (cosfn
!= NULL_TREE
)
11099 return build_call_expr (cosfn
, 1, arg00
);
11103 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11104 NaNs or Infinities. */
11105 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11106 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11107 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11109 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11110 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11112 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11113 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11114 && operand_equal_p (arg00
, arg01
, 0))
11116 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11118 if (cosfn
!= NULL_TREE
)
11120 tree tmp
= build_call_expr (cosfn
, 1, arg00
);
11121 return fold_build2 (RDIV_EXPR
, type
,
11122 build_real (type
, dconst1
),
11128 /* Optimize pow(x,c)/x as pow(x,c-1). */
11129 if (fcode0
== BUILT_IN_POW
11130 || fcode0
== BUILT_IN_POWF
11131 || fcode0
== BUILT_IN_POWL
)
11133 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11134 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11135 if (TREE_CODE (arg01
) == REAL_CST
11136 && !TREE_OVERFLOW (arg01
)
11137 && operand_equal_p (arg1
, arg00
, 0))
11139 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11143 c
= TREE_REAL_CST (arg01
);
11144 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11145 arg
= build_real (type
, c
);
11146 return build_call_expr (powfn
, 2, arg1
, arg
);
11150 /* Optimize a/root(b/c) into a*root(c/b). */
11151 if (BUILTIN_ROOT_P (fcode1
))
11153 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11155 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11157 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11158 tree b
= TREE_OPERAND (rootarg
, 0);
11159 tree c
= TREE_OPERAND (rootarg
, 1);
11161 tree tmp
= fold_build2 (RDIV_EXPR
, type
, c
, b
);
11163 tmp
= build_call_expr (rootfn
, 1, tmp
);
11164 return fold_build2 (MULT_EXPR
, type
, arg0
, tmp
);
11168 /* Optimize x/expN(y) into x*expN(-y). */
11169 if (BUILTIN_EXPONENT_P (fcode1
))
11171 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11172 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11173 arg1
= build_call_expr (expfn
, 1, fold_convert (type
, arg
));
11174 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
11177 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11178 if (fcode1
== BUILT_IN_POW
11179 || fcode1
== BUILT_IN_POWF
11180 || fcode1
== BUILT_IN_POWL
)
11182 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11183 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11184 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11185 tree neg11
= fold_convert (type
, negate_expr (arg11
));
11186 arg1
= build_call_expr (powfn
, 2, arg10
, neg11
);
11187 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
11192 case TRUNC_DIV_EXPR
:
11193 case FLOOR_DIV_EXPR
:
11194 /* Simplify A / (B << N) where A and B are positive and B is
11195 a power of 2, to A >> (N + log2(B)). */
11196 strict_overflow_p
= false;
11197 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11198 && (TYPE_UNSIGNED (type
)
11199 || tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
)))
11201 tree sval
= TREE_OPERAND (arg1
, 0);
11202 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11204 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11205 unsigned long pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
11207 if (strict_overflow_p
)
11208 fold_overflow_warning (("assuming signed overflow does not "
11209 "occur when simplifying A / (B << N)"),
11210 WARN_STRICT_OVERFLOW_MISC
);
11212 sh_cnt
= fold_build2 (PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11213 sh_cnt
, build_int_cst (NULL_TREE
, pow2
));
11214 return fold_build2 (RSHIFT_EXPR
, type
,
11215 fold_convert (type
, arg0
), sh_cnt
);
11220 case ROUND_DIV_EXPR
:
11221 case CEIL_DIV_EXPR
:
11222 case EXACT_DIV_EXPR
:
11223 if (integer_onep (arg1
))
11224 return non_lvalue (fold_convert (type
, arg0
));
11225 if (integer_zerop (arg1
))
11227 /* X / -1 is -X. */
11228 if (!TYPE_UNSIGNED (type
)
11229 && TREE_CODE (arg1
) == INTEGER_CST
11230 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11231 && TREE_INT_CST_HIGH (arg1
) == -1)
11232 return fold_convert (type
, negate_expr (arg0
));
11234 /* Convert -A / -B to A / B when the type is signed and overflow is
11236 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11237 && TREE_CODE (arg0
) == NEGATE_EXPR
11238 && negate_expr_p (arg1
))
11240 if (INTEGRAL_TYPE_P (type
))
11241 fold_overflow_warning (("assuming signed overflow does not occur "
11242 "when distributing negation across "
11244 WARN_STRICT_OVERFLOW_MISC
);
11245 return fold_build2 (code
, type
,
11246 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
11247 negate_expr (arg1
));
11249 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11250 && TREE_CODE (arg1
) == NEGATE_EXPR
11251 && negate_expr_p (arg0
))
11253 if (INTEGRAL_TYPE_P (type
))
11254 fold_overflow_warning (("assuming signed overflow does not occur "
11255 "when distributing negation across "
11257 WARN_STRICT_OVERFLOW_MISC
);
11258 return fold_build2 (code
, type
, negate_expr (arg0
),
11259 TREE_OPERAND (arg1
, 0));
11262 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11263 operation, EXACT_DIV_EXPR.
11265 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11266 At one time others generated faster code, it's not clear if they do
11267 after the last round to changes to the DIV code in expmed.c. */
11268 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11269 && multiple_of_p (type
, arg0
, arg1
))
11270 return fold_build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11272 strict_overflow_p
= false;
11273 if (TREE_CODE (arg1
) == INTEGER_CST
11274 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11275 &strict_overflow_p
)))
11277 if (strict_overflow_p
)
11278 fold_overflow_warning (("assuming signed overflow does not occur "
11279 "when simplifying division"),
11280 WARN_STRICT_OVERFLOW_MISC
);
11281 return fold_convert (type
, tem
);
11286 case CEIL_MOD_EXPR
:
11287 case FLOOR_MOD_EXPR
:
11288 case ROUND_MOD_EXPR
:
11289 case TRUNC_MOD_EXPR
:
11290 /* X % 1 is always zero, but be sure to preserve any side
11292 if (integer_onep (arg1
))
11293 return omit_one_operand (type
, integer_zero_node
, arg0
);
11295 /* X % 0, return X % 0 unchanged so that we can get the
11296 proper warnings and errors. */
11297 if (integer_zerop (arg1
))
11300 /* 0 % X is always zero, but be sure to preserve any side
11301 effects in X. Place this after checking for X == 0. */
11302 if (integer_zerop (arg0
))
11303 return omit_one_operand (type
, integer_zero_node
, arg1
);
11305 /* X % -1 is zero. */
11306 if (!TYPE_UNSIGNED (type
)
11307 && TREE_CODE (arg1
) == INTEGER_CST
11308 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11309 && TREE_INT_CST_HIGH (arg1
) == -1)
11310 return omit_one_operand (type
, integer_zero_node
, arg0
);
11312 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11313 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11314 strict_overflow_p
= false;
11315 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
11316 && (TYPE_UNSIGNED (type
)
11317 || tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
)))
11320 /* Also optimize A % (C << N) where C is a power of 2,
11321 to A & ((C << N) - 1). */
11322 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
11323 c
= TREE_OPERAND (arg1
, 0);
11325 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
11327 tree mask
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
11328 build_int_cst (TREE_TYPE (arg1
), 1));
11329 if (strict_overflow_p
)
11330 fold_overflow_warning (("assuming signed overflow does not "
11331 "occur when simplifying "
11332 "X % (power of two)"),
11333 WARN_STRICT_OVERFLOW_MISC
);
11334 return fold_build2 (BIT_AND_EXPR
, type
,
11335 fold_convert (type
, arg0
),
11336 fold_convert (type
, mask
));
11340 /* X % -C is the same as X % C. */
11341 if (code
== TRUNC_MOD_EXPR
11342 && !TYPE_UNSIGNED (type
)
11343 && TREE_CODE (arg1
) == INTEGER_CST
11344 && !TREE_OVERFLOW (arg1
)
11345 && TREE_INT_CST_HIGH (arg1
) < 0
11346 && !TYPE_OVERFLOW_TRAPS (type
)
11347 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11348 && !sign_bit_p (arg1
, arg1
))
11349 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
11350 fold_convert (type
, negate_expr (arg1
)));
11352 /* X % -Y is the same as X % Y. */
11353 if (code
== TRUNC_MOD_EXPR
11354 && !TYPE_UNSIGNED (type
)
11355 && TREE_CODE (arg1
) == NEGATE_EXPR
11356 && !TYPE_OVERFLOW_TRAPS (type
))
11357 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
11358 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
11360 if (TREE_CODE (arg1
) == INTEGER_CST
11361 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11362 &strict_overflow_p
)))
11364 if (strict_overflow_p
)
11365 fold_overflow_warning (("assuming signed overflow does not occur "
11366 "when simplifying modulos"),
11367 WARN_STRICT_OVERFLOW_MISC
);
11368 return fold_convert (type
, tem
);
11375 if (integer_all_onesp (arg0
))
11376 return omit_one_operand (type
, arg0
, arg1
);
11380 /* Optimize -1 >> x for arithmetic right shifts. */
11381 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
11382 return omit_one_operand (type
, arg0
, arg1
);
11383 /* ... fall through ... */
11387 if (integer_zerop (arg1
))
11388 return non_lvalue (fold_convert (type
, arg0
));
11389 if (integer_zerop (arg0
))
11390 return omit_one_operand (type
, arg0
, arg1
);
11392 /* Since negative shift count is not well-defined,
11393 don't try to compute it in the compiler. */
11394 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11397 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11398 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
11399 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11400 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11401 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11403 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
11404 + TREE_INT_CST_LOW (arg1
));
11406 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11407 being well defined. */
11408 if (low
>= TYPE_PRECISION (type
))
11410 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
11411 low
= low
% TYPE_PRECISION (type
);
11412 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
11413 return build_int_cst (type
, 0);
11415 low
= TYPE_PRECISION (type
) - 1;
11418 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11419 build_int_cst (type
, low
));
11422 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11423 into x & ((unsigned)-1 >> c) for unsigned types. */
11424 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
11425 || (TYPE_UNSIGNED (type
)
11426 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
11427 && host_integerp (arg1
, false)
11428 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11429 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11430 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11432 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
11433 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
11439 arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
11441 lshift
= build_int_cst (type
, -1);
11442 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
11444 return fold_build2 (BIT_AND_EXPR
, type
, arg00
, lshift
);
11448 /* Rewrite an LROTATE_EXPR by a constant into an
11449 RROTATE_EXPR by a new constant. */
11450 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
11452 tree tem
= build_int_cst (TREE_TYPE (arg1
),
11453 GET_MODE_BITSIZE (TYPE_MODE (type
)));
11454 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
11455 return fold_build2 (RROTATE_EXPR
, type
, op0
, tem
);
11458 /* If we have a rotate of a bit operation with the rotate count and
11459 the second operand of the bit operation both constant,
11460 permute the two operations. */
11461 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11462 && (TREE_CODE (arg0
) == BIT_AND_EXPR
11463 || TREE_CODE (arg0
) == BIT_IOR_EXPR
11464 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11465 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11466 return fold_build2 (TREE_CODE (arg0
), type
,
11467 fold_build2 (code
, type
,
11468 TREE_OPERAND (arg0
, 0), arg1
),
11469 fold_build2 (code
, type
,
11470 TREE_OPERAND (arg0
, 1), arg1
));
11472 /* Two consecutive rotates adding up to the width of the mode can
11474 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11475 && TREE_CODE (arg0
) == RROTATE_EXPR
11476 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11477 && TREE_INT_CST_HIGH (arg1
) == 0
11478 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
11479 && ((TREE_INT_CST_LOW (arg1
)
11480 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
11481 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
11482 return TREE_OPERAND (arg0
, 0);
11487 if (operand_equal_p (arg0
, arg1
, 0))
11488 return omit_one_operand (type
, arg0
, arg1
);
11489 if (INTEGRAL_TYPE_P (type
)
11490 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
11491 return omit_one_operand (type
, arg1
, arg0
);
11492 tem
= fold_minmax (MIN_EXPR
, type
, arg0
, arg1
);
11498 if (operand_equal_p (arg0
, arg1
, 0))
11499 return omit_one_operand (type
, arg0
, arg1
);
11500 if (INTEGRAL_TYPE_P (type
)
11501 && TYPE_MAX_VALUE (type
)
11502 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
11503 return omit_one_operand (type
, arg1
, arg0
);
11504 tem
= fold_minmax (MAX_EXPR
, type
, arg0
, arg1
);
11509 case TRUTH_ANDIF_EXPR
:
11510 /* Note that the operands of this must be ints
11511 and their values must be 0 or 1.
11512 ("true" is a fixed value perhaps depending on the language.) */
11513 /* If first arg is constant zero, return it. */
11514 if (integer_zerop (arg0
))
11515 return fold_convert (type
, arg0
);
11516 case TRUTH_AND_EXPR
:
11517 /* If either arg is constant true, drop it. */
11518 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11519 return non_lvalue (fold_convert (type
, arg1
));
11520 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
11521 /* Preserve sequence points. */
11522 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11523 return non_lvalue (fold_convert (type
, arg0
));
11524 /* If second arg is constant zero, result is zero, but first arg
11525 must be evaluated. */
11526 if (integer_zerop (arg1
))
11527 return omit_one_operand (type
, arg1
, arg0
);
11528 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11529 case will be handled here. */
11530 if (integer_zerop (arg0
))
11531 return omit_one_operand (type
, arg0
, arg1
);
11533 /* !X && X is always false. */
11534 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11535 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11536 return omit_one_operand (type
, integer_zero_node
, arg1
);
11537 /* X && !X is always false. */
11538 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11539 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11540 return omit_one_operand (type
, integer_zero_node
, arg0
);
11542 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11543 means A >= Y && A != MAX, but in this case we know that
11546 if (!TREE_SIDE_EFFECTS (arg0
)
11547 && !TREE_SIDE_EFFECTS (arg1
))
11549 tem
= fold_to_nonsharp_ineq_using_bound (arg0
, arg1
);
11550 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
11551 return fold_build2 (code
, type
, tem
, arg1
);
11553 tem
= fold_to_nonsharp_ineq_using_bound (arg1
, arg0
);
11554 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
11555 return fold_build2 (code
, type
, arg0
, tem
);
11559 /* We only do these simplifications if we are optimizing. */
11563 /* Check for things like (A || B) && (A || C). We can convert this
11564 to A || (B && C). Note that either operator can be any of the four
11565 truth and/or operations and the transformation will still be
11566 valid. Also note that we only care about order for the
11567 ANDIF and ORIF operators. If B contains side effects, this
11568 might change the truth-value of A. */
11569 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
11570 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
11571 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
11572 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
11573 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
11574 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
11576 tree a00
= TREE_OPERAND (arg0
, 0);
11577 tree a01
= TREE_OPERAND (arg0
, 1);
11578 tree a10
= TREE_OPERAND (arg1
, 0);
11579 tree a11
= TREE_OPERAND (arg1
, 1);
11580 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
11581 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
11582 && (code
== TRUTH_AND_EXPR
11583 || code
== TRUTH_OR_EXPR
));
11585 if (operand_equal_p (a00
, a10
, 0))
11586 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
11587 fold_build2 (code
, type
, a01
, a11
));
11588 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
11589 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
11590 fold_build2 (code
, type
, a01
, a10
));
11591 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
11592 return fold_build2 (TREE_CODE (arg0
), type
, a01
,
11593 fold_build2 (code
, type
, a00
, a11
));
11595 /* This case if tricky because we must either have commutative
11596 operators or else A10 must not have side-effects. */
11598 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
11599 && operand_equal_p (a01
, a11
, 0))
11600 return fold_build2 (TREE_CODE (arg0
), type
,
11601 fold_build2 (code
, type
, a00
, a10
),
11605 /* See if we can build a range comparison. */
11606 if (0 != (tem
= fold_range_test (code
, type
, op0
, op1
)))
11609 /* Check for the possibility of merging component references. If our
11610 lhs is another similar operation, try to merge its rhs with our
11611 rhs. Then try to merge our lhs and rhs. */
11612 if (TREE_CODE (arg0
) == code
11613 && 0 != (tem
= fold_truthop (code
, type
,
11614 TREE_OPERAND (arg0
, 1), arg1
)))
11615 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11617 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
11622 case TRUTH_ORIF_EXPR
:
11623 /* Note that the operands of this must be ints
11624 and their values must be 0 or true.
11625 ("true" is a fixed value perhaps depending on the language.) */
11626 /* If first arg is constant true, return it. */
11627 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11628 return fold_convert (type
, arg0
);
11629 case TRUTH_OR_EXPR
:
11630 /* If either arg is constant zero, drop it. */
11631 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
11632 return non_lvalue (fold_convert (type
, arg1
));
11633 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
11634 /* Preserve sequence points. */
11635 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11636 return non_lvalue (fold_convert (type
, arg0
));
11637 /* If second arg is constant true, result is true, but we must
11638 evaluate first arg. */
11639 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
11640 return omit_one_operand (type
, arg1
, arg0
);
11641 /* Likewise for first arg, but note this only occurs here for
11643 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11644 return omit_one_operand (type
, arg0
, arg1
);
11646 /* !X || X is always true. */
11647 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11648 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11649 return omit_one_operand (type
, integer_one_node
, arg1
);
11650 /* X || !X is always true. */
11651 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11652 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11653 return omit_one_operand (type
, integer_one_node
, arg0
);
11657 case TRUTH_XOR_EXPR
:
11658 /* If the second arg is constant zero, drop it. */
11659 if (integer_zerop (arg1
))
11660 return non_lvalue (fold_convert (type
, arg0
));
11661 /* If the second arg is constant true, this is a logical inversion. */
11662 if (integer_onep (arg1
))
11664 /* Only call invert_truthvalue if operand is a truth value. */
11665 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
11666 tem
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
11668 tem
= invert_truthvalue (arg0
);
11669 return non_lvalue (fold_convert (type
, tem
));
11671 /* Identical arguments cancel to zero. */
11672 if (operand_equal_p (arg0
, arg1
, 0))
11673 return omit_one_operand (type
, integer_zero_node
, arg0
);
11675 /* !X ^ X is always true. */
11676 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11677 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11678 return omit_one_operand (type
, integer_one_node
, arg1
);
11680 /* X ^ !X is always true. */
11681 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11682 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11683 return omit_one_operand (type
, integer_one_node
, arg0
);
11689 tem
= fold_comparison (code
, type
, op0
, op1
);
11690 if (tem
!= NULL_TREE
)
11693 /* bool_var != 0 becomes bool_var. */
11694 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11695 && code
== NE_EXPR
)
11696 return non_lvalue (fold_convert (type
, arg0
));
11698 /* bool_var == 1 becomes bool_var. */
11699 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11700 && code
== EQ_EXPR
)
11701 return non_lvalue (fold_convert (type
, arg0
));
11703 /* bool_var != 1 becomes !bool_var. */
11704 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11705 && code
== NE_EXPR
)
11706 return fold_build1 (TRUTH_NOT_EXPR
, type
, arg0
);
11708 /* bool_var == 0 becomes !bool_var. */
11709 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11710 && code
== EQ_EXPR
)
11711 return fold_build1 (TRUTH_NOT_EXPR
, type
, arg0
);
11713 /* If this is an equality comparison of the address of two non-weak,
11714 unaliased symbols neither of which are extern (since we do not
11715 have access to attributes for externs), then we know the result. */
11716 if (TREE_CODE (arg0
) == ADDR_EXPR
11717 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
11718 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
11719 && ! lookup_attribute ("alias",
11720 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
11721 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
11722 && TREE_CODE (arg1
) == ADDR_EXPR
11723 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
11724 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
11725 && ! lookup_attribute ("alias",
11726 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
11727 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
11729 /* We know that we're looking at the address of two
11730 non-weak, unaliased, static _DECL nodes.
11732 It is both wasteful and incorrect to call operand_equal_p
11733 to compare the two ADDR_EXPR nodes. It is wasteful in that
11734 all we need to do is test pointer equality for the arguments
11735 to the two ADDR_EXPR nodes. It is incorrect to use
11736 operand_equal_p as that function is NOT equivalent to a
11737 C equality test. It can in fact return false for two
11738 objects which would test as equal using the C equality
11740 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
11741 return constant_boolean_node (equal
11742 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
11746 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11747 a MINUS_EXPR of a constant, we can convert it into a comparison with
11748 a revised constant as long as no overflow occurs. */
11749 if (TREE_CODE (arg1
) == INTEGER_CST
11750 && (TREE_CODE (arg0
) == PLUS_EXPR
11751 || TREE_CODE (arg0
) == MINUS_EXPR
)
11752 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11753 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
11754 ? MINUS_EXPR
: PLUS_EXPR
,
11755 fold_convert (TREE_TYPE (arg0
), arg1
),
11756 TREE_OPERAND (arg0
, 1), 0))
11757 && !TREE_OVERFLOW (tem
))
11758 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11760 /* Similarly for a NEGATE_EXPR. */
11761 if (TREE_CODE (arg0
) == NEGATE_EXPR
11762 && TREE_CODE (arg1
) == INTEGER_CST
11763 && 0 != (tem
= negate_expr (arg1
))
11764 && TREE_CODE (tem
) == INTEGER_CST
11765 && !TREE_OVERFLOW (tem
))
11766 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11768 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11769 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11770 && TREE_CODE (arg1
) == INTEGER_CST
11771 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11772 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11773 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg0
),
11774 fold_convert (TREE_TYPE (arg0
), arg1
),
11775 TREE_OPERAND (arg0
, 1)));
11777 /* Transform comparisons of the form X +- C CMP X. */
11778 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
11779 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11780 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11781 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11782 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
11784 tree cst
= TREE_OPERAND (arg0
, 1);
11786 if (code
== EQ_EXPR
11787 && !integer_zerop (cst
))
11788 return omit_two_operands (type
, boolean_false_node
,
11789 TREE_OPERAND (arg0
, 0), arg1
);
11791 return omit_two_operands (type
, boolean_true_node
,
11792 TREE_OPERAND (arg0
, 0), arg1
);
11795 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11796 for !=. Don't do this for ordered comparisons due to overflow. */
11797 if (TREE_CODE (arg0
) == MINUS_EXPR
11798 && integer_zerop (arg1
))
11799 return fold_build2 (code
, type
,
11800 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
11802 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11803 if (TREE_CODE (arg0
) == ABS_EXPR
11804 && (integer_zerop (arg1
) || real_zerop (arg1
)))
11805 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
11807 /* If this is an EQ or NE comparison with zero and ARG0 is
11808 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11809 two operations, but the latter can be done in one less insn
11810 on machines that have only two-operand insns or on which a
11811 constant cannot be the first operand. */
11812 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11813 && integer_zerop (arg1
))
11815 tree arg00
= TREE_OPERAND (arg0
, 0);
11816 tree arg01
= TREE_OPERAND (arg0
, 1);
11817 if (TREE_CODE (arg00
) == LSHIFT_EXPR
11818 && integer_onep (TREE_OPERAND (arg00
, 0)))
11820 fold_build2 (code
, type
,
11821 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
11822 build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
11823 arg01
, TREE_OPERAND (arg00
, 1)),
11824 fold_convert (TREE_TYPE (arg0
),
11825 integer_one_node
)),
11827 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
11828 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
11830 fold_build2 (code
, type
,
11831 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
11832 build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
11833 arg00
, TREE_OPERAND (arg01
, 1)),
11834 fold_convert (TREE_TYPE (arg0
),
11835 integer_one_node
)),
11839 /* If this is an NE or EQ comparison of zero against the result of a
11840 signed MOD operation whose second operand is a power of 2, make
11841 the MOD operation unsigned since it is simpler and equivalent. */
11842 if (integer_zerop (arg1
)
11843 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
11844 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
11845 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
11846 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
11847 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
11848 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11850 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
11851 tree newmod
= fold_build2 (TREE_CODE (arg0
), newtype
,
11852 fold_convert (newtype
,
11853 TREE_OPERAND (arg0
, 0)),
11854 fold_convert (newtype
,
11855 TREE_OPERAND (arg0
, 1)));
11857 return fold_build2 (code
, type
, newmod
,
11858 fold_convert (newtype
, arg1
));
11861 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11862 C1 is a valid shift constant, and C2 is a power of two, i.e.
11864 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11865 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
11866 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
11868 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11869 && integer_zerop (arg1
))
11871 tree itype
= TREE_TYPE (arg0
);
11872 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
11873 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
11875 /* Check for a valid shift count. */
11876 if (TREE_INT_CST_HIGH (arg001
) == 0
11877 && TREE_INT_CST_LOW (arg001
) < prec
)
11879 tree arg01
= TREE_OPERAND (arg0
, 1);
11880 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
11881 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
11882 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11883 can be rewritten as (X & (C2 << C1)) != 0. */
11884 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
11886 tem
= fold_build2 (LSHIFT_EXPR
, itype
, arg01
, arg001
);
11887 tem
= fold_build2 (BIT_AND_EXPR
, itype
, arg000
, tem
);
11888 return fold_build2 (code
, type
, tem
, arg1
);
11890 /* Otherwise, for signed (arithmetic) shifts,
11891 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11892 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11893 else if (!TYPE_UNSIGNED (itype
))
11894 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
11895 arg000
, build_int_cst (itype
, 0));
11896 /* Otherwise, of unsigned (logical) shifts,
11897 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11898 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11900 return omit_one_operand (type
,
11901 code
== EQ_EXPR
? integer_one_node
11902 : integer_zero_node
,
11907 /* If this is an NE comparison of zero with an AND of one, remove the
11908 comparison since the AND will give the correct value. */
11909 if (code
== NE_EXPR
11910 && integer_zerop (arg1
)
11911 && TREE_CODE (arg0
) == BIT_AND_EXPR
11912 && integer_onep (TREE_OPERAND (arg0
, 1)))
11913 return fold_convert (type
, arg0
);
11915 /* If we have (A & C) == C where C is a power of 2, convert this into
11916 (A & C) != 0. Similarly for NE_EXPR. */
11917 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11918 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11919 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11920 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11921 arg0
, fold_convert (TREE_TYPE (arg0
),
11922 integer_zero_node
));
11924 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11925 bit, then fold the expression into A < 0 or A >= 0. */
11926 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
, type
);
11930 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11931 Similarly for NE_EXPR. */
11932 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11933 && TREE_CODE (arg1
) == INTEGER_CST
11934 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11936 tree notc
= fold_build1 (BIT_NOT_EXPR
,
11937 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
11938 TREE_OPERAND (arg0
, 1));
11939 tree dandnotc
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
11941 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
11942 if (integer_nonzerop (dandnotc
))
11943 return omit_one_operand (type
, rslt
, arg0
);
11946 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11947 Similarly for NE_EXPR. */
11948 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11949 && TREE_CODE (arg1
) == INTEGER_CST
11950 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11952 tree notd
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
11953 tree candnotd
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
11954 TREE_OPERAND (arg0
, 1), notd
);
11955 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
11956 if (integer_nonzerop (candnotd
))
11957 return omit_one_operand (type
, rslt
, arg0
);
11960 /* If this is a comparison of a field, we may be able to simplify it. */
11961 if ((TREE_CODE (arg0
) == COMPONENT_REF
11962 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
11963 /* Handle the constant case even without -O
11964 to make sure the warnings are given. */
11965 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
11967 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
11972 /* Optimize comparisons of strlen vs zero to a compare of the
11973 first character of the string vs zero. To wit,
11974 strlen(ptr) == 0 => *ptr == 0
11975 strlen(ptr) != 0 => *ptr != 0
11976 Other cases should reduce to one of these two (or a constant)
11977 due to the return value of strlen being unsigned. */
11978 if (TREE_CODE (arg0
) == CALL_EXPR
11979 && integer_zerop (arg1
))
11981 tree fndecl
= get_callee_fndecl (arg0
);
11984 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
11985 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
11986 && call_expr_nargs (arg0
) == 1
11987 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
11989 tree iref
= build_fold_indirect_ref (CALL_EXPR_ARG (arg0
, 0));
11990 return fold_build2 (code
, type
, iref
,
11991 build_int_cst (TREE_TYPE (iref
), 0));
11995 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11996 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11997 if (TREE_CODE (arg0
) == RSHIFT_EXPR
11998 && integer_zerop (arg1
)
11999 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12001 tree arg00
= TREE_OPERAND (arg0
, 0);
12002 tree arg01
= TREE_OPERAND (arg0
, 1);
12003 tree itype
= TREE_TYPE (arg00
);
12004 if (TREE_INT_CST_HIGH (arg01
) == 0
12005 && TREE_INT_CST_LOW (arg01
)
12006 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
12008 if (TYPE_UNSIGNED (itype
))
12010 itype
= signed_type_for (itype
);
12011 arg00
= fold_convert (itype
, arg00
);
12013 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12014 type
, arg00
, build_int_cst (itype
, 0));
12018 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12019 if (integer_zerop (arg1
)
12020 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12021 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
12022 TREE_OPERAND (arg0
, 1));
12024 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12025 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12026 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12027 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
12028 build_int_cst (TREE_TYPE (arg1
), 0));
12029 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12030 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12031 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12032 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12033 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 1),
12034 build_int_cst (TREE_TYPE (arg1
), 0));
12036 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12037 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12038 && TREE_CODE (arg1
) == INTEGER_CST
12039 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12040 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
12041 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12042 TREE_OPERAND (arg0
, 1), arg1
));
12044 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12045 (X & C) == 0 when C is a single bit. */
12046 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12047 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12048 && integer_zerop (arg1
)
12049 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12051 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
12052 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12053 TREE_OPERAND (arg0
, 1));
12054 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12058 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12059 constant C is a power of two, i.e. a single bit. */
12060 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12061 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12062 && integer_zerop (arg1
)
12063 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12064 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12065 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12067 tree arg00
= TREE_OPERAND (arg0
, 0);
12068 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12069 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12072 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12073 when is C is a power of two, i.e. a single bit. */
12074 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12075 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12076 && integer_zerop (arg1
)
12077 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12078 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12079 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12081 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12082 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg000
),
12083 arg000
, TREE_OPERAND (arg0
, 1));
12084 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12085 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12088 if (integer_zerop (arg1
)
12089 && tree_expr_nonzero_p (arg0
))
12091 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12092 return omit_one_operand (type
, res
, arg0
);
12095 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12096 if (TREE_CODE (arg0
) == NEGATE_EXPR
12097 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12098 return fold_build2 (code
, type
,
12099 TREE_OPERAND (arg0
, 0),
12100 TREE_OPERAND (arg1
, 0));
12102 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12103 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12104 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12106 tree arg00
= TREE_OPERAND (arg0
, 0);
12107 tree arg01
= TREE_OPERAND (arg0
, 1);
12108 tree arg10
= TREE_OPERAND (arg1
, 0);
12109 tree arg11
= TREE_OPERAND (arg1
, 1);
12110 tree itype
= TREE_TYPE (arg0
);
12112 if (operand_equal_p (arg01
, arg11
, 0))
12113 return fold_build2 (code
, type
,
12114 fold_build2 (BIT_AND_EXPR
, itype
,
12115 fold_build2 (BIT_XOR_EXPR
, itype
,
12118 build_int_cst (itype
, 0));
12120 if (operand_equal_p (arg01
, arg10
, 0))
12121 return fold_build2 (code
, type
,
12122 fold_build2 (BIT_AND_EXPR
, itype
,
12123 fold_build2 (BIT_XOR_EXPR
, itype
,
12126 build_int_cst (itype
, 0));
12128 if (operand_equal_p (arg00
, arg11
, 0))
12129 return fold_build2 (code
, type
,
12130 fold_build2 (BIT_AND_EXPR
, itype
,
12131 fold_build2 (BIT_XOR_EXPR
, itype
,
12134 build_int_cst (itype
, 0));
12136 if (operand_equal_p (arg00
, arg10
, 0))
12137 return fold_build2 (code
, type
,
12138 fold_build2 (BIT_AND_EXPR
, itype
,
12139 fold_build2 (BIT_XOR_EXPR
, itype
,
12142 build_int_cst (itype
, 0));
12145 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12146 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12148 tree arg00
= TREE_OPERAND (arg0
, 0);
12149 tree arg01
= TREE_OPERAND (arg0
, 1);
12150 tree arg10
= TREE_OPERAND (arg1
, 0);
12151 tree arg11
= TREE_OPERAND (arg1
, 1);
12152 tree itype
= TREE_TYPE (arg0
);
12154 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12155 operand_equal_p guarantees no side-effects so we don't need
12156 to use omit_one_operand on Z. */
12157 if (operand_equal_p (arg01
, arg11
, 0))
12158 return fold_build2 (code
, type
, arg00
, arg10
);
12159 if (operand_equal_p (arg01
, arg10
, 0))
12160 return fold_build2 (code
, type
, arg00
, arg11
);
12161 if (operand_equal_p (arg00
, arg11
, 0))
12162 return fold_build2 (code
, type
, arg01
, arg10
);
12163 if (operand_equal_p (arg00
, arg10
, 0))
12164 return fold_build2 (code
, type
, arg01
, arg11
);
12166 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12167 if (TREE_CODE (arg01
) == INTEGER_CST
12168 && TREE_CODE (arg11
) == INTEGER_CST
)
12169 return fold_build2 (code
, type
,
12170 fold_build2 (BIT_XOR_EXPR
, itype
, arg00
,
12171 fold_build2 (BIT_XOR_EXPR
, itype
,
12176 /* Attempt to simplify equality/inequality comparisons of complex
12177 values. Only lower the comparison if the result is known or
12178 can be simplified to a single scalar comparison. */
12179 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12180 || TREE_CODE (arg0
) == COMPLEX_CST
)
12181 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12182 || TREE_CODE (arg1
) == COMPLEX_CST
))
12184 tree real0
, imag0
, real1
, imag1
;
12187 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12189 real0
= TREE_OPERAND (arg0
, 0);
12190 imag0
= TREE_OPERAND (arg0
, 1);
12194 real0
= TREE_REALPART (arg0
);
12195 imag0
= TREE_IMAGPART (arg0
);
12198 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12200 real1
= TREE_OPERAND (arg1
, 0);
12201 imag1
= TREE_OPERAND (arg1
, 1);
12205 real1
= TREE_REALPART (arg1
);
12206 imag1
= TREE_IMAGPART (arg1
);
12209 rcond
= fold_binary (code
, type
, real0
, real1
);
12210 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12212 if (integer_zerop (rcond
))
12214 if (code
== EQ_EXPR
)
12215 return omit_two_operands (type
, boolean_false_node
,
12217 return fold_build2 (NE_EXPR
, type
, imag0
, imag1
);
12221 if (code
== NE_EXPR
)
12222 return omit_two_operands (type
, boolean_true_node
,
12224 return fold_build2 (EQ_EXPR
, type
, imag0
, imag1
);
12228 icond
= fold_binary (code
, type
, imag0
, imag1
);
12229 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12231 if (integer_zerop (icond
))
12233 if (code
== EQ_EXPR
)
12234 return omit_two_operands (type
, boolean_false_node
,
12236 return fold_build2 (NE_EXPR
, type
, real0
, real1
);
12240 if (code
== NE_EXPR
)
12241 return omit_two_operands (type
, boolean_true_node
,
12243 return fold_build2 (EQ_EXPR
, type
, real0
, real1
);
12254 tem
= fold_comparison (code
, type
, op0
, op1
);
12255 if (tem
!= NULL_TREE
)
12258 /* Transform comparisons of the form X +- C CMP X. */
12259 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12260 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12261 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12262 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
12263 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12264 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
12266 tree arg01
= TREE_OPERAND (arg0
, 1);
12267 enum tree_code code0
= TREE_CODE (arg0
);
12270 if (TREE_CODE (arg01
) == REAL_CST
)
12271 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12273 is_positive
= tree_int_cst_sgn (arg01
);
12275 /* (X - c) > X becomes false. */
12276 if (code
== GT_EXPR
12277 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12278 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12280 if (TREE_CODE (arg01
) == INTEGER_CST
12281 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12282 fold_overflow_warning (("assuming signed overflow does not "
12283 "occur when assuming that (X - c) > X "
12284 "is always false"),
12285 WARN_STRICT_OVERFLOW_ALL
);
12286 return constant_boolean_node (0, type
);
12289 /* Likewise (X + c) < X becomes false. */
12290 if (code
== LT_EXPR
12291 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12292 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12294 if (TREE_CODE (arg01
) == INTEGER_CST
12295 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12296 fold_overflow_warning (("assuming signed overflow does not "
12297 "occur when assuming that "
12298 "(X + c) < X is always false"),
12299 WARN_STRICT_OVERFLOW_ALL
);
12300 return constant_boolean_node (0, type
);
12303 /* Convert (X - c) <= X to true. */
12304 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12306 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12307 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12309 if (TREE_CODE (arg01
) == INTEGER_CST
12310 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12311 fold_overflow_warning (("assuming signed overflow does not "
12312 "occur when assuming that "
12313 "(X - c) <= X is always true"),
12314 WARN_STRICT_OVERFLOW_ALL
);
12315 return constant_boolean_node (1, type
);
12318 /* Convert (X + c) >= X to true. */
12319 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12321 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12322 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12324 if (TREE_CODE (arg01
) == INTEGER_CST
12325 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12326 fold_overflow_warning (("assuming signed overflow does not "
12327 "occur when assuming that "
12328 "(X + c) >= X is always true"),
12329 WARN_STRICT_OVERFLOW_ALL
);
12330 return constant_boolean_node (1, type
);
12333 if (TREE_CODE (arg01
) == INTEGER_CST
)
12335 /* Convert X + c > X and X - c < X to true for integers. */
12336 if (code
== GT_EXPR
12337 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12338 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12340 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12341 fold_overflow_warning (("assuming signed overflow does "
12342 "not occur when assuming that "
12343 "(X + c) > X is always true"),
12344 WARN_STRICT_OVERFLOW_ALL
);
12345 return constant_boolean_node (1, type
);
12348 if (code
== LT_EXPR
12349 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12350 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12352 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12353 fold_overflow_warning (("assuming signed overflow does "
12354 "not occur when assuming that "
12355 "(X - c) < X is always true"),
12356 WARN_STRICT_OVERFLOW_ALL
);
12357 return constant_boolean_node (1, type
);
12360 /* Convert X + c <= X and X - c >= X to false for integers. */
12361 if (code
== LE_EXPR
12362 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12363 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12365 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12366 fold_overflow_warning (("assuming signed overflow does "
12367 "not occur when assuming that "
12368 "(X + c) <= X is always false"),
12369 WARN_STRICT_OVERFLOW_ALL
);
12370 return constant_boolean_node (0, type
);
12373 if (code
== GE_EXPR
12374 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12375 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12377 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12378 fold_overflow_warning (("assuming signed overflow does "
12379 "not occur when assuming that "
12380 "(X - c) >= X is always false"),
12381 WARN_STRICT_OVERFLOW_ALL
);
12382 return constant_boolean_node (0, type
);
12387 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12388 This transformation affects the cases which are handled in later
12389 optimizations involving comparisons with non-negative constants. */
12390 if (TREE_CODE (arg1
) == INTEGER_CST
12391 && TREE_CODE (arg0
) != INTEGER_CST
12392 && tree_int_cst_sgn (arg1
) > 0)
12394 if (code
== GE_EXPR
)
12396 arg1
= const_binop (MINUS_EXPR
, arg1
,
12397 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12398 return fold_build2 (GT_EXPR
, type
, arg0
,
12399 fold_convert (TREE_TYPE (arg0
), arg1
));
12401 if (code
== LT_EXPR
)
12403 arg1
= const_binop (MINUS_EXPR
, arg1
,
12404 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12405 return fold_build2 (LE_EXPR
, type
, arg0
,
12406 fold_convert (TREE_TYPE (arg0
), arg1
));
12410 /* Comparisons with the highest or lowest possible integer of
12411 the specified precision will have known values. */
12413 tree arg1_type
= TREE_TYPE (arg1
);
12414 unsigned int width
= TYPE_PRECISION (arg1_type
);
12416 if (TREE_CODE (arg1
) == INTEGER_CST
12417 && !TREE_OVERFLOW (arg1
)
12418 && width
<= 2 * HOST_BITS_PER_WIDE_INT
12419 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
12421 HOST_WIDE_INT signed_max_hi
;
12422 unsigned HOST_WIDE_INT signed_max_lo
;
12423 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
12425 if (width
<= HOST_BITS_PER_WIDE_INT
)
12427 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12432 if (TYPE_UNSIGNED (arg1_type
))
12434 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
12440 max_lo
= signed_max_lo
;
12441 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
12447 width
-= HOST_BITS_PER_WIDE_INT
;
12448 signed_max_lo
= -1;
12449 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12454 if (TYPE_UNSIGNED (arg1_type
))
12456 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
12461 max_hi
= signed_max_hi
;
12462 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
12466 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
12467 && TREE_INT_CST_LOW (arg1
) == max_lo
)
12471 return omit_one_operand (type
, integer_zero_node
, arg0
);
12474 return fold_build2 (EQ_EXPR
, type
, op0
, op1
);
12477 return omit_one_operand (type
, integer_one_node
, arg0
);
12480 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
12482 /* The GE_EXPR and LT_EXPR cases above are not normally
12483 reached because of previous transformations. */
12488 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12490 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
12494 arg1
= const_binop (PLUS_EXPR
, arg1
,
12495 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12496 return fold_build2 (EQ_EXPR
, type
,
12497 fold_convert (TREE_TYPE (arg1
), arg0
),
12500 arg1
= const_binop (PLUS_EXPR
, arg1
,
12501 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12502 return fold_build2 (NE_EXPR
, type
,
12503 fold_convert (TREE_TYPE (arg1
), arg0
),
12508 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12510 && TREE_INT_CST_LOW (arg1
) == min_lo
)
12514 return omit_one_operand (type
, integer_zero_node
, arg0
);
12517 return fold_build2 (EQ_EXPR
, type
, op0
, op1
);
12520 return omit_one_operand (type
, integer_one_node
, arg0
);
12523 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
12528 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12530 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
12534 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
12535 return fold_build2 (NE_EXPR
, type
,
12536 fold_convert (TREE_TYPE (arg1
), arg0
),
12539 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
12540 return fold_build2 (EQ_EXPR
, type
,
12541 fold_convert (TREE_TYPE (arg1
), arg0
),
12547 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
12548 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
12549 && TYPE_UNSIGNED (arg1_type
)
12550 /* We will flip the signedness of the comparison operator
12551 associated with the mode of arg1, so the sign bit is
12552 specified by this mode. Check that arg1 is the signed
12553 max associated with this sign bit. */
12554 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
12555 /* signed_type does not work on pointer types. */
12556 && INTEGRAL_TYPE_P (arg1_type
))
12558 /* The following case also applies to X < signed_max+1
12559 and X >= signed_max+1 because previous transformations. */
12560 if (code
== LE_EXPR
|| code
== GT_EXPR
)
12563 st
= signed_type_for (TREE_TYPE (arg1
));
12564 return fold_build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
12565 type
, fold_convert (st
, arg0
),
12566 build_int_cst (st
, 0));
12572 /* If we are comparing an ABS_EXPR with a constant, we can
12573 convert all the cases into explicit comparisons, but they may
12574 well not be faster than doing the ABS and one comparison.
12575 But ABS (X) <= C is a range comparison, which becomes a subtraction
12576 and a comparison, and is probably faster. */
12577 if (code
== LE_EXPR
12578 && TREE_CODE (arg1
) == INTEGER_CST
12579 && TREE_CODE (arg0
) == ABS_EXPR
12580 && ! TREE_SIDE_EFFECTS (arg0
)
12581 && (0 != (tem
= negate_expr (arg1
)))
12582 && TREE_CODE (tem
) == INTEGER_CST
12583 && !TREE_OVERFLOW (tem
))
12584 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
12585 build2 (GE_EXPR
, type
,
12586 TREE_OPERAND (arg0
, 0), tem
),
12587 build2 (LE_EXPR
, type
,
12588 TREE_OPERAND (arg0
, 0), arg1
));
12590 /* Convert ABS_EXPR<x> >= 0 to true. */
12591 strict_overflow_p
= false;
12592 if (code
== GE_EXPR
12593 && (integer_zerop (arg1
)
12594 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
12595 && real_zerop (arg1
)))
12596 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12598 if (strict_overflow_p
)
12599 fold_overflow_warning (("assuming signed overflow does not occur "
12600 "when simplifying comparison of "
12601 "absolute value and zero"),
12602 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12603 return omit_one_operand (type
, integer_one_node
, arg0
);
12606 /* Convert ABS_EXPR<x> < 0 to false. */
12607 strict_overflow_p
= false;
12608 if (code
== LT_EXPR
12609 && (integer_zerop (arg1
) || real_zerop (arg1
))
12610 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12612 if (strict_overflow_p
)
12613 fold_overflow_warning (("assuming signed overflow does not occur "
12614 "when simplifying comparison of "
12615 "absolute value and zero"),
12616 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12617 return omit_one_operand (type
, integer_zero_node
, arg0
);
12620 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12621 and similarly for >= into !=. */
12622 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12623 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12624 && TREE_CODE (arg1
) == LSHIFT_EXPR
12625 && integer_onep (TREE_OPERAND (arg1
, 0)))
12626 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12627 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12628 TREE_OPERAND (arg1
, 1)),
12629 build_int_cst (TREE_TYPE (arg0
), 0));
12631 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12632 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12633 && (TREE_CODE (arg1
) == NOP_EXPR
12634 || TREE_CODE (arg1
) == CONVERT_EXPR
)
12635 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
12636 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
12638 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12639 fold_convert (TREE_TYPE (arg0
),
12640 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12641 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
12643 build_int_cst (TREE_TYPE (arg0
), 0));
12647 case UNORDERED_EXPR
:
12655 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
12657 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
12658 if (t1
!= NULL_TREE
)
12662 /* If the first operand is NaN, the result is constant. */
12663 if (TREE_CODE (arg0
) == REAL_CST
12664 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
12665 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
12667 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
12668 ? integer_zero_node
12669 : integer_one_node
;
12670 return omit_one_operand (type
, t1
, arg1
);
12673 /* If the second operand is NaN, the result is constant. */
12674 if (TREE_CODE (arg1
) == REAL_CST
12675 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
12676 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
12678 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
12679 ? integer_zero_node
12680 : integer_one_node
;
12681 return omit_one_operand (type
, t1
, arg0
);
12684 /* Simplify unordered comparison of something with itself. */
12685 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
12686 && operand_equal_p (arg0
, arg1
, 0))
12687 return constant_boolean_node (1, type
);
12689 if (code
== LTGT_EXPR
12690 && !flag_trapping_math
12691 && operand_equal_p (arg0
, arg1
, 0))
12692 return constant_boolean_node (0, type
);
12694 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12696 tree targ0
= strip_float_extensions (arg0
);
12697 tree targ1
= strip_float_extensions (arg1
);
12698 tree newtype
= TREE_TYPE (targ0
);
12700 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
12701 newtype
= TREE_TYPE (targ1
);
12703 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
12704 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
12705 fold_convert (newtype
, targ1
));
12710 case COMPOUND_EXPR
:
12711 /* When pedantic, a compound expression can be neither an lvalue
12712 nor an integer constant expression. */
12713 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
12715 /* Don't let (0, 0) be null pointer constant. */
12716 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
12717 : fold_convert (type
, arg1
);
12718 return pedantic_non_lvalue (tem
);
12721 if ((TREE_CODE (arg0
) == REAL_CST
12722 && TREE_CODE (arg1
) == REAL_CST
)
12723 || (TREE_CODE (arg0
) == INTEGER_CST
12724 && TREE_CODE (arg1
) == INTEGER_CST
))
12725 return build_complex (type
, arg0
, arg1
);
12729 /* An ASSERT_EXPR should never be passed to fold_binary. */
12730 gcc_unreachable ();
12734 } /* switch (code) */
12737 /* Callback for walk_tree, looking for LABEL_EXPR.
12738 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12739 Do not check the sub-tree of GOTO_EXPR. */
12742 contains_label_1 (tree
*tp
,
12743 int *walk_subtrees
,
12744 void *data ATTRIBUTE_UNUSED
)
12746 switch (TREE_CODE (*tp
))
12751 *walk_subtrees
= 0;
12758 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12759 accessible from outside the sub-tree. Returns NULL_TREE if no
12760 addressable label is found. */
12763 contains_label_p (tree st
)
12765 return (walk_tree (&st
, contains_label_1
, NULL
, NULL
) != NULL_TREE
);
12768 /* Fold a ternary expression of code CODE and type TYPE with operands
12769 OP0, OP1, and OP2. Return the folded expression if folding is
12770 successful. Otherwise, return NULL_TREE. */
12773 fold_ternary (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
12776 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
12777 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12779 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
12780 && TREE_CODE_LENGTH (code
) == 3);
12782 /* Strip any conversions that don't change the mode. This is safe
12783 for every expression, except for a comparison expression because
12784 its signedness is derived from its operands. So, in the latter
12785 case, only strip conversions that don't change the signedness.
12787 Note that this is done as an internal manipulation within the
12788 constant folder, in order to find the simplest representation of
12789 the arguments so that their form can be studied. In any cases,
12790 the appropriate type conversions should be put back in the tree
12791 that will get out of the constant folder. */
12806 case COMPONENT_REF
:
12807 if (TREE_CODE (arg0
) == CONSTRUCTOR
12808 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
12810 unsigned HOST_WIDE_INT idx
;
12812 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
12819 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12820 so all simple results must be passed through pedantic_non_lvalue. */
12821 if (TREE_CODE (arg0
) == INTEGER_CST
)
12823 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
12824 tem
= integer_zerop (arg0
) ? op2
: op1
;
12825 /* Only optimize constant conditions when the selected branch
12826 has the same type as the COND_EXPR. This avoids optimizing
12827 away "c ? x : throw", where the throw has a void type.
12828 Avoid throwing away that operand which contains label. */
12829 if ((!TREE_SIDE_EFFECTS (unused_op
)
12830 || !contains_label_p (unused_op
))
12831 && (! VOID_TYPE_P (TREE_TYPE (tem
))
12832 || VOID_TYPE_P (type
)))
12833 return pedantic_non_lvalue (tem
);
12836 if (operand_equal_p (arg1
, op2
, 0))
12837 return pedantic_omit_one_operand (type
, arg1
, arg0
);
12839 /* If we have A op B ? A : C, we may be able to convert this to a
12840 simpler expression, depending on the operation and the values
12841 of B and C. Signed zeros prevent all of these transformations,
12842 for reasons given above each one.
12844 Also try swapping the arguments and inverting the conditional. */
12845 if (COMPARISON_CLASS_P (arg0
)
12846 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
12847 arg1
, TREE_OPERAND (arg0
, 1))
12848 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
12850 tem
= fold_cond_expr_with_comparison (type
, arg0
, op1
, op2
);
12855 if (COMPARISON_CLASS_P (arg0
)
12856 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
12858 TREE_OPERAND (arg0
, 1))
12859 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
12861 tem
= fold_truth_not_expr (arg0
);
12862 if (tem
&& COMPARISON_CLASS_P (tem
))
12864 tem
= fold_cond_expr_with_comparison (type
, tem
, op2
, op1
);
12870 /* If the second operand is simpler than the third, swap them
12871 since that produces better jump optimization results. */
12872 if (truth_value_p (TREE_CODE (arg0
))
12873 && tree_swap_operands_p (op1
, op2
, false))
12875 /* See if this can be inverted. If it can't, possibly because
12876 it was a floating-point inequality comparison, don't do
12878 tem
= fold_truth_not_expr (arg0
);
12880 return fold_build3 (code
, type
, tem
, op2
, op1
);
12883 /* Convert A ? 1 : 0 to simply A. */
12884 if (integer_onep (op1
)
12885 && integer_zerop (op2
)
12886 /* If we try to convert OP0 to our type, the
12887 call to fold will try to move the conversion inside
12888 a COND, which will recurse. In that case, the COND_EXPR
12889 is probably the best choice, so leave it alone. */
12890 && type
== TREE_TYPE (arg0
))
12891 return pedantic_non_lvalue (arg0
);
12893 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12894 over COND_EXPR in cases such as floating point comparisons. */
12895 if (integer_zerop (op1
)
12896 && integer_onep (op2
)
12897 && truth_value_p (TREE_CODE (arg0
)))
12898 return pedantic_non_lvalue (fold_convert (type
,
12899 invert_truthvalue (arg0
)));
12901 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12902 if (TREE_CODE (arg0
) == LT_EXPR
12903 && integer_zerop (TREE_OPERAND (arg0
, 1))
12904 && integer_zerop (op2
)
12905 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
12907 /* sign_bit_p only checks ARG1 bits within A's precision.
12908 If <sign bit of A> has wider type than A, bits outside
12909 of A's precision in <sign bit of A> need to be checked.
12910 If they are all 0, this optimization needs to be done
12911 in unsigned A's type, if they are all 1 in signed A's type,
12912 otherwise this can't be done. */
12913 if (TYPE_PRECISION (TREE_TYPE (tem
))
12914 < TYPE_PRECISION (TREE_TYPE (arg1
))
12915 && TYPE_PRECISION (TREE_TYPE (tem
))
12916 < TYPE_PRECISION (type
))
12918 unsigned HOST_WIDE_INT mask_lo
;
12919 HOST_WIDE_INT mask_hi
;
12920 int inner_width
, outer_width
;
12923 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
12924 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
12925 if (outer_width
> TYPE_PRECISION (type
))
12926 outer_width
= TYPE_PRECISION (type
);
12928 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
12930 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
12931 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
12937 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
12938 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
12940 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
12942 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
12943 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
12947 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
12948 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
12950 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
12951 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
12953 tem_type
= signed_type_for (TREE_TYPE (tem
));
12954 tem
= fold_convert (tem_type
, tem
);
12956 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
12957 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
12959 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
12960 tem
= fold_convert (tem_type
, tem
);
12967 return fold_convert (type
,
12968 fold_build2 (BIT_AND_EXPR
,
12969 TREE_TYPE (tem
), tem
,
12970 fold_convert (TREE_TYPE (tem
),
12974 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12975 already handled above. */
12976 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12977 && integer_onep (TREE_OPERAND (arg0
, 1))
12978 && integer_zerop (op2
)
12979 && integer_pow2p (arg1
))
12981 tree tem
= TREE_OPERAND (arg0
, 0);
12983 if (TREE_CODE (tem
) == RSHIFT_EXPR
12984 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
12985 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
12986 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
12987 return fold_build2 (BIT_AND_EXPR
, type
,
12988 TREE_OPERAND (tem
, 0), arg1
);
12991 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12992 is probably obsolete because the first operand should be a
12993 truth value (that's why we have the two cases above), but let's
12994 leave it in until we can confirm this for all front-ends. */
12995 if (integer_zerop (op2
)
12996 && TREE_CODE (arg0
) == NE_EXPR
12997 && integer_zerop (TREE_OPERAND (arg0
, 1))
12998 && integer_pow2p (arg1
)
12999 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13000 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13001 arg1
, OEP_ONLY_CONST
))
13002 return pedantic_non_lvalue (fold_convert (type
,
13003 TREE_OPERAND (arg0
, 0)));
13005 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13006 if (integer_zerop (op2
)
13007 && truth_value_p (TREE_CODE (arg0
))
13008 && truth_value_p (TREE_CODE (arg1
)))
13009 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
13010 fold_convert (type
, arg0
),
13013 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13014 if (integer_onep (op2
)
13015 && truth_value_p (TREE_CODE (arg0
))
13016 && truth_value_p (TREE_CODE (arg1
)))
13018 /* Only perform transformation if ARG0 is easily inverted. */
13019 tem
= fold_truth_not_expr (arg0
);
13021 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
13022 fold_convert (type
, tem
),
13026 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13027 if (integer_zerop (arg1
)
13028 && truth_value_p (TREE_CODE (arg0
))
13029 && truth_value_p (TREE_CODE (op2
)))
13031 /* Only perform transformation if ARG0 is easily inverted. */
13032 tem
= fold_truth_not_expr (arg0
);
13034 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
13035 fold_convert (type
, tem
),
13039 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13040 if (integer_onep (arg1
)
13041 && truth_value_p (TREE_CODE (arg0
))
13042 && truth_value_p (TREE_CODE (op2
)))
13043 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
13044 fold_convert (type
, arg0
),
13050 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13051 of fold_ternary on them. */
13052 gcc_unreachable ();
13054 case BIT_FIELD_REF
:
13055 if ((TREE_CODE (arg0
) == VECTOR_CST
13056 || (TREE_CODE (arg0
) == CONSTRUCTOR
&& TREE_CONSTANT (arg0
)))
13057 && type
== TREE_TYPE (TREE_TYPE (arg0
))
13058 && host_integerp (arg1
, 1)
13059 && host_integerp (op2
, 1))
13061 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
13062 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
13065 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
13066 && (idx
% width
) == 0
13067 && (idx
= idx
/ width
)
13068 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
13070 tree elements
= NULL_TREE
;
13072 if (TREE_CODE (arg0
) == VECTOR_CST
)
13073 elements
= TREE_VECTOR_CST_ELTS (arg0
);
13076 unsigned HOST_WIDE_INT idx
;
13079 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0
), idx
, value
)
13080 elements
= tree_cons (NULL_TREE
, value
, elements
);
13082 while (idx
-- > 0 && elements
)
13083 elements
= TREE_CHAIN (elements
);
13085 return TREE_VALUE (elements
);
13087 return fold_convert (type
, integer_zero_node
);
13094 } /* switch (code) */
13097 /* Perform constant folding and related simplification of EXPR.
13098 The related simplifications include x*1 => x, x*0 => 0, etc.,
13099 and application of the associative law.
13100 NOP_EXPR conversions may be removed freely (as long as we
13101 are careful not to change the type of the overall expression).
13102 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13103 but we can constant-fold them if they have constant operands. */
13105 #ifdef ENABLE_FOLD_CHECKING
13106 # define fold(x) fold_1 (x)
13107 static tree
fold_1 (tree
);
13113 const tree t
= expr
;
13114 enum tree_code code
= TREE_CODE (t
);
13115 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13118 /* Return right away if a constant. */
13119 if (kind
== tcc_constant
)
13122 /* CALL_EXPR-like objects with variable numbers of operands are
13123 treated specially. */
13124 if (kind
== tcc_vl_exp
)
13126 if (code
== CALL_EXPR
)
13128 tem
= fold_call_expr (expr
, false);
13129 return tem
? tem
: expr
;
13134 if (IS_EXPR_CODE_CLASS (kind
)
13135 || IS_GIMPLE_STMT_CODE_CLASS (kind
))
13137 tree type
= TREE_TYPE (t
);
13138 tree op0
, op1
, op2
;
13140 switch (TREE_CODE_LENGTH (code
))
13143 op0
= TREE_OPERAND (t
, 0);
13144 tem
= fold_unary (code
, type
, op0
);
13145 return tem
? tem
: expr
;
13147 op0
= TREE_OPERAND (t
, 0);
13148 op1
= TREE_OPERAND (t
, 1);
13149 tem
= fold_binary (code
, type
, op0
, op1
);
13150 return tem
? tem
: expr
;
13152 op0
= TREE_OPERAND (t
, 0);
13153 op1
= TREE_OPERAND (t
, 1);
13154 op2
= TREE_OPERAND (t
, 2);
13155 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
13156 return tem
? tem
: expr
;
13165 return fold (DECL_INITIAL (t
));
13169 } /* switch (code) */
13172 #ifdef ENABLE_FOLD_CHECKING
13175 static void fold_checksum_tree (const_tree
, struct md5_ctx
*, htab_t
);
13176 static void fold_check_failed (const_tree
, const_tree
);
13177 void print_fold_checksum (const_tree
);
13179 /* When --enable-checking=fold, compute a digest of expr before
13180 and after actual fold call to see if fold did not accidentally
13181 change original expr. */
13187 struct md5_ctx ctx
;
13188 unsigned char checksum_before
[16], checksum_after
[16];
13191 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13192 md5_init_ctx (&ctx
);
13193 fold_checksum_tree (expr
, &ctx
, ht
);
13194 md5_finish_ctx (&ctx
, checksum_before
);
13197 ret
= fold_1 (expr
);
13199 md5_init_ctx (&ctx
);
13200 fold_checksum_tree (expr
, &ctx
, ht
);
13201 md5_finish_ctx (&ctx
, checksum_after
);
13204 if (memcmp (checksum_before
, checksum_after
, 16))
13205 fold_check_failed (expr
, ret
);
13211 print_fold_checksum (const_tree expr
)
13213 struct md5_ctx ctx
;
13214 unsigned char checksum
[16], cnt
;
13217 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13218 md5_init_ctx (&ctx
);
13219 fold_checksum_tree (expr
, &ctx
, ht
);
13220 md5_finish_ctx (&ctx
, checksum
);
13222 for (cnt
= 0; cnt
< 16; ++cnt
)
13223 fprintf (stderr
, "%02x", checksum
[cnt
]);
13224 putc ('\n', stderr
);
13228 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
13230 internal_error ("fold check: original tree changed by fold");
13234 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
13237 enum tree_code code
;
13238 struct tree_function_decl buf
;
13243 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
13244 <= sizeof (struct tree_function_decl
))
13245 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
13248 slot
= (const void **) htab_find_slot (ht
, expr
, INSERT
);
13252 code
= TREE_CODE (expr
);
13253 if (TREE_CODE_CLASS (code
) == tcc_declaration
13254 && DECL_ASSEMBLER_NAME_SET_P (expr
))
13256 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13257 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13258 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
13259 expr
= (tree
) &buf
;
13261 else if (TREE_CODE_CLASS (code
) == tcc_type
13262 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
13263 || TYPE_CACHED_VALUES_P (expr
)
13264 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)))
13266 /* Allow these fields to be modified. */
13268 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13269 expr
= tmp
= (tree
) &buf
;
13270 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
13271 TYPE_POINTER_TO (tmp
) = NULL
;
13272 TYPE_REFERENCE_TO (tmp
) = NULL
;
13273 if (TYPE_CACHED_VALUES_P (tmp
))
13275 TYPE_CACHED_VALUES_P (tmp
) = 0;
13276 TYPE_CACHED_VALUES (tmp
) = NULL
;
13279 md5_process_bytes (expr
, tree_size (expr
), ctx
);
13280 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
13281 if (TREE_CODE_CLASS (code
) != tcc_type
13282 && TREE_CODE_CLASS (code
) != tcc_declaration
13283 && code
!= TREE_LIST
13284 && code
!= SSA_NAME
)
13285 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
13286 switch (TREE_CODE_CLASS (code
))
13292 md5_process_bytes (TREE_STRING_POINTER (expr
),
13293 TREE_STRING_LENGTH (expr
), ctx
);
13296 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
13297 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
13300 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
13306 case tcc_exceptional
:
13310 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
13311 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
13312 expr
= TREE_CHAIN (expr
);
13313 goto recursive_label
;
13316 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
13317 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
13323 case tcc_expression
:
13324 case tcc_reference
:
13325 case tcc_comparison
:
13328 case tcc_statement
:
13330 len
= TREE_OPERAND_LENGTH (expr
);
13331 for (i
= 0; i
< len
; ++i
)
13332 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
13334 case tcc_declaration
:
13335 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
13336 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
13337 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
13339 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
13340 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
13341 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
13342 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
13343 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
13345 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
13346 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
13348 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
13350 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
13351 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
13352 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
13356 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
13357 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
13358 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
13359 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
13360 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
13361 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
13362 if (INTEGRAL_TYPE_P (expr
)
13363 || SCALAR_FLOAT_TYPE_P (expr
))
13365 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
13366 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
13368 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
13369 if (TREE_CODE (expr
) == RECORD_TYPE
13370 || TREE_CODE (expr
) == UNION_TYPE
13371 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
13372 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
13373 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
13380 /* Helper function for outputting the checksum of a tree T. When
13381 debugging with gdb, you can "define mynext" to be "next" followed
13382 by "call debug_fold_checksum (op0)", then just trace down till the
13386 debug_fold_checksum (const_tree t
)
13389 unsigned char checksum
[16];
13390 struct md5_ctx ctx
;
13391 htab_t ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13393 md5_init_ctx (&ctx
);
13394 fold_checksum_tree (t
, &ctx
, ht
);
13395 md5_finish_ctx (&ctx
, checksum
);
13398 for (i
= 0; i
< 16; i
++)
13399 fprintf (stderr
, "%d ", checksum
[i
]);
13401 fprintf (stderr
, "\n");
13406 /* Fold a unary tree expression with code CODE of type TYPE with an
13407 operand OP0. Return a folded expression if successful. Otherwise,
13408 return a tree expression with code CODE of type TYPE with an
13412 fold_build1_stat (enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
13415 #ifdef ENABLE_FOLD_CHECKING
13416 unsigned char checksum_before
[16], checksum_after
[16];
13417 struct md5_ctx ctx
;
13420 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13421 md5_init_ctx (&ctx
);
13422 fold_checksum_tree (op0
, &ctx
, ht
);
13423 md5_finish_ctx (&ctx
, checksum_before
);
13427 tem
= fold_unary (code
, type
, op0
);
13429 tem
= build1_stat (code
, type
, op0 PASS_MEM_STAT
);
13431 #ifdef ENABLE_FOLD_CHECKING
13432 md5_init_ctx (&ctx
);
13433 fold_checksum_tree (op0
, &ctx
, ht
);
13434 md5_finish_ctx (&ctx
, checksum_after
);
13437 if (memcmp (checksum_before
, checksum_after
, 16))
13438 fold_check_failed (op0
, tem
);
13443 /* Fold a binary tree expression with code CODE of type TYPE with
13444 operands OP0 and OP1. Return a folded expression if successful.
13445 Otherwise, return a tree expression with code CODE of type TYPE
13446 with operands OP0 and OP1. */
13449 fold_build2_stat (enum tree_code code
, tree type
, tree op0
, tree op1
13453 #ifdef ENABLE_FOLD_CHECKING
13454 unsigned char checksum_before_op0
[16],
13455 checksum_before_op1
[16],
13456 checksum_after_op0
[16],
13457 checksum_after_op1
[16];
13458 struct md5_ctx ctx
;
13461 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13462 md5_init_ctx (&ctx
);
13463 fold_checksum_tree (op0
, &ctx
, ht
);
13464 md5_finish_ctx (&ctx
, checksum_before_op0
);
13467 md5_init_ctx (&ctx
);
13468 fold_checksum_tree (op1
, &ctx
, ht
);
13469 md5_finish_ctx (&ctx
, checksum_before_op1
);
13473 tem
= fold_binary (code
, type
, op0
, op1
);
13475 tem
= build2_stat (code
, type
, op0
, op1 PASS_MEM_STAT
);
13477 #ifdef ENABLE_FOLD_CHECKING
13478 md5_init_ctx (&ctx
);
13479 fold_checksum_tree (op0
, &ctx
, ht
);
13480 md5_finish_ctx (&ctx
, checksum_after_op0
);
13483 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13484 fold_check_failed (op0
, tem
);
13486 md5_init_ctx (&ctx
);
13487 fold_checksum_tree (op1
, &ctx
, ht
);
13488 md5_finish_ctx (&ctx
, checksum_after_op1
);
13491 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13492 fold_check_failed (op1
, tem
);
13497 /* Fold a ternary tree expression with code CODE of type TYPE with
13498 operands OP0, OP1, and OP2. Return a folded expression if
13499 successful. Otherwise, return a tree expression with code CODE of
13500 type TYPE with operands OP0, OP1, and OP2. */
13503 fold_build3_stat (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
13507 #ifdef ENABLE_FOLD_CHECKING
13508 unsigned char checksum_before_op0
[16],
13509 checksum_before_op1
[16],
13510 checksum_before_op2
[16],
13511 checksum_after_op0
[16],
13512 checksum_after_op1
[16],
13513 checksum_after_op2
[16];
13514 struct md5_ctx ctx
;
13517 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13518 md5_init_ctx (&ctx
);
13519 fold_checksum_tree (op0
, &ctx
, ht
);
13520 md5_finish_ctx (&ctx
, checksum_before_op0
);
13523 md5_init_ctx (&ctx
);
13524 fold_checksum_tree (op1
, &ctx
, ht
);
13525 md5_finish_ctx (&ctx
, checksum_before_op1
);
13528 md5_init_ctx (&ctx
);
13529 fold_checksum_tree (op2
, &ctx
, ht
);
13530 md5_finish_ctx (&ctx
, checksum_before_op2
);
13534 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
13535 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
13537 tem
= build3_stat (code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
13539 #ifdef ENABLE_FOLD_CHECKING
13540 md5_init_ctx (&ctx
);
13541 fold_checksum_tree (op0
, &ctx
, ht
);
13542 md5_finish_ctx (&ctx
, checksum_after_op0
);
13545 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13546 fold_check_failed (op0
, tem
);
13548 md5_init_ctx (&ctx
);
13549 fold_checksum_tree (op1
, &ctx
, ht
);
13550 md5_finish_ctx (&ctx
, checksum_after_op1
);
13553 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13554 fold_check_failed (op1
, tem
);
13556 md5_init_ctx (&ctx
);
13557 fold_checksum_tree (op2
, &ctx
, ht
);
13558 md5_finish_ctx (&ctx
, checksum_after_op2
);
13561 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
13562 fold_check_failed (op2
, tem
);
13567 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13568 arguments in ARGARRAY, and a null static chain.
13569 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13570 of type TYPE from the given operands as constructed by build_call_array. */
13573 fold_build_call_array (tree type
, tree fn
, int nargs
, tree
*argarray
)
13576 #ifdef ENABLE_FOLD_CHECKING
13577 unsigned char checksum_before_fn
[16],
13578 checksum_before_arglist
[16],
13579 checksum_after_fn
[16],
13580 checksum_after_arglist
[16];
13581 struct md5_ctx ctx
;
13585 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13586 md5_init_ctx (&ctx
);
13587 fold_checksum_tree (fn
, &ctx
, ht
);
13588 md5_finish_ctx (&ctx
, checksum_before_fn
);
13591 md5_init_ctx (&ctx
);
13592 for (i
= 0; i
< nargs
; i
++)
13593 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
13594 md5_finish_ctx (&ctx
, checksum_before_arglist
);
13598 tem
= fold_builtin_call_array (type
, fn
, nargs
, argarray
);
13600 #ifdef ENABLE_FOLD_CHECKING
13601 md5_init_ctx (&ctx
);
13602 fold_checksum_tree (fn
, &ctx
, ht
);
13603 md5_finish_ctx (&ctx
, checksum_after_fn
);
13606 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
13607 fold_check_failed (fn
, tem
);
13609 md5_init_ctx (&ctx
);
13610 for (i
= 0; i
< nargs
; i
++)
13611 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
13612 md5_finish_ctx (&ctx
, checksum_after_arglist
);
13615 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
13616 fold_check_failed (NULL_TREE
, tem
);
13621 /* Perform constant folding and related simplification of initializer
13622 expression EXPR. These behave identically to "fold_buildN" but ignore
13623 potential run-time traps and exceptions that fold must preserve. */
13625 #define START_FOLD_INIT \
13626 int saved_signaling_nans = flag_signaling_nans;\
13627 int saved_trapping_math = flag_trapping_math;\
13628 int saved_rounding_math = flag_rounding_math;\
13629 int saved_trapv = flag_trapv;\
13630 int saved_folding_initializer = folding_initializer;\
13631 flag_signaling_nans = 0;\
13632 flag_trapping_math = 0;\
13633 flag_rounding_math = 0;\
13635 folding_initializer = 1;
13637 #define END_FOLD_INIT \
13638 flag_signaling_nans = saved_signaling_nans;\
13639 flag_trapping_math = saved_trapping_math;\
13640 flag_rounding_math = saved_rounding_math;\
13641 flag_trapv = saved_trapv;\
13642 folding_initializer = saved_folding_initializer;
13645 fold_build1_initializer (enum tree_code code
, tree type
, tree op
)
13650 result
= fold_build1 (code
, type
, op
);
13657 fold_build2_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
)
13662 result
= fold_build2 (code
, type
, op0
, op1
);
13669 fold_build3_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
,
13675 result
= fold_build3 (code
, type
, op0
, op1
, op2
);
13682 fold_build_call_array_initializer (tree type
, tree fn
,
13683 int nargs
, tree
*argarray
)
13688 result
= fold_build_call_array (type
, fn
, nargs
, argarray
);
13694 #undef START_FOLD_INIT
13695 #undef END_FOLD_INIT
13697 /* Determine if first argument is a multiple of second argument. Return 0 if
13698 it is not, or we cannot easily determined it to be.
13700 An example of the sort of thing we care about (at this point; this routine
13701 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13702 fold cases do now) is discovering that
13704 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13710 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13712 This code also handles discovering that
13714 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13716 is a multiple of 8 so we don't have to worry about dealing with a
13717 possible remainder.
13719 Note that we *look* inside a SAVE_EXPR only to determine how it was
13720 calculated; it is not safe for fold to do much of anything else with the
13721 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13722 at run time. For example, the latter example above *cannot* be implemented
13723 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13724 evaluation time of the original SAVE_EXPR is not necessarily the same at
13725 the time the new expression is evaluated. The only optimization of this
13726 sort that would be valid is changing
13728 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13732 SAVE_EXPR (I) * SAVE_EXPR (J)
13734 (where the same SAVE_EXPR (J) is used in the original and the
13735 transformed version). */
13738 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
13740 if (operand_equal_p (top
, bottom
, 0))
13743 if (TREE_CODE (type
) != INTEGER_TYPE
)
13746 switch (TREE_CODE (top
))
13749 /* Bitwise and provides a power of two multiple. If the mask is
13750 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13751 if (!integer_pow2p (bottom
))
13756 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13757 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13761 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13762 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13765 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
13769 op1
= TREE_OPERAND (top
, 1);
13770 /* const_binop may not detect overflow correctly,
13771 so check for it explicitly here. */
13772 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
13773 > TREE_INT_CST_LOW (op1
)
13774 && TREE_INT_CST_HIGH (op1
) == 0
13775 && 0 != (t1
= fold_convert (type
,
13776 const_binop (LSHIFT_EXPR
,
13779 && !TREE_OVERFLOW (t1
))
13780 return multiple_of_p (type
, t1
, bottom
);
13785 /* Can't handle conversions from non-integral or wider integral type. */
13786 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
13787 || (TYPE_PRECISION (type
)
13788 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
13791 /* .. fall through ... */
13794 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
13797 if (TREE_CODE (bottom
) != INTEGER_CST
13798 || integer_zerop (bottom
)
13799 || (TYPE_UNSIGNED (type
)
13800 && (tree_int_cst_sgn (top
) < 0
13801 || tree_int_cst_sgn (bottom
) < 0)))
13803 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
13811 /* Return true if `t' is known to be non-negative. If the return
13812 value is based on the assumption that signed overflow is undefined,
13813 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13814 *STRICT_OVERFLOW_P. */
13817 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
13819 if (t
== error_mark_node
)
13822 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
13825 switch (TREE_CODE (t
))
13828 /* Query VRP to see if it has recorded any information about
13829 the range of this object. */
13830 return ssa_name_nonnegative_p (t
);
13833 /* We can't return 1 if flag_wrapv is set because
13834 ABS_EXPR<INT_MIN> = INT_MIN. */
13835 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
13837 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
13839 *strict_overflow_p
= true;
13845 return tree_int_cst_sgn (t
) >= 0;
13848 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
13851 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
13853 case POINTER_PLUS_EXPR
:
13855 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
13856 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
13858 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
13859 strict_overflow_p
));
13861 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13862 both unsigned and at least 2 bits shorter than the result. */
13863 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
13864 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
13865 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
13867 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
13868 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
13869 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
13870 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
13872 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
13873 TYPE_PRECISION (inner2
)) + 1;
13874 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
13880 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
13882 /* x * x for floating point x is always non-negative. */
13883 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
13885 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
13887 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
13888 strict_overflow_p
));
13891 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13892 both unsigned and their total bits is shorter than the result. */
13893 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
13894 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
13895 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
13897 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
13898 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
13899 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
13900 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
13901 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
13902 < TYPE_PRECISION (TREE_TYPE (t
));
13908 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
13910 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
13911 strict_overflow_p
));
13917 case TRUNC_DIV_EXPR
:
13918 case CEIL_DIV_EXPR
:
13919 case FLOOR_DIV_EXPR
:
13920 case ROUND_DIV_EXPR
:
13921 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
13923 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
13924 strict_overflow_p
));
13926 case TRUNC_MOD_EXPR
:
13927 case CEIL_MOD_EXPR
:
13928 case FLOOR_MOD_EXPR
:
13929 case ROUND_MOD_EXPR
:
13931 case NON_LVALUE_EXPR
:
13933 case FIX_TRUNC_EXPR
:
13934 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
13935 strict_overflow_p
);
13937 case COMPOUND_EXPR
:
13939 case GIMPLE_MODIFY_STMT
:
13940 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t
, 1),
13941 strict_overflow_p
);
13944 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
13945 strict_overflow_p
);
13948 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
13950 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
13951 strict_overflow_p
));
13955 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
13956 tree outer_type
= TREE_TYPE (t
);
13958 if (TREE_CODE (outer_type
) == REAL_TYPE
)
13960 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13961 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
13962 strict_overflow_p
);
13963 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
13965 if (TYPE_UNSIGNED (inner_type
))
13967 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
13968 strict_overflow_p
);
13971 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
13973 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13974 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
,0),
13975 strict_overflow_p
);
13976 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
13977 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
13978 && TYPE_UNSIGNED (inner_type
);
13985 tree temp
= TARGET_EXPR_SLOT (t
);
13986 t
= TARGET_EXPR_INITIAL (t
);
13988 /* If the initializer is non-void, then it's a normal expression
13989 that will be assigned to the slot. */
13990 if (!VOID_TYPE_P (t
))
13991 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
13993 /* Otherwise, the initializer sets the slot in some way. One common
13994 way is an assignment statement at the end of the initializer. */
13997 if (TREE_CODE (t
) == BIND_EXPR
)
13998 t
= expr_last (BIND_EXPR_BODY (t
));
13999 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
14000 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
14001 t
= expr_last (TREE_OPERAND (t
, 0));
14002 else if (TREE_CODE (t
) == STATEMENT_LIST
)
14007 if ((TREE_CODE (t
) == MODIFY_EXPR
14008 || TREE_CODE (t
) == GIMPLE_MODIFY_STMT
)
14009 && GENERIC_TREE_OPERAND (t
, 0) == temp
)
14010 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t
, 1),
14011 strict_overflow_p
);
14018 tree fndecl
= get_callee_fndecl (t
);
14019 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
14020 switch (DECL_FUNCTION_CODE (fndecl
))
14022 CASE_FLT_FN (BUILT_IN_ACOS
):
14023 CASE_FLT_FN (BUILT_IN_ACOSH
):
14024 CASE_FLT_FN (BUILT_IN_CABS
):
14025 CASE_FLT_FN (BUILT_IN_COSH
):
14026 CASE_FLT_FN (BUILT_IN_ERFC
):
14027 CASE_FLT_FN (BUILT_IN_EXP
):
14028 CASE_FLT_FN (BUILT_IN_EXP10
):
14029 CASE_FLT_FN (BUILT_IN_EXP2
):
14030 CASE_FLT_FN (BUILT_IN_FABS
):
14031 CASE_FLT_FN (BUILT_IN_FDIM
):
14032 CASE_FLT_FN (BUILT_IN_HYPOT
):
14033 CASE_FLT_FN (BUILT_IN_POW10
):
14034 CASE_INT_FN (BUILT_IN_FFS
):
14035 CASE_INT_FN (BUILT_IN_PARITY
):
14036 CASE_INT_FN (BUILT_IN_POPCOUNT
):
14037 case BUILT_IN_BSWAP32
:
14038 case BUILT_IN_BSWAP64
:
14042 CASE_FLT_FN (BUILT_IN_SQRT
):
14043 /* sqrt(-0.0) is -0.0. */
14044 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
14046 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14047 strict_overflow_p
);
14049 CASE_FLT_FN (BUILT_IN_ASINH
):
14050 CASE_FLT_FN (BUILT_IN_ATAN
):
14051 CASE_FLT_FN (BUILT_IN_ATANH
):
14052 CASE_FLT_FN (BUILT_IN_CBRT
):
14053 CASE_FLT_FN (BUILT_IN_CEIL
):
14054 CASE_FLT_FN (BUILT_IN_ERF
):
14055 CASE_FLT_FN (BUILT_IN_EXPM1
):
14056 CASE_FLT_FN (BUILT_IN_FLOOR
):
14057 CASE_FLT_FN (BUILT_IN_FMOD
):
14058 CASE_FLT_FN (BUILT_IN_FREXP
):
14059 CASE_FLT_FN (BUILT_IN_LCEIL
):
14060 CASE_FLT_FN (BUILT_IN_LDEXP
):
14061 CASE_FLT_FN (BUILT_IN_LFLOOR
):
14062 CASE_FLT_FN (BUILT_IN_LLCEIL
):
14063 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
14064 CASE_FLT_FN (BUILT_IN_LLRINT
):
14065 CASE_FLT_FN (BUILT_IN_LLROUND
):
14066 CASE_FLT_FN (BUILT_IN_LRINT
):
14067 CASE_FLT_FN (BUILT_IN_LROUND
):
14068 CASE_FLT_FN (BUILT_IN_MODF
):
14069 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
14070 CASE_FLT_FN (BUILT_IN_RINT
):
14071 CASE_FLT_FN (BUILT_IN_ROUND
):
14072 CASE_FLT_FN (BUILT_IN_SCALB
):
14073 CASE_FLT_FN (BUILT_IN_SCALBLN
):
14074 CASE_FLT_FN (BUILT_IN_SCALBN
):
14075 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
14076 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
14077 CASE_FLT_FN (BUILT_IN_SINH
):
14078 CASE_FLT_FN (BUILT_IN_TANH
):
14079 CASE_FLT_FN (BUILT_IN_TRUNC
):
14080 /* True if the 1st argument is nonnegative. */
14081 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14082 strict_overflow_p
);
14084 CASE_FLT_FN (BUILT_IN_FMAX
):
14085 /* True if the 1st OR 2nd arguments are nonnegative. */
14086 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14088 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 1),
14089 strict_overflow_p
)));
14091 CASE_FLT_FN (BUILT_IN_FMIN
):
14092 /* True if the 1st AND 2nd arguments are nonnegative. */
14093 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14095 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 1),
14096 strict_overflow_p
)));
14098 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14099 /* True if the 2nd argument is nonnegative. */
14100 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 1),
14101 strict_overflow_p
);
14103 CASE_FLT_FN (BUILT_IN_POWI
):
14104 /* True if the 1st argument is nonnegative or the second
14105 argument is an even integer. */
14106 if (TREE_CODE (CALL_EXPR_ARG (t
, 1)) == INTEGER_CST
)
14108 tree arg1
= CALL_EXPR_ARG (t
, 1);
14109 if ((TREE_INT_CST_LOW (arg1
) & 1) == 0)
14112 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14113 strict_overflow_p
);
14115 CASE_FLT_FN (BUILT_IN_POW
):
14116 /* True if the 1st argument is nonnegative or the second
14117 argument is an even integer valued real. */
14118 if (TREE_CODE (CALL_EXPR_ARG (t
, 1)) == REAL_CST
)
14123 c
= TREE_REAL_CST (CALL_EXPR_ARG (t
, 1));
14124 n
= real_to_integer (&c
);
14127 REAL_VALUE_TYPE cint
;
14128 real_from_integer (&cint
, VOIDmode
, n
,
14129 n
< 0 ? -1 : 0, 0);
14130 if (real_identical (&c
, &cint
))
14134 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14135 strict_overflow_p
);
14142 /* ... fall through ... */
14146 tree type
= TREE_TYPE (t
);
14147 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14148 && truth_value_p (TREE_CODE (t
)))
14149 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14150 have a signed:1 type (where the value is -1 and 0). */
14155 /* We don't know sign of `t', so be conservative and return false. */
14159 /* Return true if `t' is known to be non-negative. Handle warnings
14160 about undefined signed overflow. */
14163 tree_expr_nonnegative_p (tree t
)
14165 bool ret
, strict_overflow_p
;
14167 strict_overflow_p
= false;
14168 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
14169 if (strict_overflow_p
)
14170 fold_overflow_warning (("assuming signed overflow does not occur when "
14171 "determining that expression is always "
14173 WARN_STRICT_OVERFLOW_MISC
);
14177 /* Return true when T is an address and is known to be nonzero.
14178 For floating point we further ensure that T is not denormal.
14179 Similar logic is present in nonzero_address in rtlanal.h.
14181 If the return value is based on the assumption that signed overflow
14182 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14183 change *STRICT_OVERFLOW_P. */
14186 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
14188 tree type
= TREE_TYPE (t
);
14189 bool sub_strict_overflow_p
;
14191 /* Doing something useful for floating point would need more work. */
14192 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
14195 switch (TREE_CODE (t
))
14198 /* Query VRP to see if it has recorded any information about
14199 the range of this object. */
14200 return ssa_name_nonzero_p (t
);
14203 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14204 strict_overflow_p
);
14207 return !integer_zerop (t
);
14209 case POINTER_PLUS_EXPR
:
14211 if (TYPE_OVERFLOW_UNDEFINED (type
))
14213 /* With the presence of negative values it is hard
14214 to say something. */
14215 sub_strict_overflow_p
= false;
14216 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14217 &sub_strict_overflow_p
)
14218 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14219 &sub_strict_overflow_p
))
14221 /* One of operands must be positive and the other non-negative. */
14222 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14223 overflows, on a twos-complement machine the sum of two
14224 nonnegative numbers can never be zero. */
14225 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14227 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14228 strict_overflow_p
));
14233 if (TYPE_OVERFLOW_UNDEFINED (type
))
14235 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14237 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14238 strict_overflow_p
))
14240 *strict_overflow_p
= true;
14248 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
14249 tree outer_type
= TREE_TYPE (t
);
14251 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
14252 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14253 strict_overflow_p
));
14259 tree base
= get_base_address (TREE_OPERAND (t
, 0));
14264 /* Weak declarations may link to NULL. */
14265 if (VAR_OR_FUNCTION_DECL_P (base
))
14266 return !DECL_WEAK (base
);
14268 /* Constants are never weak. */
14269 if (CONSTANT_CLASS_P (base
))
14276 sub_strict_overflow_p
= false;
14277 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14278 &sub_strict_overflow_p
)
14279 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
14280 &sub_strict_overflow_p
))
14282 if (sub_strict_overflow_p
)
14283 *strict_overflow_p
= true;
14289 sub_strict_overflow_p
= false;
14290 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14291 &sub_strict_overflow_p
)
14292 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14293 &sub_strict_overflow_p
))
14295 if (sub_strict_overflow_p
)
14296 *strict_overflow_p
= true;
14301 sub_strict_overflow_p
= false;
14302 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14303 &sub_strict_overflow_p
))
14305 if (sub_strict_overflow_p
)
14306 *strict_overflow_p
= true;
14308 /* When both operands are nonzero, then MAX must be too. */
14309 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14310 strict_overflow_p
))
14313 /* MAX where operand 0 is positive is positive. */
14314 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14315 strict_overflow_p
);
14317 /* MAX where operand 1 is positive is positive. */
14318 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14319 &sub_strict_overflow_p
)
14320 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14321 &sub_strict_overflow_p
))
14323 if (sub_strict_overflow_p
)
14324 *strict_overflow_p
= true;
14329 case COMPOUND_EXPR
:
14331 case GIMPLE_MODIFY_STMT
:
14333 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t
, 1),
14334 strict_overflow_p
);
14337 case NON_LVALUE_EXPR
:
14338 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14339 strict_overflow_p
);
14342 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14344 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14345 strict_overflow_p
));
14348 return alloca_call_p (t
);
14356 /* Return true when T is an address and is known to be nonzero.
14357 Handle warnings about undefined signed overflow. */
14360 tree_expr_nonzero_p (tree t
)
14362 bool ret
, strict_overflow_p
;
14364 strict_overflow_p
= false;
14365 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
14366 if (strict_overflow_p
)
14367 fold_overflow_warning (("assuming signed overflow does not occur when "
14368 "determining that expression is always "
14370 WARN_STRICT_OVERFLOW_MISC
);
14374 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14375 attempt to fold the expression to a constant without modifying TYPE,
14378 If the expression could be simplified to a constant, then return
14379 the constant. If the expression would not be simplified to a
14380 constant, then return NULL_TREE. */
14383 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
14385 tree tem
= fold_binary (code
, type
, op0
, op1
);
14386 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14389 /* Given the components of a unary expression CODE, TYPE and OP0,
14390 attempt to fold the expression to a constant without modifying
14393 If the expression could be simplified to a constant, then return
14394 the constant. If the expression would not be simplified to a
14395 constant, then return NULL_TREE. */
14398 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
14400 tree tem
= fold_unary (code
, type
, op0
);
14401 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14404 /* If EXP represents referencing an element in a constant string
14405 (either via pointer arithmetic or array indexing), return the
14406 tree representing the value accessed, otherwise return NULL. */
14409 fold_read_from_constant_string (tree exp
)
14411 if ((TREE_CODE (exp
) == INDIRECT_REF
14412 || TREE_CODE (exp
) == ARRAY_REF
)
14413 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
14415 tree exp1
= TREE_OPERAND (exp
, 0);
14419 if (TREE_CODE (exp
) == INDIRECT_REF
)
14420 string
= string_constant (exp1
, &index
);
14423 tree low_bound
= array_ref_low_bound (exp
);
14424 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
14426 /* Optimize the special-case of a zero lower bound.
14428 We convert the low_bound to sizetype to avoid some problems
14429 with constant folding. (E.g. suppose the lower bound is 1,
14430 and its mode is QI. Without the conversion,l (ARRAY
14431 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14432 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14433 if (! integer_zerop (low_bound
))
14434 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
14440 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
14441 && TREE_CODE (string
) == STRING_CST
14442 && TREE_CODE (index
) == INTEGER_CST
14443 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
14444 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
14446 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
14447 return build_int_cst_type (TREE_TYPE (exp
),
14448 (TREE_STRING_POINTER (string
)
14449 [TREE_INT_CST_LOW (index
)]));
14454 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14455 an integer constant, real, or fixed-point constant.
14457 TYPE is the type of the result. */
14460 fold_negate_const (tree arg0
, tree type
)
14462 tree t
= NULL_TREE
;
14464 switch (TREE_CODE (arg0
))
14468 unsigned HOST_WIDE_INT low
;
14469 HOST_WIDE_INT high
;
14470 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
14471 TREE_INT_CST_HIGH (arg0
),
14473 t
= force_fit_type_double (type
, low
, high
, 1,
14474 (overflow
| TREE_OVERFLOW (arg0
))
14475 && !TYPE_UNSIGNED (type
));
14480 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
14485 FIXED_VALUE_TYPE f
;
14486 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
14487 &(TREE_FIXED_CST (arg0
)), NULL
,
14488 TYPE_SATURATING (type
));
14489 t
= build_fixed (type
, f
);
14490 /* Propagate overflow flags. */
14491 if (overflow_p
| TREE_OVERFLOW (arg0
))
14493 TREE_OVERFLOW (t
) = 1;
14494 TREE_CONSTANT_OVERFLOW (t
) = 1;
14496 else if (TREE_CONSTANT_OVERFLOW (arg0
))
14497 TREE_CONSTANT_OVERFLOW (t
) = 1;
14502 gcc_unreachable ();
14508 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14509 an integer constant or real constant.
14511 TYPE is the type of the result. */
14514 fold_abs_const (tree arg0
, tree type
)
14516 tree t
= NULL_TREE
;
14518 switch (TREE_CODE (arg0
))
14521 /* If the value is unsigned, then the absolute value is
14522 the same as the ordinary value. */
14523 if (TYPE_UNSIGNED (type
))
14525 /* Similarly, if the value is non-negative. */
14526 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
14528 /* If the value is negative, then the absolute value is
14532 unsigned HOST_WIDE_INT low
;
14533 HOST_WIDE_INT high
;
14534 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
14535 TREE_INT_CST_HIGH (arg0
),
14537 t
= force_fit_type_double (type
, low
, high
, -1,
14538 overflow
| TREE_OVERFLOW (arg0
));
14543 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
14544 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
14550 gcc_unreachable ();
14556 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14557 constant. TYPE is the type of the result. */
14560 fold_not_const (tree arg0
, tree type
)
14562 tree t
= NULL_TREE
;
14564 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
14566 t
= force_fit_type_double (type
, ~TREE_INT_CST_LOW (arg0
),
14567 ~TREE_INT_CST_HIGH (arg0
), 0,
14568 TREE_OVERFLOW (arg0
));
14573 /* Given CODE, a relational operator, the target type, TYPE and two
14574 constant operands OP0 and OP1, return the result of the
14575 relational operation. If the result is not a compile time
14576 constant, then return NULL_TREE. */
14579 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
14581 int result
, invert
;
14583 /* From here on, the only cases we handle are when the result is
14584 known to be a constant. */
14586 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
14588 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
14589 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
14591 /* Handle the cases where either operand is a NaN. */
14592 if (real_isnan (c0
) || real_isnan (c1
))
14602 case UNORDERED_EXPR
:
14616 if (flag_trapping_math
)
14622 gcc_unreachable ();
14625 return constant_boolean_node (result
, type
);
14628 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
14631 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
14633 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
14634 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
14635 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
14638 /* Handle equality/inequality of complex constants. */
14639 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
14641 tree rcond
= fold_relational_const (code
, type
,
14642 TREE_REALPART (op0
),
14643 TREE_REALPART (op1
));
14644 tree icond
= fold_relational_const (code
, type
,
14645 TREE_IMAGPART (op0
),
14646 TREE_IMAGPART (op1
));
14647 if (code
== EQ_EXPR
)
14648 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
14649 else if (code
== NE_EXPR
)
14650 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
14655 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14657 To compute GT, swap the arguments and do LT.
14658 To compute GE, do LT and invert the result.
14659 To compute LE, swap the arguments, do LT and invert the result.
14660 To compute NE, do EQ and invert the result.
14662 Therefore, the code below must handle only EQ and LT. */
14664 if (code
== LE_EXPR
|| code
== GT_EXPR
)
14669 code
= swap_tree_comparison (code
);
14672 /* Note that it is safe to invert for real values here because we
14673 have already handled the one case that it matters. */
14676 if (code
== NE_EXPR
|| code
== GE_EXPR
)
14679 code
= invert_tree_comparison (code
, false);
14682 /* Compute a result for LT or EQ if args permit;
14683 Otherwise return T. */
14684 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
14686 if (code
== EQ_EXPR
)
14687 result
= tree_int_cst_equal (op0
, op1
);
14688 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
14689 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
14691 result
= INT_CST_LT (op0
, op1
);
14698 return constant_boolean_node (result
, type
);
14701 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14702 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14706 fold_build_cleanup_point_expr (tree type
, tree expr
)
14708 /* If the expression does not have side effects then we don't have to wrap
14709 it with a cleanup point expression. */
14710 if (!TREE_SIDE_EFFECTS (expr
))
14713 /* If the expression is a return, check to see if the expression inside the
14714 return has no side effects or the right hand side of the modify expression
14715 inside the return. If either don't have side effects set we don't need to
14716 wrap the expression in a cleanup point expression. Note we don't check the
14717 left hand side of the modify because it should always be a return decl. */
14718 if (TREE_CODE (expr
) == RETURN_EXPR
)
14720 tree op
= TREE_OPERAND (expr
, 0);
14721 if (!op
|| !TREE_SIDE_EFFECTS (op
))
14723 op
= TREE_OPERAND (op
, 1);
14724 if (!TREE_SIDE_EFFECTS (op
))
14728 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
14731 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14732 of an indirection through OP0, or NULL_TREE if no simplification is
14736 fold_indirect_ref_1 (tree type
, tree op0
)
14742 subtype
= TREE_TYPE (sub
);
14743 if (!POINTER_TYPE_P (subtype
))
14746 if (TREE_CODE (sub
) == ADDR_EXPR
)
14748 tree op
= TREE_OPERAND (sub
, 0);
14749 tree optype
= TREE_TYPE (op
);
14750 /* *&CONST_DECL -> to the value of the const decl. */
14751 if (TREE_CODE (op
) == CONST_DECL
)
14752 return DECL_INITIAL (op
);
14753 /* *&p => p; make sure to handle *&"str"[cst] here. */
14754 if (type
== optype
)
14756 tree fop
= fold_read_from_constant_string (op
);
14762 /* *(foo *)&fooarray => fooarray[0] */
14763 else if (TREE_CODE (optype
) == ARRAY_TYPE
14764 && type
== TREE_TYPE (optype
))
14766 tree type_domain
= TYPE_DOMAIN (optype
);
14767 tree min_val
= size_zero_node
;
14768 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14769 min_val
= TYPE_MIN_VALUE (type_domain
);
14770 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
14772 /* *(foo *)&complexfoo => __real__ complexfoo */
14773 else if (TREE_CODE (optype
) == COMPLEX_TYPE
14774 && type
== TREE_TYPE (optype
))
14775 return fold_build1 (REALPART_EXPR
, type
, op
);
14776 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14777 else if (TREE_CODE (optype
) == VECTOR_TYPE
14778 && type
== TREE_TYPE (optype
))
14780 tree part_width
= TYPE_SIZE (type
);
14781 tree index
= bitsize_int (0);
14782 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
14786 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14787 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
14788 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
14790 tree op00
= TREE_OPERAND (sub
, 0);
14791 tree op01
= TREE_OPERAND (sub
, 1);
14795 op00type
= TREE_TYPE (op00
);
14796 if (TREE_CODE (op00
) == ADDR_EXPR
14797 && TREE_CODE (TREE_TYPE (op00type
)) == COMPLEX_TYPE
14798 && type
== TREE_TYPE (TREE_TYPE (op00type
)))
14800 tree size
= TYPE_SIZE_UNIT (type
);
14801 if (tree_int_cst_equal (size
, op01
))
14802 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (op00
, 0));
14806 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14807 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
14808 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
14811 tree min_val
= size_zero_node
;
14812 sub
= build_fold_indirect_ref (sub
);
14813 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
14814 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14815 min_val
= TYPE_MIN_VALUE (type_domain
);
14816 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
14822 /* Builds an expression for an indirection through T, simplifying some
14826 build_fold_indirect_ref (tree t
)
14828 tree type
= TREE_TYPE (TREE_TYPE (t
));
14829 tree sub
= fold_indirect_ref_1 (type
, t
);
14834 return build1 (INDIRECT_REF
, type
, t
);
14837 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14840 fold_indirect_ref (tree t
)
14842 tree sub
= fold_indirect_ref_1 (TREE_TYPE (t
), TREE_OPERAND (t
, 0));
14850 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14851 whose result is ignored. The type of the returned tree need not be
14852 the same as the original expression. */
14855 fold_ignored_result (tree t
)
14857 if (!TREE_SIDE_EFFECTS (t
))
14858 return integer_zero_node
;
14861 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
14864 t
= TREE_OPERAND (t
, 0);
14868 case tcc_comparison
:
14869 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14870 t
= TREE_OPERAND (t
, 0);
14871 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
14872 t
= TREE_OPERAND (t
, 1);
14877 case tcc_expression
:
14878 switch (TREE_CODE (t
))
14880 case COMPOUND_EXPR
:
14881 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14883 t
= TREE_OPERAND (t
, 0);
14887 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
14888 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
14890 t
= TREE_OPERAND (t
, 0);
14903 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
14904 This can only be applied to objects of a sizetype. */
14907 round_up (tree value
, int divisor
)
14909 tree div
= NULL_TREE
;
14911 gcc_assert (divisor
> 0);
14915 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14916 have to do anything. Only do this when we are not given a const,
14917 because in that case, this check is more expensive than just
14919 if (TREE_CODE (value
) != INTEGER_CST
)
14921 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14923 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14927 /* If divisor is a power of two, simplify this to bit manipulation. */
14928 if (divisor
== (divisor
& -divisor
))
14930 if (TREE_CODE (value
) == INTEGER_CST
)
14932 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (value
);
14933 unsigned HOST_WIDE_INT high
;
14936 if ((low
& (divisor
- 1)) == 0)
14939 overflow_p
= TREE_OVERFLOW (value
);
14940 high
= TREE_INT_CST_HIGH (value
);
14941 low
&= ~(divisor
- 1);
14950 return force_fit_type_double (TREE_TYPE (value
), low
, high
,
14957 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
14958 value
= size_binop (PLUS_EXPR
, value
, t
);
14959 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
14960 value
= size_binop (BIT_AND_EXPR
, value
, t
);
14966 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14967 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
14968 value
= size_binop (MULT_EXPR
, value
, div
);
14974 /* Likewise, but round down. */
14977 round_down (tree value
, int divisor
)
14979 tree div
= NULL_TREE
;
14981 gcc_assert (divisor
> 0);
14985 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14986 have to do anything. Only do this when we are not given a const,
14987 because in that case, this check is more expensive than just
14989 if (TREE_CODE (value
) != INTEGER_CST
)
14991 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14993 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14997 /* If divisor is a power of two, simplify this to bit manipulation. */
14998 if (divisor
== (divisor
& -divisor
))
15002 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15003 value
= size_binop (BIT_AND_EXPR
, value
, t
);
15008 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15009 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
15010 value
= size_binop (MULT_EXPR
, value
, div
);
15016 /* Returns the pointer to the base of the object addressed by EXP and
15017 extracts the information about the offset of the access, storing it
15018 to PBITPOS and POFFSET. */
15021 split_address_to_core_and_offset (tree exp
,
15022 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
15025 enum machine_mode mode
;
15026 int unsignedp
, volatilep
;
15027 HOST_WIDE_INT bitsize
;
15029 if (TREE_CODE (exp
) == ADDR_EXPR
)
15031 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
15032 poffset
, &mode
, &unsignedp
, &volatilep
,
15034 core
= fold_addr_expr (core
);
15040 *poffset
= NULL_TREE
;
15046 /* Returns true if addresses of E1 and E2 differ by a constant, false
15047 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15050 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
15053 HOST_WIDE_INT bitpos1
, bitpos2
;
15054 tree toffset1
, toffset2
, tdiff
, type
;
15056 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
15057 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
15059 if (bitpos1
% BITS_PER_UNIT
!= 0
15060 || bitpos2
% BITS_PER_UNIT
!= 0
15061 || !operand_equal_p (core1
, core2
, 0))
15064 if (toffset1
&& toffset2
)
15066 type
= TREE_TYPE (toffset1
);
15067 if (type
!= TREE_TYPE (toffset2
))
15068 toffset2
= fold_convert (type
, toffset2
);
15070 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
15071 if (!cst_and_fits_in_hwi (tdiff
))
15074 *diff
= int_cst_value (tdiff
);
15076 else if (toffset1
|| toffset2
)
15078 /* If only one of the offsets is non-constant, the difference cannot
15085 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
15089 /* Simplify the floating point expression EXP when the sign of the
15090 result is not significant. Return NULL_TREE if no simplification
15094 fold_strip_sign_ops (tree exp
)
15098 switch (TREE_CODE (exp
))
15102 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
15103 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
15107 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
15109 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
15110 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15111 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
15112 return fold_build2 (TREE_CODE (exp
), TREE_TYPE (exp
),
15113 arg0
? arg0
: TREE_OPERAND (exp
, 0),
15114 arg1
? arg1
: TREE_OPERAND (exp
, 1));
15117 case COMPOUND_EXPR
:
15118 arg0
= TREE_OPERAND (exp
, 0);
15119 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15121 return fold_build2 (COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
15125 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15126 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
15128 return fold_build3 (COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
15129 arg0
? arg0
: TREE_OPERAND (exp
, 1),
15130 arg1
? arg1
: TREE_OPERAND (exp
, 2));
15135 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
15138 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15139 /* Strip copysign function call, return the 1st argument. */
15140 arg0
= CALL_EXPR_ARG (exp
, 0);
15141 arg1
= CALL_EXPR_ARG (exp
, 1);
15142 return omit_one_operand (TREE_TYPE (exp
), arg0
, arg1
);
15145 /* Strip sign ops from the argument of "odd" math functions. */
15146 if (negate_mathfn_p (fcode
))
15148 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
15150 return build_call_expr (get_callee_fndecl (exp
), 1, arg0
);