1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
65 #include "langhooks.h"
68 /* Nonzero if we are folding constants inside an initializer; zero
70 int folding_initializer
= 0;
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code
{
94 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
95 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
96 static bool negate_mathfn_p (enum built_in_function
);
97 static bool negate_expr_p (tree
);
98 static tree
negate_expr (tree
);
99 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
100 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
101 static tree
const_binop (enum tree_code
, tree
, tree
, int);
102 static enum comparison_code
comparison_to_compcode (enum tree_code
);
103 static enum tree_code
compcode_to_comparison (enum comparison_code
);
104 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
105 enum tree_code
, tree
, tree
, tree
);
106 static int truth_value_p (enum tree_code
);
107 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
108 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
109 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
110 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
111 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
112 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
113 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
114 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
115 enum machine_mode
*, int *, int *,
117 static int all_ones_mask_p (const_tree
, int);
118 static tree
sign_bit_p (tree
, const_tree
);
119 static int simple_operand_p (const_tree
);
120 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
121 static tree
range_predecessor (tree
);
122 static tree
range_successor (tree
);
123 static tree
make_range (tree
, int *, tree
*, tree
*, bool *);
124 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
125 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
127 static tree
fold_range_test (enum tree_code
, tree
, tree
, tree
);
128 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
129 static tree
unextend (tree
, int, int, tree
);
130 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
131 static tree
optimize_minmax_comparison (enum tree_code
, tree
, tree
, tree
);
132 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
133 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
134 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
,
137 static bool fold_real_zero_addition_p (const_tree
, const_tree
, int);
138 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
140 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
141 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
142 static bool reorder_operands_p (const_tree
, const_tree
);
143 static tree
fold_negate_const (tree
, tree
);
144 static tree
fold_not_const (tree
, tree
);
145 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
148 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
149 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
150 and SUM1. Then this yields nonzero if overflow occurred during the
153 Overflow occurs if A and B have the same sign, but A and SUM differ in
154 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
156 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
158 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
159 We do that by representing the two-word integer in 4 words, with only
160 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
161 number. The value of the word is LOWPART + HIGHPART * BASE. */
164 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
165 #define HIGHPART(x) \
166 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
167 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
169 /* Unpack a two-word integer into 4 words.
170 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
171 WORDS points to the array of HOST_WIDE_INTs. */
174 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
176 words
[0] = LOWPART (low
);
177 words
[1] = HIGHPART (low
);
178 words
[2] = LOWPART (hi
);
179 words
[3] = HIGHPART (hi
);
182 /* Pack an array of 4 words into a two-word integer.
183 WORDS points to the array of words.
184 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
187 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
190 *low
= words
[0] + words
[1] * BASE
;
191 *hi
= words
[2] + words
[3] * BASE
;
194 /* Force the double-word integer L1, H1 to be within the range of the
195 integer type TYPE. Stores the properly truncated and sign-extended
196 double-word integer in *LV, *HV. Returns true if the operation
197 overflows, that is, argument and result are different. */
200 fit_double_type (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
201 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, const_tree type
)
203 unsigned HOST_WIDE_INT low0
= l1
;
204 HOST_WIDE_INT high0
= h1
;
206 int sign_extended_type
;
208 if (POINTER_TYPE_P (type
)
209 || TREE_CODE (type
) == OFFSET_TYPE
)
212 prec
= TYPE_PRECISION (type
);
214 /* Size types *are* sign extended. */
215 sign_extended_type
= (!TYPE_UNSIGNED (type
)
216 || (TREE_CODE (type
) == INTEGER_TYPE
217 && TYPE_IS_SIZETYPE (type
)));
219 /* First clear all bits that are beyond the type's precision. */
220 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
222 else if (prec
> HOST_BITS_PER_WIDE_INT
)
223 h1
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
227 if (prec
< HOST_BITS_PER_WIDE_INT
)
228 l1
&= ~((HOST_WIDE_INT
) (-1) << prec
);
231 /* Then do sign extension if necessary. */
232 if (!sign_extended_type
)
233 /* No sign extension */;
234 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
235 /* Correct width already. */;
236 else if (prec
> HOST_BITS_PER_WIDE_INT
)
238 /* Sign extend top half? */
239 if (h1
& ((unsigned HOST_WIDE_INT
)1
240 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
241 h1
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
243 else if (prec
== HOST_BITS_PER_WIDE_INT
)
245 if ((HOST_WIDE_INT
)l1
< 0)
250 /* Sign extend bottom half? */
251 if (l1
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
254 l1
|= (HOST_WIDE_INT
)(-1) << prec
;
261 /* If the value didn't fit, signal overflow. */
262 return l1
!= low0
|| h1
!= high0
;
265 /* We force the double-int HIGH:LOW to the range of the type TYPE by
266 sign or zero extending it.
267 OVERFLOWABLE indicates if we are interested
268 in overflow of the value, when >0 we are only interested in signed
269 overflow, for <0 we are interested in any overflow. OVERFLOWED
270 indicates whether overflow has already occurred. CONST_OVERFLOWED
271 indicates whether constant overflow has already occurred. We force
272 T's value to be within range of T's type (by setting to 0 or 1 all
273 the bits outside the type's range). We set TREE_OVERFLOWED if,
274 OVERFLOWED is nonzero,
275 or OVERFLOWABLE is >0 and signed overflow occurs
276 or OVERFLOWABLE is <0 and any overflow occurs
277 We return a new tree node for the extended double-int. The node
278 is shared if no overflow flags are set. */
281 force_fit_type_double (tree type
, unsigned HOST_WIDE_INT low
,
282 HOST_WIDE_INT high
, int overflowable
,
285 int sign_extended_type
;
288 /* Size types *are* sign extended. */
289 sign_extended_type
= (!TYPE_UNSIGNED (type
)
290 || (TREE_CODE (type
) == INTEGER_TYPE
291 && TYPE_IS_SIZETYPE (type
)));
293 overflow
= fit_double_type (low
, high
, &low
, &high
, type
);
295 /* If we need to set overflow flags, return a new unshared node. */
296 if (overflowed
|| overflow
)
300 || (overflowable
> 0 && sign_extended_type
))
302 tree t
= make_node (INTEGER_CST
);
303 TREE_INT_CST_LOW (t
) = low
;
304 TREE_INT_CST_HIGH (t
) = high
;
305 TREE_TYPE (t
) = type
;
306 TREE_OVERFLOW (t
) = 1;
311 /* Else build a shared node. */
312 return build_int_cst_wide (type
, low
, high
);
315 /* Add two doubleword integers with doubleword result.
316 Return nonzero if the operation overflows according to UNSIGNED_P.
317 Each argument is given as two `HOST_WIDE_INT' pieces.
318 One argument is L1 and H1; the other, L2 and H2.
319 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
322 add_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
323 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
324 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
327 unsigned HOST_WIDE_INT l
;
331 h
= h1
+ h2
+ (l
< l1
);
337 return (unsigned HOST_WIDE_INT
) h
< (unsigned HOST_WIDE_INT
) h1
;
339 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
342 /* Negate a doubleword integer with doubleword result.
343 Return nonzero if the operation overflows, assuming it's signed.
344 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
345 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
348 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
349 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
355 return (*hv
& h1
) < 0;
365 /* Multiply two doubleword integers with doubleword result.
366 Return nonzero if the operation overflows according to UNSIGNED_P.
367 Each argument is given as two `HOST_WIDE_INT' pieces.
368 One argument is L1 and H1; the other, L2 and H2.
369 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
372 mul_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
373 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
374 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
377 HOST_WIDE_INT arg1
[4];
378 HOST_WIDE_INT arg2
[4];
379 HOST_WIDE_INT prod
[4 * 2];
380 unsigned HOST_WIDE_INT carry
;
382 unsigned HOST_WIDE_INT toplow
, neglow
;
383 HOST_WIDE_INT tophigh
, neghigh
;
385 encode (arg1
, l1
, h1
);
386 encode (arg2
, l2
, h2
);
388 memset (prod
, 0, sizeof prod
);
390 for (i
= 0; i
< 4; i
++)
393 for (j
= 0; j
< 4; j
++)
396 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
397 carry
+= arg1
[i
] * arg2
[j
];
398 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
400 prod
[k
] = LOWPART (carry
);
401 carry
= HIGHPART (carry
);
406 decode (prod
, lv
, hv
);
407 decode (prod
+ 4, &toplow
, &tophigh
);
409 /* Unsigned overflow is immediate. */
411 return (toplow
| tophigh
) != 0;
413 /* Check for signed overflow by calculating the signed representation of the
414 top half of the result; it should agree with the low half's sign bit. */
417 neg_double (l2
, h2
, &neglow
, &neghigh
);
418 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
422 neg_double (l1
, h1
, &neglow
, &neghigh
);
423 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
425 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
428 /* Shift the doubleword integer in L1, H1 left by COUNT places
429 keeping only PREC bits of result.
430 Shift right if COUNT is negative.
431 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
432 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
435 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
436 HOST_WIDE_INT count
, unsigned int prec
,
437 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
439 unsigned HOST_WIDE_INT signmask
;
443 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
447 if (SHIFT_COUNT_TRUNCATED
)
450 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
452 /* Shifting by the host word size is undefined according to the
453 ANSI standard, so we must handle this as a special case. */
457 else if (count
>= HOST_BITS_PER_WIDE_INT
)
459 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
464 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
465 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
469 /* Sign extend all bits that are beyond the precision. */
471 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
472 ? ((unsigned HOST_WIDE_INT
) *hv
473 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
474 : (*lv
>> (prec
- 1))) & 1);
476 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
478 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
480 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
481 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
486 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
487 *lv
|= signmask
<< prec
;
491 /* Shift the doubleword integer in L1, H1 right by COUNT places
492 keeping only PREC bits of result. COUNT must be positive.
493 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
494 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
497 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
498 HOST_WIDE_INT count
, unsigned int prec
,
499 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
502 unsigned HOST_WIDE_INT signmask
;
505 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
508 if (SHIFT_COUNT_TRUNCATED
)
511 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
513 /* Shifting by the host word size is undefined according to the
514 ANSI standard, so we must handle this as a special case. */
518 else if (count
>= HOST_BITS_PER_WIDE_INT
)
521 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
525 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
527 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
530 /* Zero / sign extend all bits that are beyond the precision. */
532 if (count
>= (HOST_WIDE_INT
)prec
)
537 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
539 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
541 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
542 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
547 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
548 *lv
|= signmask
<< (prec
- count
);
552 /* Rotate the doubleword integer in L1, H1 left by COUNT places
553 keeping only PREC bits of result.
554 Rotate right if COUNT is negative.
555 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
558 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
559 HOST_WIDE_INT count
, unsigned int prec
,
560 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
562 unsigned HOST_WIDE_INT s1l
, s2l
;
563 HOST_WIDE_INT s1h
, s2h
;
569 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
570 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
575 /* Rotate the doubleword integer in L1, H1 left by COUNT places
576 keeping only PREC bits of result. COUNT must be positive.
577 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
580 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
581 HOST_WIDE_INT count
, unsigned int prec
,
582 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
584 unsigned HOST_WIDE_INT s1l
, s2l
;
585 HOST_WIDE_INT s1h
, s2h
;
591 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
592 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
597 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
598 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
599 CODE is a tree code for a kind of division, one of
600 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
602 It controls how the quotient is rounded to an integer.
603 Return nonzero if the operation overflows.
604 UNS nonzero says do unsigned division. */
607 div_and_round_double (enum tree_code code
, int uns
,
608 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
609 HOST_WIDE_INT hnum_orig
,
610 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
611 HOST_WIDE_INT hden_orig
,
612 unsigned HOST_WIDE_INT
*lquo
,
613 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
617 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
618 HOST_WIDE_INT den
[4], quo
[4];
620 unsigned HOST_WIDE_INT work
;
621 unsigned HOST_WIDE_INT carry
= 0;
622 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
623 HOST_WIDE_INT hnum
= hnum_orig
;
624 unsigned HOST_WIDE_INT lden
= lden_orig
;
625 HOST_WIDE_INT hden
= hden_orig
;
628 if (hden
== 0 && lden
== 0)
629 overflow
= 1, lden
= 1;
631 /* Calculate quotient sign and convert operands to unsigned. */
637 /* (minimum integer) / (-1) is the only overflow case. */
638 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
639 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
645 neg_double (lden
, hden
, &lden
, &hden
);
649 if (hnum
== 0 && hden
== 0)
650 { /* single precision */
652 /* This unsigned division rounds toward zero. */
658 { /* trivial case: dividend < divisor */
659 /* hden != 0 already checked. */
666 memset (quo
, 0, sizeof quo
);
668 memset (num
, 0, sizeof num
); /* to zero 9th element */
669 memset (den
, 0, sizeof den
);
671 encode (num
, lnum
, hnum
);
672 encode (den
, lden
, hden
);
674 /* Special code for when the divisor < BASE. */
675 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
677 /* hnum != 0 already checked. */
678 for (i
= 4 - 1; i
>= 0; i
--)
680 work
= num
[i
] + carry
* BASE
;
681 quo
[i
] = work
/ lden
;
687 /* Full double precision division,
688 with thanks to Don Knuth's "Seminumerical Algorithms". */
689 int num_hi_sig
, den_hi_sig
;
690 unsigned HOST_WIDE_INT quo_est
, scale
;
692 /* Find the highest nonzero divisor digit. */
693 for (i
= 4 - 1;; i
--)
700 /* Insure that the first digit of the divisor is at least BASE/2.
701 This is required by the quotient digit estimation algorithm. */
703 scale
= BASE
/ (den
[den_hi_sig
] + 1);
705 { /* scale divisor and dividend */
707 for (i
= 0; i
<= 4 - 1; i
++)
709 work
= (num
[i
] * scale
) + carry
;
710 num
[i
] = LOWPART (work
);
711 carry
= HIGHPART (work
);
716 for (i
= 0; i
<= 4 - 1; i
++)
718 work
= (den
[i
] * scale
) + carry
;
719 den
[i
] = LOWPART (work
);
720 carry
= HIGHPART (work
);
721 if (den
[i
] != 0) den_hi_sig
= i
;
728 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
730 /* Guess the next quotient digit, quo_est, by dividing the first
731 two remaining dividend digits by the high order quotient digit.
732 quo_est is never low and is at most 2 high. */
733 unsigned HOST_WIDE_INT tmp
;
735 num_hi_sig
= i
+ den_hi_sig
+ 1;
736 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
737 if (num
[num_hi_sig
] != den
[den_hi_sig
])
738 quo_est
= work
/ den
[den_hi_sig
];
742 /* Refine quo_est so it's usually correct, and at most one high. */
743 tmp
= work
- quo_est
* den
[den_hi_sig
];
745 && (den
[den_hi_sig
- 1] * quo_est
746 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
749 /* Try QUO_EST as the quotient digit, by multiplying the
750 divisor by QUO_EST and subtracting from the remaining dividend.
751 Keep in mind that QUO_EST is the I - 1st digit. */
754 for (j
= 0; j
<= den_hi_sig
; j
++)
756 work
= quo_est
* den
[j
] + carry
;
757 carry
= HIGHPART (work
);
758 work
= num
[i
+ j
] - LOWPART (work
);
759 num
[i
+ j
] = LOWPART (work
);
760 carry
+= HIGHPART (work
) != 0;
763 /* If quo_est was high by one, then num[i] went negative and
764 we need to correct things. */
765 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
768 carry
= 0; /* add divisor back in */
769 for (j
= 0; j
<= den_hi_sig
; j
++)
771 work
= num
[i
+ j
] + den
[j
] + carry
;
772 carry
= HIGHPART (work
);
773 num
[i
+ j
] = LOWPART (work
);
776 num
[num_hi_sig
] += carry
;
779 /* Store the quotient digit. */
784 decode (quo
, lquo
, hquo
);
787 /* If result is negative, make it so. */
789 neg_double (*lquo
, *hquo
, lquo
, hquo
);
791 /* Compute trial remainder: rem = num - (quo * den) */
792 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
793 neg_double (*lrem
, *hrem
, lrem
, hrem
);
794 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
799 case TRUNC_MOD_EXPR
: /* round toward zero */
800 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
804 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
805 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
808 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
816 case CEIL_MOD_EXPR
: /* round toward positive infinity */
817 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
819 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
827 case ROUND_MOD_EXPR
: /* round to closest integer */
829 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
830 HOST_WIDE_INT habs_rem
= *hrem
;
831 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
832 HOST_WIDE_INT habs_den
= hden
, htwice
;
834 /* Get absolute values. */
836 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
838 neg_double (lden
, hden
, &labs_den
, &habs_den
);
840 /* If (2 * abs (lrem) >= abs (lden)) */
841 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
842 labs_rem
, habs_rem
, <wice
, &htwice
);
844 if (((unsigned HOST_WIDE_INT
) habs_den
845 < (unsigned HOST_WIDE_INT
) htwice
)
846 || (((unsigned HOST_WIDE_INT
) habs_den
847 == (unsigned HOST_WIDE_INT
) htwice
)
848 && (labs_den
< ltwice
)))
852 add_double (*lquo
, *hquo
,
853 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
856 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
868 /* Compute true remainder: rem = num - (quo * den) */
869 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
870 neg_double (*lrem
, *hrem
, lrem
, hrem
);
871 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
875 /* If ARG2 divides ARG1 with zero remainder, carries out the division
876 of type CODE and returns the quotient.
877 Otherwise returns NULL_TREE. */
880 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
882 unsigned HOST_WIDE_INT int1l
, int2l
;
883 HOST_WIDE_INT int1h
, int2h
;
884 unsigned HOST_WIDE_INT quol
, reml
;
885 HOST_WIDE_INT quoh
, remh
;
886 tree type
= TREE_TYPE (arg1
);
887 int uns
= TYPE_UNSIGNED (type
);
889 int1l
= TREE_INT_CST_LOW (arg1
);
890 int1h
= TREE_INT_CST_HIGH (arg1
);
891 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
892 &obj[some_exotic_number]. */
893 if (POINTER_TYPE_P (type
))
896 type
= signed_type_for (type
);
897 fit_double_type (int1l
, int1h
, &int1l
, &int1h
,
901 fit_double_type (int1l
, int1h
, &int1l
, &int1h
, type
);
902 int2l
= TREE_INT_CST_LOW (arg2
);
903 int2h
= TREE_INT_CST_HIGH (arg2
);
905 div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
906 &quol
, &quoh
, &reml
, &remh
);
907 if (remh
!= 0 || reml
!= 0)
910 return build_int_cst_wide (type
, quol
, quoh
);
913 /* This is nonzero if we should defer warnings about undefined
914 overflow. This facility exists because these warnings are a
915 special case. The code to estimate loop iterations does not want
916 to issue any warnings, since it works with expressions which do not
917 occur in user code. Various bits of cleanup code call fold(), but
918 only use the result if it has certain characteristics (e.g., is a
919 constant); that code only wants to issue a warning if the result is
922 static int fold_deferring_overflow_warnings
;
924 /* If a warning about undefined overflow is deferred, this is the
925 warning. Note that this may cause us to turn two warnings into
926 one, but that is fine since it is sufficient to only give one
927 warning per expression. */
929 static const char* fold_deferred_overflow_warning
;
931 /* If a warning about undefined overflow is deferred, this is the
932 level at which the warning should be emitted. */
934 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
936 /* Start deferring overflow warnings. We could use a stack here to
937 permit nested calls, but at present it is not necessary. */
940 fold_defer_overflow_warnings (void)
942 ++fold_deferring_overflow_warnings
;
945 /* Stop deferring overflow warnings. If there is a pending warning,
946 and ISSUE is true, then issue the warning if appropriate. STMT is
947 the statement with which the warning should be associated (used for
948 location information); STMT may be NULL. CODE is the level of the
949 warning--a warn_strict_overflow_code value. This function will use
950 the smaller of CODE and the deferred code when deciding whether to
951 issue the warning. CODE may be zero to mean to always use the
955 fold_undefer_overflow_warnings (bool issue
, const_tree stmt
, int code
)
960 gcc_assert (fold_deferring_overflow_warnings
> 0);
961 --fold_deferring_overflow_warnings
;
962 if (fold_deferring_overflow_warnings
> 0)
964 if (fold_deferred_overflow_warning
!= NULL
966 && code
< (int) fold_deferred_overflow_code
)
967 fold_deferred_overflow_code
= code
;
971 warnmsg
= fold_deferred_overflow_warning
;
972 fold_deferred_overflow_warning
= NULL
;
974 if (!issue
|| warnmsg
== NULL
)
977 if (stmt
!= NULL_TREE
&& TREE_NO_WARNING (stmt
))
980 /* Use the smallest code level when deciding to issue the
982 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
983 code
= fold_deferred_overflow_code
;
985 if (!issue_strict_overflow_warning (code
))
988 if (stmt
== NULL_TREE
|| !expr_has_location (stmt
))
989 locus
= input_location
;
991 locus
= expr_location (stmt
);
992 warning (OPT_Wstrict_overflow
, "%H%s", &locus
, warnmsg
);
995 /* Stop deferring overflow warnings, ignoring any deferred
999 fold_undefer_and_ignore_overflow_warnings (void)
1001 fold_undefer_overflow_warnings (false, NULL_TREE
, 0);
1004 /* Whether we are deferring overflow warnings. */
1007 fold_deferring_overflow_warnings_p (void)
1009 return fold_deferring_overflow_warnings
> 0;
1012 /* This is called when we fold something based on the fact that signed
1013 overflow is undefined. */
1016 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
1018 gcc_assert (!flag_wrapv
&& !flag_trapv
);
1019 if (fold_deferring_overflow_warnings
> 0)
1021 if (fold_deferred_overflow_warning
== NULL
1022 || wc
< fold_deferred_overflow_code
)
1024 fold_deferred_overflow_warning
= gmsgid
;
1025 fold_deferred_overflow_code
= wc
;
1028 else if (issue_strict_overflow_warning (wc
))
1029 warning (OPT_Wstrict_overflow
, gmsgid
);
1032 /* Return true if the built-in mathematical function specified by CODE
1033 is odd, i.e. -f(x) == f(-x). */
1036 negate_mathfn_p (enum built_in_function code
)
1040 CASE_FLT_FN (BUILT_IN_ASIN
):
1041 CASE_FLT_FN (BUILT_IN_ASINH
):
1042 CASE_FLT_FN (BUILT_IN_ATAN
):
1043 CASE_FLT_FN (BUILT_IN_ATANH
):
1044 CASE_FLT_FN (BUILT_IN_CASIN
):
1045 CASE_FLT_FN (BUILT_IN_CASINH
):
1046 CASE_FLT_FN (BUILT_IN_CATAN
):
1047 CASE_FLT_FN (BUILT_IN_CATANH
):
1048 CASE_FLT_FN (BUILT_IN_CBRT
):
1049 CASE_FLT_FN (BUILT_IN_CPROJ
):
1050 CASE_FLT_FN (BUILT_IN_CSIN
):
1051 CASE_FLT_FN (BUILT_IN_CSINH
):
1052 CASE_FLT_FN (BUILT_IN_CTAN
):
1053 CASE_FLT_FN (BUILT_IN_CTANH
):
1054 CASE_FLT_FN (BUILT_IN_ERF
):
1055 CASE_FLT_FN (BUILT_IN_LLROUND
):
1056 CASE_FLT_FN (BUILT_IN_LROUND
):
1057 CASE_FLT_FN (BUILT_IN_ROUND
):
1058 CASE_FLT_FN (BUILT_IN_SIN
):
1059 CASE_FLT_FN (BUILT_IN_SINH
):
1060 CASE_FLT_FN (BUILT_IN_TAN
):
1061 CASE_FLT_FN (BUILT_IN_TANH
):
1062 CASE_FLT_FN (BUILT_IN_TRUNC
):
1065 CASE_FLT_FN (BUILT_IN_LLRINT
):
1066 CASE_FLT_FN (BUILT_IN_LRINT
):
1067 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
1068 CASE_FLT_FN (BUILT_IN_RINT
):
1069 return !flag_rounding_math
;
1077 /* Check whether we may negate an integer constant T without causing
1081 may_negate_without_overflow_p (const_tree t
)
1083 unsigned HOST_WIDE_INT val
;
1087 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
1089 type
= TREE_TYPE (t
);
1090 if (TYPE_UNSIGNED (type
))
1093 prec
= TYPE_PRECISION (type
);
1094 if (prec
> HOST_BITS_PER_WIDE_INT
)
1096 if (TREE_INT_CST_LOW (t
) != 0)
1098 prec
-= HOST_BITS_PER_WIDE_INT
;
1099 val
= TREE_INT_CST_HIGH (t
);
1102 val
= TREE_INT_CST_LOW (t
);
1103 if (prec
< HOST_BITS_PER_WIDE_INT
)
1104 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
1105 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
1108 /* Determine whether an expression T can be cheaply negated using
1109 the function negate_expr without introducing undefined overflow. */
1112 negate_expr_p (tree t
)
1119 type
= TREE_TYPE (t
);
1121 STRIP_SIGN_NOPS (t
);
1122 switch (TREE_CODE (t
))
1125 if (TYPE_OVERFLOW_WRAPS (type
))
1128 /* Check that -CST will not overflow type. */
1129 return may_negate_without_overflow_p (t
);
1131 return (INTEGRAL_TYPE_P (type
)
1132 && TYPE_OVERFLOW_WRAPS (type
));
1140 return negate_expr_p (TREE_REALPART (t
))
1141 && negate_expr_p (TREE_IMAGPART (t
));
1144 return negate_expr_p (TREE_OPERAND (t
, 0))
1145 && negate_expr_p (TREE_OPERAND (t
, 1));
1148 return negate_expr_p (TREE_OPERAND (t
, 0));
1151 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1152 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1154 /* -(A + B) -> (-B) - A. */
1155 if (negate_expr_p (TREE_OPERAND (t
, 1))
1156 && reorder_operands_p (TREE_OPERAND (t
, 0),
1157 TREE_OPERAND (t
, 1)))
1159 /* -(A + B) -> (-A) - B. */
1160 return negate_expr_p (TREE_OPERAND (t
, 0));
1163 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1164 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1165 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1166 && reorder_operands_p (TREE_OPERAND (t
, 0),
1167 TREE_OPERAND (t
, 1));
1170 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1176 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1177 return negate_expr_p (TREE_OPERAND (t
, 1))
1178 || negate_expr_p (TREE_OPERAND (t
, 0));
1181 case TRUNC_DIV_EXPR
:
1182 case ROUND_DIV_EXPR
:
1183 case FLOOR_DIV_EXPR
:
1185 case EXACT_DIV_EXPR
:
1186 /* In general we can't negate A / B, because if A is INT_MIN and
1187 B is 1, we may turn this into INT_MIN / -1 which is undefined
1188 and actually traps on some architectures. But if overflow is
1189 undefined, we can negate, because - (INT_MIN / 1) is an
1191 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
1192 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
1194 return negate_expr_p (TREE_OPERAND (t
, 1))
1195 || negate_expr_p (TREE_OPERAND (t
, 0));
1198 /* Negate -((double)float) as (double)(-float). */
1199 if (TREE_CODE (type
) == REAL_TYPE
)
1201 tree tem
= strip_float_extensions (t
);
1203 return negate_expr_p (tem
);
1208 /* Negate -f(x) as f(-x). */
1209 if (negate_mathfn_p (builtin_mathfn_code (t
)))
1210 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
1214 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1215 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1217 tree op1
= TREE_OPERAND (t
, 1);
1218 if (TREE_INT_CST_HIGH (op1
) == 0
1219 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1220 == TREE_INT_CST_LOW (op1
))
1231 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1232 simplification is possible.
1233 If negate_expr_p would return true for T, NULL_TREE will never be
1237 fold_negate_expr (tree t
)
1239 tree type
= TREE_TYPE (t
);
1242 switch (TREE_CODE (t
))
1244 /* Convert - (~A) to A + 1. */
1246 if (INTEGRAL_TYPE_P (type
))
1247 return fold_build2 (PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
1248 build_int_cst (type
, 1));
1252 tem
= fold_negate_const (t
, type
);
1253 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
1254 || !TYPE_OVERFLOW_TRAPS (type
))
1259 tem
= fold_negate_const (t
, type
);
1260 /* Two's complement FP formats, such as c4x, may overflow. */
1261 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
1266 tem
= fold_negate_const (t
, type
);
1271 tree rpart
= negate_expr (TREE_REALPART (t
));
1272 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1274 if ((TREE_CODE (rpart
) == REAL_CST
1275 && TREE_CODE (ipart
) == REAL_CST
)
1276 || (TREE_CODE (rpart
) == INTEGER_CST
1277 && TREE_CODE (ipart
) == INTEGER_CST
))
1278 return build_complex (type
, rpart
, ipart
);
1283 if (negate_expr_p (t
))
1284 return fold_build2 (COMPLEX_EXPR
, type
,
1285 fold_negate_expr (TREE_OPERAND (t
, 0)),
1286 fold_negate_expr (TREE_OPERAND (t
, 1)));
1290 if (negate_expr_p (t
))
1291 return fold_build1 (CONJ_EXPR
, type
,
1292 fold_negate_expr (TREE_OPERAND (t
, 0)));
1296 return TREE_OPERAND (t
, 0);
1299 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1300 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
1302 /* -(A + B) -> (-B) - A. */
1303 if (negate_expr_p (TREE_OPERAND (t
, 1))
1304 && reorder_operands_p (TREE_OPERAND (t
, 0),
1305 TREE_OPERAND (t
, 1)))
1307 tem
= negate_expr (TREE_OPERAND (t
, 1));
1308 return fold_build2 (MINUS_EXPR
, type
,
1309 tem
, TREE_OPERAND (t
, 0));
1312 /* -(A + B) -> (-A) - B. */
1313 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1315 tem
= negate_expr (TREE_OPERAND (t
, 0));
1316 return fold_build2 (MINUS_EXPR
, type
,
1317 tem
, TREE_OPERAND (t
, 1));
1323 /* - (A - B) -> B - A */
1324 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
1325 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
1326 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1327 return fold_build2 (MINUS_EXPR
, type
,
1328 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
1332 if (TYPE_UNSIGNED (type
))
1338 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
1340 tem
= TREE_OPERAND (t
, 1);
1341 if (negate_expr_p (tem
))
1342 return fold_build2 (TREE_CODE (t
), type
,
1343 TREE_OPERAND (t
, 0), negate_expr (tem
));
1344 tem
= TREE_OPERAND (t
, 0);
1345 if (negate_expr_p (tem
))
1346 return fold_build2 (TREE_CODE (t
), type
,
1347 negate_expr (tem
), TREE_OPERAND (t
, 1));
1351 case TRUNC_DIV_EXPR
:
1352 case ROUND_DIV_EXPR
:
1353 case FLOOR_DIV_EXPR
:
1355 case EXACT_DIV_EXPR
:
1356 /* In general we can't negate A / B, because if A is INT_MIN and
1357 B is 1, we may turn this into INT_MIN / -1 which is undefined
1358 and actually traps on some architectures. But if overflow is
1359 undefined, we can negate, because - (INT_MIN / 1) is an
1361 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
1363 const char * const warnmsg
= G_("assuming signed overflow does not "
1364 "occur when negating a division");
1365 tem
= TREE_OPERAND (t
, 1);
1366 if (negate_expr_p (tem
))
1368 if (INTEGRAL_TYPE_P (type
)
1369 && (TREE_CODE (tem
) != INTEGER_CST
1370 || integer_onep (tem
)))
1371 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
1372 return fold_build2 (TREE_CODE (t
), type
,
1373 TREE_OPERAND (t
, 0), negate_expr (tem
));
1375 tem
= TREE_OPERAND (t
, 0);
1376 if (negate_expr_p (tem
))
1378 if (INTEGRAL_TYPE_P (type
)
1379 && (TREE_CODE (tem
) != INTEGER_CST
1380 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
1381 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
1382 return fold_build2 (TREE_CODE (t
), type
,
1383 negate_expr (tem
), TREE_OPERAND (t
, 1));
1389 /* Convert -((double)float) into (double)(-float). */
1390 if (TREE_CODE (type
) == REAL_TYPE
)
1392 tem
= strip_float_extensions (t
);
1393 if (tem
!= t
&& negate_expr_p (tem
))
1394 return fold_convert (type
, negate_expr (tem
));
1399 /* Negate -f(x) as f(-x). */
1400 if (negate_mathfn_p (builtin_mathfn_code (t
))
1401 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
1405 fndecl
= get_callee_fndecl (t
);
1406 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
1407 return build_call_expr (fndecl
, 1, arg
);
1412 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1413 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1415 tree op1
= TREE_OPERAND (t
, 1);
1416 if (TREE_INT_CST_HIGH (op1
) == 0
1417 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1418 == TREE_INT_CST_LOW (op1
))
1420 tree ntype
= TYPE_UNSIGNED (type
)
1421 ? signed_type_for (type
)
1422 : unsigned_type_for (type
);
1423 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1424 temp
= fold_build2 (RSHIFT_EXPR
, ntype
, temp
, op1
);
1425 return fold_convert (type
, temp
);
1437 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1438 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1439 return NULL_TREE. */
1442 negate_expr (tree t
)
1449 type
= TREE_TYPE (t
);
1450 STRIP_SIGN_NOPS (t
);
1452 tem
= fold_negate_expr (t
);
1454 tem
= build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1455 return fold_convert (type
, tem
);
1458 /* Split a tree IN into a constant, literal and variable parts that could be
1459 combined with CODE to make IN. "constant" means an expression with
1460 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1461 commutative arithmetic operation. Store the constant part into *CONP,
1462 the literal in *LITP and return the variable part. If a part isn't
1463 present, set it to null. If the tree does not decompose in this way,
1464 return the entire tree as the variable part and the other parts as null.
1466 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1467 case, we negate an operand that was subtracted. Except if it is a
1468 literal for which we use *MINUS_LITP instead.
1470 If NEGATE_P is true, we are negating all of IN, again except a literal
1471 for which we use *MINUS_LITP instead.
1473 If IN is itself a literal or constant, return it as appropriate.
1475 Note that we do not guarantee that any of the three values will be the
1476 same type as IN, but they will have the same signedness and mode. */
1479 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1480 tree
*minus_litp
, int negate_p
)
1488 /* Strip any conversions that don't change the machine mode or signedness. */
1489 STRIP_SIGN_NOPS (in
);
1491 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
1492 || TREE_CODE (in
) == FIXED_CST
)
1494 else if (TREE_CODE (in
) == code
1495 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1496 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
1497 /* We can associate addition and subtraction together (even
1498 though the C standard doesn't say so) for integers because
1499 the value is not affected. For reals, the value might be
1500 affected, so we can't. */
1501 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1502 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1504 tree op0
= TREE_OPERAND (in
, 0);
1505 tree op1
= TREE_OPERAND (in
, 1);
1506 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1507 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1509 /* First see if either of the operands is a literal, then a constant. */
1510 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
1511 || TREE_CODE (op0
) == FIXED_CST
)
1512 *litp
= op0
, op0
= 0;
1513 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
1514 || TREE_CODE (op1
) == FIXED_CST
)
1515 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1517 if (op0
!= 0 && TREE_CONSTANT (op0
))
1518 *conp
= op0
, op0
= 0;
1519 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1520 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1522 /* If we haven't dealt with either operand, this is not a case we can
1523 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1524 if (op0
!= 0 && op1
!= 0)
1529 var
= op1
, neg_var_p
= neg1_p
;
1531 /* Now do any needed negations. */
1533 *minus_litp
= *litp
, *litp
= 0;
1535 *conp
= negate_expr (*conp
);
1537 var
= negate_expr (var
);
1539 else if (TREE_CONSTANT (in
))
1547 *minus_litp
= *litp
, *litp
= 0;
1548 else if (*minus_litp
)
1549 *litp
= *minus_litp
, *minus_litp
= 0;
1550 *conp
= negate_expr (*conp
);
1551 var
= negate_expr (var
);
1557 /* Re-associate trees split by the above function. T1 and T2 are either
1558 expressions to associate or null. Return the new expression, if any. If
1559 we build an operation, do it in TYPE and with CODE. */
1562 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1569 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1570 try to fold this since we will have infinite recursion. But do
1571 deal with any NEGATE_EXPRs. */
1572 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1573 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1575 if (code
== PLUS_EXPR
)
1577 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1578 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1579 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1580 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1581 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1582 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1583 else if (integer_zerop (t2
))
1584 return fold_convert (type
, t1
);
1586 else if (code
== MINUS_EXPR
)
1588 if (integer_zerop (t2
))
1589 return fold_convert (type
, t1
);
1592 return build2 (code
, type
, fold_convert (type
, t1
),
1593 fold_convert (type
, t2
));
1596 return fold_build2 (code
, type
, fold_convert (type
, t1
),
1597 fold_convert (type
, t2
));
1600 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1601 for use in int_const_binop, size_binop and size_diffop. */
1604 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
1606 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
1608 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
1623 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
1624 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
1625 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
1629 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1630 to produce a new constant. Return NULL_TREE if we don't know how
1631 to evaluate CODE at compile-time.
1633 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1636 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
, int notrunc
)
1638 unsigned HOST_WIDE_INT int1l
, int2l
;
1639 HOST_WIDE_INT int1h
, int2h
;
1640 unsigned HOST_WIDE_INT low
;
1642 unsigned HOST_WIDE_INT garbagel
;
1643 HOST_WIDE_INT garbageh
;
1645 tree type
= TREE_TYPE (arg1
);
1646 int uns
= TYPE_UNSIGNED (type
);
1648 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1651 int1l
= TREE_INT_CST_LOW (arg1
);
1652 int1h
= TREE_INT_CST_HIGH (arg1
);
1653 int2l
= TREE_INT_CST_LOW (arg2
);
1654 int2h
= TREE_INT_CST_HIGH (arg2
);
1659 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1663 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1667 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1673 /* It's unclear from the C standard whether shifts can overflow.
1674 The following code ignores overflow; perhaps a C standard
1675 interpretation ruling is needed. */
1676 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1683 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1688 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1692 neg_double (int2l
, int2h
, &low
, &hi
);
1693 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1694 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1698 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1701 case TRUNC_DIV_EXPR
:
1702 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1703 case EXACT_DIV_EXPR
:
1704 /* This is a shortcut for a common special case. */
1705 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1706 && !TREE_OVERFLOW (arg1
)
1707 && !TREE_OVERFLOW (arg2
)
1708 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1710 if (code
== CEIL_DIV_EXPR
)
1713 low
= int1l
/ int2l
, hi
= 0;
1717 /* ... fall through ... */
1719 case ROUND_DIV_EXPR
:
1720 if (int2h
== 0 && int2l
== 0)
1722 if (int2h
== 0 && int2l
== 1)
1724 low
= int1l
, hi
= int1h
;
1727 if (int1l
== int2l
&& int1h
== int2h
1728 && ! (int1l
== 0 && int1h
== 0))
1733 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1734 &low
, &hi
, &garbagel
, &garbageh
);
1737 case TRUNC_MOD_EXPR
:
1738 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1739 /* This is a shortcut for a common special case. */
1740 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1741 && !TREE_OVERFLOW (arg1
)
1742 && !TREE_OVERFLOW (arg2
)
1743 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1745 if (code
== CEIL_MOD_EXPR
)
1747 low
= int1l
% int2l
, hi
= 0;
1751 /* ... fall through ... */
1753 case ROUND_MOD_EXPR
:
1754 if (int2h
== 0 && int2l
== 0)
1756 overflow
= div_and_round_double (code
, uns
,
1757 int1l
, int1h
, int2l
, int2h
,
1758 &garbagel
, &garbageh
, &low
, &hi
);
1764 low
= (((unsigned HOST_WIDE_INT
) int1h
1765 < (unsigned HOST_WIDE_INT
) int2h
)
1766 || (((unsigned HOST_WIDE_INT
) int1h
1767 == (unsigned HOST_WIDE_INT
) int2h
)
1770 low
= (int1h
< int2h
1771 || (int1h
== int2h
&& int1l
< int2l
));
1773 if (low
== (code
== MIN_EXPR
))
1774 low
= int1l
, hi
= int1h
;
1776 low
= int2l
, hi
= int2h
;
1785 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1787 /* Propagate overflow flags ourselves. */
1788 if (((!uns
|| is_sizetype
) && overflow
)
1789 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1792 TREE_OVERFLOW (t
) = 1;
1796 t
= force_fit_type_double (TREE_TYPE (arg1
), low
, hi
, 1,
1797 ((!uns
|| is_sizetype
) && overflow
)
1798 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1803 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1804 constant. We assume ARG1 and ARG2 have the same data type, or at least
1805 are the same kind of constant and the same machine mode. Return zero if
1806 combining the constants is not allowed in the current operating mode.
1808 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1811 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1813 /* Sanity check for the recursive cases. */
1820 if (TREE_CODE (arg1
) == INTEGER_CST
)
1821 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1823 if (TREE_CODE (arg1
) == REAL_CST
)
1825 enum machine_mode mode
;
1828 REAL_VALUE_TYPE value
;
1829 REAL_VALUE_TYPE result
;
1833 /* The following codes are handled by real_arithmetic. */
1848 d1
= TREE_REAL_CST (arg1
);
1849 d2
= TREE_REAL_CST (arg2
);
1851 type
= TREE_TYPE (arg1
);
1852 mode
= TYPE_MODE (type
);
1854 /* Don't perform operation if we honor signaling NaNs and
1855 either operand is a NaN. */
1856 if (HONOR_SNANS (mode
)
1857 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1860 /* Don't perform operation if it would raise a division
1861 by zero exception. */
1862 if (code
== RDIV_EXPR
1863 && REAL_VALUES_EQUAL (d2
, dconst0
)
1864 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1867 /* If either operand is a NaN, just return it. Otherwise, set up
1868 for floating-point trap; we return an overflow. */
1869 if (REAL_VALUE_ISNAN (d1
))
1871 else if (REAL_VALUE_ISNAN (d2
))
1874 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1875 real_convert (&result
, mode
, &value
);
1877 /* Don't constant fold this floating point operation if
1878 the result has overflowed and flag_trapping_math. */
1879 if (flag_trapping_math
1880 && MODE_HAS_INFINITIES (mode
)
1881 && REAL_VALUE_ISINF (result
)
1882 && !REAL_VALUE_ISINF (d1
)
1883 && !REAL_VALUE_ISINF (d2
))
1886 /* Don't constant fold this floating point operation if the
1887 result may dependent upon the run-time rounding mode and
1888 flag_rounding_math is set, or if GCC's software emulation
1889 is unable to accurately represent the result. */
1890 if ((flag_rounding_math
1891 || (REAL_MODE_FORMAT_COMPOSITE_P (mode
)
1892 && !flag_unsafe_math_optimizations
))
1893 && (inexact
|| !real_identical (&result
, &value
)))
1896 t
= build_real (type
, result
);
1898 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1902 if (TREE_CODE (arg1
) == FIXED_CST
)
1904 FIXED_VALUE_TYPE f1
;
1905 FIXED_VALUE_TYPE f2
;
1906 FIXED_VALUE_TYPE result
;
1911 /* The following codes are handled by fixed_arithmetic. */
1917 case TRUNC_DIV_EXPR
:
1918 f2
= TREE_FIXED_CST (arg2
);
1923 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1924 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1932 f1
= TREE_FIXED_CST (arg1
);
1933 type
= TREE_TYPE (arg1
);
1934 sat_p
= TYPE_SATURATING (type
);
1935 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1936 t
= build_fixed (type
, result
);
1937 /* Propagate overflow flags. */
1938 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1940 TREE_OVERFLOW (t
) = 1;
1941 TREE_CONSTANT_OVERFLOW (t
) = 1;
1943 else if (TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
))
1944 TREE_CONSTANT_OVERFLOW (t
) = 1;
1948 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1950 tree type
= TREE_TYPE (arg1
);
1951 tree r1
= TREE_REALPART (arg1
);
1952 tree i1
= TREE_IMAGPART (arg1
);
1953 tree r2
= TREE_REALPART (arg2
);
1954 tree i2
= TREE_IMAGPART (arg2
);
1961 real
= const_binop (code
, r1
, r2
, notrunc
);
1962 imag
= const_binop (code
, i1
, i2
, notrunc
);
1966 real
= const_binop (MINUS_EXPR
,
1967 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1968 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1970 imag
= const_binop (PLUS_EXPR
,
1971 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1972 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1979 = const_binop (PLUS_EXPR
,
1980 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1981 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1984 = const_binop (PLUS_EXPR
,
1985 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1986 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1989 = const_binop (MINUS_EXPR
,
1990 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1991 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1994 if (INTEGRAL_TYPE_P (TREE_TYPE (r1
)))
1995 code
= TRUNC_DIV_EXPR
;
1997 real
= const_binop (code
, t1
, magsquared
, notrunc
);
1998 imag
= const_binop (code
, t2
, magsquared
, notrunc
);
2007 return build_complex (type
, real
, imag
);
2013 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2014 indicates which particular sizetype to create. */
2017 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
2019 return build_int_cst (sizetype_tab
[(int) kind
], number
);
2022 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2023 is a tree code. The type of the result is taken from the operands.
2024 Both must be equivalent integer types, ala int_binop_types_match_p.
2025 If the operands are constant, so is the result. */
2028 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
2030 tree type
= TREE_TYPE (arg0
);
2032 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
2033 return error_mark_node
;
2035 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
2038 /* Handle the special case of two integer constants faster. */
2039 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2041 /* And some specific cases even faster than that. */
2042 if (code
== PLUS_EXPR
)
2044 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
2046 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
2049 else if (code
== MINUS_EXPR
)
2051 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
2054 else if (code
== MULT_EXPR
)
2056 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
2060 /* Handle general case of two integer constants. */
2061 return int_const_binop (code
, arg0
, arg1
, 0);
2064 return fold_build2 (code
, type
, arg0
, arg1
);
2067 /* Given two values, either both of sizetype or both of bitsizetype,
2068 compute the difference between the two values. Return the value
2069 in signed type corresponding to the type of the operands. */
2072 size_diffop (tree arg0
, tree arg1
)
2074 tree type
= TREE_TYPE (arg0
);
2077 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
2080 /* If the type is already signed, just do the simple thing. */
2081 if (!TYPE_UNSIGNED (type
))
2082 return size_binop (MINUS_EXPR
, arg0
, arg1
);
2084 if (type
== sizetype
)
2086 else if (type
== bitsizetype
)
2087 ctype
= sbitsizetype
;
2089 ctype
= signed_type_for (type
);
2091 /* If either operand is not a constant, do the conversions to the signed
2092 type and subtract. The hardware will do the right thing with any
2093 overflow in the subtraction. */
2094 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
2095 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
2096 fold_convert (ctype
, arg1
));
2098 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2099 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2100 overflow) and negate (which can't either). Special-case a result
2101 of zero while we're here. */
2102 if (tree_int_cst_equal (arg0
, arg1
))
2103 return build_int_cst (ctype
, 0);
2104 else if (tree_int_cst_lt (arg1
, arg0
))
2105 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
2107 return size_binop (MINUS_EXPR
, build_int_cst (ctype
, 0),
2108 fold_convert (ctype
, size_binop (MINUS_EXPR
,
2112 /* A subroutine of fold_convert_const handling conversions of an
2113 INTEGER_CST to another integer type. */
2116 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
2120 /* Given an integer constant, make new constant with new type,
2121 appropriately sign-extended or truncated. */
2122 t
= force_fit_type_double (type
, TREE_INT_CST_LOW (arg1
),
2123 TREE_INT_CST_HIGH (arg1
),
2124 /* Don't set the overflow when
2125 converting from a pointer, */
2126 !POINTER_TYPE_P (TREE_TYPE (arg1
))
2127 /* or to a sizetype with same signedness
2128 and the precision is unchanged.
2129 ??? sizetype is always sign-extended,
2130 but its signedness depends on the
2131 frontend. Thus we see spurious overflows
2132 here if we do not check this. */
2133 && !((TYPE_PRECISION (TREE_TYPE (arg1
))
2134 == TYPE_PRECISION (type
))
2135 && (TYPE_UNSIGNED (TREE_TYPE (arg1
))
2136 == TYPE_UNSIGNED (type
))
2137 && ((TREE_CODE (TREE_TYPE (arg1
)) == INTEGER_TYPE
2138 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1
)))
2139 || (TREE_CODE (type
) == INTEGER_TYPE
2140 && TYPE_IS_SIZETYPE (type
)))),
2141 (TREE_INT_CST_HIGH (arg1
) < 0
2142 && (TYPE_UNSIGNED (type
)
2143 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2144 | TREE_OVERFLOW (arg1
));
2149 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2150 to an integer type. */
2153 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
2158 /* The following code implements the floating point to integer
2159 conversion rules required by the Java Language Specification,
2160 that IEEE NaNs are mapped to zero and values that overflow
2161 the target precision saturate, i.e. values greater than
2162 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2163 are mapped to INT_MIN. These semantics are allowed by the
2164 C and C++ standards that simply state that the behavior of
2165 FP-to-integer conversion is unspecified upon overflow. */
2167 HOST_WIDE_INT high
, low
;
2169 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
2173 case FIX_TRUNC_EXPR
:
2174 real_trunc (&r
, VOIDmode
, &x
);
2181 /* If R is NaN, return zero and show we have an overflow. */
2182 if (REAL_VALUE_ISNAN (r
))
2189 /* See if R is less than the lower bound or greater than the
2194 tree lt
= TYPE_MIN_VALUE (type
);
2195 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
2196 if (REAL_VALUES_LESS (r
, l
))
2199 high
= TREE_INT_CST_HIGH (lt
);
2200 low
= TREE_INT_CST_LOW (lt
);
2206 tree ut
= TYPE_MAX_VALUE (type
);
2209 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
2210 if (REAL_VALUES_LESS (u
, r
))
2213 high
= TREE_INT_CST_HIGH (ut
);
2214 low
= TREE_INT_CST_LOW (ut
);
2220 REAL_VALUE_TO_INT (&low
, &high
, r
);
2222 t
= force_fit_type_double (type
, low
, high
, -1,
2223 overflow
| TREE_OVERFLOW (arg1
));
2227 /* A subroutine of fold_convert_const handling conversions of a
2228 FIXED_CST to an integer type. */
2231 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
2234 double_int temp
, temp_trunc
;
2237 /* Right shift FIXED_CST to temp by fbit. */
2238 temp
= TREE_FIXED_CST (arg1
).data
;
2239 mode
= TREE_FIXED_CST (arg1
).mode
;
2240 if (GET_MODE_FBIT (mode
) < 2 * HOST_BITS_PER_WIDE_INT
)
2242 lshift_double (temp
.low
, temp
.high
,
2243 - GET_MODE_FBIT (mode
), 2 * HOST_BITS_PER_WIDE_INT
,
2244 &temp
.low
, &temp
.high
, SIGNED_FIXED_POINT_MODE_P (mode
));
2246 /* Left shift temp to temp_trunc by fbit. */
2247 lshift_double (temp
.low
, temp
.high
,
2248 GET_MODE_FBIT (mode
), 2 * HOST_BITS_PER_WIDE_INT
,
2249 &temp_trunc
.low
, &temp_trunc
.high
,
2250 SIGNED_FIXED_POINT_MODE_P (mode
));
2257 temp_trunc
.high
= 0;
2260 /* If FIXED_CST is negative, we need to round the value toward 0.
2261 By checking if the fractional bits are not zero to add 1 to temp. */
2262 if (SIGNED_FIXED_POINT_MODE_P (mode
) && temp_trunc
.high
< 0
2263 && !double_int_equal_p (TREE_FIXED_CST (arg1
).data
, temp_trunc
))
2268 temp
= double_int_add (temp
, one
);
2271 /* Given a fixed-point constant, make new constant with new type,
2272 appropriately sign-extended or truncated. */
2273 t
= force_fit_type_double (type
, temp
.low
, temp
.high
, -1,
2275 && (TYPE_UNSIGNED (type
)
2276 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2277 | TREE_OVERFLOW (arg1
));
2282 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2283 to another floating point type. */
2286 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
2288 REAL_VALUE_TYPE value
;
2291 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
2292 t
= build_real (type
, value
);
2294 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2298 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2299 to a floating point type. */
2302 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2304 REAL_VALUE_TYPE value
;
2307 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
2308 t
= build_real (type
, value
);
2310 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2311 TREE_CONSTANT_OVERFLOW (t
)
2312 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
2316 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2317 to another fixed-point type. */
2320 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2322 FIXED_VALUE_TYPE value
;
2326 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2327 TYPE_SATURATING (type
));
2328 t
= build_fixed (type
, value
);
2330 /* Propagate overflow flags. */
2331 if (overflow_p
| TREE_OVERFLOW (arg1
))
2333 TREE_OVERFLOW (t
) = 1;
2334 TREE_CONSTANT_OVERFLOW (t
) = 1;
2336 else if (TREE_CONSTANT_OVERFLOW (arg1
))
2337 TREE_CONSTANT_OVERFLOW (t
) = 1;
2341 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2342 to a fixed-point type. */
2345 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2347 FIXED_VALUE_TYPE value
;
2351 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
2352 TREE_INT_CST (arg1
),
2353 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2354 TYPE_SATURATING (type
));
2355 t
= build_fixed (type
, value
);
2357 /* Propagate overflow flags. */
2358 if (overflow_p
| TREE_OVERFLOW (arg1
))
2360 TREE_OVERFLOW (t
) = 1;
2361 TREE_CONSTANT_OVERFLOW (t
) = 1;
2363 else if (TREE_CONSTANT_OVERFLOW (arg1
))
2364 TREE_CONSTANT_OVERFLOW (t
) = 1;
2368 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2369 to a fixed-point type. */
2372 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2374 FIXED_VALUE_TYPE value
;
2378 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2379 &TREE_REAL_CST (arg1
),
2380 TYPE_SATURATING (type
));
2381 t
= build_fixed (type
, value
);
2383 /* Propagate overflow flags. */
2384 if (overflow_p
| TREE_OVERFLOW (arg1
))
2386 TREE_OVERFLOW (t
) = 1;
2387 TREE_CONSTANT_OVERFLOW (t
) = 1;
2389 else if (TREE_CONSTANT_OVERFLOW (arg1
))
2390 TREE_CONSTANT_OVERFLOW (t
) = 1;
2394 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2395 type TYPE. If no simplification can be done return NULL_TREE. */
2398 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2400 if (TREE_TYPE (arg1
) == type
)
2403 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
2405 if (TREE_CODE (arg1
) == INTEGER_CST
)
2406 return fold_convert_const_int_from_int (type
, arg1
);
2407 else if (TREE_CODE (arg1
) == REAL_CST
)
2408 return fold_convert_const_int_from_real (code
, type
, arg1
);
2409 else if (TREE_CODE (arg1
) == FIXED_CST
)
2410 return fold_convert_const_int_from_fixed (type
, arg1
);
2412 else if (TREE_CODE (type
) == REAL_TYPE
)
2414 if (TREE_CODE (arg1
) == INTEGER_CST
)
2415 return build_real_from_int_cst (type
, arg1
);
2416 else if (TREE_CODE (arg1
) == REAL_CST
)
2417 return fold_convert_const_real_from_real (type
, arg1
);
2418 else if (TREE_CODE (arg1
) == FIXED_CST
)
2419 return fold_convert_const_real_from_fixed (type
, arg1
);
2421 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2423 if (TREE_CODE (arg1
) == FIXED_CST
)
2424 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2425 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2426 return fold_convert_const_fixed_from_int (type
, arg1
);
2427 else if (TREE_CODE (arg1
) == REAL_CST
)
2428 return fold_convert_const_fixed_from_real (type
, arg1
);
2433 /* Construct a vector of zero elements of vector type TYPE. */
2436 build_zero_vector (tree type
)
2441 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2442 units
= TYPE_VECTOR_SUBPARTS (type
);
2445 for (i
= 0; i
< units
; i
++)
2446 list
= tree_cons (NULL_TREE
, elem
, list
);
2447 return build_vector (type
, list
);
2450 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2453 fold_convertible_p (const_tree type
, const_tree arg
)
2455 tree orig
= TREE_TYPE (arg
);
2460 if (TREE_CODE (arg
) == ERROR_MARK
2461 || TREE_CODE (type
) == ERROR_MARK
2462 || TREE_CODE (orig
) == ERROR_MARK
)
2465 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2468 switch (TREE_CODE (type
))
2470 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2471 case POINTER_TYPE
: case REFERENCE_TYPE
:
2473 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2474 || TREE_CODE (orig
) == OFFSET_TYPE
)
2476 return (TREE_CODE (orig
) == VECTOR_TYPE
2477 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2480 return TREE_CODE (type
) == TREE_CODE (orig
);
2484 /* Convert expression ARG to type TYPE. Used by the middle-end for
2485 simple conversions in preference to calling the front-end's convert. */
2488 fold_convert (tree type
, tree arg
)
2490 tree orig
= TREE_TYPE (arg
);
2496 if (TREE_CODE (arg
) == ERROR_MARK
2497 || TREE_CODE (type
) == ERROR_MARK
2498 || TREE_CODE (orig
) == ERROR_MARK
)
2499 return error_mark_node
;
2501 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2502 return fold_build1 (NOP_EXPR
, type
, arg
);
2504 switch (TREE_CODE (type
))
2506 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2507 case POINTER_TYPE
: case REFERENCE_TYPE
:
2509 if (TREE_CODE (arg
) == INTEGER_CST
)
2511 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2512 if (tem
!= NULL_TREE
)
2515 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2516 || TREE_CODE (orig
) == OFFSET_TYPE
)
2517 return fold_build1 (NOP_EXPR
, type
, arg
);
2518 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2520 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2521 return fold_convert (type
, tem
);
2523 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2524 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2525 return fold_build1 (NOP_EXPR
, type
, arg
);
2528 if (TREE_CODE (arg
) == INTEGER_CST
)
2530 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2531 if (tem
!= NULL_TREE
)
2534 else if (TREE_CODE (arg
) == REAL_CST
)
2536 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2537 if (tem
!= NULL_TREE
)
2540 else if (TREE_CODE (arg
) == FIXED_CST
)
2542 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2543 if (tem
!= NULL_TREE
)
2547 switch (TREE_CODE (orig
))
2550 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2551 case POINTER_TYPE
: case REFERENCE_TYPE
:
2552 return fold_build1 (FLOAT_EXPR
, type
, arg
);
2555 return fold_build1 (NOP_EXPR
, type
, arg
);
2557 case FIXED_POINT_TYPE
:
2558 return fold_build1 (FIXED_CONVERT_EXPR
, type
, arg
);
2561 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2562 return fold_convert (type
, tem
);
2568 case FIXED_POINT_TYPE
:
2569 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2570 || TREE_CODE (arg
) == REAL_CST
)
2572 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2573 if (tem
!= NULL_TREE
)
2577 switch (TREE_CODE (orig
))
2579 case FIXED_POINT_TYPE
:
2584 return fold_build1 (FIXED_CONVERT_EXPR
, type
, arg
);
2587 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2588 return fold_convert (type
, tem
);
2595 switch (TREE_CODE (orig
))
2598 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2599 case POINTER_TYPE
: case REFERENCE_TYPE
:
2601 case FIXED_POINT_TYPE
:
2602 return build2 (COMPLEX_EXPR
, type
,
2603 fold_convert (TREE_TYPE (type
), arg
),
2604 fold_convert (TREE_TYPE (type
), integer_zero_node
));
2609 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2611 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
2612 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
2613 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2616 arg
= save_expr (arg
);
2617 rpart
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2618 ipart
= fold_build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2619 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
2620 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
2621 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2629 if (integer_zerop (arg
))
2630 return build_zero_vector (type
);
2631 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2632 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2633 || TREE_CODE (orig
) == VECTOR_TYPE
);
2634 return fold_build1 (VIEW_CONVERT_EXPR
, type
, arg
);
2637 tem
= fold_ignored_result (arg
);
2638 if (TREE_CODE (tem
) == GIMPLE_MODIFY_STMT
)
2640 return fold_build1 (NOP_EXPR
, type
, tem
);
2647 /* Return false if expr can be assumed not to be an lvalue, true
2651 maybe_lvalue_p (const_tree x
)
2653 /* We only need to wrap lvalue tree codes. */
2654 switch (TREE_CODE (x
))
2665 case ALIGN_INDIRECT_REF
:
2666 case MISALIGNED_INDIRECT_REF
:
2668 case ARRAY_RANGE_REF
:
2674 case PREINCREMENT_EXPR
:
2675 case PREDECREMENT_EXPR
:
2677 case TRY_CATCH_EXPR
:
2678 case WITH_CLEANUP_EXPR
:
2681 case GIMPLE_MODIFY_STMT
:
2690 /* Assume the worst for front-end tree codes. */
2691 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2699 /* Return an expr equal to X but certainly not valid as an lvalue. */
2704 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2709 if (! maybe_lvalue_p (x
))
2711 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2714 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2715 Zero means allow extended lvalues. */
2717 int pedantic_lvalues
;
2719 /* When pedantic, return an expr equal to X but certainly not valid as a
2720 pedantic lvalue. Otherwise, return X. */
2723 pedantic_non_lvalue (tree x
)
2725 if (pedantic_lvalues
)
2726 return non_lvalue (x
);
2731 /* Given a tree comparison code, return the code that is the logical inverse
2732 of the given code. It is not safe to do this for floating-point
2733 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2734 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2737 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2739 if (honor_nans
&& flag_trapping_math
)
2749 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2751 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2753 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2755 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2769 return UNORDERED_EXPR
;
2770 case UNORDERED_EXPR
:
2771 return ORDERED_EXPR
;
2777 /* Similar, but return the comparison that results if the operands are
2778 swapped. This is safe for floating-point. */
2781 swap_tree_comparison (enum tree_code code
)
2788 case UNORDERED_EXPR
:
2814 /* Convert a comparison tree code from an enum tree_code representation
2815 into a compcode bit-based encoding. This function is the inverse of
2816 compcode_to_comparison. */
2818 static enum comparison_code
2819 comparison_to_compcode (enum tree_code code
)
2836 return COMPCODE_ORD
;
2837 case UNORDERED_EXPR
:
2838 return COMPCODE_UNORD
;
2840 return COMPCODE_UNLT
;
2842 return COMPCODE_UNEQ
;
2844 return COMPCODE_UNLE
;
2846 return COMPCODE_UNGT
;
2848 return COMPCODE_LTGT
;
2850 return COMPCODE_UNGE
;
2856 /* Convert a compcode bit-based encoding of a comparison operator back
2857 to GCC's enum tree_code representation. This function is the
2858 inverse of comparison_to_compcode. */
2860 static enum tree_code
2861 compcode_to_comparison (enum comparison_code code
)
2878 return ORDERED_EXPR
;
2879 case COMPCODE_UNORD
:
2880 return UNORDERED_EXPR
;
2898 /* Return a tree for the comparison which is the combination of
2899 doing the AND or OR (depending on CODE) of the two operations LCODE
2900 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2901 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2902 if this makes the transformation invalid. */
2905 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2906 enum tree_code rcode
, tree truth_type
,
2907 tree ll_arg
, tree lr_arg
)
2909 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2910 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2911 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2912 enum comparison_code compcode
;
2916 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2917 compcode
= lcompcode
& rcompcode
;
2920 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2921 compcode
= lcompcode
| rcompcode
;
2930 /* Eliminate unordered comparisons, as well as LTGT and ORD
2931 which are not used unless the mode has NaNs. */
2932 compcode
&= ~COMPCODE_UNORD
;
2933 if (compcode
== COMPCODE_LTGT
)
2934 compcode
= COMPCODE_NE
;
2935 else if (compcode
== COMPCODE_ORD
)
2936 compcode
= COMPCODE_TRUE
;
2938 else if (flag_trapping_math
)
2940 /* Check that the original operation and the optimized ones will trap
2941 under the same condition. */
2942 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2943 && (lcompcode
!= COMPCODE_EQ
)
2944 && (lcompcode
!= COMPCODE_ORD
);
2945 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2946 && (rcompcode
!= COMPCODE_EQ
)
2947 && (rcompcode
!= COMPCODE_ORD
);
2948 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2949 && (compcode
!= COMPCODE_EQ
)
2950 && (compcode
!= COMPCODE_ORD
);
2952 /* In a short-circuited boolean expression the LHS might be
2953 such that the RHS, if evaluated, will never trap. For
2954 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2955 if neither x nor y is NaN. (This is a mixed blessing: for
2956 example, the expression above will never trap, hence
2957 optimizing it to x < y would be invalid). */
2958 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2959 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2962 /* If the comparison was short-circuited, and only the RHS
2963 trapped, we may now generate a spurious trap. */
2965 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2968 /* If we changed the conditions that cause a trap, we lose. */
2969 if ((ltrap
|| rtrap
) != trap
)
2973 if (compcode
== COMPCODE_TRUE
)
2974 return constant_boolean_node (true, truth_type
);
2975 else if (compcode
== COMPCODE_FALSE
)
2976 return constant_boolean_node (false, truth_type
);
2978 return fold_build2 (compcode_to_comparison (compcode
),
2979 truth_type
, ll_arg
, lr_arg
);
2982 /* Return nonzero if CODE is a tree code that represents a truth value. */
2985 truth_value_p (enum tree_code code
)
2987 return (TREE_CODE_CLASS (code
) == tcc_comparison
2988 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2989 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2990 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2993 /* Return nonzero if two operands (typically of the same tree node)
2994 are necessarily equal. If either argument has side-effects this
2995 function returns zero. FLAGS modifies behavior as follows:
2997 If OEP_ONLY_CONST is set, only return nonzero for constants.
2998 This function tests whether the operands are indistinguishable;
2999 it does not test whether they are equal using C's == operation.
3000 The distinction is important for IEEE floating point, because
3001 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3002 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3004 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3005 even though it may hold multiple values during a function.
3006 This is because a GCC tree node guarantees that nothing else is
3007 executed between the evaluation of its "operands" (which may often
3008 be evaluated in arbitrary order). Hence if the operands themselves
3009 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3010 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3011 unset means assuming isochronic (or instantaneous) tree equivalence.
3012 Unless comparing arbitrary expression trees, such as from different
3013 statements, this flag can usually be left unset.
3015 If OEP_PURE_SAME is set, then pure functions with identical arguments
3016 are considered the same. It is used when the caller has other ways
3017 to ensure that global memory is unchanged in between. */
3020 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
3022 /* If either is ERROR_MARK, they aren't equal. */
3023 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
3026 /* If both types don't have the same signedness, then we can't consider
3027 them equal. We must check this before the STRIP_NOPS calls
3028 because they may change the signedness of the arguments. */
3029 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3032 /* If both types don't have the same precision, then it is not safe
3034 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
3040 /* In case both args are comparisons but with different comparison
3041 code, try to swap the comparison operands of one arg to produce
3042 a match and compare that variant. */
3043 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3044 && COMPARISON_CLASS_P (arg0
)
3045 && COMPARISON_CLASS_P (arg1
))
3047 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
3049 if (TREE_CODE (arg0
) == swap_code
)
3050 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3051 TREE_OPERAND (arg1
, 1), flags
)
3052 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3053 TREE_OPERAND (arg1
, 0), flags
);
3056 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3057 /* This is needed for conversions and for COMPONENT_REF.
3058 Might as well play it safe and always test this. */
3059 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
3060 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
3061 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
3064 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3065 We don't care about side effects in that case because the SAVE_EXPR
3066 takes care of that for us. In all other cases, two expressions are
3067 equal if they have no side effects. If we have two identical
3068 expressions with side effects that should be treated the same due
3069 to the only side effects being identical SAVE_EXPR's, that will
3070 be detected in the recursive calls below. */
3071 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
3072 && (TREE_CODE (arg0
) == SAVE_EXPR
3073 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
3076 /* Next handle constant cases, those for which we can return 1 even
3077 if ONLY_CONST is set. */
3078 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
3079 switch (TREE_CODE (arg0
))
3082 return tree_int_cst_equal (arg0
, arg1
);
3085 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
3086 TREE_FIXED_CST (arg1
));
3089 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
3090 TREE_REAL_CST (arg1
)))
3094 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
3096 /* If we do not distinguish between signed and unsigned zero,
3097 consider them equal. */
3098 if (real_zerop (arg0
) && real_zerop (arg1
))
3107 v1
= TREE_VECTOR_CST_ELTS (arg0
);
3108 v2
= TREE_VECTOR_CST_ELTS (arg1
);
3111 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
3114 v1
= TREE_CHAIN (v1
);
3115 v2
= TREE_CHAIN (v2
);
3122 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
3124 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
3128 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
3129 && ! memcmp (TREE_STRING_POINTER (arg0
),
3130 TREE_STRING_POINTER (arg1
),
3131 TREE_STRING_LENGTH (arg0
)));
3134 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
3140 if (flags
& OEP_ONLY_CONST
)
3143 /* Define macros to test an operand from arg0 and arg1 for equality and a
3144 variant that allows null and views null as being different from any
3145 non-null value. In the latter case, if either is null, the both
3146 must be; otherwise, do the normal comparison. */
3147 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3148 TREE_OPERAND (arg1, N), flags)
3150 #define OP_SAME_WITH_NULL(N) \
3151 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3152 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3154 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
3157 /* Two conversions are equal only if signedness and modes match. */
3158 switch (TREE_CODE (arg0
))
3162 case FIX_TRUNC_EXPR
:
3163 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
3164 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3174 case tcc_comparison
:
3176 if (OP_SAME (0) && OP_SAME (1))
3179 /* For commutative ops, allow the other order. */
3180 return (commutative_tree_code (TREE_CODE (arg0
))
3181 && operand_equal_p (TREE_OPERAND (arg0
, 0),
3182 TREE_OPERAND (arg1
, 1), flags
)
3183 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3184 TREE_OPERAND (arg1
, 0), flags
));
3187 /* If either of the pointer (or reference) expressions we are
3188 dereferencing contain a side effect, these cannot be equal. */
3189 if (TREE_SIDE_EFFECTS (arg0
)
3190 || TREE_SIDE_EFFECTS (arg1
))
3193 switch (TREE_CODE (arg0
))
3196 case ALIGN_INDIRECT_REF
:
3197 case MISALIGNED_INDIRECT_REF
:
3203 case ARRAY_RANGE_REF
:
3204 /* Operands 2 and 3 may be null.
3205 Compare the array index by value if it is constant first as we
3206 may have different types but same value here. */
3208 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3209 TREE_OPERAND (arg1
, 1))
3211 && OP_SAME_WITH_NULL (2)
3212 && OP_SAME_WITH_NULL (3));
3215 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3216 may be NULL when we're called to compare MEM_EXPRs. */
3217 return OP_SAME_WITH_NULL (0)
3219 && OP_SAME_WITH_NULL (2);
3222 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3228 case tcc_expression
:
3229 switch (TREE_CODE (arg0
))
3232 case TRUTH_NOT_EXPR
:
3235 case TRUTH_ANDIF_EXPR
:
3236 case TRUTH_ORIF_EXPR
:
3237 return OP_SAME (0) && OP_SAME (1);
3239 case TRUTH_AND_EXPR
:
3241 case TRUTH_XOR_EXPR
:
3242 if (OP_SAME (0) && OP_SAME (1))
3245 /* Otherwise take into account this is a commutative operation. */
3246 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3247 TREE_OPERAND (arg1
, 1), flags
)
3248 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3249 TREE_OPERAND (arg1
, 0), flags
));
3256 switch (TREE_CODE (arg0
))
3259 /* If the CALL_EXPRs call different functions, then they
3260 clearly can not be equal. */
3261 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3266 unsigned int cef
= call_expr_flags (arg0
);
3267 if (flags
& OEP_PURE_SAME
)
3268 cef
&= ECF_CONST
| ECF_PURE
;
3275 /* Now see if all the arguments are the same. */
3277 const_call_expr_arg_iterator iter0
, iter1
;
3279 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3280 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3282 a0
= next_const_call_expr_arg (&iter0
),
3283 a1
= next_const_call_expr_arg (&iter1
))
3284 if (! operand_equal_p (a0
, a1
, flags
))
3287 /* If we get here and both argument lists are exhausted
3288 then the CALL_EXPRs are equal. */
3289 return ! (a0
|| a1
);
3295 case tcc_declaration
:
3296 /* Consider __builtin_sqrt equal to sqrt. */
3297 return (TREE_CODE (arg0
) == FUNCTION_DECL
3298 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3299 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3300 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3307 #undef OP_SAME_WITH_NULL
3310 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3311 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3313 When in doubt, return 0. */
3316 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3318 int unsignedp1
, unsignedpo
;
3319 tree primarg0
, primarg1
, primother
;
3320 unsigned int correct_width
;
3322 if (operand_equal_p (arg0
, arg1
, 0))
3325 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3326 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3329 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3330 and see if the inner values are the same. This removes any
3331 signedness comparison, which doesn't matter here. */
3332 primarg0
= arg0
, primarg1
= arg1
;
3333 STRIP_NOPS (primarg0
);
3334 STRIP_NOPS (primarg1
);
3335 if (operand_equal_p (primarg0
, primarg1
, 0))
3338 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3339 actual comparison operand, ARG0.
3341 First throw away any conversions to wider types
3342 already present in the operands. */
3344 primarg1
= get_narrower (arg1
, &unsignedp1
);
3345 primother
= get_narrower (other
, &unsignedpo
);
3347 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3348 if (unsignedp1
== unsignedpo
3349 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3350 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3352 tree type
= TREE_TYPE (arg0
);
3354 /* Make sure shorter operand is extended the right way
3355 to match the longer operand. */
3356 primarg1
= fold_convert (signed_or_unsigned_type_for
3357 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3359 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3366 /* See if ARG is an expression that is either a comparison or is performing
3367 arithmetic on comparisons. The comparisons must only be comparing
3368 two different values, which will be stored in *CVAL1 and *CVAL2; if
3369 they are nonzero it means that some operands have already been found.
3370 No variables may be used anywhere else in the expression except in the
3371 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3372 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3374 If this is true, return 1. Otherwise, return zero. */
3377 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3379 enum tree_code code
= TREE_CODE (arg
);
3380 enum tree_code_class
class = TREE_CODE_CLASS (code
);
3382 /* We can handle some of the tcc_expression cases here. */
3383 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3385 else if (class == tcc_expression
3386 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3387 || code
== COMPOUND_EXPR
))
3390 else if (class == tcc_expression
&& code
== SAVE_EXPR
3391 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3393 /* If we've already found a CVAL1 or CVAL2, this expression is
3394 two complex to handle. */
3395 if (*cval1
|| *cval2
)
3405 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3408 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3409 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3410 cval1
, cval2
, save_p
));
3415 case tcc_expression
:
3416 if (code
== COND_EXPR
)
3417 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3418 cval1
, cval2
, save_p
)
3419 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3420 cval1
, cval2
, save_p
)
3421 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3422 cval1
, cval2
, save_p
));
3425 case tcc_comparison
:
3426 /* First see if we can handle the first operand, then the second. For
3427 the second operand, we know *CVAL1 can't be zero. It must be that
3428 one side of the comparison is each of the values; test for the
3429 case where this isn't true by failing if the two operands
3432 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3433 TREE_OPERAND (arg
, 1), 0))
3437 *cval1
= TREE_OPERAND (arg
, 0);
3438 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3440 else if (*cval2
== 0)
3441 *cval2
= TREE_OPERAND (arg
, 0);
3442 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3447 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3449 else if (*cval2
== 0)
3450 *cval2
= TREE_OPERAND (arg
, 1);
3451 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3463 /* ARG is a tree that is known to contain just arithmetic operations and
3464 comparisons. Evaluate the operations in the tree substituting NEW0 for
3465 any occurrence of OLD0 as an operand of a comparison and likewise for
3469 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
3471 tree type
= TREE_TYPE (arg
);
3472 enum tree_code code
= TREE_CODE (arg
);
3473 enum tree_code_class
class = TREE_CODE_CLASS (code
);
3475 /* We can handle some of the tcc_expression cases here. */
3476 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3478 else if (class == tcc_expression
3479 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3485 return fold_build1 (code
, type
,
3486 eval_subst (TREE_OPERAND (arg
, 0),
3487 old0
, new0
, old1
, new1
));
3490 return fold_build2 (code
, type
,
3491 eval_subst (TREE_OPERAND (arg
, 0),
3492 old0
, new0
, old1
, new1
),
3493 eval_subst (TREE_OPERAND (arg
, 1),
3494 old0
, new0
, old1
, new1
));
3496 case tcc_expression
:
3500 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
3503 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
3506 return fold_build3 (code
, type
,
3507 eval_subst (TREE_OPERAND (arg
, 0),
3508 old0
, new0
, old1
, new1
),
3509 eval_subst (TREE_OPERAND (arg
, 1),
3510 old0
, new0
, old1
, new1
),
3511 eval_subst (TREE_OPERAND (arg
, 2),
3512 old0
, new0
, old1
, new1
));
3516 /* Fall through - ??? */
3518 case tcc_comparison
:
3520 tree arg0
= TREE_OPERAND (arg
, 0);
3521 tree arg1
= TREE_OPERAND (arg
, 1);
3523 /* We need to check both for exact equality and tree equality. The
3524 former will be true if the operand has a side-effect. In that
3525 case, we know the operand occurred exactly once. */
3527 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3529 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3532 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3534 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3537 return fold_build2 (code
, type
, arg0
, arg1
);
3545 /* Return a tree for the case when the result of an expression is RESULT
3546 converted to TYPE and OMITTED was previously an operand of the expression
3547 but is now not needed (e.g., we folded OMITTED * 0).
3549 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3550 the conversion of RESULT to TYPE. */
3553 omit_one_operand (tree type
, tree result
, tree omitted
)
3555 tree t
= fold_convert (type
, result
);
3557 /* If the resulting operand is an empty statement, just return the omitted
3558 statement casted to void. */
3559 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3560 return build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
3562 if (TREE_SIDE_EFFECTS (omitted
))
3563 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3565 return non_lvalue (t
);
3568 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3571 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
3573 tree t
= fold_convert (type
, result
);
3575 /* If the resulting operand is an empty statement, just return the omitted
3576 statement casted to void. */
3577 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3578 return build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
3580 if (TREE_SIDE_EFFECTS (omitted
))
3581 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3583 return pedantic_non_lvalue (t
);
3586 /* Return a tree for the case when the result of an expression is RESULT
3587 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3588 of the expression but are now not needed.
3590 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3591 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3592 evaluated before OMITTED2. Otherwise, if neither has side effects,
3593 just do the conversion of RESULT to TYPE. */
3596 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
3598 tree t
= fold_convert (type
, result
);
3600 if (TREE_SIDE_EFFECTS (omitted2
))
3601 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
3602 if (TREE_SIDE_EFFECTS (omitted1
))
3603 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
3605 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
3609 /* Return a simplified tree node for the truth-negation of ARG. This
3610 never alters ARG itself. We assume that ARG is an operation that
3611 returns a truth value (0 or 1).
3613 FIXME: one would think we would fold the result, but it causes
3614 problems with the dominator optimizer. */
3617 fold_truth_not_expr (tree arg
)
3619 tree type
= TREE_TYPE (arg
);
3620 enum tree_code code
= TREE_CODE (arg
);
3622 /* If this is a comparison, we can simply invert it, except for
3623 floating-point non-equality comparisons, in which case we just
3624 enclose a TRUTH_NOT_EXPR around what we have. */
3626 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3628 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3629 if (FLOAT_TYPE_P (op_type
)
3630 && flag_trapping_math
3631 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3632 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3636 code
= invert_tree_comparison (code
,
3637 HONOR_NANS (TYPE_MODE (op_type
)));
3638 if (code
== ERROR_MARK
)
3641 return build2 (code
, type
,
3642 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
3649 return constant_boolean_node (integer_zerop (arg
), type
);
3651 case TRUTH_AND_EXPR
:
3652 return build2 (TRUTH_OR_EXPR
, type
,
3653 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3654 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3657 return build2 (TRUTH_AND_EXPR
, type
,
3658 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3659 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3661 case TRUTH_XOR_EXPR
:
3662 /* Here we can invert either operand. We invert the first operand
3663 unless the second operand is a TRUTH_NOT_EXPR in which case our
3664 result is the XOR of the first operand with the inside of the
3665 negation of the second operand. */
3667 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3668 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3669 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3671 return build2 (TRUTH_XOR_EXPR
, type
,
3672 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3673 TREE_OPERAND (arg
, 1));
3675 case TRUTH_ANDIF_EXPR
:
3676 return build2 (TRUTH_ORIF_EXPR
, type
,
3677 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3678 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3680 case TRUTH_ORIF_EXPR
:
3681 return build2 (TRUTH_ANDIF_EXPR
, type
,
3682 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3683 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3685 case TRUTH_NOT_EXPR
:
3686 return TREE_OPERAND (arg
, 0);
3690 tree arg1
= TREE_OPERAND (arg
, 1);
3691 tree arg2
= TREE_OPERAND (arg
, 2);
3692 /* A COND_EXPR may have a throw as one operand, which
3693 then has void type. Just leave void operands
3695 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3696 VOID_TYPE_P (TREE_TYPE (arg1
))
3697 ? arg1
: invert_truthvalue (arg1
),
3698 VOID_TYPE_P (TREE_TYPE (arg2
))
3699 ? arg2
: invert_truthvalue (arg2
));
3703 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3704 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3706 case NON_LVALUE_EXPR
:
3707 return invert_truthvalue (TREE_OPERAND (arg
, 0));
3710 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3711 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3715 return build1 (TREE_CODE (arg
), type
,
3716 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3719 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3721 return build2 (EQ_EXPR
, type
, arg
,
3722 build_int_cst (type
, 0));
3725 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3727 case CLEANUP_POINT_EXPR
:
3728 return build1 (CLEANUP_POINT_EXPR
, type
,
3729 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3738 /* Return a simplified tree node for the truth-negation of ARG. This
3739 never alters ARG itself. We assume that ARG is an operation that
3740 returns a truth value (0 or 1).
3742 FIXME: one would think we would fold the result, but it causes
3743 problems with the dominator optimizer. */
3746 invert_truthvalue (tree arg
)
3750 if (TREE_CODE (arg
) == ERROR_MARK
)
3753 tem
= fold_truth_not_expr (arg
);
3755 tem
= build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3760 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3761 operands are another bit-wise operation with a common input. If so,
3762 distribute the bit operations to save an operation and possibly two if
3763 constants are involved. For example, convert
3764 (A | B) & (A | C) into A | (B & C)
3765 Further simplification will occur if B and C are constants.
3767 If this optimization cannot be done, 0 will be returned. */
3770 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3775 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3776 || TREE_CODE (arg0
) == code
3777 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3778 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3781 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3783 common
= TREE_OPERAND (arg0
, 0);
3784 left
= TREE_OPERAND (arg0
, 1);
3785 right
= TREE_OPERAND (arg1
, 1);
3787 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3789 common
= TREE_OPERAND (arg0
, 0);
3790 left
= TREE_OPERAND (arg0
, 1);
3791 right
= TREE_OPERAND (arg1
, 0);
3793 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3795 common
= TREE_OPERAND (arg0
, 1);
3796 left
= TREE_OPERAND (arg0
, 0);
3797 right
= TREE_OPERAND (arg1
, 1);
3799 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3801 common
= TREE_OPERAND (arg0
, 1);
3802 left
= TREE_OPERAND (arg0
, 0);
3803 right
= TREE_OPERAND (arg1
, 0);
3808 return fold_build2 (TREE_CODE (arg0
), type
, common
,
3809 fold_build2 (code
, type
, left
, right
));
3812 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3813 with code CODE. This optimization is unsafe. */
3815 distribute_real_division (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3817 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3818 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3820 /* (A / C) +- (B / C) -> (A +- B) / C. */
3822 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3823 TREE_OPERAND (arg1
, 1), 0))
3824 return fold_build2 (mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3825 fold_build2 (code
, type
,
3826 TREE_OPERAND (arg0
, 0),
3827 TREE_OPERAND (arg1
, 0)),
3828 TREE_OPERAND (arg0
, 1));
3830 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3831 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3832 TREE_OPERAND (arg1
, 0), 0)
3833 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3834 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3836 REAL_VALUE_TYPE r0
, r1
;
3837 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3838 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3840 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3842 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3843 real_arithmetic (&r0
, code
, &r0
, &r1
);
3844 return fold_build2 (MULT_EXPR
, type
,
3845 TREE_OPERAND (arg0
, 0),
3846 build_real (type
, r0
));
3852 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3853 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3856 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
3863 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3864 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3865 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3866 && host_integerp (size
, 0)
3867 && tree_low_cst (size
, 0) == bitsize
)
3868 return fold_convert (type
, inner
);
3871 result
= build3 (BIT_FIELD_REF
, type
, inner
,
3872 size_int (bitsize
), bitsize_int (bitpos
));
3874 BIT_FIELD_REF_UNSIGNED (result
) = unsignedp
;
3879 /* Optimize a bit-field compare.
3881 There are two cases: First is a compare against a constant and the
3882 second is a comparison of two items where the fields are at the same
3883 bit position relative to the start of a chunk (byte, halfword, word)
3884 large enough to contain it. In these cases we can avoid the shift
3885 implicit in bitfield extractions.
3887 For constants, we emit a compare of the shifted constant with the
3888 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3889 compared. For two fields at the same position, we do the ANDs with the
3890 similar mask and compare the result of the ANDs.
3892 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3893 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3894 are the left and right operands of the comparison, respectively.
3896 If the optimization described above can be done, we return the resulting
3897 tree. Otherwise we return zero. */
3900 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
3903 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3904 tree type
= TREE_TYPE (lhs
);
3905 tree signed_type
, unsigned_type
;
3906 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3907 enum machine_mode lmode
, rmode
, nmode
;
3908 int lunsignedp
, runsignedp
;
3909 int lvolatilep
= 0, rvolatilep
= 0;
3910 tree linner
, rinner
= NULL_TREE
;
3914 /* Get all the information about the extractions being done. If the bit size
3915 if the same as the size of the underlying object, we aren't doing an
3916 extraction at all and so can do nothing. We also don't want to
3917 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3918 then will no longer be able to replace it. */
3919 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3920 &lunsignedp
, &lvolatilep
, false);
3921 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3922 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3927 /* If this is not a constant, we can only do something if bit positions,
3928 sizes, and signedness are the same. */
3929 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3930 &runsignedp
, &rvolatilep
, false);
3932 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3933 || lunsignedp
!= runsignedp
|| offset
!= 0
3934 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3938 /* See if we can find a mode to refer to this field. We should be able to,
3939 but fail if we can't. */
3940 nmode
= get_best_mode (lbitsize
, lbitpos
,
3941 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3942 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3943 TYPE_ALIGN (TREE_TYPE (rinner
))),
3944 word_mode
, lvolatilep
|| rvolatilep
);
3945 if (nmode
== VOIDmode
)
3948 /* Set signed and unsigned types of the precision of this mode for the
3950 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3951 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3953 /* Compute the bit position and size for the new reference and our offset
3954 within it. If the new reference is the same size as the original, we
3955 won't optimize anything, so return zero. */
3956 nbitsize
= GET_MODE_BITSIZE (nmode
);
3957 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3959 if (nbitsize
== lbitsize
)
3962 if (BYTES_BIG_ENDIAN
)
3963 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3965 /* Make the mask to be used against the extracted field. */
3966 mask
= build_int_cst_type (unsigned_type
, -1);
3967 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
3968 mask
= const_binop (RSHIFT_EXPR
, mask
,
3969 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
3972 /* If not comparing with constant, just rework the comparison
3974 return fold_build2 (code
, compare_type
,
3975 fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3976 make_bit_field_ref (linner
,
3981 fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3982 make_bit_field_ref (rinner
,
3988 /* Otherwise, we are handling the constant case. See if the constant is too
3989 big for the field. Warn and return a tree of for 0 (false) if so. We do
3990 this not only for its own sake, but to avoid having to test for this
3991 error case below. If we didn't, we might generate wrong code.
3993 For unsigned fields, the constant shifted right by the field length should
3994 be all zero. For signed fields, the high-order bits should agree with
3999 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
4000 fold_convert (unsigned_type
, rhs
),
4001 size_int (lbitsize
), 0)))
4003 warning (0, "comparison is always %d due to width of bit-field",
4005 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4010 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
4011 size_int (lbitsize
- 1), 0);
4012 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
4014 warning (0, "comparison is always %d due to width of bit-field",
4016 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4020 /* Single-bit compares should always be against zero. */
4021 if (lbitsize
== 1 && ! integer_zerop (rhs
))
4023 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
4024 rhs
= build_int_cst (type
, 0);
4027 /* Make a new bitfield reference, shift the constant over the
4028 appropriate number of bits and mask it with the computed mask
4029 (in case this was a signed field). If we changed it, make a new one. */
4030 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
4033 TREE_SIDE_EFFECTS (lhs
) = 1;
4034 TREE_THIS_VOLATILE (lhs
) = 1;
4037 rhs
= const_binop (BIT_AND_EXPR
,
4038 const_binop (LSHIFT_EXPR
,
4039 fold_convert (unsigned_type
, rhs
),
4040 size_int (lbitpos
), 0),
4043 return build2 (code
, compare_type
,
4044 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
4048 /* Subroutine for fold_truthop: decode a field reference.
4050 If EXP is a comparison reference, we return the innermost reference.
4052 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4053 set to the starting bit number.
4055 If the innermost field can be completely contained in a mode-sized
4056 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4058 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4059 otherwise it is not changed.
4061 *PUNSIGNEDP is set to the signedness of the field.
4063 *PMASK is set to the mask used. This is either contained in a
4064 BIT_AND_EXPR or derived from the width of the field.
4066 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4068 Return 0 if this is not a component reference or is one that we can't
4069 do anything with. */
4072 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
4073 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
4074 int *punsignedp
, int *pvolatilep
,
4075 tree
*pmask
, tree
*pand_mask
)
4077 tree outer_type
= 0;
4079 tree mask
, inner
, offset
;
4081 unsigned int precision
;
4083 /* All the optimizations using this function assume integer fields.
4084 There are problems with FP fields since the type_for_size call
4085 below can fail for, e.g., XFmode. */
4086 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
4089 /* We are interested in the bare arrangement of bits, so strip everything
4090 that doesn't affect the machine mode. However, record the type of the
4091 outermost expression if it may matter below. */
4092 if (TREE_CODE (exp
) == NOP_EXPR
4093 || TREE_CODE (exp
) == CONVERT_EXPR
4094 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
4095 outer_type
= TREE_TYPE (exp
);
4098 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
4100 and_mask
= TREE_OPERAND (exp
, 1);
4101 exp
= TREE_OPERAND (exp
, 0);
4102 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
4103 if (TREE_CODE (and_mask
) != INTEGER_CST
)
4107 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
4108 punsignedp
, pvolatilep
, false);
4109 if ((inner
== exp
&& and_mask
== 0)
4110 || *pbitsize
< 0 || offset
!= 0
4111 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
4114 /* If the number of bits in the reference is the same as the bitsize of
4115 the outer type, then the outer type gives the signedness. Otherwise
4116 (in case of a small bitfield) the signedness is unchanged. */
4117 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
4118 *punsignedp
= TYPE_UNSIGNED (outer_type
);
4120 /* Compute the mask to access the bitfield. */
4121 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
4122 precision
= TYPE_PRECISION (unsigned_type
);
4124 mask
= build_int_cst_type (unsigned_type
, -1);
4126 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
4127 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
4129 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4131 mask
= fold_build2 (BIT_AND_EXPR
, unsigned_type
,
4132 fold_convert (unsigned_type
, and_mask
), mask
);
4135 *pand_mask
= and_mask
;
4139 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4143 all_ones_mask_p (const_tree mask
, int size
)
4145 tree type
= TREE_TYPE (mask
);
4146 unsigned int precision
= TYPE_PRECISION (type
);
4149 tmask
= build_int_cst_type (signed_type_for (type
), -1);
4152 tree_int_cst_equal (mask
,
4153 const_binop (RSHIFT_EXPR
,
4154 const_binop (LSHIFT_EXPR
, tmask
,
4155 size_int (precision
- size
),
4157 size_int (precision
- size
), 0));
4160 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4161 represents the sign bit of EXP's type. If EXP represents a sign
4162 or zero extension, also test VAL against the unextended type.
4163 The return value is the (sub)expression whose sign bit is VAL,
4164 or NULL_TREE otherwise. */
4167 sign_bit_p (tree exp
, const_tree val
)
4169 unsigned HOST_WIDE_INT mask_lo
, lo
;
4170 HOST_WIDE_INT mask_hi
, hi
;
4174 /* Tree EXP must have an integral type. */
4175 t
= TREE_TYPE (exp
);
4176 if (! INTEGRAL_TYPE_P (t
))
4179 /* Tree VAL must be an integer constant. */
4180 if (TREE_CODE (val
) != INTEGER_CST
4181 || TREE_OVERFLOW (val
))
4184 width
= TYPE_PRECISION (t
);
4185 if (width
> HOST_BITS_PER_WIDE_INT
)
4187 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
4190 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
4191 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
4197 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
4200 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
4201 >> (HOST_BITS_PER_WIDE_INT
- width
));
4204 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4205 treat VAL as if it were unsigned. */
4206 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
4207 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
4210 /* Handle extension from a narrower type. */
4211 if (TREE_CODE (exp
) == NOP_EXPR
4212 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
4213 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4218 /* Subroutine for fold_truthop: determine if an operand is simple enough
4219 to be evaluated unconditionally. */
4222 simple_operand_p (const_tree exp
)
4224 /* Strip any conversions that don't change the machine mode. */
4227 return (CONSTANT_CLASS_P (exp
)
4228 || TREE_CODE (exp
) == SSA_NAME
4230 && ! TREE_ADDRESSABLE (exp
)
4231 && ! TREE_THIS_VOLATILE (exp
)
4232 && ! DECL_NONLOCAL (exp
)
4233 /* Don't regard global variables as simple. They may be
4234 allocated in ways unknown to the compiler (shared memory,
4235 #pragma weak, etc). */
4236 && ! TREE_PUBLIC (exp
)
4237 && ! DECL_EXTERNAL (exp
)
4238 /* Loading a static variable is unduly expensive, but global
4239 registers aren't expensive. */
4240 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4243 /* The following functions are subroutines to fold_range_test and allow it to
4244 try to change a logical combination of comparisons into a range test.
4247 X == 2 || X == 3 || X == 4 || X == 5
4251 (unsigned) (X - 2) <= 3
4253 We describe each set of comparisons as being either inside or outside
4254 a range, using a variable named like IN_P, and then describe the
4255 range with a lower and upper bound. If one of the bounds is omitted,
4256 it represents either the highest or lowest value of the type.
4258 In the comments below, we represent a range by two numbers in brackets
4259 preceded by a "+" to designate being inside that range, or a "-" to
4260 designate being outside that range, so the condition can be inverted by
4261 flipping the prefix. An omitted bound is represented by a "-". For
4262 example, "- [-, 10]" means being outside the range starting at the lowest
4263 possible value and ending at 10, in other words, being greater than 10.
4264 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4267 We set up things so that the missing bounds are handled in a consistent
4268 manner so neither a missing bound nor "true" and "false" need to be
4269 handled using a special case. */
4271 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4272 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4273 and UPPER1_P are nonzero if the respective argument is an upper bound
4274 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4275 must be specified for a comparison. ARG1 will be converted to ARG0's
4276 type if both are specified. */
4279 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4280 tree arg1
, int upper1_p
)
4286 /* If neither arg represents infinity, do the normal operation.
4287 Else, if not a comparison, return infinity. Else handle the special
4288 comparison rules. Note that most of the cases below won't occur, but
4289 are handled for consistency. */
4291 if (arg0
!= 0 && arg1
!= 0)
4293 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4294 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4296 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4299 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4302 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4303 for neither. In real maths, we cannot assume open ended ranges are
4304 the same. But, this is computer arithmetic, where numbers are finite.
4305 We can therefore make the transformation of any unbounded range with
4306 the value Z, Z being greater than any representable number. This permits
4307 us to treat unbounded ranges as equal. */
4308 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4309 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4313 result
= sgn0
== sgn1
;
4316 result
= sgn0
!= sgn1
;
4319 result
= sgn0
< sgn1
;
4322 result
= sgn0
<= sgn1
;
4325 result
= sgn0
> sgn1
;
4328 result
= sgn0
>= sgn1
;
4334 return constant_boolean_node (result
, type
);
4337 /* Given EXP, a logical expression, set the range it is testing into
4338 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4339 actually being tested. *PLOW and *PHIGH will be made of the same
4340 type as the returned expression. If EXP is not a comparison, we
4341 will most likely not be returning a useful value and range. Set
4342 *STRICT_OVERFLOW_P to true if the return value is only valid
4343 because signed overflow is undefined; otherwise, do not change
4344 *STRICT_OVERFLOW_P. */
4347 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4348 bool *strict_overflow_p
)
4350 enum tree_code code
;
4351 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
4352 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
4354 tree low
, high
, n_low
, n_high
;
4356 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4357 and see if we can refine the range. Some of the cases below may not
4358 happen, but it doesn't seem worth worrying about this. We "continue"
4359 the outer loop when we've changed something; otherwise we "break"
4360 the switch, which will "break" the while. */
4363 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4367 code
= TREE_CODE (exp
);
4368 exp_type
= TREE_TYPE (exp
);
4370 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4372 if (TREE_OPERAND_LENGTH (exp
) > 0)
4373 arg0
= TREE_OPERAND (exp
, 0);
4374 if (TREE_CODE_CLASS (code
) == tcc_comparison
4375 || TREE_CODE_CLASS (code
) == tcc_unary
4376 || TREE_CODE_CLASS (code
) == tcc_binary
)
4377 arg0_type
= TREE_TYPE (arg0
);
4378 if (TREE_CODE_CLASS (code
) == tcc_binary
4379 || TREE_CODE_CLASS (code
) == tcc_comparison
4380 || (TREE_CODE_CLASS (code
) == tcc_expression
4381 && TREE_OPERAND_LENGTH (exp
) > 1))
4382 arg1
= TREE_OPERAND (exp
, 1);
4387 case TRUTH_NOT_EXPR
:
4388 in_p
= ! in_p
, exp
= arg0
;
4391 case EQ_EXPR
: case NE_EXPR
:
4392 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4393 /* We can only do something if the range is testing for zero
4394 and if the second operand is an integer constant. Note that
4395 saying something is "in" the range we make is done by
4396 complementing IN_P since it will set in the initial case of
4397 being not equal to zero; "out" is leaving it alone. */
4398 if (low
== 0 || high
== 0
4399 || ! integer_zerop (low
) || ! integer_zerop (high
)
4400 || TREE_CODE (arg1
) != INTEGER_CST
)
4405 case NE_EXPR
: /* - [c, c] */
4408 case EQ_EXPR
: /* + [c, c] */
4409 in_p
= ! in_p
, low
= high
= arg1
;
4411 case GT_EXPR
: /* - [-, c] */
4412 low
= 0, high
= arg1
;
4414 case GE_EXPR
: /* + [c, -] */
4415 in_p
= ! in_p
, low
= arg1
, high
= 0;
4417 case LT_EXPR
: /* - [c, -] */
4418 low
= arg1
, high
= 0;
4420 case LE_EXPR
: /* + [-, c] */
4421 in_p
= ! in_p
, low
= 0, high
= arg1
;
4427 /* If this is an unsigned comparison, we also know that EXP is
4428 greater than or equal to zero. We base the range tests we make
4429 on that fact, so we record it here so we can parse existing
4430 range tests. We test arg0_type since often the return type
4431 of, e.g. EQ_EXPR, is boolean. */
4432 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4434 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4436 build_int_cst (arg0_type
, 0),
4440 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4442 /* If the high bound is missing, but we have a nonzero low
4443 bound, reverse the range so it goes from zero to the low bound
4445 if (high
== 0 && low
&& ! integer_zerop (low
))
4448 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4449 integer_one_node
, 0);
4450 low
= build_int_cst (arg0_type
, 0);
4458 /* (-x) IN [a,b] -> x in [-b, -a] */
4459 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4460 build_int_cst (exp_type
, 0),
4462 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4463 build_int_cst (exp_type
, 0),
4465 low
= n_low
, high
= n_high
;
4471 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4472 build_int_cst (exp_type
, 1));
4475 case PLUS_EXPR
: case MINUS_EXPR
:
4476 if (TREE_CODE (arg1
) != INTEGER_CST
)
4479 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4480 move a constant to the other side. */
4481 if (!TYPE_UNSIGNED (arg0_type
)
4482 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4485 /* If EXP is signed, any overflow in the computation is undefined,
4486 so we don't worry about it so long as our computations on
4487 the bounds don't overflow. For unsigned, overflow is defined
4488 and this is exactly the right thing. */
4489 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4490 arg0_type
, low
, 0, arg1
, 0);
4491 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4492 arg0_type
, high
, 1, arg1
, 0);
4493 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4494 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4497 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4498 *strict_overflow_p
= true;
4500 /* Check for an unsigned range which has wrapped around the maximum
4501 value thus making n_high < n_low, and normalize it. */
4502 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4504 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4505 integer_one_node
, 0);
4506 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4507 integer_one_node
, 0);
4509 /* If the range is of the form +/- [ x+1, x ], we won't
4510 be able to normalize it. But then, it represents the
4511 whole range or the empty set, so make it
4513 if (tree_int_cst_equal (n_low
, low
)
4514 && tree_int_cst_equal (n_high
, high
))
4520 low
= n_low
, high
= n_high
;
4525 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
4526 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4529 if (! INTEGRAL_TYPE_P (arg0_type
)
4530 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4531 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4534 n_low
= low
, n_high
= high
;
4537 n_low
= fold_convert (arg0_type
, n_low
);
4540 n_high
= fold_convert (arg0_type
, n_high
);
4543 /* If we're converting arg0 from an unsigned type, to exp,
4544 a signed type, we will be doing the comparison as unsigned.
4545 The tests above have already verified that LOW and HIGH
4548 So we have to ensure that we will handle large unsigned
4549 values the same way that the current signed bounds treat
4552 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4556 /* For fixed-point modes, we need to pass the saturating flag
4557 as the 2nd parameter. */
4558 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4559 equiv_type
= lang_hooks
.types
.type_for_mode
4560 (TYPE_MODE (arg0_type
),
4561 TYPE_SATURATING (arg0_type
));
4563 equiv_type
= lang_hooks
.types
.type_for_mode
4564 (TYPE_MODE (arg0_type
), 1);
4566 /* A range without an upper bound is, naturally, unbounded.
4567 Since convert would have cropped a very large value, use
4568 the max value for the destination type. */
4570 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4571 : TYPE_MAX_VALUE (arg0_type
);
4573 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4574 high_positive
= fold_build2 (RSHIFT_EXPR
, arg0_type
,
4575 fold_convert (arg0_type
,
4577 build_int_cst (arg0_type
, 1));
4579 /* If the low bound is specified, "and" the range with the
4580 range for which the original unsigned value will be
4584 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4585 1, n_low
, n_high
, 1,
4586 fold_convert (arg0_type
,
4591 in_p
= (n_in_p
== in_p
);
4595 /* Otherwise, "or" the range with the range of the input
4596 that will be interpreted as negative. */
4597 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4598 0, n_low
, n_high
, 1,
4599 fold_convert (arg0_type
,
4604 in_p
= (in_p
!= n_in_p
);
4609 low
= n_low
, high
= n_high
;
4619 /* If EXP is a constant, we can evaluate whether this is true or false. */
4620 if (TREE_CODE (exp
) == INTEGER_CST
)
4622 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4624 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4630 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4634 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4635 type, TYPE, return an expression to test if EXP is in (or out of, depending
4636 on IN_P) the range. Return 0 if the test couldn't be created. */
4639 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
4641 tree etype
= TREE_TYPE (exp
);
4644 #ifdef HAVE_canonicalize_funcptr_for_compare
4645 /* Disable this optimization for function pointer expressions
4646 on targets that require function pointer canonicalization. */
4647 if (HAVE_canonicalize_funcptr_for_compare
4648 && TREE_CODE (etype
) == POINTER_TYPE
4649 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4655 value
= build_range_check (type
, exp
, 1, low
, high
);
4657 return invert_truthvalue (value
);
4662 if (low
== 0 && high
== 0)
4663 return build_int_cst (type
, 1);
4666 return fold_build2 (LE_EXPR
, type
, exp
,
4667 fold_convert (etype
, high
));
4670 return fold_build2 (GE_EXPR
, type
, exp
,
4671 fold_convert (etype
, low
));
4673 if (operand_equal_p (low
, high
, 0))
4674 return fold_build2 (EQ_EXPR
, type
, exp
,
4675 fold_convert (etype
, low
));
4677 if (integer_zerop (low
))
4679 if (! TYPE_UNSIGNED (etype
))
4681 etype
= unsigned_type_for (etype
);
4682 high
= fold_convert (etype
, high
);
4683 exp
= fold_convert (etype
, exp
);
4685 return build_range_check (type
, exp
, 1, 0, high
);
4688 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4689 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4691 unsigned HOST_WIDE_INT lo
;
4695 prec
= TYPE_PRECISION (etype
);
4696 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4699 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4703 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4704 lo
= (unsigned HOST_WIDE_INT
) -1;
4707 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4709 if (TYPE_UNSIGNED (etype
))
4711 etype
= signed_type_for (etype
);
4712 exp
= fold_convert (etype
, exp
);
4714 return fold_build2 (GT_EXPR
, type
, exp
,
4715 build_int_cst (etype
, 0));
4719 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4720 This requires wrap-around arithmetics for the type of the expression. */
4721 switch (TREE_CODE (etype
))
4724 /* There is no requirement that LOW be within the range of ETYPE
4725 if the latter is a subtype. It must, however, be within the base
4726 type of ETYPE. So be sure we do the subtraction in that type. */
4727 if (TREE_TYPE (etype
))
4728 etype
= TREE_TYPE (etype
);
4733 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4734 TYPE_UNSIGNED (etype
));
4741 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4742 if (TREE_CODE (etype
) == INTEGER_TYPE
4743 && !TYPE_OVERFLOW_WRAPS (etype
))
4745 tree utype
, minv
, maxv
;
4747 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4748 for the type in question, as we rely on this here. */
4749 utype
= unsigned_type_for (etype
);
4750 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
4751 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4752 integer_one_node
, 1);
4753 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
4755 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4762 high
= fold_convert (etype
, high
);
4763 low
= fold_convert (etype
, low
);
4764 exp
= fold_convert (etype
, exp
);
4766 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4769 if (POINTER_TYPE_P (etype
))
4771 if (value
!= 0 && !TREE_OVERFLOW (value
))
4773 low
= fold_convert (sizetype
, low
);
4774 low
= fold_build1 (NEGATE_EXPR
, sizetype
, low
);
4775 return build_range_check (type
,
4776 fold_build2 (POINTER_PLUS_EXPR
, etype
, exp
, low
),
4777 1, build_int_cst (etype
, 0), value
);
4782 if (value
!= 0 && !TREE_OVERFLOW (value
))
4783 return build_range_check (type
,
4784 fold_build2 (MINUS_EXPR
, etype
, exp
, low
),
4785 1, build_int_cst (etype
, 0), value
);
4790 /* Return the predecessor of VAL in its type, handling the infinite case. */
4793 range_predecessor (tree val
)
4795 tree type
= TREE_TYPE (val
);
4797 if (INTEGRAL_TYPE_P (type
)
4798 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4801 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4804 /* Return the successor of VAL in its type, handling the infinite case. */
4807 range_successor (tree val
)
4809 tree type
= TREE_TYPE (val
);
4811 if (INTEGRAL_TYPE_P (type
)
4812 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4815 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4818 /* Given two ranges, see if we can merge them into one. Return 1 if we
4819 can, 0 if we can't. Set the output range into the specified parameters. */
4822 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4823 tree high0
, int in1_p
, tree low1
, tree high1
)
4831 int lowequal
= ((low0
== 0 && low1
== 0)
4832 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4833 low0
, 0, low1
, 0)));
4834 int highequal
= ((high0
== 0 && high1
== 0)
4835 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4836 high0
, 1, high1
, 1)));
4838 /* Make range 0 be the range that starts first, or ends last if they
4839 start at the same value. Swap them if it isn't. */
4840 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4843 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4844 high1
, 1, high0
, 1))))
4846 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4847 tem
= low0
, low0
= low1
, low1
= tem
;
4848 tem
= high0
, high0
= high1
, high1
= tem
;
4851 /* Now flag two cases, whether the ranges are disjoint or whether the
4852 second range is totally subsumed in the first. Note that the tests
4853 below are simplified by the ones above. */
4854 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4855 high0
, 1, low1
, 0));
4856 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4857 high1
, 1, high0
, 1));
4859 /* We now have four cases, depending on whether we are including or
4860 excluding the two ranges. */
4863 /* If they don't overlap, the result is false. If the second range
4864 is a subset it is the result. Otherwise, the range is from the start
4865 of the second to the end of the first. */
4867 in_p
= 0, low
= high
= 0;
4869 in_p
= 1, low
= low1
, high
= high1
;
4871 in_p
= 1, low
= low1
, high
= high0
;
4874 else if (in0_p
&& ! in1_p
)
4876 /* If they don't overlap, the result is the first range. If they are
4877 equal, the result is false. If the second range is a subset of the
4878 first, and the ranges begin at the same place, we go from just after
4879 the end of the second range to the end of the first. If the second
4880 range is not a subset of the first, or if it is a subset and both
4881 ranges end at the same place, the range starts at the start of the
4882 first range and ends just before the second range.
4883 Otherwise, we can't describe this as a single range. */
4885 in_p
= 1, low
= low0
, high
= high0
;
4886 else if (lowequal
&& highequal
)
4887 in_p
= 0, low
= high
= 0;
4888 else if (subset
&& lowequal
)
4890 low
= range_successor (high1
);
4895 /* We are in the weird situation where high0 > high1 but
4896 high1 has no successor. Punt. */
4900 else if (! subset
|| highequal
)
4903 high
= range_predecessor (low1
);
4907 /* low0 < low1 but low1 has no predecessor. Punt. */
4915 else if (! in0_p
&& in1_p
)
4917 /* If they don't overlap, the result is the second range. If the second
4918 is a subset of the first, the result is false. Otherwise,
4919 the range starts just after the first range and ends at the
4920 end of the second. */
4922 in_p
= 1, low
= low1
, high
= high1
;
4923 else if (subset
|| highequal
)
4924 in_p
= 0, low
= high
= 0;
4927 low
= range_successor (high0
);
4932 /* high1 > high0 but high0 has no successor. Punt. */
4940 /* The case where we are excluding both ranges. Here the complex case
4941 is if they don't overlap. In that case, the only time we have a
4942 range is if they are adjacent. If the second is a subset of the
4943 first, the result is the first. Otherwise, the range to exclude
4944 starts at the beginning of the first range and ends at the end of the
4948 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4949 range_successor (high0
),
4951 in_p
= 0, low
= low0
, high
= high1
;
4954 /* Canonicalize - [min, x] into - [-, x]. */
4955 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4956 switch (TREE_CODE (TREE_TYPE (low0
)))
4959 if (TYPE_PRECISION (TREE_TYPE (low0
))
4960 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4964 if (tree_int_cst_equal (low0
,
4965 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4969 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4970 && integer_zerop (low0
))
4977 /* Canonicalize - [x, max] into - [x, -]. */
4978 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4979 switch (TREE_CODE (TREE_TYPE (high1
)))
4982 if (TYPE_PRECISION (TREE_TYPE (high1
))
4983 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4987 if (tree_int_cst_equal (high1
,
4988 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4992 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4993 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4995 integer_one_node
, 1)))
5002 /* The ranges might be also adjacent between the maximum and
5003 minimum values of the given type. For
5004 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5005 return + [x + 1, y - 1]. */
5006 if (low0
== 0 && high1
== 0)
5008 low
= range_successor (high0
);
5009 high
= range_predecessor (low1
);
5010 if (low
== 0 || high
== 0)
5020 in_p
= 0, low
= low0
, high
= high0
;
5022 in_p
= 0, low
= low0
, high
= high1
;
5025 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
5030 /* Subroutine of fold, looking inside expressions of the form
5031 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5032 of the COND_EXPR. This function is being used also to optimize
5033 A op B ? C : A, by reversing the comparison first.
5035 Return a folded expression whose code is not a COND_EXPR
5036 anymore, or NULL_TREE if no folding opportunity is found. */
5039 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
5041 enum tree_code comp_code
= TREE_CODE (arg0
);
5042 tree arg00
= TREE_OPERAND (arg0
, 0);
5043 tree arg01
= TREE_OPERAND (arg0
, 1);
5044 tree arg1_type
= TREE_TYPE (arg1
);
5050 /* If we have A op 0 ? A : -A, consider applying the following
5053 A == 0? A : -A same as -A
5054 A != 0? A : -A same as A
5055 A >= 0? A : -A same as abs (A)
5056 A > 0? A : -A same as abs (A)
5057 A <= 0? A : -A same as -abs (A)
5058 A < 0? A : -A same as -abs (A)
5060 None of these transformations work for modes with signed
5061 zeros. If A is +/-0, the first two transformations will
5062 change the sign of the result (from +0 to -0, or vice
5063 versa). The last four will fix the sign of the result,
5064 even though the original expressions could be positive or
5065 negative, depending on the sign of A.
5067 Note that all these transformations are correct if A is
5068 NaN, since the two alternatives (A and -A) are also NaNs. */
5069 if ((FLOAT_TYPE_P (TREE_TYPE (arg01
))
5070 ? real_zerop (arg01
)
5071 : integer_zerop (arg01
))
5072 && ((TREE_CODE (arg2
) == NEGATE_EXPR
5073 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
5074 /* In the case that A is of the form X-Y, '-A' (arg2) may
5075 have already been folded to Y-X, check for that. */
5076 || (TREE_CODE (arg1
) == MINUS_EXPR
5077 && TREE_CODE (arg2
) == MINUS_EXPR
5078 && operand_equal_p (TREE_OPERAND (arg1
, 0),
5079 TREE_OPERAND (arg2
, 1), 0)
5080 && operand_equal_p (TREE_OPERAND (arg1
, 1),
5081 TREE_OPERAND (arg2
, 0), 0))))
5086 tem
= fold_convert (arg1_type
, arg1
);
5087 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
5090 return pedantic_non_lvalue (fold_convert (type
, arg1
));
5093 if (flag_trapping_math
)
5098 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5099 arg1
= fold_convert (signed_type_for
5100 (TREE_TYPE (arg1
)), arg1
);
5101 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5102 return pedantic_non_lvalue (fold_convert (type
, tem
));
5105 if (flag_trapping_math
)
5109 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5110 arg1
= fold_convert (signed_type_for
5111 (TREE_TYPE (arg1
)), arg1
);
5112 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5113 return negate_expr (fold_convert (type
, tem
));
5115 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5119 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5120 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5121 both transformations are correct when A is NaN: A != 0
5122 is then true, and A == 0 is false. */
5124 if (integer_zerop (arg01
) && integer_zerop (arg2
))
5126 if (comp_code
== NE_EXPR
)
5127 return pedantic_non_lvalue (fold_convert (type
, arg1
));
5128 else if (comp_code
== EQ_EXPR
)
5129 return build_int_cst (type
, 0);
5132 /* Try some transformations of A op B ? A : B.
5134 A == B? A : B same as B
5135 A != B? A : B same as A
5136 A >= B? A : B same as max (A, B)
5137 A > B? A : B same as max (B, A)
5138 A <= B? A : B same as min (A, B)
5139 A < B? A : B same as min (B, A)
5141 As above, these transformations don't work in the presence
5142 of signed zeros. For example, if A and B are zeros of
5143 opposite sign, the first two transformations will change
5144 the sign of the result. In the last four, the original
5145 expressions give different results for (A=+0, B=-0) and
5146 (A=-0, B=+0), but the transformed expressions do not.
5148 The first two transformations are correct if either A or B
5149 is a NaN. In the first transformation, the condition will
5150 be false, and B will indeed be chosen. In the case of the
5151 second transformation, the condition A != B will be true,
5152 and A will be chosen.
5154 The conversions to max() and min() are not correct if B is
5155 a number and A is not. The conditions in the original
5156 expressions will be false, so all four give B. The min()
5157 and max() versions would give a NaN instead. */
5158 if (operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
5159 /* Avoid these transformations if the COND_EXPR may be used
5160 as an lvalue in the C++ front-end. PR c++/19199. */
5162 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
5163 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
5164 || ! maybe_lvalue_p (arg1
)
5165 || ! maybe_lvalue_p (arg2
)))
5167 tree comp_op0
= arg00
;
5168 tree comp_op1
= arg01
;
5169 tree comp_type
= TREE_TYPE (comp_op0
);
5171 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5172 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
5182 return pedantic_non_lvalue (fold_convert (type
, arg2
));
5184 return pedantic_non_lvalue (fold_convert (type
, arg1
));
5189 /* In C++ a ?: expression can be an lvalue, so put the
5190 operand which will be used if they are equal first
5191 so that we can convert this back to the
5192 corresponding COND_EXPR. */
5193 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5195 comp_op0
= fold_convert (comp_type
, comp_op0
);
5196 comp_op1
= fold_convert (comp_type
, comp_op1
);
5197 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5198 ? fold_build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5199 : fold_build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
);
5200 return pedantic_non_lvalue (fold_convert (type
, tem
));
5207 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5209 comp_op0
= fold_convert (comp_type
, comp_op0
);
5210 comp_op1
= fold_convert (comp_type
, comp_op1
);
5211 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5212 ? fold_build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5213 : fold_build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
);
5214 return pedantic_non_lvalue (fold_convert (type
, tem
));
5218 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5219 return pedantic_non_lvalue (fold_convert (type
, arg2
));
5222 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
5223 return pedantic_non_lvalue (fold_convert (type
, arg1
));
5226 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5231 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5232 we might still be able to simplify this. For example,
5233 if C1 is one less or one more than C2, this might have started
5234 out as a MIN or MAX and been transformed by this function.
5235 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5237 if (INTEGRAL_TYPE_P (type
)
5238 && TREE_CODE (arg01
) == INTEGER_CST
5239 && TREE_CODE (arg2
) == INTEGER_CST
)
5243 /* We can replace A with C1 in this case. */
5244 arg1
= fold_convert (type
, arg01
);
5245 return fold_build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
);
5248 /* If C1 is C2 + 1, this is min(A, C2). */
5249 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5251 && operand_equal_p (arg01
,
5252 const_binop (PLUS_EXPR
, arg2
,
5253 build_int_cst (type
, 1), 0),
5255 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
5257 fold_convert (type
, arg1
),
5262 /* If C1 is C2 - 1, this is min(A, C2). */
5263 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5265 && operand_equal_p (arg01
,
5266 const_binop (MINUS_EXPR
, arg2
,
5267 build_int_cst (type
, 1), 0),
5269 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
5271 fold_convert (type
, arg1
),
5276 /* If C1 is C2 - 1, this is max(A, C2). */
5277 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
5279 && operand_equal_p (arg01
,
5280 const_binop (MINUS_EXPR
, arg2
,
5281 build_int_cst (type
, 1), 0),
5283 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
5285 fold_convert (type
, arg1
),
5290 /* If C1 is C2 + 1, this is max(A, C2). */
5291 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
5293 && operand_equal_p (arg01
,
5294 const_binop (PLUS_EXPR
, arg2
,
5295 build_int_cst (type
, 1), 0),
5297 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
5299 fold_convert (type
, arg1
),
5313 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5314 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5317 /* EXP is some logical combination of boolean tests. See if we can
5318 merge it into some range test. Return the new tree if so. */
5321 fold_range_test (enum tree_code code
, tree type
, tree op0
, tree op1
)
5323 int or_op
= (code
== TRUTH_ORIF_EXPR
5324 || code
== TRUTH_OR_EXPR
);
5325 int in0_p
, in1_p
, in_p
;
5326 tree low0
, low1
, low
, high0
, high1
, high
;
5327 bool strict_overflow_p
= false;
5328 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5329 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5331 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5332 "when simplifying range test");
5334 /* If this is an OR operation, invert both sides; we will invert
5335 again at the end. */
5337 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5339 /* If both expressions are the same, if we can merge the ranges, and we
5340 can build the range test, return it or it inverted. If one of the
5341 ranges is always true or always false, consider it to be the same
5342 expression as the other. */
5343 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5344 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5346 && 0 != (tem
= (build_range_check (type
,
5348 : rhs
!= 0 ? rhs
: integer_zero_node
,
5351 if (strict_overflow_p
)
5352 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5353 return or_op
? invert_truthvalue (tem
) : tem
;
5356 /* On machines where the branch cost is expensive, if this is a
5357 short-circuited branch and the underlying object on both sides
5358 is the same, make a non-short-circuit operation. */
5359 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5360 && lhs
!= 0 && rhs
!= 0
5361 && (code
== TRUTH_ANDIF_EXPR
5362 || code
== TRUTH_ORIF_EXPR
)
5363 && operand_equal_p (lhs
, rhs
, 0))
5365 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5366 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5367 which cases we can't do this. */
5368 if (simple_operand_p (lhs
))
5369 return build2 (code
== TRUTH_ANDIF_EXPR
5370 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5373 else if (lang_hooks
.decls
.global_bindings_p () == 0
5374 && ! CONTAINS_PLACEHOLDER_P (lhs
))
5376 tree common
= save_expr (lhs
);
5378 if (0 != (lhs
= build_range_check (type
, common
,
5379 or_op
? ! in0_p
: in0_p
,
5381 && (0 != (rhs
= build_range_check (type
, common
,
5382 or_op
? ! in1_p
: in1_p
,
5385 if (strict_overflow_p
)
5386 fold_overflow_warning (warnmsg
,
5387 WARN_STRICT_OVERFLOW_COMPARISON
);
5388 return build2 (code
== TRUTH_ANDIF_EXPR
5389 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5398 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5399 bit value. Arrange things so the extra bits will be set to zero if and
5400 only if C is signed-extended to its full width. If MASK is nonzero,
5401 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5404 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5406 tree type
= TREE_TYPE (c
);
5407 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5410 if (p
== modesize
|| unsignedp
)
5413 /* We work by getting just the sign bit into the low-order bit, then
5414 into the high-order bit, then sign-extend. We then XOR that value
5416 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
5417 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
5419 /* We must use a signed type in order to get an arithmetic right shift.
5420 However, we must also avoid introducing accidental overflows, so that
5421 a subsequent call to integer_zerop will work. Hence we must
5422 do the type conversion here. At this point, the constant is either
5423 zero or one, and the conversion to a signed type can never overflow.
5424 We could get an overflow if this conversion is done anywhere else. */
5425 if (TYPE_UNSIGNED (type
))
5426 temp
= fold_convert (signed_type_for (type
), temp
);
5428 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
5429 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
5431 temp
= const_binop (BIT_AND_EXPR
, temp
,
5432 fold_convert (TREE_TYPE (c
), mask
), 0);
5433 /* If necessary, convert the type back to match the type of C. */
5434 if (TYPE_UNSIGNED (type
))
5435 temp
= fold_convert (type
, temp
);
5437 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
5440 /* Find ways of folding logical expressions of LHS and RHS:
5441 Try to merge two comparisons to the same innermost item.
5442 Look for range tests like "ch >= '0' && ch <= '9'".
5443 Look for combinations of simple terms on machines with expensive branches
5444 and evaluate the RHS unconditionally.
5446 For example, if we have p->a == 2 && p->b == 4 and we can make an
5447 object large enough to span both A and B, we can do this with a comparison
5448 against the object ANDed with the a mask.
5450 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5451 operations to do this with one comparison.
5453 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5454 function and the one above.
5456 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5457 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5459 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5462 We return the simplified tree or 0 if no optimization is possible. */
5465 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
5467 /* If this is the "or" of two comparisons, we can do something if
5468 the comparisons are NE_EXPR. If this is the "and", we can do something
5469 if the comparisons are EQ_EXPR. I.e.,
5470 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5472 WANTED_CODE is this operation code. For single bit fields, we can
5473 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5474 comparison for one-bit fields. */
5476 enum tree_code wanted_code
;
5477 enum tree_code lcode
, rcode
;
5478 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5479 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5480 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5481 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5482 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5483 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5484 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5485 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5486 enum machine_mode lnmode
, rnmode
;
5487 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5488 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5489 tree l_const
, r_const
;
5490 tree lntype
, rntype
, result
;
5491 int first_bit
, end_bit
;
5493 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5494 enum tree_code orig_code
= code
;
5496 /* Start by getting the comparison codes. Fail if anything is volatile.
5497 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5498 it were surrounded with a NE_EXPR. */
5500 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5503 lcode
= TREE_CODE (lhs
);
5504 rcode
= TREE_CODE (rhs
);
5506 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5508 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5509 build_int_cst (TREE_TYPE (lhs
), 0));
5513 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5515 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5516 build_int_cst (TREE_TYPE (rhs
), 0));
5520 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5521 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5524 ll_arg
= TREE_OPERAND (lhs
, 0);
5525 lr_arg
= TREE_OPERAND (lhs
, 1);
5526 rl_arg
= TREE_OPERAND (rhs
, 0);
5527 rr_arg
= TREE_OPERAND (rhs
, 1);
5529 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5530 if (simple_operand_p (ll_arg
)
5531 && simple_operand_p (lr_arg
))
5534 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5535 && operand_equal_p (lr_arg
, rr_arg
, 0))
5537 result
= combine_comparisons (code
, lcode
, rcode
,
5538 truth_type
, ll_arg
, lr_arg
);
5542 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5543 && operand_equal_p (lr_arg
, rl_arg
, 0))
5545 result
= combine_comparisons (code
, lcode
,
5546 swap_tree_comparison (rcode
),
5547 truth_type
, ll_arg
, lr_arg
);
5553 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5554 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5556 /* If the RHS can be evaluated unconditionally and its operands are
5557 simple, it wins to evaluate the RHS unconditionally on machines
5558 with expensive branches. In this case, this isn't a comparison
5559 that can be merged. Avoid doing this if the RHS is a floating-point
5560 comparison since those can trap. */
5562 if (BRANCH_COST
>= 2
5563 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5564 && simple_operand_p (rl_arg
)
5565 && simple_operand_p (rr_arg
))
5567 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5568 if (code
== TRUTH_OR_EXPR
5569 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5570 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5571 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
5572 return build2 (NE_EXPR
, truth_type
,
5573 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5575 build_int_cst (TREE_TYPE (ll_arg
), 0));
5577 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5578 if (code
== TRUTH_AND_EXPR
5579 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5580 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5581 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
5582 return build2 (EQ_EXPR
, truth_type
,
5583 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5585 build_int_cst (TREE_TYPE (ll_arg
), 0));
5587 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
5589 if (code
!= orig_code
|| lhs
!= orig_lhs
|| rhs
!= orig_rhs
)
5590 return build2 (code
, truth_type
, lhs
, rhs
);
5595 /* See if the comparisons can be merged. Then get all the parameters for
5598 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5599 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5603 ll_inner
= decode_field_reference (ll_arg
,
5604 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5605 &ll_unsignedp
, &volatilep
, &ll_mask
,
5607 lr_inner
= decode_field_reference (lr_arg
,
5608 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5609 &lr_unsignedp
, &volatilep
, &lr_mask
,
5611 rl_inner
= decode_field_reference (rl_arg
,
5612 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5613 &rl_unsignedp
, &volatilep
, &rl_mask
,
5615 rr_inner
= decode_field_reference (rr_arg
,
5616 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5617 &rr_unsignedp
, &volatilep
, &rr_mask
,
5620 /* It must be true that the inner operation on the lhs of each
5621 comparison must be the same if we are to be able to do anything.
5622 Then see if we have constants. If not, the same must be true for
5624 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5625 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5628 if (TREE_CODE (lr_arg
) == INTEGER_CST
5629 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5630 l_const
= lr_arg
, r_const
= rr_arg
;
5631 else if (lr_inner
== 0 || rr_inner
== 0
5632 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5635 l_const
= r_const
= 0;
5637 /* If either comparison code is not correct for our logical operation,
5638 fail. However, we can convert a one-bit comparison against zero into
5639 the opposite comparison against that bit being set in the field. */
5641 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5642 if (lcode
!= wanted_code
)
5644 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5646 /* Make the left operand unsigned, since we are only interested
5647 in the value of one bit. Otherwise we are doing the wrong
5656 /* This is analogous to the code for l_const above. */
5657 if (rcode
!= wanted_code
)
5659 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5668 /* See if we can find a mode that contains both fields being compared on
5669 the left. If we can't, fail. Otherwise, update all constants and masks
5670 to be relative to a field of that size. */
5671 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5672 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5673 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5674 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5676 if (lnmode
== VOIDmode
)
5679 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5680 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5681 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5682 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5684 if (BYTES_BIG_ENDIAN
)
5686 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5687 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5690 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
5691 size_int (xll_bitpos
), 0);
5692 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
5693 size_int (xrl_bitpos
), 0);
5697 l_const
= fold_convert (lntype
, l_const
);
5698 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5699 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
5700 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5701 fold_build1 (BIT_NOT_EXPR
,
5705 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5707 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5712 r_const
= fold_convert (lntype
, r_const
);
5713 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5714 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
5715 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5716 fold_build1 (BIT_NOT_EXPR
,
5720 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5722 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5726 /* If the right sides are not constant, do the same for it. Also,
5727 disallow this optimization if a size or signedness mismatch occurs
5728 between the left and right sides. */
5731 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5732 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5733 /* Make sure the two fields on the right
5734 correspond to the left without being swapped. */
5735 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5738 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5739 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5740 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5741 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5743 if (rnmode
== VOIDmode
)
5746 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5747 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5748 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5749 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5751 if (BYTES_BIG_ENDIAN
)
5753 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5754 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5757 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
5758 size_int (xlr_bitpos
), 0);
5759 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
5760 size_int (xrr_bitpos
), 0);
5762 /* Make a mask that corresponds to both fields being compared.
5763 Do this for both items being compared. If the operands are the
5764 same size and the bits being compared are in the same position
5765 then we can do this by masking both and comparing the masked
5767 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5768 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
5769 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5771 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5772 ll_unsignedp
|| rl_unsignedp
);
5773 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5774 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5776 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5777 lr_unsignedp
|| rr_unsignedp
);
5778 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5779 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5781 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5784 /* There is still another way we can do something: If both pairs of
5785 fields being compared are adjacent, we may be able to make a wider
5786 field containing them both.
5788 Note that we still must mask the lhs/rhs expressions. Furthermore,
5789 the mask must be shifted to account for the shift done by
5790 make_bit_field_ref. */
5791 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5792 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5793 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5794 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5798 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
5799 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5800 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
5801 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5803 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5804 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
5805 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5806 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
5808 /* Convert to the smaller type before masking out unwanted bits. */
5810 if (lntype
!= rntype
)
5812 if (lnbitsize
> rnbitsize
)
5814 lhs
= fold_convert (rntype
, lhs
);
5815 ll_mask
= fold_convert (rntype
, ll_mask
);
5818 else if (lnbitsize
< rnbitsize
)
5820 rhs
= fold_convert (lntype
, rhs
);
5821 lr_mask
= fold_convert (lntype
, lr_mask
);
5826 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5827 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5829 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5830 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5832 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5838 /* Handle the case of comparisons with constants. If there is something in
5839 common between the masks, those bits of the constants must be the same.
5840 If not, the condition is always false. Test for this to avoid generating
5841 incorrect code below. */
5842 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
5843 if (! integer_zerop (result
)
5844 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
5845 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
5847 if (wanted_code
== NE_EXPR
)
5849 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5850 return constant_boolean_node (true, truth_type
);
5854 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5855 return constant_boolean_node (false, truth_type
);
5859 /* Construct the expression we will return. First get the component
5860 reference we will make. Unless the mask is all ones the width of
5861 that field, perform the mask operation. Then compare with the
5863 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5864 ll_unsignedp
|| rl_unsignedp
);
5866 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5867 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5868 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5870 return build2 (wanted_code
, truth_type
, result
,
5871 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
5874 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5878 optimize_minmax_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
5881 enum tree_code op_code
;
5882 tree comp_const
= op1
;
5884 int consts_equal
, consts_lt
;
5887 STRIP_SIGN_NOPS (arg0
);
5889 op_code
= TREE_CODE (arg0
);
5890 minmax_const
= TREE_OPERAND (arg0
, 1);
5891 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5892 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5893 inner
= TREE_OPERAND (arg0
, 0);
5895 /* If something does not permit us to optimize, return the original tree. */
5896 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5897 || TREE_CODE (comp_const
) != INTEGER_CST
5898 || TREE_OVERFLOW (comp_const
)
5899 || TREE_CODE (minmax_const
) != INTEGER_CST
5900 || TREE_OVERFLOW (minmax_const
))
5903 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5904 and GT_EXPR, doing the rest with recursive calls using logical
5908 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5910 tree tem
= optimize_minmax_comparison (invert_tree_comparison (code
, false),
5913 return invert_truthvalue (tem
);
5919 fold_build2 (TRUTH_ORIF_EXPR
, type
,
5920 optimize_minmax_comparison
5921 (EQ_EXPR
, type
, arg0
, comp_const
),
5922 optimize_minmax_comparison
5923 (GT_EXPR
, type
, arg0
, comp_const
));
5926 if (op_code
== MAX_EXPR
&& consts_equal
)
5927 /* MAX (X, 0) == 0 -> X <= 0 */
5928 return fold_build2 (LE_EXPR
, type
, inner
, comp_const
);
5930 else if (op_code
== MAX_EXPR
&& consts_lt
)
5931 /* MAX (X, 0) == 5 -> X == 5 */
5932 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5934 else if (op_code
== MAX_EXPR
)
5935 /* MAX (X, 0) == -1 -> false */
5936 return omit_one_operand (type
, integer_zero_node
, inner
);
5938 else if (consts_equal
)
5939 /* MIN (X, 0) == 0 -> X >= 0 */
5940 return fold_build2 (GE_EXPR
, type
, inner
, comp_const
);
5943 /* MIN (X, 0) == 5 -> false */
5944 return omit_one_operand (type
, integer_zero_node
, inner
);
5947 /* MIN (X, 0) == -1 -> X == -1 */
5948 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5951 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5952 /* MAX (X, 0) > 0 -> X > 0
5953 MAX (X, 0) > 5 -> X > 5 */
5954 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5956 else if (op_code
== MAX_EXPR
)
5957 /* MAX (X, 0) > -1 -> true */
5958 return omit_one_operand (type
, integer_one_node
, inner
);
5960 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5961 /* MIN (X, 0) > 0 -> false
5962 MIN (X, 0) > 5 -> false */
5963 return omit_one_operand (type
, integer_zero_node
, inner
);
5966 /* MIN (X, 0) > -1 -> X > -1 */
5967 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5974 /* T is an integer expression that is being multiplied, divided, or taken a
5975 modulus (CODE says which and what kind of divide or modulus) by a
5976 constant C. See if we can eliminate that operation by folding it with
5977 other operations already in T. WIDE_TYPE, if non-null, is a type that
5978 should be used for the computation if wider than our type.
5980 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5981 (X * 2) + (Y * 4). We must, however, be assured that either the original
5982 expression would not overflow or that overflow is undefined for the type
5983 in the language in question.
5985 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5986 the machine has a multiply-accumulate insn or that this is part of an
5987 addressing calculation.
5989 If we return a non-null expression, it is an equivalent form of the
5990 original computation, but need not be in the original type.
5992 We set *STRICT_OVERFLOW_P to true if the return values depends on
5993 signed overflow being undefined. Otherwise we do not change
5994 *STRICT_OVERFLOW_P. */
5997 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5998 bool *strict_overflow_p
)
6000 /* To avoid exponential search depth, refuse to allow recursion past
6001 three levels. Beyond that (1) it's highly unlikely that we'll find
6002 something interesting and (2) we've probably processed it before
6003 when we built the inner expression. */
6012 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
6019 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6020 bool *strict_overflow_p
)
6022 tree type
= TREE_TYPE (t
);
6023 enum tree_code tcode
= TREE_CODE (t
);
6024 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
6025 > GET_MODE_SIZE (TYPE_MODE (type
)))
6026 ? wide_type
: type
);
6028 int same_p
= tcode
== code
;
6029 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
6030 bool sub_strict_overflow_p
;
6032 /* Don't deal with constants of zero here; they confuse the code below. */
6033 if (integer_zerop (c
))
6036 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
6037 op0
= TREE_OPERAND (t
, 0);
6039 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
6040 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
6042 /* Note that we need not handle conditional operations here since fold
6043 already handles those cases. So just do arithmetic here. */
6047 /* For a constant, we can always simplify if we are a multiply
6048 or (for divide and modulus) if it is a multiple of our constant. */
6049 if (code
== MULT_EXPR
6050 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
6051 return const_binop (code
, fold_convert (ctype
, t
),
6052 fold_convert (ctype
, c
), 0);
6055 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
6056 /* If op0 is an expression ... */
6057 if ((COMPARISON_CLASS_P (op0
)
6058 || UNARY_CLASS_P (op0
)
6059 || BINARY_CLASS_P (op0
)
6060 || VL_EXP_CLASS_P (op0
)
6061 || EXPRESSION_CLASS_P (op0
))
6062 /* ... and is unsigned, and its type is smaller than ctype,
6063 then we cannot pass through as widening. */
6064 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
6065 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
6066 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
6067 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
6068 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
6069 /* ... or this is a truncation (t is narrower than op0),
6070 then we cannot pass through this narrowing. */
6071 || (GET_MODE_SIZE (TYPE_MODE (type
))
6072 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
6073 /* ... or signedness changes for division or modulus,
6074 then we cannot pass through this conversion. */
6075 || (code
!= MULT_EXPR
6076 && (TYPE_UNSIGNED (ctype
)
6077 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
6078 /* ... or has undefined overflow while the converted to
6079 type has not, we cannot do the operation in the inner type
6080 as that would introduce undefined overflow. */
6081 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
6082 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6085 /* Pass the constant down and see if we can make a simplification. If
6086 we can, replace this expression with the inner simplification for
6087 possible later conversion to our or some other type. */
6088 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6089 && TREE_CODE (t2
) == INTEGER_CST
6090 && !TREE_OVERFLOW (t2
)
6091 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
6093 ? ctype
: NULL_TREE
,
6094 strict_overflow_p
))))
6099 /* If widening the type changes it from signed to unsigned, then we
6100 must avoid building ABS_EXPR itself as unsigned. */
6101 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6103 tree cstype
= (*signed_type_for
) (ctype
);
6104 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6107 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6108 return fold_convert (ctype
, t1
);
6112 /* If the constant is negative, we cannot simplify this. */
6113 if (tree_int_cst_sgn (c
) == -1)
6117 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6119 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6122 case MIN_EXPR
: case MAX_EXPR
:
6123 /* If widening the type changes the signedness, then we can't perform
6124 this optimization as that changes the result. */
6125 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6128 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6129 sub_strict_overflow_p
= false;
6130 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6131 &sub_strict_overflow_p
)) != 0
6132 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6133 &sub_strict_overflow_p
)) != 0)
6135 if (tree_int_cst_sgn (c
) < 0)
6136 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6137 if (sub_strict_overflow_p
)
6138 *strict_overflow_p
= true;
6139 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6140 fold_convert (ctype
, t2
));
6144 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6145 /* If the second operand is constant, this is a multiplication
6146 or floor division, by a power of two, so we can treat it that
6147 way unless the multiplier or divisor overflows. Signed
6148 left-shift overflow is implementation-defined rather than
6149 undefined in C90, so do not convert signed left shift into
6151 if (TREE_CODE (op1
) == INTEGER_CST
6152 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6153 /* const_binop may not detect overflow correctly,
6154 so check for it explicitly here. */
6155 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
6156 && TREE_INT_CST_HIGH (op1
) == 0
6157 && 0 != (t1
= fold_convert (ctype
,
6158 const_binop (LSHIFT_EXPR
,
6161 && !TREE_OVERFLOW (t1
))
6162 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6163 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6164 ctype
, fold_convert (ctype
, op0
), t1
),
6165 c
, code
, wide_type
, strict_overflow_p
);
6168 case PLUS_EXPR
: case MINUS_EXPR
:
6169 /* See if we can eliminate the operation on both sides. If we can, we
6170 can return a new PLUS or MINUS. If we can't, the only remaining
6171 cases where we can do anything are if the second operand is a
6173 sub_strict_overflow_p
= false;
6174 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6175 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6176 if (t1
!= 0 && t2
!= 0
6177 && (code
== MULT_EXPR
6178 /* If not multiplication, we can only do this if both operands
6179 are divisible by c. */
6180 || (multiple_of_p (ctype
, op0
, c
)
6181 && multiple_of_p (ctype
, op1
, c
))))
6183 if (sub_strict_overflow_p
)
6184 *strict_overflow_p
= true;
6185 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6186 fold_convert (ctype
, t2
));
6189 /* If this was a subtraction, negate OP1 and set it to be an addition.
6190 This simplifies the logic below. */
6191 if (tcode
== MINUS_EXPR
)
6192 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6194 if (TREE_CODE (op1
) != INTEGER_CST
)
6197 /* If either OP1 or C are negative, this optimization is not safe for
6198 some of the division and remainder types while for others we need
6199 to change the code. */
6200 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6202 if (code
== CEIL_DIV_EXPR
)
6203 code
= FLOOR_DIV_EXPR
;
6204 else if (code
== FLOOR_DIV_EXPR
)
6205 code
= CEIL_DIV_EXPR
;
6206 else if (code
!= MULT_EXPR
6207 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6211 /* If it's a multiply or a division/modulus operation of a multiple
6212 of our constant, do the operation and verify it doesn't overflow. */
6213 if (code
== MULT_EXPR
6214 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
6216 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6217 fold_convert (ctype
, c
), 0);
6218 /* We allow the constant to overflow with wrapping semantics. */
6220 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6226 /* If we have an unsigned type is not a sizetype, we cannot widen
6227 the operation since it will change the result if the original
6228 computation overflowed. */
6229 if (TYPE_UNSIGNED (ctype
)
6230 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
6234 /* If we were able to eliminate our operation from the first side,
6235 apply our operation to the second side and reform the PLUS. */
6236 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
6237 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
6239 /* The last case is if we are a multiply. In that case, we can
6240 apply the distributive law to commute the multiply and addition
6241 if the multiplication of the constants doesn't overflow. */
6242 if (code
== MULT_EXPR
)
6243 return fold_build2 (tcode
, ctype
,
6244 fold_build2 (code
, ctype
,
6245 fold_convert (ctype
, op0
),
6246 fold_convert (ctype
, c
)),
6252 /* We have a special case here if we are doing something like
6253 (C * 8) % 4 since we know that's zero. */
6254 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6255 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6256 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6257 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
6258 return omit_one_operand (type
, integer_zero_node
, op0
);
6260 /* ... fall through ... */
6262 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6263 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6264 /* If we can extract our operation from the LHS, do so and return a
6265 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6266 do something only if the second operand is a constant. */
6268 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6269 strict_overflow_p
)) != 0)
6270 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6271 fold_convert (ctype
, op1
));
6272 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6273 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6274 strict_overflow_p
)) != 0)
6275 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6276 fold_convert (ctype
, t1
));
6277 else if (TREE_CODE (op1
) != INTEGER_CST
)
6280 /* If these are the same operation types, we can associate them
6281 assuming no overflow. */
6283 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
6284 fold_convert (ctype
, c
), 0))
6285 && !TREE_OVERFLOW (t1
))
6286 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
6288 /* If these operations "cancel" each other, we have the main
6289 optimizations of this pass, which occur when either constant is a
6290 multiple of the other, in which case we replace this with either an
6291 operation or CODE or TCODE.
6293 If we have an unsigned type that is not a sizetype, we cannot do
6294 this since it will change the result if the original computation
6296 if ((TYPE_OVERFLOW_UNDEFINED (ctype
)
6297 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
6298 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6299 || (tcode
== MULT_EXPR
6300 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6301 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6302 && code
!= MULT_EXPR
)))
6304 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
6306 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6307 *strict_overflow_p
= true;
6308 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6309 fold_convert (ctype
,
6310 const_binop (TRUNC_DIV_EXPR
,
6313 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
6315 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6316 *strict_overflow_p
= true;
6317 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6318 fold_convert (ctype
,
6319 const_binop (TRUNC_DIV_EXPR
,
6332 /* Return a node which has the indicated constant VALUE (either 0 or
6333 1), and is of the indicated TYPE. */
6336 constant_boolean_node (int value
, tree type
)
6338 if (type
== integer_type_node
)
6339 return value
? integer_one_node
: integer_zero_node
;
6340 else if (type
== boolean_type_node
)
6341 return value
? boolean_true_node
: boolean_false_node
;
6343 return build_int_cst (type
, value
);
6347 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6348 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6349 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6350 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6351 COND is the first argument to CODE; otherwise (as in the example
6352 given here), it is the second argument. TYPE is the type of the
6353 original expression. Return NULL_TREE if no simplification is
6357 fold_binary_op_with_conditional_arg (enum tree_code code
,
6358 tree type
, tree op0
, tree op1
,
6359 tree cond
, tree arg
, int cond_first_p
)
6361 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6362 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6363 tree test
, true_value
, false_value
;
6364 tree lhs
= NULL_TREE
;
6365 tree rhs
= NULL_TREE
;
6367 /* This transformation is only worthwhile if we don't have to wrap
6368 arg in a SAVE_EXPR, and the operation can be simplified on at least
6369 one of the branches once its pushed inside the COND_EXPR. */
6370 if (!TREE_CONSTANT (arg
))
6373 if (TREE_CODE (cond
) == COND_EXPR
)
6375 test
= TREE_OPERAND (cond
, 0);
6376 true_value
= TREE_OPERAND (cond
, 1);
6377 false_value
= TREE_OPERAND (cond
, 2);
6378 /* If this operand throws an expression, then it does not make
6379 sense to try to perform a logical or arithmetic operation
6381 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6383 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6388 tree testtype
= TREE_TYPE (cond
);
6390 true_value
= constant_boolean_node (true, testtype
);
6391 false_value
= constant_boolean_node (false, testtype
);
6394 arg
= fold_convert (arg_type
, arg
);
6397 true_value
= fold_convert (cond_type
, true_value
);
6399 lhs
= fold_build2 (code
, type
, true_value
, arg
);
6401 lhs
= fold_build2 (code
, type
, arg
, true_value
);
6405 false_value
= fold_convert (cond_type
, false_value
);
6407 rhs
= fold_build2 (code
, type
, false_value
, arg
);
6409 rhs
= fold_build2 (code
, type
, arg
, false_value
);
6412 test
= fold_build3 (COND_EXPR
, type
, test
, lhs
, rhs
);
6413 return fold_convert (type
, test
);
6417 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6419 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6420 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6421 ADDEND is the same as X.
6423 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6424 and finite. The problematic cases are when X is zero, and its mode
6425 has signed zeros. In the case of rounding towards -infinity,
6426 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6427 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6430 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6432 if (!real_zerop (addend
))
6435 /* Don't allow the fold with -fsignaling-nans. */
6436 if (HONOR_SNANS (TYPE_MODE (type
)))
6439 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6440 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6443 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6444 if (TREE_CODE (addend
) == REAL_CST
6445 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6448 /* The mode has signed zeros, and we have to honor their sign.
6449 In this situation, there is only one case we can return true for.
6450 X - 0 is the same as X unless rounding towards -infinity is
6452 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6455 /* Subroutine of fold() that checks comparisons of built-in math
6456 functions against real constants.
6458 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6459 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6460 is the type of the result and ARG0 and ARG1 are the operands of the
6461 comparison. ARG1 must be a TREE_REAL_CST.
6463 The function returns the constant folded tree if a simplification
6464 can be made, and NULL_TREE otherwise. */
6467 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
6468 tree type
, tree arg0
, tree arg1
)
6472 if (BUILTIN_SQRT_P (fcode
))
6474 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6475 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6477 c
= TREE_REAL_CST (arg1
);
6478 if (REAL_VALUE_NEGATIVE (c
))
6480 /* sqrt(x) < y is always false, if y is negative. */
6481 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6482 return omit_one_operand (type
, integer_zero_node
, arg
);
6484 /* sqrt(x) > y is always true, if y is negative and we
6485 don't care about NaNs, i.e. negative values of x. */
6486 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6487 return omit_one_operand (type
, integer_one_node
, arg
);
6489 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6490 return fold_build2 (GE_EXPR
, type
, arg
,
6491 build_real (TREE_TYPE (arg
), dconst0
));
6493 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6497 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6498 real_convert (&c2
, mode
, &c2
);
6500 if (REAL_VALUE_ISINF (c2
))
6502 /* sqrt(x) > y is x == +Inf, when y is very large. */
6503 if (HONOR_INFINITIES (mode
))
6504 return fold_build2 (EQ_EXPR
, type
, arg
,
6505 build_real (TREE_TYPE (arg
), c2
));
6507 /* sqrt(x) > y is always false, when y is very large
6508 and we don't care about infinities. */
6509 return omit_one_operand (type
, integer_zero_node
, arg
);
6512 /* sqrt(x) > c is the same as x > c*c. */
6513 return fold_build2 (code
, type
, arg
,
6514 build_real (TREE_TYPE (arg
), c2
));
6516 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6520 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6521 real_convert (&c2
, mode
, &c2
);
6523 if (REAL_VALUE_ISINF (c2
))
6525 /* sqrt(x) < y is always true, when y is a very large
6526 value and we don't care about NaNs or Infinities. */
6527 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6528 return omit_one_operand (type
, integer_one_node
, arg
);
6530 /* sqrt(x) < y is x != +Inf when y is very large and we
6531 don't care about NaNs. */
6532 if (! HONOR_NANS (mode
))
6533 return fold_build2 (NE_EXPR
, type
, arg
,
6534 build_real (TREE_TYPE (arg
), c2
));
6536 /* sqrt(x) < y is x >= 0 when y is very large and we
6537 don't care about Infinities. */
6538 if (! HONOR_INFINITIES (mode
))
6539 return fold_build2 (GE_EXPR
, type
, arg
,
6540 build_real (TREE_TYPE (arg
), dconst0
));
6542 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6543 if (lang_hooks
.decls
.global_bindings_p () != 0
6544 || CONTAINS_PLACEHOLDER_P (arg
))
6547 arg
= save_expr (arg
);
6548 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
6549 fold_build2 (GE_EXPR
, type
, arg
,
6550 build_real (TREE_TYPE (arg
),
6552 fold_build2 (NE_EXPR
, type
, arg
,
6553 build_real (TREE_TYPE (arg
),
6557 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6558 if (! HONOR_NANS (mode
))
6559 return fold_build2 (code
, type
, arg
,
6560 build_real (TREE_TYPE (arg
), c2
));
6562 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6563 if (lang_hooks
.decls
.global_bindings_p () == 0
6564 && ! CONTAINS_PLACEHOLDER_P (arg
))
6566 arg
= save_expr (arg
);
6567 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
6568 fold_build2 (GE_EXPR
, type
, arg
,
6569 build_real (TREE_TYPE (arg
),
6571 fold_build2 (code
, type
, arg
,
6572 build_real (TREE_TYPE (arg
),
6581 /* Subroutine of fold() that optimizes comparisons against Infinities,
6582 either +Inf or -Inf.
6584 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6585 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6586 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6588 The function returns the constant folded tree if a simplification
6589 can be made, and NULL_TREE otherwise. */
6592 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6594 enum machine_mode mode
;
6595 REAL_VALUE_TYPE max
;
6599 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6601 /* For negative infinity swap the sense of the comparison. */
6602 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6604 code
= swap_tree_comparison (code
);
6609 /* x > +Inf is always false, if with ignore sNANs. */
6610 if (HONOR_SNANS (mode
))
6612 return omit_one_operand (type
, integer_zero_node
, arg0
);
6615 /* x <= +Inf is always true, if we don't case about NaNs. */
6616 if (! HONOR_NANS (mode
))
6617 return omit_one_operand (type
, integer_one_node
, arg0
);
6619 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6620 if (lang_hooks
.decls
.global_bindings_p () == 0
6621 && ! CONTAINS_PLACEHOLDER_P (arg0
))
6623 arg0
= save_expr (arg0
);
6624 return fold_build2 (EQ_EXPR
, type
, arg0
, arg0
);
6630 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6631 real_maxval (&max
, neg
, mode
);
6632 return fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6633 arg0
, build_real (TREE_TYPE (arg0
), max
));
6636 /* x < +Inf is always equal to x <= DBL_MAX. */
6637 real_maxval (&max
, neg
, mode
);
6638 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6639 arg0
, build_real (TREE_TYPE (arg0
), max
));
6642 /* x != +Inf is always equal to !(x > DBL_MAX). */
6643 real_maxval (&max
, neg
, mode
);
6644 if (! HONOR_NANS (mode
))
6645 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6646 arg0
, build_real (TREE_TYPE (arg0
), max
));
6648 temp
= fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6649 arg0
, build_real (TREE_TYPE (arg0
), max
));
6650 return fold_build1 (TRUTH_NOT_EXPR
, type
, temp
);
6659 /* Subroutine of fold() that optimizes comparisons of a division by
6660 a nonzero integer constant against an integer constant, i.e.
6663 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6664 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6665 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6667 The function returns the constant folded tree if a simplification
6668 can be made, and NULL_TREE otherwise. */
6671 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6673 tree prod
, tmp
, hi
, lo
;
6674 tree arg00
= TREE_OPERAND (arg0
, 0);
6675 tree arg01
= TREE_OPERAND (arg0
, 1);
6676 unsigned HOST_WIDE_INT lpart
;
6677 HOST_WIDE_INT hpart
;
6678 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6682 /* We have to do this the hard way to detect unsigned overflow.
6683 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6684 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6685 TREE_INT_CST_HIGH (arg01
),
6686 TREE_INT_CST_LOW (arg1
),
6687 TREE_INT_CST_HIGH (arg1
),
6688 &lpart
, &hpart
, unsigned_p
);
6689 prod
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6691 neg_overflow
= false;
6695 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6696 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6699 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6700 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6701 TREE_INT_CST_HIGH (prod
),
6702 TREE_INT_CST_LOW (tmp
),
6703 TREE_INT_CST_HIGH (tmp
),
6704 &lpart
, &hpart
, unsigned_p
);
6705 hi
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6706 -1, overflow
| TREE_OVERFLOW (prod
));
6708 else if (tree_int_cst_sgn (arg01
) >= 0)
6710 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6711 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6712 switch (tree_int_cst_sgn (arg1
))
6715 neg_overflow
= true;
6716 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6721 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6726 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6736 /* A negative divisor reverses the relational operators. */
6737 code
= swap_tree_comparison (code
);
6739 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6740 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6741 switch (tree_int_cst_sgn (arg1
))
6744 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6749 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6754 neg_overflow
= true;
6755 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6767 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6768 return omit_one_operand (type
, integer_zero_node
, arg00
);
6769 if (TREE_OVERFLOW (hi
))
6770 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6771 if (TREE_OVERFLOW (lo
))
6772 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6773 return build_range_check (type
, arg00
, 1, lo
, hi
);
6776 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6777 return omit_one_operand (type
, integer_one_node
, arg00
);
6778 if (TREE_OVERFLOW (hi
))
6779 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6780 if (TREE_OVERFLOW (lo
))
6781 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6782 return build_range_check (type
, arg00
, 0, lo
, hi
);
6785 if (TREE_OVERFLOW (lo
))
6787 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6788 return omit_one_operand (type
, tmp
, arg00
);
6790 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6793 if (TREE_OVERFLOW (hi
))
6795 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6796 return omit_one_operand (type
, tmp
, arg00
);
6798 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6801 if (TREE_OVERFLOW (hi
))
6803 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6804 return omit_one_operand (type
, tmp
, arg00
);
6806 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6809 if (TREE_OVERFLOW (lo
))
6811 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6812 return omit_one_operand (type
, tmp
, arg00
);
6814 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6824 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6825 equality/inequality test, then return a simplified form of the test
6826 using a sign testing. Otherwise return NULL. TYPE is the desired
6830 fold_single_bit_test_into_sign_test (enum tree_code code
, tree arg0
, tree arg1
,
6833 /* If this is testing a single bit, we can optimize the test. */
6834 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6835 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6836 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6838 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6839 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6840 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6842 if (arg00
!= NULL_TREE
6843 /* This is only a win if casting to a signed type is cheap,
6844 i.e. when arg00's type is not a partial mode. */
6845 && TYPE_PRECISION (TREE_TYPE (arg00
))
6846 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6848 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6849 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6850 result_type
, fold_convert (stype
, arg00
),
6851 build_int_cst (stype
, 0));
6858 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6859 equality/inequality test, then return a simplified form of
6860 the test using shifts and logical operations. Otherwise return
6861 NULL. TYPE is the desired result type. */
6864 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
6867 /* If this is testing a single bit, we can optimize the test. */
6868 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6869 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6870 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6872 tree inner
= TREE_OPERAND (arg0
, 0);
6873 tree type
= TREE_TYPE (arg0
);
6874 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6875 enum machine_mode operand_mode
= TYPE_MODE (type
);
6877 tree signed_type
, unsigned_type
, intermediate_type
;
6880 /* First, see if we can fold the single bit test into a sign-bit
6882 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
,
6887 /* Otherwise we have (A & C) != 0 where C is a single bit,
6888 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6889 Similarly for (A & C) == 0. */
6891 /* If INNER is a right shift of a constant and it plus BITNUM does
6892 not overflow, adjust BITNUM and INNER. */
6893 if (TREE_CODE (inner
) == RSHIFT_EXPR
6894 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6895 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6896 && bitnum
< TYPE_PRECISION (type
)
6897 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6898 bitnum
- TYPE_PRECISION (type
)))
6900 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6901 inner
= TREE_OPERAND (inner
, 0);
6904 /* If we are going to be able to omit the AND below, we must do our
6905 operations as unsigned. If we must use the AND, we have a choice.
6906 Normally unsigned is faster, but for some machines signed is. */
6907 #ifdef LOAD_EXTEND_OP
6908 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6909 && !flag_syntax_only
) ? 0 : 1;
6914 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6915 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6916 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6917 inner
= fold_convert (intermediate_type
, inner
);
6920 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6921 inner
, size_int (bitnum
));
6923 one
= build_int_cst (intermediate_type
, 1);
6925 if (code
== EQ_EXPR
)
6926 inner
= fold_build2 (BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6928 /* Put the AND last so it can combine with more things. */
6929 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6931 /* Make sure to return the proper type. */
6932 inner
= fold_convert (result_type
, inner
);
6939 /* Check whether we are allowed to reorder operands arg0 and arg1,
6940 such that the evaluation of arg1 occurs before arg0. */
6943 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6945 if (! flag_evaluation_order
)
6947 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6949 return ! TREE_SIDE_EFFECTS (arg0
)
6950 && ! TREE_SIDE_EFFECTS (arg1
);
6953 /* Test whether it is preferable two swap two operands, ARG0 and
6954 ARG1, for example because ARG0 is an integer constant and ARG1
6955 isn't. If REORDER is true, only recommend swapping if we can
6956 evaluate the operands in reverse order. */
6959 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6961 STRIP_SIGN_NOPS (arg0
);
6962 STRIP_SIGN_NOPS (arg1
);
6964 if (TREE_CODE (arg1
) == INTEGER_CST
)
6966 if (TREE_CODE (arg0
) == INTEGER_CST
)
6969 if (TREE_CODE (arg1
) == REAL_CST
)
6971 if (TREE_CODE (arg0
) == REAL_CST
)
6974 if (TREE_CODE (arg1
) == FIXED_CST
)
6976 if (TREE_CODE (arg0
) == FIXED_CST
)
6979 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6981 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6984 if (TREE_CONSTANT (arg1
))
6986 if (TREE_CONSTANT (arg0
))
6992 if (reorder
&& flag_evaluation_order
6993 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6996 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6997 for commutative and comparison operators. Ensuring a canonical
6998 form allows the optimizers to find additional redundancies without
6999 having to explicitly check for both orderings. */
7000 if (TREE_CODE (arg0
) == SSA_NAME
7001 && TREE_CODE (arg1
) == SSA_NAME
7002 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
7005 /* Put SSA_NAMEs last. */
7006 if (TREE_CODE (arg1
) == SSA_NAME
)
7008 if (TREE_CODE (arg0
) == SSA_NAME
)
7011 /* Put variables last. */
7020 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7021 ARG0 is extended to a wider type. */
7024 fold_widened_comparison (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
7026 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
7028 tree shorter_type
, outer_type
;
7032 if (arg0_unw
== arg0
)
7034 shorter_type
= TREE_TYPE (arg0_unw
);
7036 #ifdef HAVE_canonicalize_funcptr_for_compare
7037 /* Disable this optimization if we're casting a function pointer
7038 type on targets that require function pointer canonicalization. */
7039 if (HAVE_canonicalize_funcptr_for_compare
7040 && TREE_CODE (shorter_type
) == POINTER_TYPE
7041 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
7045 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
7048 arg1_unw
= get_unwidened (arg1
, shorter_type
);
7050 /* If possible, express the comparison in the shorter mode. */
7051 if ((code
== EQ_EXPR
|| code
== NE_EXPR
7052 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
7053 && (TREE_TYPE (arg1_unw
) == shorter_type
7054 || (TREE_CODE (arg1_unw
) == INTEGER_CST
7055 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
7056 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
7057 && int_fits_type_p (arg1_unw
, shorter_type
))))
7058 return fold_build2 (code
, type
, arg0_unw
,
7059 fold_convert (shorter_type
, arg1_unw
));
7061 if (TREE_CODE (arg1_unw
) != INTEGER_CST
7062 || TREE_CODE (shorter_type
) != INTEGER_TYPE
7063 || !int_fits_type_p (arg1_unw
, shorter_type
))
7066 /* If we are comparing with the integer that does not fit into the range
7067 of the shorter type, the result is known. */
7068 outer_type
= TREE_TYPE (arg1_unw
);
7069 min
= lower_bound_in_type (outer_type
, shorter_type
);
7070 max
= upper_bound_in_type (outer_type
, shorter_type
);
7072 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
7074 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
7081 return omit_one_operand (type
, integer_zero_node
, arg0
);
7086 return omit_one_operand (type
, integer_one_node
, arg0
);
7092 return omit_one_operand (type
, integer_one_node
, arg0
);
7094 return omit_one_operand (type
, integer_zero_node
, arg0
);
7099 return omit_one_operand (type
, integer_zero_node
, arg0
);
7101 return omit_one_operand (type
, integer_one_node
, arg0
);
7110 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7111 ARG0 just the signedness is changed. */
7114 fold_sign_changed_comparison (enum tree_code code
, tree type
,
7115 tree arg0
, tree arg1
)
7118 tree inner_type
, outer_type
;
7120 if (TREE_CODE (arg0
) != NOP_EXPR
7121 && TREE_CODE (arg0
) != CONVERT_EXPR
)
7124 outer_type
= TREE_TYPE (arg0
);
7125 arg0_inner
= TREE_OPERAND (arg0
, 0);
7126 inner_type
= TREE_TYPE (arg0_inner
);
7128 #ifdef HAVE_canonicalize_funcptr_for_compare
7129 /* Disable this optimization if we're casting a function pointer
7130 type on targets that require function pointer canonicalization. */
7131 if (HAVE_canonicalize_funcptr_for_compare
7132 && TREE_CODE (inner_type
) == POINTER_TYPE
7133 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
7137 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
7140 if (TREE_CODE (arg1
) != INTEGER_CST
7141 && !((TREE_CODE (arg1
) == NOP_EXPR
7142 || TREE_CODE (arg1
) == CONVERT_EXPR
)
7143 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
7146 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
7151 if (TREE_CODE (arg1
) == INTEGER_CST
)
7152 arg1
= force_fit_type_double (inner_type
, TREE_INT_CST_LOW (arg1
),
7153 TREE_INT_CST_HIGH (arg1
), 0,
7154 TREE_OVERFLOW (arg1
));
7156 arg1
= fold_convert (inner_type
, arg1
);
7158 return fold_build2 (code
, type
, arg0_inner
, arg1
);
7161 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7162 step of the array. Reconstructs s and delta in the case of s * delta
7163 being an integer constant (and thus already folded).
7164 ADDR is the address. MULT is the multiplicative expression.
7165 If the function succeeds, the new address expression is returned. Otherwise
7166 NULL_TREE is returned. */
7169 try_move_mult_to_index (tree addr
, tree op1
)
7171 tree s
, delta
, step
;
7172 tree ref
= TREE_OPERAND (addr
, 0), pref
;
7177 /* Strip the nops that might be added when converting op1 to sizetype. */
7180 /* Canonicalize op1 into a possibly non-constant delta
7181 and an INTEGER_CST s. */
7182 if (TREE_CODE (op1
) == MULT_EXPR
)
7184 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
7189 if (TREE_CODE (arg0
) == INTEGER_CST
)
7194 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7202 else if (TREE_CODE (op1
) == INTEGER_CST
)
7209 /* Simulate we are delta * 1. */
7211 s
= integer_one_node
;
7214 for (;; ref
= TREE_OPERAND (ref
, 0))
7216 if (TREE_CODE (ref
) == ARRAY_REF
)
7218 /* Remember if this was a multi-dimensional array. */
7219 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
7222 itype
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
7226 step
= array_ref_element_size (ref
);
7227 if (TREE_CODE (step
) != INTEGER_CST
)
7232 if (! tree_int_cst_equal (step
, s
))
7237 /* Try if delta is a multiple of step. */
7238 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, delta
, step
);
7244 /* Only fold here if we can verify we do not overflow one
7245 dimension of a multi-dimensional array. */
7250 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
7251 || !INTEGRAL_TYPE_P (itype
)
7252 || !TYPE_MAX_VALUE (itype
)
7253 || TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
)
7256 tmp
= fold_binary (PLUS_EXPR
, itype
,
7257 fold_convert (itype
,
7258 TREE_OPERAND (ref
, 1)),
7259 fold_convert (itype
, delta
));
7261 || TREE_CODE (tmp
) != INTEGER_CST
7262 || tree_int_cst_lt (TYPE_MAX_VALUE (itype
), tmp
))
7271 if (!handled_component_p (ref
))
7275 /* We found the suitable array reference. So copy everything up to it,
7276 and replace the index. */
7278 pref
= TREE_OPERAND (addr
, 0);
7279 ret
= copy_node (pref
);
7284 pref
= TREE_OPERAND (pref
, 0);
7285 TREE_OPERAND (pos
, 0) = copy_node (pref
);
7286 pos
= TREE_OPERAND (pos
, 0);
7289 TREE_OPERAND (pos
, 1) = fold_build2 (PLUS_EXPR
, itype
,
7290 fold_convert (itype
,
7291 TREE_OPERAND (pos
, 1)),
7292 fold_convert (itype
, delta
));
7294 return fold_build1 (ADDR_EXPR
, TREE_TYPE (addr
), ret
);
7298 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7299 means A >= Y && A != MAX, but in this case we know that
7300 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7303 fold_to_nonsharp_ineq_using_bound (tree ineq
, tree bound
)
7305 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7307 if (TREE_CODE (bound
) == LT_EXPR
)
7308 a
= TREE_OPERAND (bound
, 0);
7309 else if (TREE_CODE (bound
) == GT_EXPR
)
7310 a
= TREE_OPERAND (bound
, 1);
7314 typea
= TREE_TYPE (a
);
7315 if (!INTEGRAL_TYPE_P (typea
)
7316 && !POINTER_TYPE_P (typea
))
7319 if (TREE_CODE (ineq
) == LT_EXPR
)
7321 a1
= TREE_OPERAND (ineq
, 1);
7322 y
= TREE_OPERAND (ineq
, 0);
7324 else if (TREE_CODE (ineq
) == GT_EXPR
)
7326 a1
= TREE_OPERAND (ineq
, 0);
7327 y
= TREE_OPERAND (ineq
, 1);
7332 if (TREE_TYPE (a1
) != typea
)
7335 if (POINTER_TYPE_P (typea
))
7337 /* Convert the pointer types into integer before taking the difference. */
7338 tree ta
= fold_convert (ssizetype
, a
);
7339 tree ta1
= fold_convert (ssizetype
, a1
);
7340 diff
= fold_binary (MINUS_EXPR
, ssizetype
, ta1
, ta
);
7343 diff
= fold_binary (MINUS_EXPR
, typea
, a1
, a
);
7345 if (!diff
|| !integer_onep (diff
))
7348 return fold_build2 (GE_EXPR
, type
, a
, y
);
7351 /* Fold a sum or difference of at least one multiplication.
7352 Returns the folded tree or NULL if no simplification could be made. */
7355 fold_plusminus_mult_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
7357 tree arg00
, arg01
, arg10
, arg11
;
7358 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7360 /* (A * C) +- (B * C) -> (A+-B) * C.
7361 (A * C) +- A -> A * (C+-1).
7362 We are most concerned about the case where C is a constant,
7363 but other combinations show up during loop reduction. Since
7364 it is not difficult, try all four possibilities. */
7366 if (TREE_CODE (arg0
) == MULT_EXPR
)
7368 arg00
= TREE_OPERAND (arg0
, 0);
7369 arg01
= TREE_OPERAND (arg0
, 1);
7371 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7373 arg00
= build_one_cst (type
);
7378 /* We cannot generate constant 1 for fract. */
7379 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7382 arg01
= build_one_cst (type
);
7384 if (TREE_CODE (arg1
) == MULT_EXPR
)
7386 arg10
= TREE_OPERAND (arg1
, 0);
7387 arg11
= TREE_OPERAND (arg1
, 1);
7389 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7391 arg10
= build_one_cst (type
);
7396 /* We cannot generate constant 1 for fract. */
7397 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7400 arg11
= build_one_cst (type
);
7404 if (operand_equal_p (arg01
, arg11
, 0))
7405 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7406 else if (operand_equal_p (arg00
, arg10
, 0))
7407 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7408 else if (operand_equal_p (arg00
, arg11
, 0))
7409 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7410 else if (operand_equal_p (arg01
, arg10
, 0))
7411 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7413 /* No identical multiplicands; see if we can find a common
7414 power-of-two factor in non-power-of-two multiplies. This
7415 can help in multi-dimensional array access. */
7416 else if (host_integerp (arg01
, 0)
7417 && host_integerp (arg11
, 0))
7419 HOST_WIDE_INT int01
, int11
, tmp
;
7422 int01
= TREE_INT_CST_LOW (arg01
);
7423 int11
= TREE_INT_CST_LOW (arg11
);
7425 /* Move min of absolute values to int11. */
7426 if ((int01
>= 0 ? int01
: -int01
)
7427 < (int11
>= 0 ? int11
: -int11
))
7429 tmp
= int01
, int01
= int11
, int11
= tmp
;
7430 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7437 if (exact_log2 (abs (int11
)) > 0 && int01
% int11
== 0)
7439 alt0
= fold_build2 (MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7440 build_int_cst (TREE_TYPE (arg00
),
7445 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7450 return fold_build2 (MULT_EXPR
, type
,
7451 fold_build2 (code
, type
,
7452 fold_convert (type
, alt0
),
7453 fold_convert (type
, alt1
)),
7454 fold_convert (type
, same
));
7459 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7460 specified by EXPR into the buffer PTR of length LEN bytes.
7461 Return the number of bytes placed in the buffer, or zero
7465 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7467 tree type
= TREE_TYPE (expr
);
7468 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7469 int byte
, offset
, word
, words
;
7470 unsigned char value
;
7472 if (total_bytes
> len
)
7474 words
= total_bytes
/ UNITS_PER_WORD
;
7476 for (byte
= 0; byte
< total_bytes
; byte
++)
7478 int bitpos
= byte
* BITS_PER_UNIT
;
7479 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7480 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7482 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7483 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7485 if (total_bytes
> UNITS_PER_WORD
)
7487 word
= byte
/ UNITS_PER_WORD
;
7488 if (WORDS_BIG_ENDIAN
)
7489 word
= (words
- 1) - word
;
7490 offset
= word
* UNITS_PER_WORD
;
7491 if (BYTES_BIG_ENDIAN
)
7492 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7494 offset
+= byte
% UNITS_PER_WORD
;
7497 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7498 ptr
[offset
] = value
;
7504 /* Subroutine of native_encode_expr. Encode the REAL_CST
7505 specified by EXPR into the buffer PTR of length LEN bytes.
7506 Return the number of bytes placed in the buffer, or zero
7510 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7512 tree type
= TREE_TYPE (expr
);
7513 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7514 int byte
, offset
, word
, words
, bitpos
;
7515 unsigned char value
;
7517 /* There are always 32 bits in each long, no matter the size of
7518 the hosts long. We handle floating point representations with
7522 if (total_bytes
> len
)
7524 words
= 32 / UNITS_PER_WORD
;
7526 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7528 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7529 bitpos
+= BITS_PER_UNIT
)
7531 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7532 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7534 if (UNITS_PER_WORD
< 4)
7536 word
= byte
/ UNITS_PER_WORD
;
7537 if (WORDS_BIG_ENDIAN
)
7538 word
= (words
- 1) - word
;
7539 offset
= word
* UNITS_PER_WORD
;
7540 if (BYTES_BIG_ENDIAN
)
7541 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7543 offset
+= byte
% UNITS_PER_WORD
;
7546 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7547 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7552 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7553 specified by EXPR into the buffer PTR of length LEN bytes.
7554 Return the number of bytes placed in the buffer, or zero
7558 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7563 part
= TREE_REALPART (expr
);
7564 rsize
= native_encode_expr (part
, ptr
, len
);
7567 part
= TREE_IMAGPART (expr
);
7568 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7571 return rsize
+ isize
;
7575 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7576 specified by EXPR into the buffer PTR of length LEN bytes.
7577 Return the number of bytes placed in the buffer, or zero
7581 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7583 int i
, size
, offset
, count
;
7584 tree itype
, elem
, elements
;
7587 elements
= TREE_VECTOR_CST_ELTS (expr
);
7588 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
7589 itype
= TREE_TYPE (TREE_TYPE (expr
));
7590 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7591 for (i
= 0; i
< count
; i
++)
7595 elem
= TREE_VALUE (elements
);
7596 elements
= TREE_CHAIN (elements
);
7603 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7608 if (offset
+ size
> len
)
7610 memset (ptr
+offset
, 0, size
);
7618 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7619 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7620 buffer PTR of length LEN bytes. Return the number of bytes
7621 placed in the buffer, or zero upon failure. */
7624 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7626 switch (TREE_CODE (expr
))
7629 return native_encode_int (expr
, ptr
, len
);
7632 return native_encode_real (expr
, ptr
, len
);
7635 return native_encode_complex (expr
, ptr
, len
);
7638 return native_encode_vector (expr
, ptr
, len
);
7646 /* Subroutine of native_interpret_expr. Interpret the contents of
7647 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7648 If the buffer cannot be interpreted, return NULL_TREE. */
7651 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7653 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7654 int byte
, offset
, word
, words
;
7655 unsigned char value
;
7656 unsigned int HOST_WIDE_INT lo
= 0;
7657 HOST_WIDE_INT hi
= 0;
7659 if (total_bytes
> len
)
7661 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
7663 words
= total_bytes
/ UNITS_PER_WORD
;
7665 for (byte
= 0; byte
< total_bytes
; byte
++)
7667 int bitpos
= byte
* BITS_PER_UNIT
;
7668 if (total_bytes
> UNITS_PER_WORD
)
7670 word
= byte
/ UNITS_PER_WORD
;
7671 if (WORDS_BIG_ENDIAN
)
7672 word
= (words
- 1) - word
;
7673 offset
= word
* UNITS_PER_WORD
;
7674 if (BYTES_BIG_ENDIAN
)
7675 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7677 offset
+= byte
% UNITS_PER_WORD
;
7680 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7681 value
= ptr
[offset
];
7683 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7684 lo
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7686 hi
|= (unsigned HOST_WIDE_INT
) value
7687 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7690 return build_int_cst_wide_type (type
, lo
, hi
);
7694 /* Subroutine of native_interpret_expr. Interpret the contents of
7695 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7696 If the buffer cannot be interpreted, return NULL_TREE. */
7699 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7701 enum machine_mode mode
= TYPE_MODE (type
);
7702 int total_bytes
= GET_MODE_SIZE (mode
);
7703 int byte
, offset
, word
, words
, bitpos
;
7704 unsigned char value
;
7705 /* There are always 32 bits in each long, no matter the size of
7706 the hosts long. We handle floating point representations with
7711 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7712 if (total_bytes
> len
|| total_bytes
> 24)
7714 words
= 32 / UNITS_PER_WORD
;
7716 memset (tmp
, 0, sizeof (tmp
));
7717 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7718 bitpos
+= BITS_PER_UNIT
)
7720 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7721 if (UNITS_PER_WORD
< 4)
7723 word
= byte
/ UNITS_PER_WORD
;
7724 if (WORDS_BIG_ENDIAN
)
7725 word
= (words
- 1) - word
;
7726 offset
= word
* UNITS_PER_WORD
;
7727 if (BYTES_BIG_ENDIAN
)
7728 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7730 offset
+= byte
% UNITS_PER_WORD
;
7733 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7734 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7736 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7739 real_from_target (&r
, tmp
, mode
);
7740 return build_real (type
, r
);
7744 /* Subroutine of native_interpret_expr. Interpret the contents of
7745 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7746 If the buffer cannot be interpreted, return NULL_TREE. */
7749 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7751 tree etype
, rpart
, ipart
;
7754 etype
= TREE_TYPE (type
);
7755 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7758 rpart
= native_interpret_expr (etype
, ptr
, size
);
7761 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7764 return build_complex (type
, rpart
, ipart
);
7768 /* Subroutine of native_interpret_expr. Interpret the contents of
7769 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7770 If the buffer cannot be interpreted, return NULL_TREE. */
7773 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7775 tree etype
, elem
, elements
;
7778 etype
= TREE_TYPE (type
);
7779 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7780 count
= TYPE_VECTOR_SUBPARTS (type
);
7781 if (size
* count
> len
)
7784 elements
= NULL_TREE
;
7785 for (i
= count
- 1; i
>= 0; i
--)
7787 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7790 elements
= tree_cons (NULL_TREE
, elem
, elements
);
7792 return build_vector (type
, elements
);
7796 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7797 the buffer PTR of length LEN as a constant of type TYPE. For
7798 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7799 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7800 return NULL_TREE. */
7803 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7805 switch (TREE_CODE (type
))
7810 return native_interpret_int (type
, ptr
, len
);
7813 return native_interpret_real (type
, ptr
, len
);
7816 return native_interpret_complex (type
, ptr
, len
);
7819 return native_interpret_vector (type
, ptr
, len
);
7827 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7828 TYPE at compile-time. If we're unable to perform the conversion
7829 return NULL_TREE. */
7832 fold_view_convert_expr (tree type
, tree expr
)
7834 /* We support up to 512-bit values (for V8DFmode). */
7835 unsigned char buffer
[64];
7838 /* Check that the host and target are sane. */
7839 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7842 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7846 return native_interpret_expr (type
, buffer
, len
);
7849 /* Build an expression for the address of T. Folds away INDIRECT_REF
7850 to avoid confusing the gimplify process. When IN_FOLD is true
7851 avoid modifications of T. */
7854 build_fold_addr_expr_with_type_1 (tree t
, tree ptrtype
, bool in_fold
)
7856 /* The size of the object is not relevant when talking about its address. */
7857 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7858 t
= TREE_OPERAND (t
, 0);
7860 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7861 if (TREE_CODE (t
) == INDIRECT_REF
7862 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
7864 t
= TREE_OPERAND (t
, 0);
7866 if (TREE_TYPE (t
) != ptrtype
)
7867 t
= build1 (NOP_EXPR
, ptrtype
, t
);
7873 while (handled_component_p (base
))
7874 base
= TREE_OPERAND (base
, 0);
7877 TREE_ADDRESSABLE (base
) = 1;
7879 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
7882 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
7887 /* Build an expression for the address of T with type PTRTYPE. This
7888 function modifies the input parameter 'T' by sometimes setting the
7889 TREE_ADDRESSABLE flag. */
7892 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
7894 return build_fold_addr_expr_with_type_1 (t
, ptrtype
, false);
7897 /* Build an expression for the address of T. This function modifies
7898 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7899 flag. When called from fold functions, use fold_addr_expr instead. */
7902 build_fold_addr_expr (tree t
)
7904 return build_fold_addr_expr_with_type_1 (t
,
7905 build_pointer_type (TREE_TYPE (t
)),
7909 /* Same as build_fold_addr_expr, builds an expression for the address
7910 of T, but avoids touching the input node 't'. Fold functions
7911 should use this version. */
7914 fold_addr_expr (tree t
)
7916 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7918 return build_fold_addr_expr_with_type_1 (t
, ptrtype
, true);
7921 /* Fold a unary expression of code CODE and type TYPE with operand
7922 OP0. Return the folded expression if folding is successful.
7923 Otherwise, return NULL_TREE. */
7926 fold_unary (enum tree_code code
, tree type
, tree op0
)
7930 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7932 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7933 && TREE_CODE_LENGTH (code
) == 1);
7938 if (code
== NOP_EXPR
|| code
== CONVERT_EXPR
7939 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
7941 /* Don't use STRIP_NOPS, because signedness of argument type
7943 STRIP_SIGN_NOPS (arg0
);
7947 /* Strip any conversions that don't change the mode. This
7948 is safe for every expression, except for a comparison
7949 expression because its signedness is derived from its
7952 Note that this is done as an internal manipulation within
7953 the constant folder, in order to find the simplest
7954 representation of the arguments so that their form can be
7955 studied. In any cases, the appropriate type conversions
7956 should be put back in the tree that will get out of the
7962 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7964 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7965 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7966 fold_build1 (code
, type
, TREE_OPERAND (arg0
, 1)));
7967 else if (TREE_CODE (arg0
) == COND_EXPR
)
7969 tree arg01
= TREE_OPERAND (arg0
, 1);
7970 tree arg02
= TREE_OPERAND (arg0
, 2);
7971 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7972 arg01
= fold_build1 (code
, type
, arg01
);
7973 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7974 arg02
= fold_build1 (code
, type
, arg02
);
7975 tem
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7978 /* If this was a conversion, and all we did was to move into
7979 inside the COND_EXPR, bring it back out. But leave it if
7980 it is a conversion from integer to integer and the
7981 result precision is no wider than a word since such a
7982 conversion is cheap and may be optimized away by combine,
7983 while it couldn't if it were outside the COND_EXPR. Then return
7984 so we don't get into an infinite recursion loop taking the
7985 conversion out and then back in. */
7987 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
7988 || code
== NON_LVALUE_EXPR
)
7989 && TREE_CODE (tem
) == COND_EXPR
7990 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7991 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7992 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7993 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7994 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7995 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7996 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7998 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7999 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
8000 || flag_syntax_only
))
8001 tem
= build1 (code
, type
,
8003 TREE_TYPE (TREE_OPERAND
8004 (TREE_OPERAND (tem
, 1), 0)),
8005 TREE_OPERAND (tem
, 0),
8006 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
8007 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
8010 else if (COMPARISON_CLASS_P (arg0
))
8012 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
8014 arg0
= copy_node (arg0
);
8015 TREE_TYPE (arg0
) = type
;
8018 else if (TREE_CODE (type
) != INTEGER_TYPE
)
8019 return fold_build3 (COND_EXPR
, type
, arg0
,
8020 fold_build1 (code
, type
,
8022 fold_build1 (code
, type
,
8023 integer_zero_node
));
8032 case FIX_TRUNC_EXPR
:
8033 if (TREE_TYPE (op0
) == type
)
8036 /* If we have (type) (a CMP b) and type is an integral type, return
8037 new expression involving the new type. */
8038 if (COMPARISON_CLASS_P (op0
) && INTEGRAL_TYPE_P (type
))
8039 return fold_build2 (TREE_CODE (op0
), type
, TREE_OPERAND (op0
, 0),
8040 TREE_OPERAND (op0
, 1));
8042 /* Handle cases of two conversions in a row. */
8043 if (TREE_CODE (op0
) == NOP_EXPR
8044 || TREE_CODE (op0
) == CONVERT_EXPR
)
8046 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
8047 tree inter_type
= TREE_TYPE (op0
);
8048 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
8049 int inside_ptr
= POINTER_TYPE_P (inside_type
);
8050 int inside_float
= FLOAT_TYPE_P (inside_type
);
8051 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
8052 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
8053 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
8054 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
8055 int inter_ptr
= POINTER_TYPE_P (inter_type
);
8056 int inter_float
= FLOAT_TYPE_P (inter_type
);
8057 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
8058 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
8059 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
8060 int final_int
= INTEGRAL_TYPE_P (type
);
8061 int final_ptr
= POINTER_TYPE_P (type
);
8062 int final_float
= FLOAT_TYPE_P (type
);
8063 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
8064 unsigned int final_prec
= TYPE_PRECISION (type
);
8065 int final_unsignedp
= TYPE_UNSIGNED (type
);
8067 /* In addition to the cases of two conversions in a row
8068 handled below, if we are converting something to its own
8069 type via an object of identical or wider precision, neither
8070 conversion is needed. */
8071 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
8072 && (((inter_int
|| inter_ptr
) && final_int
)
8073 || (inter_float
&& final_float
))
8074 && inter_prec
>= final_prec
)
8075 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
8077 /* Likewise, if the intermediate and final types are either both
8078 float or both integer, we don't need the middle conversion if
8079 it is wider than the final type and doesn't change the signedness
8080 (for integers). Avoid this if the final type is a pointer
8081 since then we sometimes need the inner conversion. Likewise if
8082 the outer has a precision not equal to the size of its mode. */
8083 if (((inter_int
&& inside_int
)
8084 || (inter_float
&& inside_float
)
8085 || (inter_vec
&& inside_vec
))
8086 && inter_prec
>= inside_prec
8087 && (inter_float
|| inter_vec
8088 || inter_unsignedp
== inside_unsignedp
)
8089 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
8090 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
8092 && (! final_vec
|| inter_prec
== inside_prec
))
8093 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
8095 /* If we have a sign-extension of a zero-extended value, we can
8096 replace that by a single zero-extension. */
8097 if (inside_int
&& inter_int
&& final_int
8098 && inside_prec
< inter_prec
&& inter_prec
< final_prec
8099 && inside_unsignedp
&& !inter_unsignedp
)
8100 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
8102 /* Two conversions in a row are not needed unless:
8103 - some conversion is floating-point (overstrict for now), or
8104 - some conversion is a vector (overstrict for now), or
8105 - the intermediate type is narrower than both initial and
8107 - the intermediate type and innermost type differ in signedness,
8108 and the outermost type is wider than the intermediate, or
8109 - the initial type is a pointer type and the precisions of the
8110 intermediate and final types differ, or
8111 - the final type is a pointer type and the precisions of the
8112 initial and intermediate types differ.
8113 - the initial type is a pointer to an array and the final type
8115 if (! inside_float
&& ! inter_float
&& ! final_float
8116 && ! inside_vec
&& ! inter_vec
&& ! final_vec
8117 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
8118 && ! (inside_int
&& inter_int
8119 && inter_unsignedp
!= inside_unsignedp
8120 && inter_prec
< final_prec
)
8121 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
8122 == (final_unsignedp
&& final_prec
> inter_prec
))
8123 && ! (inside_ptr
&& inter_prec
!= final_prec
)
8124 && ! (final_ptr
&& inside_prec
!= inter_prec
)
8125 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
8126 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
8127 && ! (inside_ptr
&& final_ptr
8128 && TREE_CODE (TREE_TYPE (inside_type
)) == ARRAY_TYPE
8129 && TREE_CODE (TREE_TYPE (type
)) != ARRAY_TYPE
))
8130 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
8133 /* Handle (T *)&A.B.C for A being of type T and B and C
8134 living at offset zero. This occurs frequently in
8135 C++ upcasting and then accessing the base. */
8136 if (TREE_CODE (op0
) == ADDR_EXPR
8137 && POINTER_TYPE_P (type
)
8138 && handled_component_p (TREE_OPERAND (op0
, 0)))
8140 HOST_WIDE_INT bitsize
, bitpos
;
8142 enum machine_mode mode
;
8143 int unsignedp
, volatilep
;
8144 tree base
= TREE_OPERAND (op0
, 0);
8145 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
8146 &mode
, &unsignedp
, &volatilep
, false);
8147 /* If the reference was to a (constant) zero offset, we can use
8148 the address of the base if it has the same base type
8149 as the result type. */
8150 if (! offset
&& bitpos
== 0
8151 && TYPE_MAIN_VARIANT (TREE_TYPE (type
))
8152 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
8153 return fold_convert (type
, fold_addr_expr (base
));
8156 if ((TREE_CODE (op0
) == MODIFY_EXPR
8157 || TREE_CODE (op0
) == GIMPLE_MODIFY_STMT
)
8158 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0
, 1))
8159 /* Detect assigning a bitfield. */
8160 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0
, 0)) == COMPONENT_REF
8162 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0
, 0), 1))))
8164 /* Don't leave an assignment inside a conversion
8165 unless assigning a bitfield. */
8166 tem
= fold_build1 (code
, type
, GENERIC_TREE_OPERAND (op0
, 1));
8167 /* First do the assignment, then return converted constant. */
8168 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
8169 TREE_NO_WARNING (tem
) = 1;
8170 TREE_USED (tem
) = 1;
8174 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8175 constants (if x has signed type, the sign bit cannot be set
8176 in c). This folds extension into the BIT_AND_EXPR. */
8177 if (INTEGRAL_TYPE_P (type
)
8178 && TREE_CODE (type
) != BOOLEAN_TYPE
8179 && TREE_CODE (op0
) == BIT_AND_EXPR
8180 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8183 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
8186 if (TYPE_UNSIGNED (TREE_TYPE (and))
8187 || (TYPE_PRECISION (type
)
8188 <= TYPE_PRECISION (TREE_TYPE (and))))
8190 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8191 <= HOST_BITS_PER_WIDE_INT
8192 && host_integerp (and1
, 1))
8194 unsigned HOST_WIDE_INT cst
;
8196 cst
= tree_low_cst (and1
, 1);
8197 cst
&= (HOST_WIDE_INT
) -1
8198 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8199 change
= (cst
== 0);
8200 #ifdef LOAD_EXTEND_OP
8202 && !flag_syntax_only
8203 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
8206 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8207 and0
= fold_convert (uns
, and0
);
8208 and1
= fold_convert (uns
, and1
);
8214 tem
= force_fit_type_double (type
, TREE_INT_CST_LOW (and1
),
8215 TREE_INT_CST_HIGH (and1
), 0,
8216 TREE_OVERFLOW (and1
));
8217 return fold_build2 (BIT_AND_EXPR
, type
,
8218 fold_convert (type
, and0
), tem
);
8222 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8223 when one of the new casts will fold away. Conservatively we assume
8224 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8225 if (POINTER_TYPE_P (type
)
8226 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8227 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8228 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
8229 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
8231 tree arg00
= TREE_OPERAND (arg0
, 0);
8232 tree arg01
= TREE_OPERAND (arg0
, 1);
8234 return fold_build2 (TREE_CODE (arg0
), type
, fold_convert (type
, arg00
),
8235 fold_convert (sizetype
, arg01
));
8238 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8239 of the same precision, and X is an integer type not narrower than
8240 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8241 if (INTEGRAL_TYPE_P (type
)
8242 && TREE_CODE (op0
) == BIT_NOT_EXPR
8243 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8244 && (TREE_CODE (TREE_OPERAND (op0
, 0)) == NOP_EXPR
8245 || TREE_CODE (TREE_OPERAND (op0
, 0)) == CONVERT_EXPR
)
8246 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8248 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8249 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8250 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8251 return fold_build1 (BIT_NOT_EXPR
, type
, fold_convert (type
, tem
));
8254 tem
= fold_convert_const (code
, type
, op0
);
8255 return tem
? tem
: NULL_TREE
;
8257 case FIXED_CONVERT_EXPR
:
8258 tem
= fold_convert_const (code
, type
, arg0
);
8259 return tem
? tem
: NULL_TREE
;
8261 case VIEW_CONVERT_EXPR
:
8262 if (TREE_TYPE (op0
) == type
)
8264 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
8265 || (TREE_CODE (op0
) == NOP_EXPR
8266 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8267 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8268 && TYPE_PRECISION (TREE_TYPE (op0
))
8269 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8270 return fold_build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
8271 return fold_view_convert_expr (type
, op0
);
8274 tem
= fold_negate_expr (arg0
);
8276 return fold_convert (type
, tem
);
8280 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8281 return fold_abs_const (arg0
, type
);
8282 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8283 return fold_build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8284 /* Convert fabs((double)float) into (double)fabsf(float). */
8285 else if (TREE_CODE (arg0
) == NOP_EXPR
8286 && TREE_CODE (type
) == REAL_TYPE
)
8288 tree targ0
= strip_float_extensions (arg0
);
8290 return fold_convert (type
, fold_build1 (ABS_EXPR
,
8294 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8295 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8297 else if (tree_expr_nonnegative_p (arg0
))
8300 /* Strip sign ops from argument. */
8301 if (TREE_CODE (type
) == REAL_TYPE
)
8303 tem
= fold_strip_sign_ops (arg0
);
8305 return fold_build1 (ABS_EXPR
, type
, fold_convert (type
, tem
));
8310 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8311 return fold_convert (type
, arg0
);
8312 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8314 tree itype
= TREE_TYPE (type
);
8315 tree rpart
= fold_convert (itype
, TREE_OPERAND (arg0
, 0));
8316 tree ipart
= fold_convert (itype
, TREE_OPERAND (arg0
, 1));
8317 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, negate_expr (ipart
));
8319 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8321 tree itype
= TREE_TYPE (type
);
8322 tree rpart
= fold_convert (itype
, TREE_REALPART (arg0
));
8323 tree ipart
= fold_convert (itype
, TREE_IMAGPART (arg0
));
8324 return build_complex (type
, rpart
, negate_expr (ipart
));
8326 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8327 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
8331 if (TREE_CODE (arg0
) == INTEGER_CST
)
8332 return fold_not_const (arg0
, type
);
8333 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8334 return TREE_OPERAND (op0
, 0);
8335 /* Convert ~ (-A) to A - 1. */
8336 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8337 return fold_build2 (MINUS_EXPR
, type
,
8338 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
8339 build_int_cst (type
, 1));
8340 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8341 else if (INTEGRAL_TYPE_P (type
)
8342 && ((TREE_CODE (arg0
) == MINUS_EXPR
8343 && integer_onep (TREE_OPERAND (arg0
, 1)))
8344 || (TREE_CODE (arg0
) == PLUS_EXPR
8345 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8346 return fold_build1 (NEGATE_EXPR
, type
,
8347 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
8348 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8349 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8350 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
8352 TREE_OPERAND (arg0
, 0)))))
8353 return fold_build2 (BIT_XOR_EXPR
, type
, tem
,
8354 fold_convert (type
, TREE_OPERAND (arg0
, 1)));
8355 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8356 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
8358 TREE_OPERAND (arg0
, 1)))))
8359 return fold_build2 (BIT_XOR_EXPR
, type
,
8360 fold_convert (type
, TREE_OPERAND (arg0
, 0)), tem
);
8361 /* Perform BIT_NOT_EXPR on each element individually. */
8362 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8364 tree elements
= TREE_VECTOR_CST_ELTS (arg0
), elem
, list
= NULL_TREE
;
8365 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
8367 for (i
= 0; i
< count
; i
++)
8371 elem
= TREE_VALUE (elements
);
8372 elem
= fold_unary (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8373 if (elem
== NULL_TREE
)
8375 elements
= TREE_CHAIN (elements
);
8378 elem
= build_int_cst (TREE_TYPE (type
), -1);
8379 list
= tree_cons (NULL_TREE
, elem
, list
);
8382 return build_vector (type
, nreverse (list
));
8387 case TRUTH_NOT_EXPR
:
8388 /* The argument to invert_truthvalue must have Boolean type. */
8389 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8390 arg0
= fold_convert (boolean_type_node
, arg0
);
8392 /* Note that the operand of this must be an int
8393 and its values must be 0 or 1.
8394 ("true" is a fixed value perhaps depending on the language,
8395 but we don't handle values other than 1 correctly yet.) */
8396 tem
= fold_truth_not_expr (arg0
);
8399 return fold_convert (type
, tem
);
8402 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8403 return fold_convert (type
, arg0
);
8404 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8405 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
8406 TREE_OPERAND (arg0
, 1));
8407 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8408 return fold_convert (type
, TREE_REALPART (arg0
));
8409 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8411 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8412 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
8413 fold_build1 (REALPART_EXPR
, itype
,
8414 TREE_OPERAND (arg0
, 0)),
8415 fold_build1 (REALPART_EXPR
, itype
,
8416 TREE_OPERAND (arg0
, 1)));
8417 return fold_convert (type
, tem
);
8419 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8421 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8422 tem
= fold_build1 (REALPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8423 return fold_convert (type
, tem
);
8425 if (TREE_CODE (arg0
) == CALL_EXPR
)
8427 tree fn
= get_callee_fndecl (arg0
);
8428 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8429 switch (DECL_FUNCTION_CODE (fn
))
8431 CASE_FLT_FN (BUILT_IN_CEXPI
):
8432 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8434 return build_call_expr (fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8444 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8445 return fold_convert (type
, integer_zero_node
);
8446 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8447 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
8448 TREE_OPERAND (arg0
, 0));
8449 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8450 return fold_convert (type
, TREE_IMAGPART (arg0
));
8451 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8453 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8454 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
8455 fold_build1 (IMAGPART_EXPR
, itype
,
8456 TREE_OPERAND (arg0
, 0)),
8457 fold_build1 (IMAGPART_EXPR
, itype
,
8458 TREE_OPERAND (arg0
, 1)));
8459 return fold_convert (type
, tem
);
8461 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8463 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8464 tem
= fold_build1 (IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8465 return fold_convert (type
, negate_expr (tem
));
8467 if (TREE_CODE (arg0
) == CALL_EXPR
)
8469 tree fn
= get_callee_fndecl (arg0
);
8470 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8471 switch (DECL_FUNCTION_CODE (fn
))
8473 CASE_FLT_FN (BUILT_IN_CEXPI
):
8474 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8476 return build_call_expr (fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8487 } /* switch (code) */
8490 /* Fold a binary expression of code CODE and type TYPE with operands
8491 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8492 Return the folded expression if folding is successful. Otherwise,
8493 return NULL_TREE. */
8496 fold_minmax (enum tree_code code
, tree type
, tree op0
, tree op1
)
8498 enum tree_code compl_code
;
8500 if (code
== MIN_EXPR
)
8501 compl_code
= MAX_EXPR
;
8502 else if (code
== MAX_EXPR
)
8503 compl_code
= MIN_EXPR
;
8507 /* MIN (MAX (a, b), b) == b. */
8508 if (TREE_CODE (op0
) == compl_code
8509 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8510 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 0));
8512 /* MIN (MAX (b, a), b) == b. */
8513 if (TREE_CODE (op0
) == compl_code
8514 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8515 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8516 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 1));
8518 /* MIN (a, MAX (a, b)) == a. */
8519 if (TREE_CODE (op1
) == compl_code
8520 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8521 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8522 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 1));
8524 /* MIN (a, MAX (b, a)) == a. */
8525 if (TREE_CODE (op1
) == compl_code
8526 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8527 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8528 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 0));
8533 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8534 by changing CODE to reduce the magnitude of constants involved in
8535 ARG0 of the comparison.
8536 Returns a canonicalized comparison tree if a simplification was
8537 possible, otherwise returns NULL_TREE.
8538 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8539 valid if signed overflow is undefined. */
8542 maybe_canonicalize_comparison_1 (enum tree_code code
, tree type
,
8543 tree arg0
, tree arg1
,
8544 bool *strict_overflow_p
)
8546 enum tree_code code0
= TREE_CODE (arg0
);
8547 tree t
, cst0
= NULL_TREE
;
8551 /* Match A +- CST code arg1 and CST code arg1. */
8552 if (!(((code0
== MINUS_EXPR
8553 || code0
== PLUS_EXPR
)
8554 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8555 || code0
== INTEGER_CST
))
8558 /* Identify the constant in arg0 and its sign. */
8559 if (code0
== INTEGER_CST
)
8562 cst0
= TREE_OPERAND (arg0
, 1);
8563 sgn0
= tree_int_cst_sgn (cst0
);
8565 /* Overflowed constants and zero will cause problems. */
8566 if (integer_zerop (cst0
)
8567 || TREE_OVERFLOW (cst0
))
8570 /* See if we can reduce the magnitude of the constant in
8571 arg0 by changing the comparison code. */
8572 if (code0
== INTEGER_CST
)
8574 /* CST <= arg1 -> CST-1 < arg1. */
8575 if (code
== LE_EXPR
&& sgn0
== 1)
8577 /* -CST < arg1 -> -CST-1 <= arg1. */
8578 else if (code
== LT_EXPR
&& sgn0
== -1)
8580 /* CST > arg1 -> CST-1 >= arg1. */
8581 else if (code
== GT_EXPR
&& sgn0
== 1)
8583 /* -CST >= arg1 -> -CST-1 > arg1. */
8584 else if (code
== GE_EXPR
&& sgn0
== -1)
8588 /* arg1 code' CST' might be more canonical. */
8593 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8595 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8597 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8598 else if (code
== GT_EXPR
8599 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8601 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8602 else if (code
== LE_EXPR
8603 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8605 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8606 else if (code
== GE_EXPR
8607 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8611 *strict_overflow_p
= true;
8614 /* Now build the constant reduced in magnitude. */
8615 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8616 cst0
, build_int_cst (TREE_TYPE (cst0
), 1), 0);
8617 if (code0
!= INTEGER_CST
)
8618 t
= fold_build2 (code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8620 /* If swapping might yield to a more canonical form, do so. */
8622 return fold_build2 (swap_tree_comparison (code
), type
, arg1
, t
);
8624 return fold_build2 (code
, type
, t
, arg1
);
8627 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8628 overflow further. Try to decrease the magnitude of constants involved
8629 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8630 and put sole constants at the second argument position.
8631 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8634 maybe_canonicalize_comparison (enum tree_code code
, tree type
,
8635 tree arg0
, tree arg1
)
8638 bool strict_overflow_p
;
8639 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8640 "when reducing constant in comparison");
8642 /* In principle pointers also have undefined overflow behavior,
8643 but that causes problems elsewhere. */
8644 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8645 || POINTER_TYPE_P (TREE_TYPE (arg0
)))
8648 /* Try canonicalization by simplifying arg0. */
8649 strict_overflow_p
= false;
8650 t
= maybe_canonicalize_comparison_1 (code
, type
, arg0
, arg1
,
8651 &strict_overflow_p
);
8654 if (strict_overflow_p
)
8655 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8659 /* Try canonicalization by simplifying arg1 using the swapped
8661 code
= swap_tree_comparison (code
);
8662 strict_overflow_p
= false;
8663 t
= maybe_canonicalize_comparison_1 (code
, type
, arg1
, arg0
,
8664 &strict_overflow_p
);
8665 if (t
&& strict_overflow_p
)
8666 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8670 /* Subroutine of fold_binary. This routine performs all of the
8671 transformations that are common to the equality/inequality
8672 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8673 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8674 fold_binary should call fold_binary. Fold a comparison with
8675 tree code CODE and type TYPE with operands OP0 and OP1. Return
8676 the folded comparison or NULL_TREE. */
8679 fold_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
8681 tree arg0
, arg1
, tem
;
8686 STRIP_SIGN_NOPS (arg0
);
8687 STRIP_SIGN_NOPS (arg1
);
8689 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8690 if (tem
!= NULL_TREE
)
8693 /* If one arg is a real or integer constant, put it last. */
8694 if (tree_swap_operands_p (arg0
, arg1
, true))
8695 return fold_build2 (swap_tree_comparison (code
), type
, op1
, op0
);
8697 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8698 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8699 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8700 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8701 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8702 && (TREE_CODE (arg1
) == INTEGER_CST
8703 && !TREE_OVERFLOW (arg1
)))
8705 tree const1
= TREE_OPERAND (arg0
, 1);
8707 tree variable
= TREE_OPERAND (arg0
, 0);
8710 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8712 lhs
= fold_build2 (lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8713 TREE_TYPE (arg1
), const2
, const1
);
8715 /* If the constant operation overflowed this can be
8716 simplified as a comparison against INT_MAX/INT_MIN. */
8717 if (TREE_CODE (lhs
) == INTEGER_CST
8718 && TREE_OVERFLOW (lhs
))
8720 int const1_sgn
= tree_int_cst_sgn (const1
);
8721 enum tree_code code2
= code
;
8723 /* Get the sign of the constant on the lhs if the
8724 operation were VARIABLE + CONST1. */
8725 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8726 const1_sgn
= -const1_sgn
;
8728 /* The sign of the constant determines if we overflowed
8729 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8730 Canonicalize to the INT_MIN overflow by swapping the comparison
8732 if (const1_sgn
== -1)
8733 code2
= swap_tree_comparison (code
);
8735 /* We now can look at the canonicalized case
8736 VARIABLE + 1 CODE2 INT_MIN
8737 and decide on the result. */
8738 if (code2
== LT_EXPR
8740 || code2
== EQ_EXPR
)
8741 return omit_one_operand (type
, boolean_false_node
, variable
);
8742 else if (code2
== NE_EXPR
8744 || code2
== GT_EXPR
)
8745 return omit_one_operand (type
, boolean_true_node
, variable
);
8748 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8749 && (TREE_CODE (lhs
) != INTEGER_CST
8750 || !TREE_OVERFLOW (lhs
)))
8752 fold_overflow_warning (("assuming signed overflow does not occur "
8753 "when changing X +- C1 cmp C2 to "
8755 WARN_STRICT_OVERFLOW_COMPARISON
);
8756 return fold_build2 (code
, type
, variable
, lhs
);
8760 /* For comparisons of pointers we can decompose it to a compile time
8761 comparison of the base objects and the offsets into the object.
8762 This requires at least one operand being an ADDR_EXPR or a
8763 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8764 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8765 && (TREE_CODE (arg0
) == ADDR_EXPR
8766 || TREE_CODE (arg1
) == ADDR_EXPR
8767 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8768 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8770 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8771 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8772 enum machine_mode mode
;
8773 int volatilep
, unsignedp
;
8774 bool indirect_base0
= false;
8776 /* Get base and offset for the access. Strip ADDR_EXPR for
8777 get_inner_reference, but put it back by stripping INDIRECT_REF
8778 off the base object if possible. */
8780 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8782 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8783 &bitsize
, &bitpos0
, &offset0
, &mode
,
8784 &unsignedp
, &volatilep
, false);
8785 if (TREE_CODE (base0
) == INDIRECT_REF
)
8786 base0
= TREE_OPERAND (base0
, 0);
8788 indirect_base0
= true;
8790 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8792 base0
= TREE_OPERAND (arg0
, 0);
8793 offset0
= TREE_OPERAND (arg0
, 1);
8797 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8799 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8800 &bitsize
, &bitpos1
, &offset1
, &mode
,
8801 &unsignedp
, &volatilep
, false);
8802 /* We have to make sure to have an indirect/non-indirect base1
8803 just the same as we did for base0. */
8804 if (TREE_CODE (base1
) == INDIRECT_REF
8806 base1
= TREE_OPERAND (base1
, 0);
8807 else if (!indirect_base0
)
8810 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8812 base1
= TREE_OPERAND (arg1
, 0);
8813 offset1
= TREE_OPERAND (arg1
, 1);
8815 else if (indirect_base0
)
8818 /* If we have equivalent bases we might be able to simplify. */
8820 && operand_equal_p (base0
, base1
, 0))
8822 /* We can fold this expression to a constant if the non-constant
8823 offset parts are equal. */
8824 if (offset0
== offset1
8825 || (offset0
&& offset1
8826 && operand_equal_p (offset0
, offset1
, 0)))
8831 return build_int_cst (boolean_type_node
, bitpos0
== bitpos1
);
8833 return build_int_cst (boolean_type_node
, bitpos0
!= bitpos1
);
8835 return build_int_cst (boolean_type_node
, bitpos0
< bitpos1
);
8837 return build_int_cst (boolean_type_node
, bitpos0
<= bitpos1
);
8839 return build_int_cst (boolean_type_node
, bitpos0
>= bitpos1
);
8841 return build_int_cst (boolean_type_node
, bitpos0
> bitpos1
);
8845 /* We can simplify the comparison to a comparison of the variable
8846 offset parts if the constant offset parts are equal.
8847 Be careful to use signed size type here because otherwise we
8848 mess with array offsets in the wrong way. This is possible
8849 because pointer arithmetic is restricted to retain within an
8850 object and overflow on pointer differences is undefined as of
8851 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8852 else if (bitpos0
== bitpos1
)
8854 tree signed_size_type_node
;
8855 signed_size_type_node
= signed_type_for (size_type_node
);
8857 /* By converting to signed size type we cover middle-end pointer
8858 arithmetic which operates on unsigned pointer types of size
8859 type size and ARRAY_REF offsets which are properly sign or
8860 zero extended from their type in case it is narrower than
8862 if (offset0
== NULL_TREE
)
8863 offset0
= build_int_cst (signed_size_type_node
, 0);
8865 offset0
= fold_convert (signed_size_type_node
, offset0
);
8866 if (offset1
== NULL_TREE
)
8867 offset1
= build_int_cst (signed_size_type_node
, 0);
8869 offset1
= fold_convert (signed_size_type_node
, offset1
);
8871 return fold_build2 (code
, type
, offset0
, offset1
);
8876 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8877 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8878 the resulting offset is smaller in absolute value than the
8880 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8881 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8882 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8883 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8884 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8885 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8886 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8888 tree const1
= TREE_OPERAND (arg0
, 1);
8889 tree const2
= TREE_OPERAND (arg1
, 1);
8890 tree variable1
= TREE_OPERAND (arg0
, 0);
8891 tree variable2
= TREE_OPERAND (arg1
, 0);
8893 const char * const warnmsg
= G_("assuming signed overflow does not "
8894 "occur when combining constants around "
8897 /* Put the constant on the side where it doesn't overflow and is
8898 of lower absolute value than before. */
8899 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8900 ? MINUS_EXPR
: PLUS_EXPR
,
8902 if (!TREE_OVERFLOW (cst
)
8903 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
8905 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8906 return fold_build2 (code
, type
,
8908 fold_build2 (TREE_CODE (arg1
), TREE_TYPE (arg1
),
8912 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8913 ? MINUS_EXPR
: PLUS_EXPR
,
8915 if (!TREE_OVERFLOW (cst
)
8916 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
8918 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8919 return fold_build2 (code
, type
,
8920 fold_build2 (TREE_CODE (arg0
), TREE_TYPE (arg0
),
8926 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8927 signed arithmetic case. That form is created by the compiler
8928 often enough for folding it to be of value. One example is in
8929 computing loop trip counts after Operator Strength Reduction. */
8930 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8931 && TREE_CODE (arg0
) == MULT_EXPR
8932 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8933 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8934 && integer_zerop (arg1
))
8936 tree const1
= TREE_OPERAND (arg0
, 1);
8937 tree const2
= arg1
; /* zero */
8938 tree variable1
= TREE_OPERAND (arg0
, 0);
8939 enum tree_code cmp_code
= code
;
8941 gcc_assert (!integer_zerop (const1
));
8943 fold_overflow_warning (("assuming signed overflow does not occur when "
8944 "eliminating multiplication in comparison "
8946 WARN_STRICT_OVERFLOW_COMPARISON
);
8948 /* If const1 is negative we swap the sense of the comparison. */
8949 if (tree_int_cst_sgn (const1
) < 0)
8950 cmp_code
= swap_tree_comparison (cmp_code
);
8952 return fold_build2 (cmp_code
, type
, variable1
, const2
);
8955 tem
= maybe_canonicalize_comparison (code
, type
, op0
, op1
);
8959 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8961 tree targ0
= strip_float_extensions (arg0
);
8962 tree targ1
= strip_float_extensions (arg1
);
8963 tree newtype
= TREE_TYPE (targ0
);
8965 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8966 newtype
= TREE_TYPE (targ1
);
8968 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8969 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
8970 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
8971 fold_convert (newtype
, targ1
));
8973 /* (-a) CMP (-b) -> b CMP a */
8974 if (TREE_CODE (arg0
) == NEGATE_EXPR
8975 && TREE_CODE (arg1
) == NEGATE_EXPR
)
8976 return fold_build2 (code
, type
, TREE_OPERAND (arg1
, 0),
8977 TREE_OPERAND (arg0
, 0));
8979 if (TREE_CODE (arg1
) == REAL_CST
)
8981 REAL_VALUE_TYPE cst
;
8982 cst
= TREE_REAL_CST (arg1
);
8984 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8985 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8986 return fold_build2 (swap_tree_comparison (code
), type
,
8987 TREE_OPERAND (arg0
, 0),
8988 build_real (TREE_TYPE (arg1
),
8989 REAL_VALUE_NEGATE (cst
)));
8991 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8992 /* a CMP (-0) -> a CMP 0 */
8993 if (REAL_VALUE_MINUS_ZERO (cst
))
8994 return fold_build2 (code
, type
, arg0
,
8995 build_real (TREE_TYPE (arg1
), dconst0
));
8997 /* x != NaN is always true, other ops are always false. */
8998 if (REAL_VALUE_ISNAN (cst
)
8999 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9001 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9002 return omit_one_operand (type
, tem
, arg0
);
9005 /* Fold comparisons against infinity. */
9006 if (REAL_VALUE_ISINF (cst
))
9008 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
9009 if (tem
!= NULL_TREE
)
9014 /* If this is a comparison of a real constant with a PLUS_EXPR
9015 or a MINUS_EXPR of a real constant, we can convert it into a
9016 comparison with a revised real constant as long as no overflow
9017 occurs when unsafe_math_optimizations are enabled. */
9018 if (flag_unsafe_math_optimizations
9019 && TREE_CODE (arg1
) == REAL_CST
9020 && (TREE_CODE (arg0
) == PLUS_EXPR
9021 || TREE_CODE (arg0
) == MINUS_EXPR
)
9022 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9023 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9024 ? MINUS_EXPR
: PLUS_EXPR
,
9025 arg1
, TREE_OPERAND (arg0
, 1), 0))
9026 && !TREE_OVERFLOW (tem
))
9027 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9029 /* Likewise, we can simplify a comparison of a real constant with
9030 a MINUS_EXPR whose first operand is also a real constant, i.e.
9031 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9032 floating-point types only if -fassociative-math is set. */
9033 if (flag_associative_math
9034 && TREE_CODE (arg1
) == REAL_CST
9035 && TREE_CODE (arg0
) == MINUS_EXPR
9036 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9037 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9039 && !TREE_OVERFLOW (tem
))
9040 return fold_build2 (swap_tree_comparison (code
), type
,
9041 TREE_OPERAND (arg0
, 1), tem
);
9043 /* Fold comparisons against built-in math functions. */
9044 if (TREE_CODE (arg1
) == REAL_CST
9045 && flag_unsafe_math_optimizations
9046 && ! flag_errno_math
)
9048 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9050 if (fcode
!= END_BUILTINS
)
9052 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
9053 if (tem
!= NULL_TREE
)
9059 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9060 && (TREE_CODE (arg0
) == NOP_EXPR
9061 || TREE_CODE (arg0
) == CONVERT_EXPR
))
9063 /* If we are widening one operand of an integer comparison,
9064 see if the other operand is similarly being widened. Perhaps we
9065 can do the comparison in the narrower type. */
9066 tem
= fold_widened_comparison (code
, type
, arg0
, arg1
);
9070 /* Or if we are changing signedness. */
9071 tem
= fold_sign_changed_comparison (code
, type
, arg0
, arg1
);
9076 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9077 constant, we can simplify it. */
9078 if (TREE_CODE (arg1
) == INTEGER_CST
9079 && (TREE_CODE (arg0
) == MIN_EXPR
9080 || TREE_CODE (arg0
) == MAX_EXPR
)
9081 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9083 tem
= optimize_minmax_comparison (code
, type
, op0
, op1
);
9088 /* Simplify comparison of something with itself. (For IEEE
9089 floating-point, we can only do some of these simplifications.) */
9090 if (operand_equal_p (arg0
, arg1
, 0))
9095 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9096 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9097 return constant_boolean_node (1, type
);
9102 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9103 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9104 return constant_boolean_node (1, type
);
9105 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9108 /* For NE, we can only do this simplification if integer
9109 or we don't honor IEEE floating point NaNs. */
9110 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9111 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9113 /* ... fall through ... */
9116 return constant_boolean_node (0, type
);
9122 /* If we are comparing an expression that just has comparisons
9123 of two integer values, arithmetic expressions of those comparisons,
9124 and constants, we can simplify it. There are only three cases
9125 to check: the two values can either be equal, the first can be
9126 greater, or the second can be greater. Fold the expression for
9127 those three values. Since each value must be 0 or 1, we have
9128 eight possibilities, each of which corresponds to the constant 0
9129 or 1 or one of the six possible comparisons.
9131 This handles common cases like (a > b) == 0 but also handles
9132 expressions like ((x > y) - (y > x)) > 0, which supposedly
9133 occur in macroized code. */
9135 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9137 tree cval1
= 0, cval2
= 0;
9140 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9141 /* Don't handle degenerate cases here; they should already
9142 have been handled anyway. */
9143 && cval1
!= 0 && cval2
!= 0
9144 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9145 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9146 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9147 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9148 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9149 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9150 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9152 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9153 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9155 /* We can't just pass T to eval_subst in case cval1 or cval2
9156 was the same as ARG1. */
9159 = fold_build2 (code
, type
,
9160 eval_subst (arg0
, cval1
, maxval
,
9164 = fold_build2 (code
, type
,
9165 eval_subst (arg0
, cval1
, maxval
,
9169 = fold_build2 (code
, type
,
9170 eval_subst (arg0
, cval1
, minval
,
9174 /* All three of these results should be 0 or 1. Confirm they are.
9175 Then use those values to select the proper code to use. */
9177 if (TREE_CODE (high_result
) == INTEGER_CST
9178 && TREE_CODE (equal_result
) == INTEGER_CST
9179 && TREE_CODE (low_result
) == INTEGER_CST
)
9181 /* Make a 3-bit mask with the high-order bit being the
9182 value for `>', the next for '=', and the low for '<'. */
9183 switch ((integer_onep (high_result
) * 4)
9184 + (integer_onep (equal_result
) * 2)
9185 + integer_onep (low_result
))
9189 return omit_one_operand (type
, integer_zero_node
, arg0
);
9210 return omit_one_operand (type
, integer_one_node
, arg0
);
9214 return save_expr (build2 (code
, type
, cval1
, cval2
));
9215 return fold_build2 (code
, type
, cval1
, cval2
);
9220 /* Fold a comparison of the address of COMPONENT_REFs with the same
9221 type and component to a comparison of the address of the base
9222 object. In short, &x->a OP &y->a to x OP y and
9223 &x->a OP &y.a to x OP &y */
9224 if (TREE_CODE (arg0
) == ADDR_EXPR
9225 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == COMPONENT_REF
9226 && TREE_CODE (arg1
) == ADDR_EXPR
9227 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == COMPONENT_REF
)
9229 tree cref0
= TREE_OPERAND (arg0
, 0);
9230 tree cref1
= TREE_OPERAND (arg1
, 0);
9231 if (TREE_OPERAND (cref0
, 1) == TREE_OPERAND (cref1
, 1))
9233 tree op0
= TREE_OPERAND (cref0
, 0);
9234 tree op1
= TREE_OPERAND (cref1
, 0);
9235 return fold_build2 (code
, type
,
9236 fold_addr_expr (op0
),
9237 fold_addr_expr (op1
));
9241 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9242 into a single range test. */
9243 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9244 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9245 && TREE_CODE (arg1
) == INTEGER_CST
9246 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9247 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9248 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9249 && !TREE_OVERFLOW (arg1
))
9251 tem
= fold_div_compare (code
, type
, arg0
, arg1
);
9252 if (tem
!= NULL_TREE
)
9256 /* Fold ~X op ~Y as Y op X. */
9257 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9258 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9260 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9261 return fold_build2 (code
, type
,
9262 fold_convert (cmp_type
, TREE_OPERAND (arg1
, 0)),
9263 TREE_OPERAND (arg0
, 0));
9266 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9267 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9268 && TREE_CODE (arg1
) == INTEGER_CST
)
9270 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9271 return fold_build2 (swap_tree_comparison (code
), type
,
9272 TREE_OPERAND (arg0
, 0),
9273 fold_build1 (BIT_NOT_EXPR
, cmp_type
,
9274 fold_convert (cmp_type
, arg1
)));
9281 /* Subroutine of fold_binary. Optimize complex multiplications of the
9282 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9283 argument EXPR represents the expression "z" of type TYPE. */
9286 fold_mult_zconjz (tree type
, tree expr
)
9288 tree itype
= TREE_TYPE (type
);
9289 tree rpart
, ipart
, tem
;
9291 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9293 rpart
= TREE_OPERAND (expr
, 0);
9294 ipart
= TREE_OPERAND (expr
, 1);
9296 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9298 rpart
= TREE_REALPART (expr
);
9299 ipart
= TREE_IMAGPART (expr
);
9303 expr
= save_expr (expr
);
9304 rpart
= fold_build1 (REALPART_EXPR
, itype
, expr
);
9305 ipart
= fold_build1 (IMAGPART_EXPR
, itype
, expr
);
9308 rpart
= save_expr (rpart
);
9309 ipart
= save_expr (ipart
);
9310 tem
= fold_build2 (PLUS_EXPR
, itype
,
9311 fold_build2 (MULT_EXPR
, itype
, rpart
, rpart
),
9312 fold_build2 (MULT_EXPR
, itype
, ipart
, ipart
));
9313 return fold_build2 (COMPLEX_EXPR
, type
, tem
,
9314 fold_convert (itype
, integer_zero_node
));
9318 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9319 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9320 guarantees that P and N have the same least significant log2(M) bits.
9321 N is not otherwise constrained. In particular, N is not normalized to
9322 0 <= N < M as is common. In general, the precise value of P is unknown.
9323 M is chosen as large as possible such that constant N can be determined.
9325 Returns M and sets *RESIDUE to N. */
9327 static unsigned HOST_WIDE_INT
9328 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
)
9330 enum tree_code code
;
9334 code
= TREE_CODE (expr
);
9335 if (code
== ADDR_EXPR
)
9337 expr
= TREE_OPERAND (expr
, 0);
9338 if (handled_component_p (expr
))
9340 HOST_WIDE_INT bitsize
, bitpos
;
9342 enum machine_mode mode
;
9343 int unsignedp
, volatilep
;
9345 expr
= get_inner_reference (expr
, &bitsize
, &bitpos
, &offset
,
9346 &mode
, &unsignedp
, &volatilep
, false);
9347 *residue
= bitpos
/ BITS_PER_UNIT
;
9350 if (TREE_CODE (offset
) == INTEGER_CST
)
9351 *residue
+= TREE_INT_CST_LOW (offset
);
9353 /* We don't handle more complicated offset expressions. */
9359 return DECL_ALIGN_UNIT (expr
);
9361 else if (code
== POINTER_PLUS_EXPR
)
9364 unsigned HOST_WIDE_INT modulus
;
9365 enum tree_code inner_code
;
9367 op0
= TREE_OPERAND (expr
, 0);
9369 modulus
= get_pointer_modulus_and_residue (op0
, residue
);
9371 op1
= TREE_OPERAND (expr
, 1);
9373 inner_code
= TREE_CODE (op1
);
9374 if (inner_code
== INTEGER_CST
)
9376 *residue
+= TREE_INT_CST_LOW (op1
);
9379 else if (inner_code
== MULT_EXPR
)
9381 op1
= TREE_OPERAND (op1
, 1);
9382 if (TREE_CODE (op1
) == INTEGER_CST
)
9384 unsigned HOST_WIDE_INT align
;
9386 /* Compute the greatest power-of-2 divisor of op1. */
9387 align
= TREE_INT_CST_LOW (op1
);
9390 /* If align is non-zero and less than *modulus, replace
9391 *modulus with align., If align is 0, then either op1 is 0
9392 or the greatest power-of-2 divisor of op1 doesn't fit in an
9393 unsigned HOST_WIDE_INT. In either case, no additional
9394 constraint is imposed. */
9396 modulus
= MIN (modulus
, align
);
9403 /* If we get here, we were unable to determine anything useful about the
9409 /* Fold a binary expression of code CODE and type TYPE with operands
9410 OP0 and OP1. Return the folded expression if folding is
9411 successful. Otherwise, return NULL_TREE. */
9414 fold_binary (enum tree_code code
, tree type
, tree op0
, tree op1
)
9416 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9417 tree arg0
, arg1
, tem
;
9418 tree t1
= NULL_TREE
;
9419 bool strict_overflow_p
;
9421 gcc_assert ((IS_EXPR_CODE_CLASS (kind
)
9422 || IS_GIMPLE_STMT_CODE_CLASS (kind
))
9423 && TREE_CODE_LENGTH (code
) == 2
9425 && op1
!= NULL_TREE
);
9430 /* Strip any conversions that don't change the mode. This is
9431 safe for every expression, except for a comparison expression
9432 because its signedness is derived from its operands. So, in
9433 the latter case, only strip conversions that don't change the
9436 Note that this is done as an internal manipulation within the
9437 constant folder, in order to find the simplest representation
9438 of the arguments so that their form can be studied. In any
9439 cases, the appropriate type conversions should be put back in
9440 the tree that will get out of the constant folder. */
9442 if (kind
== tcc_comparison
)
9444 STRIP_SIGN_NOPS (arg0
);
9445 STRIP_SIGN_NOPS (arg1
);
9453 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9454 constant but we can't do arithmetic on them. */
9455 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9456 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9457 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9458 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9459 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9460 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
9462 if (kind
== tcc_binary
)
9464 /* Make sure type and arg0 have the same saturating flag. */
9465 gcc_assert (TYPE_SATURATING (type
)
9466 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9467 tem
= const_binop (code
, arg0
, arg1
, 0);
9469 else if (kind
== tcc_comparison
)
9470 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9474 if (tem
!= NULL_TREE
)
9476 if (TREE_TYPE (tem
) != type
)
9477 tem
= fold_convert (type
, tem
);
9482 /* If this is a commutative operation, and ARG0 is a constant, move it
9483 to ARG1 to reduce the number of tests below. */
9484 if (commutative_tree_code (code
)
9485 && tree_swap_operands_p (arg0
, arg1
, true))
9486 return fold_build2 (code
, type
, op1
, op0
);
9488 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9490 First check for cases where an arithmetic operation is applied to a
9491 compound, conditional, or comparison operation. Push the arithmetic
9492 operation inside the compound or conditional to see if any folding
9493 can then be done. Convert comparison to conditional for this purpose.
9494 The also optimizes non-constant cases that used to be done in
9497 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9498 one of the operands is a comparison and the other is a comparison, a
9499 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9500 code below would make the expression more complex. Change it to a
9501 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9502 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9504 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9505 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9506 && ((truth_value_p (TREE_CODE (arg0
))
9507 && (truth_value_p (TREE_CODE (arg1
))
9508 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9509 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9510 || (truth_value_p (TREE_CODE (arg1
))
9511 && (truth_value_p (TREE_CODE (arg0
))
9512 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9513 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9515 tem
= fold_build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9516 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9519 fold_convert (boolean_type_node
, arg0
),
9520 fold_convert (boolean_type_node
, arg1
));
9522 if (code
== EQ_EXPR
)
9523 tem
= invert_truthvalue (tem
);
9525 return fold_convert (type
, tem
);
9528 if (TREE_CODE_CLASS (code
) == tcc_binary
9529 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9531 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9532 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9533 fold_build2 (code
, type
,
9534 fold_convert (TREE_TYPE (op0
),
9535 TREE_OPERAND (arg0
, 1)),
9537 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9538 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9539 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9540 fold_build2 (code
, type
, op0
,
9541 fold_convert (TREE_TYPE (op1
),
9542 TREE_OPERAND (arg1
, 1))));
9544 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
9546 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
9548 /*cond_first_p=*/1);
9549 if (tem
!= NULL_TREE
)
9553 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
9555 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
9557 /*cond_first_p=*/0);
9558 if (tem
!= NULL_TREE
)
9565 case POINTER_PLUS_EXPR
:
9566 /* 0 +p index -> (type)index */
9567 if (integer_zerop (arg0
))
9568 return non_lvalue (fold_convert (type
, arg1
));
9570 /* PTR +p 0 -> PTR */
9571 if (integer_zerop (arg1
))
9572 return non_lvalue (fold_convert (type
, arg0
));
9574 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9575 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9576 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9577 return fold_convert (type
, fold_build2 (PLUS_EXPR
, sizetype
,
9578 fold_convert (sizetype
, arg1
),
9579 fold_convert (sizetype
, arg0
)));
9581 /* index +p PTR -> PTR +p index */
9582 if (POINTER_TYPE_P (TREE_TYPE (arg1
))
9583 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9584 return fold_build2 (POINTER_PLUS_EXPR
, type
,
9585 fold_convert (type
, arg1
),
9586 fold_convert (sizetype
, arg0
));
9588 /* (PTR +p B) +p A -> PTR +p (B + A) */
9589 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9592 tree arg01
= fold_convert (sizetype
, TREE_OPERAND (arg0
, 1));
9593 tree arg00
= TREE_OPERAND (arg0
, 0);
9594 inner
= fold_build2 (PLUS_EXPR
, sizetype
,
9595 arg01
, fold_convert (sizetype
, arg1
));
9596 return fold_convert (type
,
9597 fold_build2 (POINTER_PLUS_EXPR
,
9598 TREE_TYPE (arg00
), arg00
, inner
));
9601 /* PTR_CST +p CST -> CST1 */
9602 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9603 return fold_build2 (PLUS_EXPR
, type
, arg0
, fold_convert (type
, arg1
));
9605 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9606 of the array. Loop optimizer sometimes produce this type of
9608 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9610 tem
= try_move_mult_to_index (arg0
, fold_convert (sizetype
, arg1
));
9612 return fold_convert (type
, tem
);
9618 /* PTR + INT -> (INT)(PTR p+ INT) */
9619 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
9620 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
9621 return fold_convert (type
, fold_build2 (POINTER_PLUS_EXPR
,
9624 fold_convert (sizetype
, arg1
)));
9625 /* INT + PTR -> (INT)(PTR p+ INT) */
9626 if (POINTER_TYPE_P (TREE_TYPE (arg1
))
9627 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9628 return fold_convert (type
, fold_build2 (POINTER_PLUS_EXPR
,
9631 fold_convert (sizetype
, arg0
)));
9632 /* A + (-B) -> A - B */
9633 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9634 return fold_build2 (MINUS_EXPR
, type
,
9635 fold_convert (type
, arg0
),
9636 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
9637 /* (-A) + B -> B - A */
9638 if (TREE_CODE (arg0
) == NEGATE_EXPR
9639 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
9640 return fold_build2 (MINUS_EXPR
, type
,
9641 fold_convert (type
, arg1
),
9642 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
9644 if (INTEGRAL_TYPE_P (type
))
9646 /* Convert ~A + 1 to -A. */
9647 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9648 && integer_onep (arg1
))
9649 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
9652 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9653 && !TYPE_OVERFLOW_TRAPS (type
))
9655 tree tem
= TREE_OPERAND (arg0
, 0);
9658 if (operand_equal_p (tem
, arg1
, 0))
9660 t1
= build_int_cst_type (type
, -1);
9661 return omit_one_operand (type
, t1
, arg1
);
9666 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9667 && !TYPE_OVERFLOW_TRAPS (type
))
9669 tree tem
= TREE_OPERAND (arg1
, 0);
9672 if (operand_equal_p (arg0
, tem
, 0))
9674 t1
= build_int_cst_type (type
, -1);
9675 return omit_one_operand (type
, t1
, arg0
);
9679 /* X + (X / CST) * -CST is X % CST. */
9680 if (TREE_CODE (arg1
) == MULT_EXPR
9681 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9682 && operand_equal_p (arg0
,
9683 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9685 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9686 tree cst1
= TREE_OPERAND (arg1
, 1);
9687 tree sum
= fold_binary (PLUS_EXPR
, TREE_TYPE (cst1
), cst1
, cst0
);
9688 if (sum
&& integer_zerop (sum
))
9689 return fold_convert (type
,
9690 fold_build2 (TRUNC_MOD_EXPR
,
9691 TREE_TYPE (arg0
), arg0
, cst0
));
9695 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9696 same or one. Make sure type is not saturating.
9697 fold_plusminus_mult_expr will re-associate. */
9698 if ((TREE_CODE (arg0
) == MULT_EXPR
9699 || TREE_CODE (arg1
) == MULT_EXPR
)
9700 && !TYPE_SATURATING (type
)
9701 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9703 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
9708 if (! FLOAT_TYPE_P (type
))
9710 if (integer_zerop (arg1
))
9711 return non_lvalue (fold_convert (type
, arg0
));
9713 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9714 with a constant, and the two constants have no bits in common,
9715 we should treat this as a BIT_IOR_EXPR since this may produce more
9717 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9718 && TREE_CODE (arg1
) == BIT_AND_EXPR
9719 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9720 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9721 && integer_zerop (const_binop (BIT_AND_EXPR
,
9722 TREE_OPERAND (arg0
, 1),
9723 TREE_OPERAND (arg1
, 1), 0)))
9725 code
= BIT_IOR_EXPR
;
9729 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9730 (plus (plus (mult) (mult)) (foo)) so that we can
9731 take advantage of the factoring cases below. */
9732 if (((TREE_CODE (arg0
) == PLUS_EXPR
9733 || TREE_CODE (arg0
) == MINUS_EXPR
)
9734 && TREE_CODE (arg1
) == MULT_EXPR
)
9735 || ((TREE_CODE (arg1
) == PLUS_EXPR
9736 || TREE_CODE (arg1
) == MINUS_EXPR
)
9737 && TREE_CODE (arg0
) == MULT_EXPR
))
9739 tree parg0
, parg1
, parg
, marg
;
9740 enum tree_code pcode
;
9742 if (TREE_CODE (arg1
) == MULT_EXPR
)
9743 parg
= arg0
, marg
= arg1
;
9745 parg
= arg1
, marg
= arg0
;
9746 pcode
= TREE_CODE (parg
);
9747 parg0
= TREE_OPERAND (parg
, 0);
9748 parg1
= TREE_OPERAND (parg
, 1);
9752 if (TREE_CODE (parg0
) == MULT_EXPR
9753 && TREE_CODE (parg1
) != MULT_EXPR
)
9754 return fold_build2 (pcode
, type
,
9755 fold_build2 (PLUS_EXPR
, type
,
9756 fold_convert (type
, parg0
),
9757 fold_convert (type
, marg
)),
9758 fold_convert (type
, parg1
));
9759 if (TREE_CODE (parg0
) != MULT_EXPR
9760 && TREE_CODE (parg1
) == MULT_EXPR
)
9761 return fold_build2 (PLUS_EXPR
, type
,
9762 fold_convert (type
, parg0
),
9763 fold_build2 (pcode
, type
,
9764 fold_convert (type
, marg
),
9771 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9772 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
9773 return non_lvalue (fold_convert (type
, arg0
));
9775 /* Likewise if the operands are reversed. */
9776 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
9777 return non_lvalue (fold_convert (type
, arg1
));
9779 /* Convert X + -C into X - C. */
9780 if (TREE_CODE (arg1
) == REAL_CST
9781 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
9783 tem
= fold_negate_const (arg1
, type
);
9784 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
9785 return fold_build2 (MINUS_EXPR
, type
,
9786 fold_convert (type
, arg0
),
9787 fold_convert (type
, tem
));
9790 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9791 to __complex__ ( x, y ). This is not the same for SNaNs or
9792 if signed zeros are involved. */
9793 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9794 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9795 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9797 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9798 tree arg0r
= fold_unary (REALPART_EXPR
, rtype
, arg0
);
9799 tree arg0i
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
9800 bool arg0rz
= false, arg0iz
= false;
9801 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9802 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9804 tree arg1r
= fold_unary (REALPART_EXPR
, rtype
, arg1
);
9805 tree arg1i
= fold_unary (IMAGPART_EXPR
, rtype
, arg1
);
9806 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9808 tree rp
= arg1r
? arg1r
9809 : build1 (REALPART_EXPR
, rtype
, arg1
);
9810 tree ip
= arg0i
? arg0i
9811 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9812 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9814 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9816 tree rp
= arg0r
? arg0r
9817 : build1 (REALPART_EXPR
, rtype
, arg0
);
9818 tree ip
= arg1i
? arg1i
9819 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9820 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
9825 if (flag_unsafe_math_optimizations
9826 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9827 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9828 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
9831 /* Convert x+x into x*2.0. */
9832 if (operand_equal_p (arg0
, arg1
, 0)
9833 && SCALAR_FLOAT_TYPE_P (type
))
9834 return fold_build2 (MULT_EXPR
, type
, arg0
,
9835 build_real (type
, dconst2
));
9837 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9838 We associate floats only if the user has specified
9839 -fassociative-math. */
9840 if (flag_associative_math
9841 && TREE_CODE (arg1
) == PLUS_EXPR
9842 && TREE_CODE (arg0
) != MULT_EXPR
)
9844 tree tree10
= TREE_OPERAND (arg1
, 0);
9845 tree tree11
= TREE_OPERAND (arg1
, 1);
9846 if (TREE_CODE (tree11
) == MULT_EXPR
9847 && TREE_CODE (tree10
) == MULT_EXPR
)
9850 tree0
= fold_build2 (PLUS_EXPR
, type
, arg0
, tree10
);
9851 return fold_build2 (PLUS_EXPR
, type
, tree0
, tree11
);
9854 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9855 We associate floats only if the user has specified
9856 -fassociative-math. */
9857 if (flag_associative_math
9858 && TREE_CODE (arg0
) == PLUS_EXPR
9859 && TREE_CODE (arg1
) != MULT_EXPR
)
9861 tree tree00
= TREE_OPERAND (arg0
, 0);
9862 tree tree01
= TREE_OPERAND (arg0
, 1);
9863 if (TREE_CODE (tree01
) == MULT_EXPR
9864 && TREE_CODE (tree00
) == MULT_EXPR
)
9867 tree0
= fold_build2 (PLUS_EXPR
, type
, tree01
, arg1
);
9868 return fold_build2 (PLUS_EXPR
, type
, tree00
, tree0
);
9874 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9875 is a rotate of A by C1 bits. */
9876 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9877 is a rotate of A by B bits. */
9879 enum tree_code code0
, code1
;
9880 code0
= TREE_CODE (arg0
);
9881 code1
= TREE_CODE (arg1
);
9882 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9883 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9884 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9885 TREE_OPERAND (arg1
, 0), 0)
9886 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
9888 tree tree01
, tree11
;
9889 enum tree_code code01
, code11
;
9891 tree01
= TREE_OPERAND (arg0
, 1);
9892 tree11
= TREE_OPERAND (arg1
, 1);
9893 STRIP_NOPS (tree01
);
9894 STRIP_NOPS (tree11
);
9895 code01
= TREE_CODE (tree01
);
9896 code11
= TREE_CODE (tree11
);
9897 if (code01
== INTEGER_CST
9898 && code11
== INTEGER_CST
9899 && TREE_INT_CST_HIGH (tree01
) == 0
9900 && TREE_INT_CST_HIGH (tree11
) == 0
9901 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
9902 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9903 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9904 code0
== LSHIFT_EXPR
? tree01
: tree11
);
9905 else if (code11
== MINUS_EXPR
)
9907 tree tree110
, tree111
;
9908 tree110
= TREE_OPERAND (tree11
, 0);
9909 tree111
= TREE_OPERAND (tree11
, 1);
9910 STRIP_NOPS (tree110
);
9911 STRIP_NOPS (tree111
);
9912 if (TREE_CODE (tree110
) == INTEGER_CST
9913 && 0 == compare_tree_int (tree110
,
9915 (TREE_TYPE (TREE_OPERAND
9917 && operand_equal_p (tree01
, tree111
, 0))
9918 return build2 ((code0
== LSHIFT_EXPR
9921 type
, TREE_OPERAND (arg0
, 0), tree01
);
9923 else if (code01
== MINUS_EXPR
)
9925 tree tree010
, tree011
;
9926 tree010
= TREE_OPERAND (tree01
, 0);
9927 tree011
= TREE_OPERAND (tree01
, 1);
9928 STRIP_NOPS (tree010
);
9929 STRIP_NOPS (tree011
);
9930 if (TREE_CODE (tree010
) == INTEGER_CST
9931 && 0 == compare_tree_int (tree010
,
9933 (TREE_TYPE (TREE_OPERAND
9935 && operand_equal_p (tree11
, tree011
, 0))
9936 return build2 ((code0
!= LSHIFT_EXPR
9939 type
, TREE_OPERAND (arg0
, 0), tree11
);
9945 /* In most languages, can't associate operations on floats through
9946 parentheses. Rather than remember where the parentheses were, we
9947 don't associate floats at all, unless the user has specified
9949 And, we need to make sure type is not saturating. */
9951 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
9952 && !TYPE_SATURATING (type
))
9954 tree var0
, con0
, lit0
, minus_lit0
;
9955 tree var1
, con1
, lit1
, minus_lit1
;
9958 /* Split both trees into variables, constants, and literals. Then
9959 associate each group together, the constants with literals,
9960 then the result with variables. This increases the chances of
9961 literals being recombined later and of generating relocatable
9962 expressions for the sum of a constant and literal. */
9963 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
9964 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
9965 code
== MINUS_EXPR
);
9967 /* With undefined overflow we can only associate constants
9968 with one variable. */
9969 if ((POINTER_TYPE_P (type
)
9970 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
9976 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
9977 tmp0
= TREE_OPERAND (tmp0
, 0);
9978 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
9979 tmp1
= TREE_OPERAND (tmp1
, 0);
9980 /* The only case we can still associate with two variables
9981 is if they are the same, modulo negation. */
9982 if (!operand_equal_p (tmp0
, tmp1
, 0))
9986 /* Only do something if we found more than two objects. Otherwise,
9987 nothing has changed and we risk infinite recursion. */
9989 && (2 < ((var0
!= 0) + (var1
!= 0)
9990 + (con0
!= 0) + (con1
!= 0)
9991 + (lit0
!= 0) + (lit1
!= 0)
9992 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
9994 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9995 if (code
== MINUS_EXPR
)
9998 var0
= associate_trees (var0
, var1
, code
, type
);
9999 con0
= associate_trees (con0
, con1
, code
, type
);
10000 lit0
= associate_trees (lit0
, lit1
, code
, type
);
10001 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
10003 /* Preserve the MINUS_EXPR if the negative part of the literal is
10004 greater than the positive part. Otherwise, the multiplicative
10005 folding code (i.e extract_muldiv) may be fooled in case
10006 unsigned constants are subtracted, like in the following
10007 example: ((X*2 + 4) - 8U)/2. */
10008 if (minus_lit0
&& lit0
)
10010 if (TREE_CODE (lit0
) == INTEGER_CST
10011 && TREE_CODE (minus_lit0
) == INTEGER_CST
10012 && tree_int_cst_lt (lit0
, minus_lit0
))
10014 minus_lit0
= associate_trees (minus_lit0
, lit0
,
10020 lit0
= associate_trees (lit0
, minus_lit0
,
10028 return fold_convert (type
,
10029 associate_trees (var0
, minus_lit0
,
10030 MINUS_EXPR
, type
));
10033 con0
= associate_trees (con0
, minus_lit0
,
10035 return fold_convert (type
,
10036 associate_trees (var0
, con0
,
10041 con0
= associate_trees (con0
, lit0
, code
, type
);
10042 return fold_convert (type
, associate_trees (var0
, con0
,
10050 /* Pointer simplifications for subtraction, simple reassociations. */
10051 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10053 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10054 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10055 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10057 tree arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10058 tree arg01
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10059 tree arg10
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10060 tree arg11
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10061 return fold_build2 (PLUS_EXPR
, type
,
10062 fold_build2 (MINUS_EXPR
, type
, arg00
, arg10
),
10063 fold_build2 (MINUS_EXPR
, type
, arg01
, arg11
));
10065 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10066 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10068 tree arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10069 tree arg01
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10070 tree tmp
= fold_binary (MINUS_EXPR
, type
, arg00
, fold_convert (type
, arg1
));
10072 return fold_build2 (PLUS_EXPR
, type
, tmp
, arg01
);
10075 /* A - (-B) -> A + B */
10076 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10077 return fold_build2 (PLUS_EXPR
, type
, op0
,
10078 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
10079 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10080 if (TREE_CODE (arg0
) == NEGATE_EXPR
10081 && (FLOAT_TYPE_P (type
)
10082 || INTEGRAL_TYPE_P (type
))
10083 && negate_expr_p (arg1
)
10084 && reorder_operands_p (arg0
, arg1
))
10085 return fold_build2 (MINUS_EXPR
, type
,
10086 fold_convert (type
, negate_expr (arg1
)),
10087 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
10088 /* Convert -A - 1 to ~A. */
10089 if (INTEGRAL_TYPE_P (type
)
10090 && TREE_CODE (arg0
) == NEGATE_EXPR
10091 && integer_onep (arg1
)
10092 && !TYPE_OVERFLOW_TRAPS (type
))
10093 return fold_build1 (BIT_NOT_EXPR
, type
,
10094 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
10096 /* Convert -1 - A to ~A. */
10097 if (INTEGRAL_TYPE_P (type
)
10098 && integer_all_onesp (arg0
))
10099 return fold_build1 (BIT_NOT_EXPR
, type
, op1
);
10102 /* X - (X / CST) * CST is X % CST. */
10103 if (INTEGRAL_TYPE_P (type
)
10104 && TREE_CODE (arg1
) == MULT_EXPR
10105 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10106 && operand_equal_p (arg0
,
10107 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10108 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10109 TREE_OPERAND (arg1
, 1), 0))
10110 return fold_convert (type
,
10111 fold_build2 (TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10112 arg0
, TREE_OPERAND (arg1
, 1)));
10114 if (! FLOAT_TYPE_P (type
))
10116 if (integer_zerop (arg0
))
10117 return negate_expr (fold_convert (type
, arg1
));
10118 if (integer_zerop (arg1
))
10119 return non_lvalue (fold_convert (type
, arg0
));
10121 /* Fold A - (A & B) into ~B & A. */
10122 if (!TREE_SIDE_EFFECTS (arg0
)
10123 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10125 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10127 tree arg10
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10128 return fold_build2 (BIT_AND_EXPR
, type
,
10129 fold_build1 (BIT_NOT_EXPR
, type
, arg10
),
10130 fold_convert (type
, arg0
));
10132 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10134 tree arg11
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10135 return fold_build2 (BIT_AND_EXPR
, type
,
10136 fold_build1 (BIT_NOT_EXPR
, type
, arg11
),
10137 fold_convert (type
, arg0
));
10141 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10142 any power of 2 minus 1. */
10143 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10144 && TREE_CODE (arg1
) == BIT_AND_EXPR
10145 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10146 TREE_OPERAND (arg1
, 0), 0))
10148 tree mask0
= TREE_OPERAND (arg0
, 1);
10149 tree mask1
= TREE_OPERAND (arg1
, 1);
10150 tree tem
= fold_build1 (BIT_NOT_EXPR
, type
, mask0
);
10152 if (operand_equal_p (tem
, mask1
, 0))
10154 tem
= fold_build2 (BIT_XOR_EXPR
, type
,
10155 TREE_OPERAND (arg0
, 0), mask1
);
10156 return fold_build2 (MINUS_EXPR
, type
, tem
, mask1
);
10161 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10162 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10163 return non_lvalue (fold_convert (type
, arg0
));
10165 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10166 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10167 (-ARG1 + ARG0) reduces to -ARG1. */
10168 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10169 return negate_expr (fold_convert (type
, arg1
));
10171 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10172 __complex__ ( x, -y ). This is not the same for SNaNs or if
10173 signed zeros are involved. */
10174 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10175 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10176 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10178 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10179 tree arg0r
= fold_unary (REALPART_EXPR
, rtype
, arg0
);
10180 tree arg0i
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
10181 bool arg0rz
= false, arg0iz
= false;
10182 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10183 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10185 tree arg1r
= fold_unary (REALPART_EXPR
, rtype
, arg1
);
10186 tree arg1i
= fold_unary (IMAGPART_EXPR
, rtype
, arg1
);
10187 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10189 tree rp
= fold_build1 (NEGATE_EXPR
, rtype
,
10191 : build1 (REALPART_EXPR
, rtype
, arg1
));
10192 tree ip
= arg0i
? arg0i
10193 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10194 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
10196 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10198 tree rp
= arg0r
? arg0r
10199 : build1 (REALPART_EXPR
, rtype
, arg0
);
10200 tree ip
= fold_build1 (NEGATE_EXPR
, rtype
,
10202 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10203 return fold_build2 (COMPLEX_EXPR
, type
, rp
, ip
);
10208 /* Fold &x - &x. This can happen from &x.foo - &x.
10209 This is unsafe for certain floats even in non-IEEE formats.
10210 In IEEE, it is unsafe because it does wrong for NaNs.
10211 Also note that operand_equal_p is always false if an operand
10214 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10215 && operand_equal_p (arg0
, arg1
, 0))
10216 return fold_convert (type
, integer_zero_node
);
10218 /* A - B -> A + (-B) if B is easily negatable. */
10219 if (negate_expr_p (arg1
)
10220 && ((FLOAT_TYPE_P (type
)
10221 /* Avoid this transformation if B is a positive REAL_CST. */
10222 && (TREE_CODE (arg1
) != REAL_CST
10223 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10224 || INTEGRAL_TYPE_P (type
)))
10225 return fold_build2 (PLUS_EXPR
, type
,
10226 fold_convert (type
, arg0
),
10227 fold_convert (type
, negate_expr (arg1
)));
10229 /* Try folding difference of addresses. */
10231 HOST_WIDE_INT diff
;
10233 if ((TREE_CODE (arg0
) == ADDR_EXPR
10234 || TREE_CODE (arg1
) == ADDR_EXPR
)
10235 && ptr_difference_const (arg0
, arg1
, &diff
))
10236 return build_int_cst_type (type
, diff
);
10239 /* Fold &a[i] - &a[j] to i-j. */
10240 if (TREE_CODE (arg0
) == ADDR_EXPR
10241 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10242 && TREE_CODE (arg1
) == ADDR_EXPR
10243 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10245 tree aref0
= TREE_OPERAND (arg0
, 0);
10246 tree aref1
= TREE_OPERAND (arg1
, 0);
10247 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
10248 TREE_OPERAND (aref1
, 0), 0))
10250 tree op0
= fold_convert (type
, TREE_OPERAND (aref0
, 1));
10251 tree op1
= fold_convert (type
, TREE_OPERAND (aref1
, 1));
10252 tree esz
= array_ref_element_size (aref0
);
10253 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
10254 return fold_build2 (MULT_EXPR
, type
, diff
,
10255 fold_convert (type
, esz
));
10260 if (flag_unsafe_math_optimizations
10261 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10262 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10263 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
10266 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10267 same or one. Make sure type is not saturating.
10268 fold_plusminus_mult_expr will re-associate. */
10269 if ((TREE_CODE (arg0
) == MULT_EXPR
10270 || TREE_CODE (arg1
) == MULT_EXPR
)
10271 && !TYPE_SATURATING (type
)
10272 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10274 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
10282 /* (-A) * (-B) -> A * B */
10283 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10284 return fold_build2 (MULT_EXPR
, type
,
10285 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10286 fold_convert (type
, negate_expr (arg1
)));
10287 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10288 return fold_build2 (MULT_EXPR
, type
,
10289 fold_convert (type
, negate_expr (arg0
)),
10290 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
10292 if (! FLOAT_TYPE_P (type
))
10294 if (integer_zerop (arg1
))
10295 return omit_one_operand (type
, arg1
, arg0
);
10296 if (integer_onep (arg1
))
10297 return non_lvalue (fold_convert (type
, arg0
));
10298 /* Transform x * -1 into -x. Make sure to do the negation
10299 on the original operand with conversions not stripped
10300 because we can only strip non-sign-changing conversions. */
10301 if (integer_all_onesp (arg1
))
10302 return fold_convert (type
, negate_expr (op0
));
10303 /* Transform x * -C into -x * C if x is easily negatable. */
10304 if (TREE_CODE (arg1
) == INTEGER_CST
10305 && tree_int_cst_sgn (arg1
) == -1
10306 && negate_expr_p (arg0
)
10307 && (tem
= negate_expr (arg1
)) != arg1
10308 && !TREE_OVERFLOW (tem
))
10309 return fold_build2 (MULT_EXPR
, type
,
10310 fold_convert (type
, negate_expr (arg0
)), tem
);
10312 /* (a * (1 << b)) is (a << b) */
10313 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10314 && integer_onep (TREE_OPERAND (arg1
, 0)))
10315 return fold_build2 (LSHIFT_EXPR
, type
, op0
,
10316 TREE_OPERAND (arg1
, 1));
10317 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10318 && integer_onep (TREE_OPERAND (arg0
, 0)))
10319 return fold_build2 (LSHIFT_EXPR
, type
, op1
,
10320 TREE_OPERAND (arg0
, 1));
10322 strict_overflow_p
= false;
10323 if (TREE_CODE (arg1
) == INTEGER_CST
10324 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10325 &strict_overflow_p
)))
10327 if (strict_overflow_p
)
10328 fold_overflow_warning (("assuming signed overflow does not "
10329 "occur when simplifying "
10331 WARN_STRICT_OVERFLOW_MISC
);
10332 return fold_convert (type
, tem
);
10335 /* Optimize z * conj(z) for integer complex numbers. */
10336 if (TREE_CODE (arg0
) == CONJ_EXPR
10337 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10338 return fold_mult_zconjz (type
, arg1
);
10339 if (TREE_CODE (arg1
) == CONJ_EXPR
10340 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10341 return fold_mult_zconjz (type
, arg0
);
10345 /* Maybe fold x * 0 to 0. The expressions aren't the same
10346 when x is NaN, since x * 0 is also NaN. Nor are they the
10347 same in modes with signed zeros, since multiplying a
10348 negative value by 0 gives -0, not +0. */
10349 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10350 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10351 && real_zerop (arg1
))
10352 return omit_one_operand (type
, arg1
, arg0
);
10353 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10354 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10355 && real_onep (arg1
))
10356 return non_lvalue (fold_convert (type
, arg0
));
10358 /* Transform x * -1.0 into -x. */
10359 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10360 && real_minus_onep (arg1
))
10361 return fold_convert (type
, negate_expr (arg0
));
10363 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10364 the result for floating point types due to rounding so it is applied
10365 only if -fassociative-math was specify. */
10366 if (flag_associative_math
10367 && TREE_CODE (arg0
) == RDIV_EXPR
10368 && TREE_CODE (arg1
) == REAL_CST
10369 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10371 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10374 return fold_build2 (RDIV_EXPR
, type
, tem
,
10375 TREE_OPERAND (arg0
, 1));
10378 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10379 if (operand_equal_p (arg0
, arg1
, 0))
10381 tree tem
= fold_strip_sign_ops (arg0
);
10382 if (tem
!= NULL_TREE
)
10384 tem
= fold_convert (type
, tem
);
10385 return fold_build2 (MULT_EXPR
, type
, tem
, tem
);
10389 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10390 This is not the same for NaNs or if signed zeros are
10392 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10393 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10394 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10395 && TREE_CODE (arg1
) == COMPLEX_CST
10396 && real_zerop (TREE_REALPART (arg1
)))
10398 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10399 if (real_onep (TREE_IMAGPART (arg1
)))
10400 return fold_build2 (COMPLEX_EXPR
, type
,
10401 negate_expr (fold_build1 (IMAGPART_EXPR
,
10403 fold_build1 (REALPART_EXPR
, rtype
, arg0
));
10404 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10405 return fold_build2 (COMPLEX_EXPR
, type
,
10406 fold_build1 (IMAGPART_EXPR
, rtype
, arg0
),
10407 negate_expr (fold_build1 (REALPART_EXPR
,
10411 /* Optimize z * conj(z) for floating point complex numbers.
10412 Guarded by flag_unsafe_math_optimizations as non-finite
10413 imaginary components don't produce scalar results. */
10414 if (flag_unsafe_math_optimizations
10415 && TREE_CODE (arg0
) == CONJ_EXPR
10416 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10417 return fold_mult_zconjz (type
, arg1
);
10418 if (flag_unsafe_math_optimizations
10419 && TREE_CODE (arg1
) == CONJ_EXPR
10420 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10421 return fold_mult_zconjz (type
, arg0
);
10423 if (flag_unsafe_math_optimizations
)
10425 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10426 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10428 /* Optimizations of root(...)*root(...). */
10429 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10432 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10433 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10435 /* Optimize sqrt(x)*sqrt(x) as x. */
10436 if (BUILTIN_SQRT_P (fcode0
)
10437 && operand_equal_p (arg00
, arg10
, 0)
10438 && ! HONOR_SNANS (TYPE_MODE (type
)))
10441 /* Optimize root(x)*root(y) as root(x*y). */
10442 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10443 arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
10444 return build_call_expr (rootfn
, 1, arg
);
10447 /* Optimize expN(x)*expN(y) as expN(x+y). */
10448 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10450 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10451 tree arg
= fold_build2 (PLUS_EXPR
, type
,
10452 CALL_EXPR_ARG (arg0
, 0),
10453 CALL_EXPR_ARG (arg1
, 0));
10454 return build_call_expr (expfn
, 1, arg
);
10457 /* Optimizations of pow(...)*pow(...). */
10458 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10459 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10460 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10462 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10463 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10464 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10465 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10467 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10468 if (operand_equal_p (arg01
, arg11
, 0))
10470 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10471 tree arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
10472 return build_call_expr (powfn
, 2, arg
, arg01
);
10475 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10476 if (operand_equal_p (arg00
, arg10
, 0))
10478 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10479 tree arg
= fold_build2 (PLUS_EXPR
, type
, arg01
, arg11
);
10480 return build_call_expr (powfn
, 2, arg00
, arg
);
10484 /* Optimize tan(x)*cos(x) as sin(x). */
10485 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10486 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10487 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10488 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10489 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10490 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10491 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10492 CALL_EXPR_ARG (arg1
, 0), 0))
10494 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10496 if (sinfn
!= NULL_TREE
)
10497 return build_call_expr (sinfn
, 1, CALL_EXPR_ARG (arg0
, 0));
10500 /* Optimize x*pow(x,c) as pow(x,c+1). */
10501 if (fcode1
== BUILT_IN_POW
10502 || fcode1
== BUILT_IN_POWF
10503 || fcode1
== BUILT_IN_POWL
)
10505 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10506 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10507 if (TREE_CODE (arg11
) == REAL_CST
10508 && !TREE_OVERFLOW (arg11
)
10509 && operand_equal_p (arg0
, arg10
, 0))
10511 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10515 c
= TREE_REAL_CST (arg11
);
10516 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10517 arg
= build_real (type
, c
);
10518 return build_call_expr (powfn
, 2, arg0
, arg
);
10522 /* Optimize pow(x,c)*x as pow(x,c+1). */
10523 if (fcode0
== BUILT_IN_POW
10524 || fcode0
== BUILT_IN_POWF
10525 || fcode0
== BUILT_IN_POWL
)
10527 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10528 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10529 if (TREE_CODE (arg01
) == REAL_CST
10530 && !TREE_OVERFLOW (arg01
)
10531 && operand_equal_p (arg1
, arg00
, 0))
10533 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10537 c
= TREE_REAL_CST (arg01
);
10538 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10539 arg
= build_real (type
, c
);
10540 return build_call_expr (powfn
, 2, arg1
, arg
);
10544 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10545 if (! optimize_size
10546 && operand_equal_p (arg0
, arg1
, 0))
10548 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10552 tree arg
= build_real (type
, dconst2
);
10553 return build_call_expr (powfn
, 2, arg0
, arg
);
10562 if (integer_all_onesp (arg1
))
10563 return omit_one_operand (type
, arg1
, arg0
);
10564 if (integer_zerop (arg1
))
10565 return non_lvalue (fold_convert (type
, arg0
));
10566 if (operand_equal_p (arg0
, arg1
, 0))
10567 return non_lvalue (fold_convert (type
, arg0
));
10569 /* ~X | X is -1. */
10570 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10571 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10573 t1
= fold_convert (type
, integer_zero_node
);
10574 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10575 return omit_one_operand (type
, t1
, arg1
);
10578 /* X | ~X is -1. */
10579 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10580 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10582 t1
= fold_convert (type
, integer_zero_node
);
10583 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10584 return omit_one_operand (type
, t1
, arg0
);
10587 /* Canonicalize (X & C1) | C2. */
10588 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10589 && TREE_CODE (arg1
) == INTEGER_CST
10590 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10592 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, hi3
, lo3
, mlo
, mhi
;
10593 int width
= TYPE_PRECISION (type
), w
;
10594 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
10595 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
10596 hi2
= TREE_INT_CST_HIGH (arg1
);
10597 lo2
= TREE_INT_CST_LOW (arg1
);
10599 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10600 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
10601 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10603 if (width
> HOST_BITS_PER_WIDE_INT
)
10605 mhi
= (unsigned HOST_WIDE_INT
) -1
10606 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
10612 mlo
= (unsigned HOST_WIDE_INT
) -1
10613 >> (HOST_BITS_PER_WIDE_INT
- width
);
10616 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10617 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
10618 return fold_build2 (BIT_IOR_EXPR
, type
,
10619 TREE_OPERAND (arg0
, 0), arg1
);
10621 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10622 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10623 mode which allows further optimizations. */
10630 for (w
= BITS_PER_UNIT
;
10631 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
10634 unsigned HOST_WIDE_INT mask
10635 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
10636 if (((lo1
| lo2
) & mask
) == mask
10637 && (lo1
& ~mask
) == 0 && hi1
== 0)
10644 if (hi3
!= hi1
|| lo3
!= lo1
)
10645 return fold_build2 (BIT_IOR_EXPR
, type
,
10646 fold_build2 (BIT_AND_EXPR
, type
,
10647 TREE_OPERAND (arg0
, 0),
10648 build_int_cst_wide (type
,
10653 /* (X & Y) | Y is (X, Y). */
10654 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10655 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10656 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10657 /* (X & Y) | X is (Y, X). */
10658 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10659 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10660 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10661 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
10662 /* X | (X & Y) is (Y, X). */
10663 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10664 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10665 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10666 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
10667 /* X | (Y & X) is (Y, X). */
10668 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10669 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10670 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10671 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
10673 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
10674 if (t1
!= NULL_TREE
)
10677 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10679 This results in more efficient code for machines without a NAND
10680 instruction. Combine will canonicalize to the first form
10681 which will allow use of NAND instructions provided by the
10682 backend if they exist. */
10683 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10684 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10686 return fold_build1 (BIT_NOT_EXPR
, type
,
10687 build2 (BIT_AND_EXPR
, type
,
10688 TREE_OPERAND (arg0
, 0),
10689 TREE_OPERAND (arg1
, 0)));
10692 /* See if this can be simplified into a rotate first. If that
10693 is unsuccessful continue in the association code. */
10697 if (integer_zerop (arg1
))
10698 return non_lvalue (fold_convert (type
, arg0
));
10699 if (integer_all_onesp (arg1
))
10700 return fold_build1 (BIT_NOT_EXPR
, type
, op0
);
10701 if (operand_equal_p (arg0
, arg1
, 0))
10702 return omit_one_operand (type
, integer_zero_node
, arg0
);
10704 /* ~X ^ X is -1. */
10705 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10706 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10708 t1
= fold_convert (type
, integer_zero_node
);
10709 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10710 return omit_one_operand (type
, t1
, arg1
);
10713 /* X ^ ~X is -1. */
10714 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10715 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10717 t1
= fold_convert (type
, integer_zero_node
);
10718 t1
= fold_unary (BIT_NOT_EXPR
, type
, t1
);
10719 return omit_one_operand (type
, t1
, arg0
);
10722 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10723 with a constant, and the two constants have no bits in common,
10724 we should treat this as a BIT_IOR_EXPR since this may produce more
10725 simplifications. */
10726 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10727 && TREE_CODE (arg1
) == BIT_AND_EXPR
10728 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10729 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10730 && integer_zerop (const_binop (BIT_AND_EXPR
,
10731 TREE_OPERAND (arg0
, 1),
10732 TREE_OPERAND (arg1
, 1), 0)))
10734 code
= BIT_IOR_EXPR
;
10738 /* (X | Y) ^ X -> Y & ~ X*/
10739 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10740 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10742 tree t2
= TREE_OPERAND (arg0
, 1);
10743 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10745 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10746 fold_convert (type
, t1
));
10750 /* (Y | X) ^ X -> Y & ~ X*/
10751 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10752 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10754 tree t2
= TREE_OPERAND (arg0
, 0);
10755 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10757 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10758 fold_convert (type
, t1
));
10762 /* X ^ (X | Y) -> Y & ~ X*/
10763 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10764 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
10766 tree t2
= TREE_OPERAND (arg1
, 1);
10767 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10769 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10770 fold_convert (type
, t1
));
10774 /* X ^ (Y | X) -> Y & ~ X*/
10775 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10776 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
10778 tree t2
= TREE_OPERAND (arg1
, 0);
10779 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10781 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
10782 fold_convert (type
, t1
));
10786 /* Convert ~X ^ ~Y to X ^ Y. */
10787 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10788 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10789 return fold_build2 (code
, type
,
10790 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10791 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
10793 /* Convert ~X ^ C to X ^ ~C. */
10794 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10795 && TREE_CODE (arg1
) == INTEGER_CST
)
10796 return fold_build2 (code
, type
,
10797 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
10798 fold_build1 (BIT_NOT_EXPR
, type
, arg1
));
10800 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10801 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10802 && integer_onep (TREE_OPERAND (arg0
, 1))
10803 && integer_onep (arg1
))
10804 return fold_build2 (EQ_EXPR
, type
, arg0
,
10805 build_int_cst (TREE_TYPE (arg0
), 0));
10807 /* Fold (X & Y) ^ Y as ~X & Y. */
10808 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10809 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10811 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10812 return fold_build2 (BIT_AND_EXPR
, type
,
10813 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10814 fold_convert (type
, arg1
));
10816 /* Fold (X & Y) ^ X as ~Y & X. */
10817 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10818 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10819 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10821 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10822 return fold_build2 (BIT_AND_EXPR
, type
,
10823 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10824 fold_convert (type
, arg1
));
10826 /* Fold X ^ (X & Y) as X & ~Y. */
10827 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10828 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10830 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10831 return fold_build2 (BIT_AND_EXPR
, type
,
10832 fold_convert (type
, arg0
),
10833 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
10835 /* Fold X ^ (Y & X) as ~Y & X. */
10836 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10837 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10838 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10840 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10841 return fold_build2 (BIT_AND_EXPR
, type
,
10842 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10843 fold_convert (type
, arg0
));
10846 /* See if this can be simplified into a rotate first. If that
10847 is unsuccessful continue in the association code. */
10851 if (integer_all_onesp (arg1
))
10852 return non_lvalue (fold_convert (type
, arg0
));
10853 if (integer_zerop (arg1
))
10854 return omit_one_operand (type
, arg1
, arg0
);
10855 if (operand_equal_p (arg0
, arg1
, 0))
10856 return non_lvalue (fold_convert (type
, arg0
));
10858 /* ~X & X is always zero. */
10859 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10860 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10861 return omit_one_operand (type
, integer_zero_node
, arg1
);
10863 /* X & ~X is always zero. */
10864 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10865 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10866 return omit_one_operand (type
, integer_zero_node
, arg0
);
10868 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10869 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10870 && TREE_CODE (arg1
) == INTEGER_CST
10871 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10873 tree tmp1
= fold_convert (TREE_TYPE (arg0
), arg1
);
10874 tree tmp2
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10875 TREE_OPERAND (arg0
, 0), tmp1
);
10876 tree tmp3
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10877 TREE_OPERAND (arg0
, 1), tmp1
);
10878 return fold_convert (type
,
10879 fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (arg0
),
10883 /* (X | Y) & Y is (X, Y). */
10884 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10885 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10886 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
10887 /* (X | Y) & X is (Y, X). */
10888 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10889 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10890 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10891 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
10892 /* X & (X | Y) is (Y, X). */
10893 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10894 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10895 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10896 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
10897 /* X & (Y | X) is (Y, X). */
10898 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10899 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10900 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10901 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
10903 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10904 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10905 && integer_onep (TREE_OPERAND (arg0
, 1))
10906 && integer_onep (arg1
))
10908 tem
= TREE_OPERAND (arg0
, 0);
10909 return fold_build2 (EQ_EXPR
, type
,
10910 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
10911 build_int_cst (TREE_TYPE (tem
), 1)),
10912 build_int_cst (TREE_TYPE (tem
), 0));
10914 /* Fold ~X & 1 as (X & 1) == 0. */
10915 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10916 && integer_onep (arg1
))
10918 tem
= TREE_OPERAND (arg0
, 0);
10919 return fold_build2 (EQ_EXPR
, type
,
10920 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
10921 build_int_cst (TREE_TYPE (tem
), 1)),
10922 build_int_cst (TREE_TYPE (tem
), 0));
10925 /* Fold (X ^ Y) & Y as ~X & Y. */
10926 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10927 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10929 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
10930 return fold_build2 (BIT_AND_EXPR
, type
,
10931 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10932 fold_convert (type
, arg1
));
10934 /* Fold (X ^ Y) & X as ~Y & X. */
10935 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10936 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10937 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10939 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
10940 return fold_build2 (BIT_AND_EXPR
, type
,
10941 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10942 fold_convert (type
, arg1
));
10944 /* Fold X & (X ^ Y) as X & ~Y. */
10945 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10946 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10948 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
10949 return fold_build2 (BIT_AND_EXPR
, type
,
10950 fold_convert (type
, arg0
),
10951 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
10953 /* Fold X & (Y ^ X) as ~Y & X. */
10954 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
10955 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10956 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10958 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
10959 return fold_build2 (BIT_AND_EXPR
, type
,
10960 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
10961 fold_convert (type
, arg0
));
10964 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
10965 if (t1
!= NULL_TREE
)
10967 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10968 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
10969 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
10972 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
10974 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
10975 && (~TREE_INT_CST_LOW (arg1
)
10976 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
10977 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
10980 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10982 This results in more efficient code for machines without a NOR
10983 instruction. Combine will canonicalize to the first form
10984 which will allow use of NOR instructions provided by the
10985 backend if they exist. */
10986 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10987 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10989 return fold_build1 (BIT_NOT_EXPR
, type
,
10990 build2 (BIT_IOR_EXPR
, type
,
10991 fold_convert (type
,
10992 TREE_OPERAND (arg0
, 0)),
10993 fold_convert (type
,
10994 TREE_OPERAND (arg1
, 0))));
10997 /* If arg0 is derived from the address of an object or function, we may
10998 be able to fold this expression using the object or function's
11000 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
11002 unsigned HOST_WIDE_INT modulus
, residue
;
11003 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
11005 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
);
11007 /* This works because modulus is a power of 2. If this weren't the
11008 case, we'd have to replace it by its greatest power-of-2
11009 divisor: modulus & -modulus. */
11011 return build_int_cst (type
, residue
& low
);
11014 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11015 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11016 if the new mask might be further optimized. */
11017 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11018 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11019 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
11020 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
11021 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
11022 < TYPE_PRECISION (TREE_TYPE (arg0
))
11023 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11024 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
11026 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
11027 unsigned HOST_WIDE_INT mask
11028 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11029 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11030 tree shift_type
= TREE_TYPE (arg0
);
11032 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11033 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11034 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11035 && TYPE_PRECISION (TREE_TYPE (arg0
))
11036 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
11038 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11039 tree arg00
= TREE_OPERAND (arg0
, 0);
11040 /* See if more bits can be proven as zero because of
11042 if (TREE_CODE (arg00
) == NOP_EXPR
11043 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11045 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11046 if (TYPE_PRECISION (inner_type
)
11047 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
11048 && TYPE_PRECISION (inner_type
) < prec
)
11050 prec
= TYPE_PRECISION (inner_type
);
11051 /* See if we can shorten the right shift. */
11053 shift_type
= inner_type
;
11056 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11057 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11058 zerobits
<<= prec
- shiftc
;
11059 /* For arithmetic shift if sign bit could be set, zerobits
11060 can contain actually sign bits, so no transformation is
11061 possible, unless MASK masks them all away. In that
11062 case the shift needs to be converted into logical shift. */
11063 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11064 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11066 if ((mask
& zerobits
) == 0)
11067 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11073 /* ((X << 16) & 0xff00) is (X, 0). */
11074 if ((mask
& zerobits
) == mask
)
11075 return omit_one_operand (type
, build_int_cst (type
, 0), arg0
);
11077 newmask
= mask
| zerobits
;
11078 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11082 /* Only do the transformation if NEWMASK is some integer
11084 for (prec
= BITS_PER_UNIT
;
11085 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11086 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11088 if (prec
< HOST_BITS_PER_WIDE_INT
11089 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11091 if (shift_type
!= TREE_TYPE (arg0
))
11093 tem
= fold_build2 (TREE_CODE (arg0
), shift_type
,
11094 fold_convert (shift_type
,
11095 TREE_OPERAND (arg0
, 0)),
11096 TREE_OPERAND (arg0
, 1));
11097 tem
= fold_convert (type
, tem
);
11101 return fold_build2 (BIT_AND_EXPR
, type
, tem
,
11102 build_int_cst_type (TREE_TYPE (op1
),
11111 /* Don't touch a floating-point divide by zero unless the mode
11112 of the constant can represent infinity. */
11113 if (TREE_CODE (arg1
) == REAL_CST
11114 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11115 && real_zerop (arg1
))
11118 /* Optimize A / A to 1.0 if we don't care about
11119 NaNs or Infinities. Skip the transformation
11120 for non-real operands. */
11121 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11122 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11123 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11124 && operand_equal_p (arg0
, arg1
, 0))
11126 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11128 return omit_two_operands (type
, r
, arg0
, arg1
);
11131 /* The complex version of the above A / A optimization. */
11132 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11133 && operand_equal_p (arg0
, arg1
, 0))
11135 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11136 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11137 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11139 tree r
= build_real (elem_type
, dconst1
);
11140 /* omit_two_operands will call fold_convert for us. */
11141 return omit_two_operands (type
, r
, arg0
, arg1
);
11145 /* (-A) / (-B) -> A / B */
11146 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11147 return fold_build2 (RDIV_EXPR
, type
,
11148 TREE_OPERAND (arg0
, 0),
11149 negate_expr (arg1
));
11150 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11151 return fold_build2 (RDIV_EXPR
, type
,
11152 negate_expr (arg0
),
11153 TREE_OPERAND (arg1
, 0));
11155 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11156 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11157 && real_onep (arg1
))
11158 return non_lvalue (fold_convert (type
, arg0
));
11160 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11161 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11162 && real_minus_onep (arg1
))
11163 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
11165 /* If ARG1 is a constant, we can convert this to a multiply by the
11166 reciprocal. This does not have the same rounding properties,
11167 so only do this if -freciprocal-math. We can actually
11168 always safely do it if ARG1 is a power of two, but it's hard to
11169 tell if it is or not in a portable manner. */
11170 if (TREE_CODE (arg1
) == REAL_CST
)
11172 if (flag_reciprocal_math
11173 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
11175 return fold_build2 (MULT_EXPR
, type
, arg0
, tem
);
11176 /* Find the reciprocal if optimizing and the result is exact. */
11180 r
= TREE_REAL_CST (arg1
);
11181 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
11183 tem
= build_real (type
, r
);
11184 return fold_build2 (MULT_EXPR
, type
,
11185 fold_convert (type
, arg0
), tem
);
11189 /* Convert A/B/C to A/(B*C). */
11190 if (flag_reciprocal_math
11191 && TREE_CODE (arg0
) == RDIV_EXPR
)
11192 return fold_build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11193 fold_build2 (MULT_EXPR
, type
,
11194 TREE_OPERAND (arg0
, 1), arg1
));
11196 /* Convert A/(B/C) to (A/B)*C. */
11197 if (flag_reciprocal_math
11198 && TREE_CODE (arg1
) == RDIV_EXPR
)
11199 return fold_build2 (MULT_EXPR
, type
,
11200 fold_build2 (RDIV_EXPR
, type
, arg0
,
11201 TREE_OPERAND (arg1
, 0)),
11202 TREE_OPERAND (arg1
, 1));
11204 /* Convert C1/(X*C2) into (C1/C2)/X. */
11205 if (flag_reciprocal_math
11206 && TREE_CODE (arg1
) == MULT_EXPR
11207 && TREE_CODE (arg0
) == REAL_CST
11208 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11210 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11211 TREE_OPERAND (arg1
, 1), 0);
11213 return fold_build2 (RDIV_EXPR
, type
, tem
,
11214 TREE_OPERAND (arg1
, 0));
11217 if (flag_unsafe_math_optimizations
)
11219 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11220 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11222 /* Optimize sin(x)/cos(x) as tan(x). */
11223 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11224 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11225 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11226 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11227 CALL_EXPR_ARG (arg1
, 0), 0))
11229 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11231 if (tanfn
!= NULL_TREE
)
11232 return build_call_expr (tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11235 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11236 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11237 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11238 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11239 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11240 CALL_EXPR_ARG (arg1
, 0), 0))
11242 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11244 if (tanfn
!= NULL_TREE
)
11246 tree tmp
= build_call_expr (tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11247 return fold_build2 (RDIV_EXPR
, type
,
11248 build_real (type
, dconst1
), tmp
);
11252 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11253 NaNs or Infinities. */
11254 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11255 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11256 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11258 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11259 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11261 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11262 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11263 && operand_equal_p (arg00
, arg01
, 0))
11265 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11267 if (cosfn
!= NULL_TREE
)
11268 return build_call_expr (cosfn
, 1, arg00
);
11272 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11273 NaNs or Infinities. */
11274 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11275 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11276 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11278 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11279 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11281 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11282 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11283 && operand_equal_p (arg00
, arg01
, 0))
11285 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11287 if (cosfn
!= NULL_TREE
)
11289 tree tmp
= build_call_expr (cosfn
, 1, arg00
);
11290 return fold_build2 (RDIV_EXPR
, type
,
11291 build_real (type
, dconst1
),
11297 /* Optimize pow(x,c)/x as pow(x,c-1). */
11298 if (fcode0
== BUILT_IN_POW
11299 || fcode0
== BUILT_IN_POWF
11300 || fcode0
== BUILT_IN_POWL
)
11302 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11303 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11304 if (TREE_CODE (arg01
) == REAL_CST
11305 && !TREE_OVERFLOW (arg01
)
11306 && operand_equal_p (arg1
, arg00
, 0))
11308 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11312 c
= TREE_REAL_CST (arg01
);
11313 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11314 arg
= build_real (type
, c
);
11315 return build_call_expr (powfn
, 2, arg1
, arg
);
11319 /* Optimize a/root(b/c) into a*root(c/b). */
11320 if (BUILTIN_ROOT_P (fcode1
))
11322 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11324 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11326 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11327 tree b
= TREE_OPERAND (rootarg
, 0);
11328 tree c
= TREE_OPERAND (rootarg
, 1);
11330 tree tmp
= fold_build2 (RDIV_EXPR
, type
, c
, b
);
11332 tmp
= build_call_expr (rootfn
, 1, tmp
);
11333 return fold_build2 (MULT_EXPR
, type
, arg0
, tmp
);
11337 /* Optimize x/expN(y) into x*expN(-y). */
11338 if (BUILTIN_EXPONENT_P (fcode1
))
11340 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11341 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11342 arg1
= build_call_expr (expfn
, 1, fold_convert (type
, arg
));
11343 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
11346 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11347 if (fcode1
== BUILT_IN_POW
11348 || fcode1
== BUILT_IN_POWF
11349 || fcode1
== BUILT_IN_POWL
)
11351 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11352 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11353 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11354 tree neg11
= fold_convert (type
, negate_expr (arg11
));
11355 arg1
= build_call_expr (powfn
, 2, arg10
, neg11
);
11356 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
11361 case TRUNC_DIV_EXPR
:
11362 case FLOOR_DIV_EXPR
:
11363 /* Simplify A / (B << N) where A and B are positive and B is
11364 a power of 2, to A >> (N + log2(B)). */
11365 strict_overflow_p
= false;
11366 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11367 && (TYPE_UNSIGNED (type
)
11368 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11370 tree sval
= TREE_OPERAND (arg1
, 0);
11371 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11373 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11374 unsigned long pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
11376 if (strict_overflow_p
)
11377 fold_overflow_warning (("assuming signed overflow does not "
11378 "occur when simplifying A / (B << N)"),
11379 WARN_STRICT_OVERFLOW_MISC
);
11381 sh_cnt
= fold_build2 (PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11382 sh_cnt
, build_int_cst (NULL_TREE
, pow2
));
11383 return fold_build2 (RSHIFT_EXPR
, type
,
11384 fold_convert (type
, arg0
), sh_cnt
);
11388 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11389 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11390 if (INTEGRAL_TYPE_P (type
)
11391 && TYPE_UNSIGNED (type
)
11392 && code
== FLOOR_DIV_EXPR
)
11393 return fold_build2 (TRUNC_DIV_EXPR
, type
, op0
, op1
);
11397 case ROUND_DIV_EXPR
:
11398 case CEIL_DIV_EXPR
:
11399 case EXACT_DIV_EXPR
:
11400 if (integer_onep (arg1
))
11401 return non_lvalue (fold_convert (type
, arg0
));
11402 if (integer_zerop (arg1
))
11404 /* X / -1 is -X. */
11405 if (!TYPE_UNSIGNED (type
)
11406 && TREE_CODE (arg1
) == INTEGER_CST
11407 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11408 && TREE_INT_CST_HIGH (arg1
) == -1)
11409 return fold_convert (type
, negate_expr (arg0
));
11411 /* Convert -A / -B to A / B when the type is signed and overflow is
11413 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11414 && TREE_CODE (arg0
) == NEGATE_EXPR
11415 && negate_expr_p (arg1
))
11417 if (INTEGRAL_TYPE_P (type
))
11418 fold_overflow_warning (("assuming signed overflow does not occur "
11419 "when distributing negation across "
11421 WARN_STRICT_OVERFLOW_MISC
);
11422 return fold_build2 (code
, type
,
11423 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
11424 negate_expr (arg1
));
11426 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11427 && TREE_CODE (arg1
) == NEGATE_EXPR
11428 && negate_expr_p (arg0
))
11430 if (INTEGRAL_TYPE_P (type
))
11431 fold_overflow_warning (("assuming signed overflow does not occur "
11432 "when distributing negation across "
11434 WARN_STRICT_OVERFLOW_MISC
);
11435 return fold_build2 (code
, type
, negate_expr (arg0
),
11436 TREE_OPERAND (arg1
, 0));
11439 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11440 operation, EXACT_DIV_EXPR.
11442 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11443 At one time others generated faster code, it's not clear if they do
11444 after the last round to changes to the DIV code in expmed.c. */
11445 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11446 && multiple_of_p (type
, arg0
, arg1
))
11447 return fold_build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11449 strict_overflow_p
= false;
11450 if (TREE_CODE (arg1
) == INTEGER_CST
11451 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11452 &strict_overflow_p
)))
11454 if (strict_overflow_p
)
11455 fold_overflow_warning (("assuming signed overflow does not occur "
11456 "when simplifying division"),
11457 WARN_STRICT_OVERFLOW_MISC
);
11458 return fold_convert (type
, tem
);
11463 case CEIL_MOD_EXPR
:
11464 case FLOOR_MOD_EXPR
:
11465 case ROUND_MOD_EXPR
:
11466 case TRUNC_MOD_EXPR
:
11467 /* X % 1 is always zero, but be sure to preserve any side
11469 if (integer_onep (arg1
))
11470 return omit_one_operand (type
, integer_zero_node
, arg0
);
11472 /* X % 0, return X % 0 unchanged so that we can get the
11473 proper warnings and errors. */
11474 if (integer_zerop (arg1
))
11477 /* 0 % X is always zero, but be sure to preserve any side
11478 effects in X. Place this after checking for X == 0. */
11479 if (integer_zerop (arg0
))
11480 return omit_one_operand (type
, integer_zero_node
, arg1
);
11482 /* X % -1 is zero. */
11483 if (!TYPE_UNSIGNED (type
)
11484 && TREE_CODE (arg1
) == INTEGER_CST
11485 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11486 && TREE_INT_CST_HIGH (arg1
) == -1)
11487 return omit_one_operand (type
, integer_zero_node
, arg0
);
11489 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11490 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11491 strict_overflow_p
= false;
11492 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
11493 && (TYPE_UNSIGNED (type
)
11494 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11497 /* Also optimize A % (C << N) where C is a power of 2,
11498 to A & ((C << N) - 1). */
11499 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
11500 c
= TREE_OPERAND (arg1
, 0);
11502 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
11504 tree mask
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
11505 build_int_cst (TREE_TYPE (arg1
), 1));
11506 if (strict_overflow_p
)
11507 fold_overflow_warning (("assuming signed overflow does not "
11508 "occur when simplifying "
11509 "X % (power of two)"),
11510 WARN_STRICT_OVERFLOW_MISC
);
11511 return fold_build2 (BIT_AND_EXPR
, type
,
11512 fold_convert (type
, arg0
),
11513 fold_convert (type
, mask
));
11517 /* X % -C is the same as X % C. */
11518 if (code
== TRUNC_MOD_EXPR
11519 && !TYPE_UNSIGNED (type
)
11520 && TREE_CODE (arg1
) == INTEGER_CST
11521 && !TREE_OVERFLOW (arg1
)
11522 && TREE_INT_CST_HIGH (arg1
) < 0
11523 && !TYPE_OVERFLOW_TRAPS (type
)
11524 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11525 && !sign_bit_p (arg1
, arg1
))
11526 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
11527 fold_convert (type
, negate_expr (arg1
)));
11529 /* X % -Y is the same as X % Y. */
11530 if (code
== TRUNC_MOD_EXPR
11531 && !TYPE_UNSIGNED (type
)
11532 && TREE_CODE (arg1
) == NEGATE_EXPR
11533 && !TYPE_OVERFLOW_TRAPS (type
))
11534 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
11535 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
11537 if (TREE_CODE (arg1
) == INTEGER_CST
11538 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11539 &strict_overflow_p
)))
11541 if (strict_overflow_p
)
11542 fold_overflow_warning (("assuming signed overflow does not occur "
11543 "when simplifying modulos"),
11544 WARN_STRICT_OVERFLOW_MISC
);
11545 return fold_convert (type
, tem
);
11552 if (integer_all_onesp (arg0
))
11553 return omit_one_operand (type
, arg0
, arg1
);
11557 /* Optimize -1 >> x for arithmetic right shifts. */
11558 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
11559 return omit_one_operand (type
, arg0
, arg1
);
11560 /* ... fall through ... */
11564 if (integer_zerop (arg1
))
11565 return non_lvalue (fold_convert (type
, arg0
));
11566 if (integer_zerop (arg0
))
11567 return omit_one_operand (type
, arg0
, arg1
);
11569 /* Since negative shift count is not well-defined,
11570 don't try to compute it in the compiler. */
11571 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11574 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11575 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
11576 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11577 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11578 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11580 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
11581 + TREE_INT_CST_LOW (arg1
));
11583 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11584 being well defined. */
11585 if (low
>= TYPE_PRECISION (type
))
11587 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
11588 low
= low
% TYPE_PRECISION (type
);
11589 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
11590 return build_int_cst (type
, 0);
11592 low
= TYPE_PRECISION (type
) - 1;
11595 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11596 build_int_cst (type
, low
));
11599 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11600 into x & ((unsigned)-1 >> c) for unsigned types. */
11601 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
11602 || (TYPE_UNSIGNED (type
)
11603 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
11604 && host_integerp (arg1
, false)
11605 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11606 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11607 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11609 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
11610 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
11616 arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
11618 lshift
= build_int_cst (type
, -1);
11619 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
11621 return fold_build2 (BIT_AND_EXPR
, type
, arg00
, lshift
);
11625 /* Rewrite an LROTATE_EXPR by a constant into an
11626 RROTATE_EXPR by a new constant. */
11627 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
11629 tree tem
= build_int_cst (TREE_TYPE (arg1
),
11630 GET_MODE_BITSIZE (TYPE_MODE (type
)));
11631 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
11632 return fold_build2 (RROTATE_EXPR
, type
, op0
, tem
);
11635 /* If we have a rotate of a bit operation with the rotate count and
11636 the second operand of the bit operation both constant,
11637 permute the two operations. */
11638 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11639 && (TREE_CODE (arg0
) == BIT_AND_EXPR
11640 || TREE_CODE (arg0
) == BIT_IOR_EXPR
11641 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11642 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11643 return fold_build2 (TREE_CODE (arg0
), type
,
11644 fold_build2 (code
, type
,
11645 TREE_OPERAND (arg0
, 0), arg1
),
11646 fold_build2 (code
, type
,
11647 TREE_OPERAND (arg0
, 1), arg1
));
11649 /* Two consecutive rotates adding up to the width of the mode can
11651 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11652 && TREE_CODE (arg0
) == RROTATE_EXPR
11653 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11654 && TREE_INT_CST_HIGH (arg1
) == 0
11655 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
11656 && ((TREE_INT_CST_LOW (arg1
)
11657 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
11658 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
11659 return TREE_OPERAND (arg0
, 0);
11661 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11662 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11663 if the latter can be further optimized. */
11664 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
11665 && TREE_CODE (arg0
) == BIT_AND_EXPR
11666 && TREE_CODE (arg1
) == INTEGER_CST
11667 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11669 tree mask
= fold_build2 (code
, type
,
11670 fold_convert (type
, TREE_OPERAND (arg0
, 1)),
11672 tree shift
= fold_build2 (code
, type
,
11673 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
11675 tem
= fold_binary (BIT_AND_EXPR
, type
, shift
, mask
);
11683 if (operand_equal_p (arg0
, arg1
, 0))
11684 return omit_one_operand (type
, arg0
, arg1
);
11685 if (INTEGRAL_TYPE_P (type
)
11686 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
11687 return omit_one_operand (type
, arg1
, arg0
);
11688 tem
= fold_minmax (MIN_EXPR
, type
, arg0
, arg1
);
11694 if (operand_equal_p (arg0
, arg1
, 0))
11695 return omit_one_operand (type
, arg0
, arg1
);
11696 if (INTEGRAL_TYPE_P (type
)
11697 && TYPE_MAX_VALUE (type
)
11698 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
11699 return omit_one_operand (type
, arg1
, arg0
);
11700 tem
= fold_minmax (MAX_EXPR
, type
, arg0
, arg1
);
11705 case TRUTH_ANDIF_EXPR
:
11706 /* Note that the operands of this must be ints
11707 and their values must be 0 or 1.
11708 ("true" is a fixed value perhaps depending on the language.) */
11709 /* If first arg is constant zero, return it. */
11710 if (integer_zerop (arg0
))
11711 return fold_convert (type
, arg0
);
11712 case TRUTH_AND_EXPR
:
11713 /* If either arg is constant true, drop it. */
11714 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11715 return non_lvalue (fold_convert (type
, arg1
));
11716 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
11717 /* Preserve sequence points. */
11718 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11719 return non_lvalue (fold_convert (type
, arg0
));
11720 /* If second arg is constant zero, result is zero, but first arg
11721 must be evaluated. */
11722 if (integer_zerop (arg1
))
11723 return omit_one_operand (type
, arg1
, arg0
);
11724 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11725 case will be handled here. */
11726 if (integer_zerop (arg0
))
11727 return omit_one_operand (type
, arg0
, arg1
);
11729 /* !X && X is always false. */
11730 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11731 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11732 return omit_one_operand (type
, integer_zero_node
, arg1
);
11733 /* X && !X is always false. */
11734 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11735 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11736 return omit_one_operand (type
, integer_zero_node
, arg0
);
11738 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11739 means A >= Y && A != MAX, but in this case we know that
11742 if (!TREE_SIDE_EFFECTS (arg0
)
11743 && !TREE_SIDE_EFFECTS (arg1
))
11745 tem
= fold_to_nonsharp_ineq_using_bound (arg0
, arg1
);
11746 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
11747 return fold_build2 (code
, type
, tem
, arg1
);
11749 tem
= fold_to_nonsharp_ineq_using_bound (arg1
, arg0
);
11750 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
11751 return fold_build2 (code
, type
, arg0
, tem
);
11755 /* We only do these simplifications if we are optimizing. */
11759 /* Check for things like (A || B) && (A || C). We can convert this
11760 to A || (B && C). Note that either operator can be any of the four
11761 truth and/or operations and the transformation will still be
11762 valid. Also note that we only care about order for the
11763 ANDIF and ORIF operators. If B contains side effects, this
11764 might change the truth-value of A. */
11765 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
11766 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
11767 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
11768 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
11769 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
11770 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
11772 tree a00
= TREE_OPERAND (arg0
, 0);
11773 tree a01
= TREE_OPERAND (arg0
, 1);
11774 tree a10
= TREE_OPERAND (arg1
, 0);
11775 tree a11
= TREE_OPERAND (arg1
, 1);
11776 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
11777 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
11778 && (code
== TRUTH_AND_EXPR
11779 || code
== TRUTH_OR_EXPR
));
11781 if (operand_equal_p (a00
, a10
, 0))
11782 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
11783 fold_build2 (code
, type
, a01
, a11
));
11784 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
11785 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
11786 fold_build2 (code
, type
, a01
, a10
));
11787 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
11788 return fold_build2 (TREE_CODE (arg0
), type
, a01
,
11789 fold_build2 (code
, type
, a00
, a11
));
11791 /* This case if tricky because we must either have commutative
11792 operators or else A10 must not have side-effects. */
11794 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
11795 && operand_equal_p (a01
, a11
, 0))
11796 return fold_build2 (TREE_CODE (arg0
), type
,
11797 fold_build2 (code
, type
, a00
, a10
),
11801 /* See if we can build a range comparison. */
11802 if (0 != (tem
= fold_range_test (code
, type
, op0
, op1
)))
11805 /* Check for the possibility of merging component references. If our
11806 lhs is another similar operation, try to merge its rhs with our
11807 rhs. Then try to merge our lhs and rhs. */
11808 if (TREE_CODE (arg0
) == code
11809 && 0 != (tem
= fold_truthop (code
, type
,
11810 TREE_OPERAND (arg0
, 1), arg1
)))
11811 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11813 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
11818 case TRUTH_ORIF_EXPR
:
11819 /* Note that the operands of this must be ints
11820 and their values must be 0 or true.
11821 ("true" is a fixed value perhaps depending on the language.) */
11822 /* If first arg is constant true, return it. */
11823 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11824 return fold_convert (type
, arg0
);
11825 case TRUTH_OR_EXPR
:
11826 /* If either arg is constant zero, drop it. */
11827 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
11828 return non_lvalue (fold_convert (type
, arg1
));
11829 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
11830 /* Preserve sequence points. */
11831 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11832 return non_lvalue (fold_convert (type
, arg0
));
11833 /* If second arg is constant true, result is true, but we must
11834 evaluate first arg. */
11835 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
11836 return omit_one_operand (type
, arg1
, arg0
);
11837 /* Likewise for first arg, but note this only occurs here for
11839 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11840 return omit_one_operand (type
, arg0
, arg1
);
11842 /* !X || X is always true. */
11843 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11844 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11845 return omit_one_operand (type
, integer_one_node
, arg1
);
11846 /* X || !X is always true. */
11847 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11848 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11849 return omit_one_operand (type
, integer_one_node
, arg0
);
11853 case TRUTH_XOR_EXPR
:
11854 /* If the second arg is constant zero, drop it. */
11855 if (integer_zerop (arg1
))
11856 return non_lvalue (fold_convert (type
, arg0
));
11857 /* If the second arg is constant true, this is a logical inversion. */
11858 if (integer_onep (arg1
))
11860 /* Only call invert_truthvalue if operand is a truth value. */
11861 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
11862 tem
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
11864 tem
= invert_truthvalue (arg0
);
11865 return non_lvalue (fold_convert (type
, tem
));
11867 /* Identical arguments cancel to zero. */
11868 if (operand_equal_p (arg0
, arg1
, 0))
11869 return omit_one_operand (type
, integer_zero_node
, arg0
);
11871 /* !X ^ X is always true. */
11872 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11873 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11874 return omit_one_operand (type
, integer_one_node
, arg1
);
11876 /* X ^ !X is always true. */
11877 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11878 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11879 return omit_one_operand (type
, integer_one_node
, arg0
);
11885 tem
= fold_comparison (code
, type
, op0
, op1
);
11886 if (tem
!= NULL_TREE
)
11889 /* bool_var != 0 becomes bool_var. */
11890 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11891 && code
== NE_EXPR
)
11892 return non_lvalue (fold_convert (type
, arg0
));
11894 /* bool_var == 1 becomes bool_var. */
11895 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11896 && code
== EQ_EXPR
)
11897 return non_lvalue (fold_convert (type
, arg0
));
11899 /* bool_var != 1 becomes !bool_var. */
11900 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11901 && code
== NE_EXPR
)
11902 return fold_build1 (TRUTH_NOT_EXPR
, type
, fold_convert (type
, arg0
));
11904 /* bool_var == 0 becomes !bool_var. */
11905 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11906 && code
== EQ_EXPR
)
11907 return fold_build1 (TRUTH_NOT_EXPR
, type
, fold_convert (type
, arg0
));
11909 /* If this is an equality comparison of the address of two non-weak,
11910 unaliased symbols neither of which are extern (since we do not
11911 have access to attributes for externs), then we know the result. */
11912 if (TREE_CODE (arg0
) == ADDR_EXPR
11913 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
11914 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
11915 && ! lookup_attribute ("alias",
11916 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
11917 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
11918 && TREE_CODE (arg1
) == ADDR_EXPR
11919 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
11920 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
11921 && ! lookup_attribute ("alias",
11922 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
11923 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
11925 /* We know that we're looking at the address of two
11926 non-weak, unaliased, static _DECL nodes.
11928 It is both wasteful and incorrect to call operand_equal_p
11929 to compare the two ADDR_EXPR nodes. It is wasteful in that
11930 all we need to do is test pointer equality for the arguments
11931 to the two ADDR_EXPR nodes. It is incorrect to use
11932 operand_equal_p as that function is NOT equivalent to a
11933 C equality test. It can in fact return false for two
11934 objects which would test as equal using the C equality
11936 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
11937 return constant_boolean_node (equal
11938 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
11942 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11943 a MINUS_EXPR of a constant, we can convert it into a comparison with
11944 a revised constant as long as no overflow occurs. */
11945 if (TREE_CODE (arg1
) == INTEGER_CST
11946 && (TREE_CODE (arg0
) == PLUS_EXPR
11947 || TREE_CODE (arg0
) == MINUS_EXPR
)
11948 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11949 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
11950 ? MINUS_EXPR
: PLUS_EXPR
,
11951 fold_convert (TREE_TYPE (arg0
), arg1
),
11952 TREE_OPERAND (arg0
, 1), 0))
11953 && !TREE_OVERFLOW (tem
))
11954 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11956 /* Similarly for a NEGATE_EXPR. */
11957 if (TREE_CODE (arg0
) == NEGATE_EXPR
11958 && TREE_CODE (arg1
) == INTEGER_CST
11959 && 0 != (tem
= negate_expr (arg1
))
11960 && TREE_CODE (tem
) == INTEGER_CST
11961 && !TREE_OVERFLOW (tem
))
11962 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11964 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11965 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11966 && TREE_CODE (arg1
) == INTEGER_CST
11967 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11968 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
11969 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg0
),
11970 fold_convert (TREE_TYPE (arg0
), arg1
),
11971 TREE_OPERAND (arg0
, 1)));
11973 /* Transform comparisons of the form X +- C CMP X. */
11974 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
11975 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11976 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11977 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11978 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
11980 tree cst
= TREE_OPERAND (arg0
, 1);
11982 if (code
== EQ_EXPR
11983 && !integer_zerop (cst
))
11984 return omit_two_operands (type
, boolean_false_node
,
11985 TREE_OPERAND (arg0
, 0), arg1
);
11987 return omit_two_operands (type
, boolean_true_node
,
11988 TREE_OPERAND (arg0
, 0), arg1
);
11991 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11992 for !=. Don't do this for ordered comparisons due to overflow. */
11993 if (TREE_CODE (arg0
) == MINUS_EXPR
11994 && integer_zerop (arg1
))
11995 return fold_build2 (code
, type
,
11996 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
11998 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11999 if (TREE_CODE (arg0
) == ABS_EXPR
12000 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12001 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12003 /* If this is an EQ or NE comparison with zero and ARG0 is
12004 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12005 two operations, but the latter can be done in one less insn
12006 on machines that have only two-operand insns or on which a
12007 constant cannot be the first operand. */
12008 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12009 && integer_zerop (arg1
))
12011 tree arg00
= TREE_OPERAND (arg0
, 0);
12012 tree arg01
= TREE_OPERAND (arg0
, 1);
12013 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12014 && integer_onep (TREE_OPERAND (arg00
, 0)))
12016 tree tem
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
12017 arg01
, TREE_OPERAND (arg00
, 1));
12018 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12019 build_int_cst (TREE_TYPE (arg0
), 1));
12020 return fold_build2 (code
, type
,
12021 fold_convert (TREE_TYPE (arg1
), tem
), arg1
);
12023 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12024 && integer_onep (TREE_OPERAND (arg01
, 0)))
12026 tree tem
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
12027 arg00
, TREE_OPERAND (arg01
, 1));
12028 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12029 build_int_cst (TREE_TYPE (arg0
), 1));
12030 return fold_build2 (code
, type
,
12031 fold_convert (TREE_TYPE (arg1
), tem
), arg1
);
12035 /* If this is an NE or EQ comparison of zero against the result of a
12036 signed MOD operation whose second operand is a power of 2, make
12037 the MOD operation unsigned since it is simpler and equivalent. */
12038 if (integer_zerop (arg1
)
12039 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12040 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12041 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12042 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12043 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12044 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12046 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12047 tree newmod
= fold_build2 (TREE_CODE (arg0
), newtype
,
12048 fold_convert (newtype
,
12049 TREE_OPERAND (arg0
, 0)),
12050 fold_convert (newtype
,
12051 TREE_OPERAND (arg0
, 1)));
12053 return fold_build2 (code
, type
, newmod
,
12054 fold_convert (newtype
, arg1
));
12057 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12058 C1 is a valid shift constant, and C2 is a power of two, i.e.
12060 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12061 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12062 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12064 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12065 && integer_zerop (arg1
))
12067 tree itype
= TREE_TYPE (arg0
);
12068 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
12069 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12071 /* Check for a valid shift count. */
12072 if (TREE_INT_CST_HIGH (arg001
) == 0
12073 && TREE_INT_CST_LOW (arg001
) < prec
)
12075 tree arg01
= TREE_OPERAND (arg0
, 1);
12076 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12077 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12078 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12079 can be rewritten as (X & (C2 << C1)) != 0. */
12080 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12082 tem
= fold_build2 (LSHIFT_EXPR
, itype
, arg01
, arg001
);
12083 tem
= fold_build2 (BIT_AND_EXPR
, itype
, arg000
, tem
);
12084 return fold_build2 (code
, type
, tem
, arg1
);
12086 /* Otherwise, for signed (arithmetic) shifts,
12087 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12088 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12089 else if (!TYPE_UNSIGNED (itype
))
12090 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12091 arg000
, build_int_cst (itype
, 0));
12092 /* Otherwise, of unsigned (logical) shifts,
12093 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12094 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12096 return omit_one_operand (type
,
12097 code
== EQ_EXPR
? integer_one_node
12098 : integer_zero_node
,
12103 /* If this is an NE comparison of zero with an AND of one, remove the
12104 comparison since the AND will give the correct value. */
12105 if (code
== NE_EXPR
12106 && integer_zerop (arg1
)
12107 && TREE_CODE (arg0
) == BIT_AND_EXPR
12108 && integer_onep (TREE_OPERAND (arg0
, 1)))
12109 return fold_convert (type
, arg0
);
12111 /* If we have (A & C) == C where C is a power of 2, convert this into
12112 (A & C) != 0. Similarly for NE_EXPR. */
12113 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12114 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12115 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12116 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12117 arg0
, fold_convert (TREE_TYPE (arg0
),
12118 integer_zero_node
));
12120 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12121 bit, then fold the expression into A < 0 or A >= 0. */
12122 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
, type
);
12126 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12127 Similarly for NE_EXPR. */
12128 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12129 && TREE_CODE (arg1
) == INTEGER_CST
12130 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12132 tree notc
= fold_build1 (BIT_NOT_EXPR
,
12133 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12134 TREE_OPERAND (arg0
, 1));
12135 tree dandnotc
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
12137 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12138 if (integer_nonzerop (dandnotc
))
12139 return omit_one_operand (type
, rslt
, arg0
);
12142 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12143 Similarly for NE_EXPR. */
12144 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12145 && TREE_CODE (arg1
) == INTEGER_CST
12146 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12148 tree notd
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12149 tree candnotd
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
12150 TREE_OPERAND (arg0
, 1), notd
);
12151 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12152 if (integer_nonzerop (candnotd
))
12153 return omit_one_operand (type
, rslt
, arg0
);
12156 /* If this is a comparison of a field, we may be able to simplify it. */
12157 if ((TREE_CODE (arg0
) == COMPONENT_REF
12158 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12159 /* Handle the constant case even without -O
12160 to make sure the warnings are given. */
12161 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12163 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
12168 /* Optimize comparisons of strlen vs zero to a compare of the
12169 first character of the string vs zero. To wit,
12170 strlen(ptr) == 0 => *ptr == 0
12171 strlen(ptr) != 0 => *ptr != 0
12172 Other cases should reduce to one of these two (or a constant)
12173 due to the return value of strlen being unsigned. */
12174 if (TREE_CODE (arg0
) == CALL_EXPR
12175 && integer_zerop (arg1
))
12177 tree fndecl
= get_callee_fndecl (arg0
);
12180 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12181 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12182 && call_expr_nargs (arg0
) == 1
12183 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12185 tree iref
= build_fold_indirect_ref (CALL_EXPR_ARG (arg0
, 0));
12186 return fold_build2 (code
, type
, iref
,
12187 build_int_cst (TREE_TYPE (iref
), 0));
12191 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12192 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12193 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12194 && integer_zerop (arg1
)
12195 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12197 tree arg00
= TREE_OPERAND (arg0
, 0);
12198 tree arg01
= TREE_OPERAND (arg0
, 1);
12199 tree itype
= TREE_TYPE (arg00
);
12200 if (TREE_INT_CST_HIGH (arg01
) == 0
12201 && TREE_INT_CST_LOW (arg01
)
12202 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
12204 if (TYPE_UNSIGNED (itype
))
12206 itype
= signed_type_for (itype
);
12207 arg00
= fold_convert (itype
, arg00
);
12209 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12210 type
, arg00
, build_int_cst (itype
, 0));
12214 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12215 if (integer_zerop (arg1
)
12216 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12217 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
12218 TREE_OPERAND (arg0
, 1));
12220 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12221 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12222 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12223 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
12224 build_int_cst (TREE_TYPE (arg1
), 0));
12225 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12226 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12227 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12228 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12229 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 1),
12230 build_int_cst (TREE_TYPE (arg1
), 0));
12232 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12233 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12234 && TREE_CODE (arg1
) == INTEGER_CST
12235 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12236 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
12237 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12238 TREE_OPERAND (arg0
, 1), arg1
));
12240 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12241 (X & C) == 0 when C is a single bit. */
12242 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12243 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12244 && integer_zerop (arg1
)
12245 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12247 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
12248 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12249 TREE_OPERAND (arg0
, 1));
12250 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12254 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12255 constant C is a power of two, i.e. a single bit. */
12256 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12257 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12258 && integer_zerop (arg1
)
12259 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12260 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12261 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12263 tree arg00
= TREE_OPERAND (arg0
, 0);
12264 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12265 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12268 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12269 when is C is a power of two, i.e. a single bit. */
12270 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12271 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12272 && integer_zerop (arg1
)
12273 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12274 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12275 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12277 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12278 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg000
),
12279 arg000
, TREE_OPERAND (arg0
, 1));
12280 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12281 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12284 if (integer_zerop (arg1
)
12285 && tree_expr_nonzero_p (arg0
))
12287 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12288 return omit_one_operand (type
, res
, arg0
);
12291 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12292 if (TREE_CODE (arg0
) == NEGATE_EXPR
12293 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12294 return fold_build2 (code
, type
,
12295 TREE_OPERAND (arg0
, 0),
12296 TREE_OPERAND (arg1
, 0));
12298 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12299 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12300 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12302 tree arg00
= TREE_OPERAND (arg0
, 0);
12303 tree arg01
= TREE_OPERAND (arg0
, 1);
12304 tree arg10
= TREE_OPERAND (arg1
, 0);
12305 tree arg11
= TREE_OPERAND (arg1
, 1);
12306 tree itype
= TREE_TYPE (arg0
);
12308 if (operand_equal_p (arg01
, arg11
, 0))
12309 return fold_build2 (code
, type
,
12310 fold_build2 (BIT_AND_EXPR
, itype
,
12311 fold_build2 (BIT_XOR_EXPR
, itype
,
12314 build_int_cst (itype
, 0));
12316 if (operand_equal_p (arg01
, arg10
, 0))
12317 return fold_build2 (code
, type
,
12318 fold_build2 (BIT_AND_EXPR
, itype
,
12319 fold_build2 (BIT_XOR_EXPR
, itype
,
12322 build_int_cst (itype
, 0));
12324 if (operand_equal_p (arg00
, arg11
, 0))
12325 return fold_build2 (code
, type
,
12326 fold_build2 (BIT_AND_EXPR
, itype
,
12327 fold_build2 (BIT_XOR_EXPR
, itype
,
12330 build_int_cst (itype
, 0));
12332 if (operand_equal_p (arg00
, arg10
, 0))
12333 return fold_build2 (code
, type
,
12334 fold_build2 (BIT_AND_EXPR
, itype
,
12335 fold_build2 (BIT_XOR_EXPR
, itype
,
12338 build_int_cst (itype
, 0));
12341 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12342 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12344 tree arg00
= TREE_OPERAND (arg0
, 0);
12345 tree arg01
= TREE_OPERAND (arg0
, 1);
12346 tree arg10
= TREE_OPERAND (arg1
, 0);
12347 tree arg11
= TREE_OPERAND (arg1
, 1);
12348 tree itype
= TREE_TYPE (arg0
);
12350 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12351 operand_equal_p guarantees no side-effects so we don't need
12352 to use omit_one_operand on Z. */
12353 if (operand_equal_p (arg01
, arg11
, 0))
12354 return fold_build2 (code
, type
, arg00
, arg10
);
12355 if (operand_equal_p (arg01
, arg10
, 0))
12356 return fold_build2 (code
, type
, arg00
, arg11
);
12357 if (operand_equal_p (arg00
, arg11
, 0))
12358 return fold_build2 (code
, type
, arg01
, arg10
);
12359 if (operand_equal_p (arg00
, arg10
, 0))
12360 return fold_build2 (code
, type
, arg01
, arg11
);
12362 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12363 if (TREE_CODE (arg01
) == INTEGER_CST
12364 && TREE_CODE (arg11
) == INTEGER_CST
)
12365 return fold_build2 (code
, type
,
12366 fold_build2 (BIT_XOR_EXPR
, itype
, arg00
,
12367 fold_build2 (BIT_XOR_EXPR
, itype
,
12372 /* Attempt to simplify equality/inequality comparisons of complex
12373 values. Only lower the comparison if the result is known or
12374 can be simplified to a single scalar comparison. */
12375 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12376 || TREE_CODE (arg0
) == COMPLEX_CST
)
12377 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12378 || TREE_CODE (arg1
) == COMPLEX_CST
))
12380 tree real0
, imag0
, real1
, imag1
;
12383 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12385 real0
= TREE_OPERAND (arg0
, 0);
12386 imag0
= TREE_OPERAND (arg0
, 1);
12390 real0
= TREE_REALPART (arg0
);
12391 imag0
= TREE_IMAGPART (arg0
);
12394 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12396 real1
= TREE_OPERAND (arg1
, 0);
12397 imag1
= TREE_OPERAND (arg1
, 1);
12401 real1
= TREE_REALPART (arg1
);
12402 imag1
= TREE_IMAGPART (arg1
);
12405 rcond
= fold_binary (code
, type
, real0
, real1
);
12406 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12408 if (integer_zerop (rcond
))
12410 if (code
== EQ_EXPR
)
12411 return omit_two_operands (type
, boolean_false_node
,
12413 return fold_build2 (NE_EXPR
, type
, imag0
, imag1
);
12417 if (code
== NE_EXPR
)
12418 return omit_two_operands (type
, boolean_true_node
,
12420 return fold_build2 (EQ_EXPR
, type
, imag0
, imag1
);
12424 icond
= fold_binary (code
, type
, imag0
, imag1
);
12425 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12427 if (integer_zerop (icond
))
12429 if (code
== EQ_EXPR
)
12430 return omit_two_operands (type
, boolean_false_node
,
12432 return fold_build2 (NE_EXPR
, type
, real0
, real1
);
12436 if (code
== NE_EXPR
)
12437 return omit_two_operands (type
, boolean_true_node
,
12439 return fold_build2 (EQ_EXPR
, type
, real0
, real1
);
12450 tem
= fold_comparison (code
, type
, op0
, op1
);
12451 if (tem
!= NULL_TREE
)
12454 /* Transform comparisons of the form X +- C CMP X. */
12455 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12456 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12457 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12458 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
12459 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12460 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
12462 tree arg01
= TREE_OPERAND (arg0
, 1);
12463 enum tree_code code0
= TREE_CODE (arg0
);
12466 if (TREE_CODE (arg01
) == REAL_CST
)
12467 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12469 is_positive
= tree_int_cst_sgn (arg01
);
12471 /* (X - c) > X becomes false. */
12472 if (code
== GT_EXPR
12473 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12474 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12476 if (TREE_CODE (arg01
) == INTEGER_CST
12477 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12478 fold_overflow_warning (("assuming signed overflow does not "
12479 "occur when assuming that (X - c) > X "
12480 "is always false"),
12481 WARN_STRICT_OVERFLOW_ALL
);
12482 return constant_boolean_node (0, type
);
12485 /* Likewise (X + c) < X becomes false. */
12486 if (code
== LT_EXPR
12487 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12488 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12490 if (TREE_CODE (arg01
) == INTEGER_CST
12491 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12492 fold_overflow_warning (("assuming signed overflow does not "
12493 "occur when assuming that "
12494 "(X + c) < X is always false"),
12495 WARN_STRICT_OVERFLOW_ALL
);
12496 return constant_boolean_node (0, type
);
12499 /* Convert (X - c) <= X to true. */
12500 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12502 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12503 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12505 if (TREE_CODE (arg01
) == INTEGER_CST
12506 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12507 fold_overflow_warning (("assuming signed overflow does not "
12508 "occur when assuming that "
12509 "(X - c) <= X is always true"),
12510 WARN_STRICT_OVERFLOW_ALL
);
12511 return constant_boolean_node (1, type
);
12514 /* Convert (X + c) >= X to true. */
12515 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12517 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12518 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12520 if (TREE_CODE (arg01
) == INTEGER_CST
12521 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12522 fold_overflow_warning (("assuming signed overflow does not "
12523 "occur when assuming that "
12524 "(X + c) >= X is always true"),
12525 WARN_STRICT_OVERFLOW_ALL
);
12526 return constant_boolean_node (1, type
);
12529 if (TREE_CODE (arg01
) == INTEGER_CST
)
12531 /* Convert X + c > X and X - c < X to true for integers. */
12532 if (code
== GT_EXPR
12533 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12534 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12536 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12537 fold_overflow_warning (("assuming signed overflow does "
12538 "not occur when assuming that "
12539 "(X + c) > X is always true"),
12540 WARN_STRICT_OVERFLOW_ALL
);
12541 return constant_boolean_node (1, type
);
12544 if (code
== LT_EXPR
12545 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12546 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12548 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12549 fold_overflow_warning (("assuming signed overflow does "
12550 "not occur when assuming that "
12551 "(X - c) < X is always true"),
12552 WARN_STRICT_OVERFLOW_ALL
);
12553 return constant_boolean_node (1, type
);
12556 /* Convert X + c <= X and X - c >= X to false for integers. */
12557 if (code
== LE_EXPR
12558 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12559 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12561 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12562 fold_overflow_warning (("assuming signed overflow does "
12563 "not occur when assuming that "
12564 "(X + c) <= X is always false"),
12565 WARN_STRICT_OVERFLOW_ALL
);
12566 return constant_boolean_node (0, type
);
12569 if (code
== GE_EXPR
12570 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12571 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12573 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12574 fold_overflow_warning (("assuming signed overflow does "
12575 "not occur when assuming that "
12576 "(X - c) >= X is always false"),
12577 WARN_STRICT_OVERFLOW_ALL
);
12578 return constant_boolean_node (0, type
);
12583 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12584 This transformation affects the cases which are handled in later
12585 optimizations involving comparisons with non-negative constants. */
12586 if (TREE_CODE (arg1
) == INTEGER_CST
12587 && TREE_CODE (arg0
) != INTEGER_CST
12588 && tree_int_cst_sgn (arg1
) > 0)
12590 if (code
== GE_EXPR
)
12592 arg1
= const_binop (MINUS_EXPR
, arg1
,
12593 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12594 return fold_build2 (GT_EXPR
, type
, arg0
,
12595 fold_convert (TREE_TYPE (arg0
), arg1
));
12597 if (code
== LT_EXPR
)
12599 arg1
= const_binop (MINUS_EXPR
, arg1
,
12600 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12601 return fold_build2 (LE_EXPR
, type
, arg0
,
12602 fold_convert (TREE_TYPE (arg0
), arg1
));
12606 /* Comparisons with the highest or lowest possible integer of
12607 the specified precision will have known values. */
12609 tree arg1_type
= TREE_TYPE (arg1
);
12610 unsigned int width
= TYPE_PRECISION (arg1_type
);
12612 if (TREE_CODE (arg1
) == INTEGER_CST
12613 && !TREE_OVERFLOW (arg1
)
12614 && width
<= 2 * HOST_BITS_PER_WIDE_INT
12615 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
12617 HOST_WIDE_INT signed_max_hi
;
12618 unsigned HOST_WIDE_INT signed_max_lo
;
12619 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
12621 if (width
<= HOST_BITS_PER_WIDE_INT
)
12623 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12628 if (TYPE_UNSIGNED (arg1_type
))
12630 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
12636 max_lo
= signed_max_lo
;
12637 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
12643 width
-= HOST_BITS_PER_WIDE_INT
;
12644 signed_max_lo
= -1;
12645 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12650 if (TYPE_UNSIGNED (arg1_type
))
12652 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
12657 max_hi
= signed_max_hi
;
12658 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
12662 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
12663 && TREE_INT_CST_LOW (arg1
) == max_lo
)
12667 return omit_one_operand (type
, integer_zero_node
, arg0
);
12670 return fold_build2 (EQ_EXPR
, type
, op0
, op1
);
12673 return omit_one_operand (type
, integer_one_node
, arg0
);
12676 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
12678 /* The GE_EXPR and LT_EXPR cases above are not normally
12679 reached because of previous transformations. */
12684 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12686 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
12690 arg1
= const_binop (PLUS_EXPR
, arg1
,
12691 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12692 return fold_build2 (EQ_EXPR
, type
,
12693 fold_convert (TREE_TYPE (arg1
), arg0
),
12696 arg1
= const_binop (PLUS_EXPR
, arg1
,
12697 build_int_cst (TREE_TYPE (arg1
), 1), 0);
12698 return fold_build2 (NE_EXPR
, type
,
12699 fold_convert (TREE_TYPE (arg1
), arg0
),
12704 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12706 && TREE_INT_CST_LOW (arg1
) == min_lo
)
12710 return omit_one_operand (type
, integer_zero_node
, arg0
);
12713 return fold_build2 (EQ_EXPR
, type
, op0
, op1
);
12716 return omit_one_operand (type
, integer_one_node
, arg0
);
12719 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
12724 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12726 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
12730 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
12731 return fold_build2 (NE_EXPR
, type
,
12732 fold_convert (TREE_TYPE (arg1
), arg0
),
12735 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
12736 return fold_build2 (EQ_EXPR
, type
,
12737 fold_convert (TREE_TYPE (arg1
), arg0
),
12743 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
12744 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
12745 && TYPE_UNSIGNED (arg1_type
)
12746 /* We will flip the signedness of the comparison operator
12747 associated with the mode of arg1, so the sign bit is
12748 specified by this mode. Check that arg1 is the signed
12749 max associated with this sign bit. */
12750 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
12751 /* signed_type does not work on pointer types. */
12752 && INTEGRAL_TYPE_P (arg1_type
))
12754 /* The following case also applies to X < signed_max+1
12755 and X >= signed_max+1 because previous transformations. */
12756 if (code
== LE_EXPR
|| code
== GT_EXPR
)
12759 st
= signed_type_for (TREE_TYPE (arg1
));
12760 return fold_build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
12761 type
, fold_convert (st
, arg0
),
12762 build_int_cst (st
, 0));
12768 /* If we are comparing an ABS_EXPR with a constant, we can
12769 convert all the cases into explicit comparisons, but they may
12770 well not be faster than doing the ABS and one comparison.
12771 But ABS (X) <= C is a range comparison, which becomes a subtraction
12772 and a comparison, and is probably faster. */
12773 if (code
== LE_EXPR
12774 && TREE_CODE (arg1
) == INTEGER_CST
12775 && TREE_CODE (arg0
) == ABS_EXPR
12776 && ! TREE_SIDE_EFFECTS (arg0
)
12777 && (0 != (tem
= negate_expr (arg1
)))
12778 && TREE_CODE (tem
) == INTEGER_CST
12779 && !TREE_OVERFLOW (tem
))
12780 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
12781 build2 (GE_EXPR
, type
,
12782 TREE_OPERAND (arg0
, 0), tem
),
12783 build2 (LE_EXPR
, type
,
12784 TREE_OPERAND (arg0
, 0), arg1
));
12786 /* Convert ABS_EXPR<x> >= 0 to true. */
12787 strict_overflow_p
= false;
12788 if (code
== GE_EXPR
12789 && (integer_zerop (arg1
)
12790 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
12791 && real_zerop (arg1
)))
12792 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12794 if (strict_overflow_p
)
12795 fold_overflow_warning (("assuming signed overflow does not occur "
12796 "when simplifying comparison of "
12797 "absolute value and zero"),
12798 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12799 return omit_one_operand (type
, integer_one_node
, arg0
);
12802 /* Convert ABS_EXPR<x> < 0 to false. */
12803 strict_overflow_p
= false;
12804 if (code
== LT_EXPR
12805 && (integer_zerop (arg1
) || real_zerop (arg1
))
12806 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12808 if (strict_overflow_p
)
12809 fold_overflow_warning (("assuming signed overflow does not occur "
12810 "when simplifying comparison of "
12811 "absolute value and zero"),
12812 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12813 return omit_one_operand (type
, integer_zero_node
, arg0
);
12816 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12817 and similarly for >= into !=. */
12818 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12819 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12820 && TREE_CODE (arg1
) == LSHIFT_EXPR
12821 && integer_onep (TREE_OPERAND (arg1
, 0)))
12822 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12823 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12824 TREE_OPERAND (arg1
, 1)),
12825 build_int_cst (TREE_TYPE (arg0
), 0));
12827 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12828 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12829 && (TREE_CODE (arg1
) == NOP_EXPR
12830 || TREE_CODE (arg1
) == CONVERT_EXPR
)
12831 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
12832 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
12834 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12835 fold_convert (TREE_TYPE (arg0
),
12836 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12837 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
12839 build_int_cst (TREE_TYPE (arg0
), 0));
12843 case UNORDERED_EXPR
:
12851 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
12853 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
12854 if (t1
!= NULL_TREE
)
12858 /* If the first operand is NaN, the result is constant. */
12859 if (TREE_CODE (arg0
) == REAL_CST
12860 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
12861 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
12863 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
12864 ? integer_zero_node
12865 : integer_one_node
;
12866 return omit_one_operand (type
, t1
, arg1
);
12869 /* If the second operand is NaN, the result is constant. */
12870 if (TREE_CODE (arg1
) == REAL_CST
12871 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
12872 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
12874 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
12875 ? integer_zero_node
12876 : integer_one_node
;
12877 return omit_one_operand (type
, t1
, arg0
);
12880 /* Simplify unordered comparison of something with itself. */
12881 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
12882 && operand_equal_p (arg0
, arg1
, 0))
12883 return constant_boolean_node (1, type
);
12885 if (code
== LTGT_EXPR
12886 && !flag_trapping_math
12887 && operand_equal_p (arg0
, arg1
, 0))
12888 return constant_boolean_node (0, type
);
12890 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12892 tree targ0
= strip_float_extensions (arg0
);
12893 tree targ1
= strip_float_extensions (arg1
);
12894 tree newtype
= TREE_TYPE (targ0
);
12896 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
12897 newtype
= TREE_TYPE (targ1
);
12899 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
12900 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
12901 fold_convert (newtype
, targ1
));
12906 case COMPOUND_EXPR
:
12907 /* When pedantic, a compound expression can be neither an lvalue
12908 nor an integer constant expression. */
12909 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
12911 /* Don't let (0, 0) be null pointer constant. */
12912 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
12913 : fold_convert (type
, arg1
);
12914 return pedantic_non_lvalue (tem
);
12917 if ((TREE_CODE (arg0
) == REAL_CST
12918 && TREE_CODE (arg1
) == REAL_CST
)
12919 || (TREE_CODE (arg0
) == INTEGER_CST
12920 && TREE_CODE (arg1
) == INTEGER_CST
))
12921 return build_complex (type
, arg0
, arg1
);
12925 /* An ASSERT_EXPR should never be passed to fold_binary. */
12926 gcc_unreachable ();
12930 } /* switch (code) */
12933 /* Callback for walk_tree, looking for LABEL_EXPR.
12934 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12935 Do not check the sub-tree of GOTO_EXPR. */
12938 contains_label_1 (tree
*tp
,
12939 int *walk_subtrees
,
12940 void *data ATTRIBUTE_UNUSED
)
12942 switch (TREE_CODE (*tp
))
12947 *walk_subtrees
= 0;
12954 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12955 accessible from outside the sub-tree. Returns NULL_TREE if no
12956 addressable label is found. */
12959 contains_label_p (tree st
)
12961 return (walk_tree (&st
, contains_label_1
, NULL
, NULL
) != NULL_TREE
);
12964 /* Fold a ternary expression of code CODE and type TYPE with operands
12965 OP0, OP1, and OP2. Return the folded expression if folding is
12966 successful. Otherwise, return NULL_TREE. */
12969 fold_ternary (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
12972 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
12973 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12975 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
12976 && TREE_CODE_LENGTH (code
) == 3);
12978 /* Strip any conversions that don't change the mode. This is safe
12979 for every expression, except for a comparison expression because
12980 its signedness is derived from its operands. So, in the latter
12981 case, only strip conversions that don't change the signedness.
12983 Note that this is done as an internal manipulation within the
12984 constant folder, in order to find the simplest representation of
12985 the arguments so that their form can be studied. In any cases,
12986 the appropriate type conversions should be put back in the tree
12987 that will get out of the constant folder. */
13002 case COMPONENT_REF
:
13003 if (TREE_CODE (arg0
) == CONSTRUCTOR
13004 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13006 unsigned HOST_WIDE_INT idx
;
13008 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13015 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13016 so all simple results must be passed through pedantic_non_lvalue. */
13017 if (TREE_CODE (arg0
) == INTEGER_CST
)
13019 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13020 tem
= integer_zerop (arg0
) ? op2
: op1
;
13021 /* Only optimize constant conditions when the selected branch
13022 has the same type as the COND_EXPR. This avoids optimizing
13023 away "c ? x : throw", where the throw has a void type.
13024 Avoid throwing away that operand which contains label. */
13025 if ((!TREE_SIDE_EFFECTS (unused_op
)
13026 || !contains_label_p (unused_op
))
13027 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13028 || VOID_TYPE_P (type
)))
13029 return pedantic_non_lvalue (tem
);
13032 if (operand_equal_p (arg1
, op2
, 0))
13033 return pedantic_omit_one_operand (type
, arg1
, arg0
);
13035 /* If we have A op B ? A : C, we may be able to convert this to a
13036 simpler expression, depending on the operation and the values
13037 of B and C. Signed zeros prevent all of these transformations,
13038 for reasons given above each one.
13040 Also try swapping the arguments and inverting the conditional. */
13041 if (COMPARISON_CLASS_P (arg0
)
13042 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13043 arg1
, TREE_OPERAND (arg0
, 1))
13044 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
13046 tem
= fold_cond_expr_with_comparison (type
, arg0
, op1
, op2
);
13051 if (COMPARISON_CLASS_P (arg0
)
13052 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13054 TREE_OPERAND (arg0
, 1))
13055 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
13057 tem
= fold_truth_not_expr (arg0
);
13058 if (tem
&& COMPARISON_CLASS_P (tem
))
13060 tem
= fold_cond_expr_with_comparison (type
, tem
, op2
, op1
);
13066 /* If the second operand is simpler than the third, swap them
13067 since that produces better jump optimization results. */
13068 if (truth_value_p (TREE_CODE (arg0
))
13069 && tree_swap_operands_p (op1
, op2
, false))
13071 /* See if this can be inverted. If it can't, possibly because
13072 it was a floating-point inequality comparison, don't do
13074 tem
= fold_truth_not_expr (arg0
);
13076 return fold_build3 (code
, type
, tem
, op2
, op1
);
13079 /* Convert A ? 1 : 0 to simply A. */
13080 if (integer_onep (op1
)
13081 && integer_zerop (op2
)
13082 /* If we try to convert OP0 to our type, the
13083 call to fold will try to move the conversion inside
13084 a COND, which will recurse. In that case, the COND_EXPR
13085 is probably the best choice, so leave it alone. */
13086 && type
== TREE_TYPE (arg0
))
13087 return pedantic_non_lvalue (arg0
);
13089 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13090 over COND_EXPR in cases such as floating point comparisons. */
13091 if (integer_zerop (op1
)
13092 && integer_onep (op2
)
13093 && truth_value_p (TREE_CODE (arg0
)))
13094 return pedantic_non_lvalue (fold_convert (type
,
13095 invert_truthvalue (arg0
)));
13097 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13098 if (TREE_CODE (arg0
) == LT_EXPR
13099 && integer_zerop (TREE_OPERAND (arg0
, 1))
13100 && integer_zerop (op2
)
13101 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13103 /* sign_bit_p only checks ARG1 bits within A's precision.
13104 If <sign bit of A> has wider type than A, bits outside
13105 of A's precision in <sign bit of A> need to be checked.
13106 If they are all 0, this optimization needs to be done
13107 in unsigned A's type, if they are all 1 in signed A's type,
13108 otherwise this can't be done. */
13109 if (TYPE_PRECISION (TREE_TYPE (tem
))
13110 < TYPE_PRECISION (TREE_TYPE (arg1
))
13111 && TYPE_PRECISION (TREE_TYPE (tem
))
13112 < TYPE_PRECISION (type
))
13114 unsigned HOST_WIDE_INT mask_lo
;
13115 HOST_WIDE_INT mask_hi
;
13116 int inner_width
, outer_width
;
13119 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13120 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13121 if (outer_width
> TYPE_PRECISION (type
))
13122 outer_width
= TYPE_PRECISION (type
);
13124 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
13126 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
13127 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
13133 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
13134 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
13136 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
13138 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
13139 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13143 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
13144 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13146 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
13147 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
13149 tem_type
= signed_type_for (TREE_TYPE (tem
));
13150 tem
= fold_convert (tem_type
, tem
);
13152 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
13153 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
13155 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13156 tem
= fold_convert (tem_type
, tem
);
13163 return fold_convert (type
,
13164 fold_build2 (BIT_AND_EXPR
,
13165 TREE_TYPE (tem
), tem
,
13166 fold_convert (TREE_TYPE (tem
),
13170 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13171 already handled above. */
13172 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13173 && integer_onep (TREE_OPERAND (arg0
, 1))
13174 && integer_zerop (op2
)
13175 && integer_pow2p (arg1
))
13177 tree tem
= TREE_OPERAND (arg0
, 0);
13179 if (TREE_CODE (tem
) == RSHIFT_EXPR
13180 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
13181 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13182 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
13183 return fold_build2 (BIT_AND_EXPR
, type
,
13184 TREE_OPERAND (tem
, 0), arg1
);
13187 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13188 is probably obsolete because the first operand should be a
13189 truth value (that's why we have the two cases above), but let's
13190 leave it in until we can confirm this for all front-ends. */
13191 if (integer_zerop (op2
)
13192 && TREE_CODE (arg0
) == NE_EXPR
13193 && integer_zerop (TREE_OPERAND (arg0
, 1))
13194 && integer_pow2p (arg1
)
13195 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13196 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13197 arg1
, OEP_ONLY_CONST
))
13198 return pedantic_non_lvalue (fold_convert (type
,
13199 TREE_OPERAND (arg0
, 0)));
13201 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13202 if (integer_zerop (op2
)
13203 && truth_value_p (TREE_CODE (arg0
))
13204 && truth_value_p (TREE_CODE (arg1
)))
13205 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
13206 fold_convert (type
, arg0
),
13209 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13210 if (integer_onep (op2
)
13211 && truth_value_p (TREE_CODE (arg0
))
13212 && truth_value_p (TREE_CODE (arg1
)))
13214 /* Only perform transformation if ARG0 is easily inverted. */
13215 tem
= fold_truth_not_expr (arg0
);
13217 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
13218 fold_convert (type
, tem
),
13222 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13223 if (integer_zerop (arg1
)
13224 && truth_value_p (TREE_CODE (arg0
))
13225 && truth_value_p (TREE_CODE (op2
)))
13227 /* Only perform transformation if ARG0 is easily inverted. */
13228 tem
= fold_truth_not_expr (arg0
);
13230 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
13231 fold_convert (type
, tem
),
13235 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13236 if (integer_onep (arg1
)
13237 && truth_value_p (TREE_CODE (arg0
))
13238 && truth_value_p (TREE_CODE (op2
)))
13239 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
13240 fold_convert (type
, arg0
),
13246 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13247 of fold_ternary on them. */
13248 gcc_unreachable ();
13250 case BIT_FIELD_REF
:
13251 if ((TREE_CODE (arg0
) == VECTOR_CST
13252 || (TREE_CODE (arg0
) == CONSTRUCTOR
&& TREE_CONSTANT (arg0
)))
13253 && type
== TREE_TYPE (TREE_TYPE (arg0
))
13254 && host_integerp (arg1
, 1)
13255 && host_integerp (op2
, 1))
13257 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
13258 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
13261 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
13262 && (idx
% width
) == 0
13263 && (idx
= idx
/ width
)
13264 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
13266 tree elements
= NULL_TREE
;
13268 if (TREE_CODE (arg0
) == VECTOR_CST
)
13269 elements
= TREE_VECTOR_CST_ELTS (arg0
);
13272 unsigned HOST_WIDE_INT idx
;
13275 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0
), idx
, value
)
13276 elements
= tree_cons (NULL_TREE
, value
, elements
);
13278 while (idx
-- > 0 && elements
)
13279 elements
= TREE_CHAIN (elements
);
13281 return TREE_VALUE (elements
);
13283 return fold_convert (type
, integer_zero_node
);
13290 } /* switch (code) */
13293 /* Perform constant folding and related simplification of EXPR.
13294 The related simplifications include x*1 => x, x*0 => 0, etc.,
13295 and application of the associative law.
13296 NOP_EXPR conversions may be removed freely (as long as we
13297 are careful not to change the type of the overall expression).
13298 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13299 but we can constant-fold them if they have constant operands. */
13301 #ifdef ENABLE_FOLD_CHECKING
13302 # define fold(x) fold_1 (x)
13303 static tree
fold_1 (tree
);
13309 const tree t
= expr
;
13310 enum tree_code code
= TREE_CODE (t
);
13311 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13314 /* Return right away if a constant. */
13315 if (kind
== tcc_constant
)
13318 /* CALL_EXPR-like objects with variable numbers of operands are
13319 treated specially. */
13320 if (kind
== tcc_vl_exp
)
13322 if (code
== CALL_EXPR
)
13324 tem
= fold_call_expr (expr
, false);
13325 return tem
? tem
: expr
;
13330 if (IS_EXPR_CODE_CLASS (kind
)
13331 || IS_GIMPLE_STMT_CODE_CLASS (kind
))
13333 tree type
= TREE_TYPE (t
);
13334 tree op0
, op1
, op2
;
13336 switch (TREE_CODE_LENGTH (code
))
13339 op0
= TREE_OPERAND (t
, 0);
13340 tem
= fold_unary (code
, type
, op0
);
13341 return tem
? tem
: expr
;
13343 op0
= TREE_OPERAND (t
, 0);
13344 op1
= TREE_OPERAND (t
, 1);
13345 tem
= fold_binary (code
, type
, op0
, op1
);
13346 return tem
? tem
: expr
;
13348 op0
= TREE_OPERAND (t
, 0);
13349 op1
= TREE_OPERAND (t
, 1);
13350 op2
= TREE_OPERAND (t
, 2);
13351 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
13352 return tem
? tem
: expr
;
13361 return fold (DECL_INITIAL (t
));
13365 } /* switch (code) */
13368 #ifdef ENABLE_FOLD_CHECKING
13371 static void fold_checksum_tree (const_tree
, struct md5_ctx
*, htab_t
);
13372 static void fold_check_failed (const_tree
, const_tree
);
13373 void print_fold_checksum (const_tree
);
13375 /* When --enable-checking=fold, compute a digest of expr before
13376 and after actual fold call to see if fold did not accidentally
13377 change original expr. */
13383 struct md5_ctx ctx
;
13384 unsigned char checksum_before
[16], checksum_after
[16];
13387 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13388 md5_init_ctx (&ctx
);
13389 fold_checksum_tree (expr
, &ctx
, ht
);
13390 md5_finish_ctx (&ctx
, checksum_before
);
13393 ret
= fold_1 (expr
);
13395 md5_init_ctx (&ctx
);
13396 fold_checksum_tree (expr
, &ctx
, ht
);
13397 md5_finish_ctx (&ctx
, checksum_after
);
13400 if (memcmp (checksum_before
, checksum_after
, 16))
13401 fold_check_failed (expr
, ret
);
13407 print_fold_checksum (const_tree expr
)
13409 struct md5_ctx ctx
;
13410 unsigned char checksum
[16], cnt
;
13413 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13414 md5_init_ctx (&ctx
);
13415 fold_checksum_tree (expr
, &ctx
, ht
);
13416 md5_finish_ctx (&ctx
, checksum
);
13418 for (cnt
= 0; cnt
< 16; ++cnt
)
13419 fprintf (stderr
, "%02x", checksum
[cnt
]);
13420 putc ('\n', stderr
);
13424 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
13426 internal_error ("fold check: original tree changed by fold");
13430 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
13433 enum tree_code code
;
13434 struct tree_function_decl buf
;
13439 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
13440 <= sizeof (struct tree_function_decl
))
13441 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
13444 slot
= (const void **) htab_find_slot (ht
, expr
, INSERT
);
13448 code
= TREE_CODE (expr
);
13449 if (TREE_CODE_CLASS (code
) == tcc_declaration
13450 && DECL_ASSEMBLER_NAME_SET_P (expr
))
13452 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13453 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13454 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
13455 expr
= (tree
) &buf
;
13457 else if (TREE_CODE_CLASS (code
) == tcc_type
13458 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
13459 || TYPE_CACHED_VALUES_P (expr
)
13460 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)))
13462 /* Allow these fields to be modified. */
13464 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13465 expr
= tmp
= (tree
) &buf
;
13466 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
13467 TYPE_POINTER_TO (tmp
) = NULL
;
13468 TYPE_REFERENCE_TO (tmp
) = NULL
;
13469 if (TYPE_CACHED_VALUES_P (tmp
))
13471 TYPE_CACHED_VALUES_P (tmp
) = 0;
13472 TYPE_CACHED_VALUES (tmp
) = NULL
;
13475 md5_process_bytes (expr
, tree_size (expr
), ctx
);
13476 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
13477 if (TREE_CODE_CLASS (code
) != tcc_type
13478 && TREE_CODE_CLASS (code
) != tcc_declaration
13479 && code
!= TREE_LIST
13480 && code
!= SSA_NAME
)
13481 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
13482 switch (TREE_CODE_CLASS (code
))
13488 md5_process_bytes (TREE_STRING_POINTER (expr
),
13489 TREE_STRING_LENGTH (expr
), ctx
);
13492 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
13493 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
13496 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
13502 case tcc_exceptional
:
13506 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
13507 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
13508 expr
= TREE_CHAIN (expr
);
13509 goto recursive_label
;
13512 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
13513 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
13519 case tcc_expression
:
13520 case tcc_reference
:
13521 case tcc_comparison
:
13524 case tcc_statement
:
13526 len
= TREE_OPERAND_LENGTH (expr
);
13527 for (i
= 0; i
< len
; ++i
)
13528 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
13530 case tcc_declaration
:
13531 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
13532 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
13533 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
13535 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
13536 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
13537 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
13538 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
13539 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
13541 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
13542 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
13544 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
13546 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
13547 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
13548 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
13552 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
13553 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
13554 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
13555 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
13556 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
13557 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
13558 if (INTEGRAL_TYPE_P (expr
)
13559 || SCALAR_FLOAT_TYPE_P (expr
))
13561 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
13562 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
13564 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
13565 if (TREE_CODE (expr
) == RECORD_TYPE
13566 || TREE_CODE (expr
) == UNION_TYPE
13567 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
13568 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
13569 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
13576 /* Helper function for outputting the checksum of a tree T. When
13577 debugging with gdb, you can "define mynext" to be "next" followed
13578 by "call debug_fold_checksum (op0)", then just trace down till the
13582 debug_fold_checksum (const_tree t
)
13585 unsigned char checksum
[16];
13586 struct md5_ctx ctx
;
13587 htab_t ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13589 md5_init_ctx (&ctx
);
13590 fold_checksum_tree (t
, &ctx
, ht
);
13591 md5_finish_ctx (&ctx
, checksum
);
13594 for (i
= 0; i
< 16; i
++)
13595 fprintf (stderr
, "%d ", checksum
[i
]);
13597 fprintf (stderr
, "\n");
13602 /* Fold a unary tree expression with code CODE of type TYPE with an
13603 operand OP0. Return a folded expression if successful. Otherwise,
13604 return a tree expression with code CODE of type TYPE with an
13608 fold_build1_stat (enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
13611 #ifdef ENABLE_FOLD_CHECKING
13612 unsigned char checksum_before
[16], checksum_after
[16];
13613 struct md5_ctx ctx
;
13616 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13617 md5_init_ctx (&ctx
);
13618 fold_checksum_tree (op0
, &ctx
, ht
);
13619 md5_finish_ctx (&ctx
, checksum_before
);
13623 tem
= fold_unary (code
, type
, op0
);
13625 tem
= build1_stat (code
, type
, op0 PASS_MEM_STAT
);
13627 #ifdef ENABLE_FOLD_CHECKING
13628 md5_init_ctx (&ctx
);
13629 fold_checksum_tree (op0
, &ctx
, ht
);
13630 md5_finish_ctx (&ctx
, checksum_after
);
13633 if (memcmp (checksum_before
, checksum_after
, 16))
13634 fold_check_failed (op0
, tem
);
13639 /* Fold a binary tree expression with code CODE of type TYPE with
13640 operands OP0 and OP1. Return a folded expression if successful.
13641 Otherwise, return a tree expression with code CODE of type TYPE
13642 with operands OP0 and OP1. */
13645 fold_build2_stat (enum tree_code code
, tree type
, tree op0
, tree op1
13649 #ifdef ENABLE_FOLD_CHECKING
13650 unsigned char checksum_before_op0
[16],
13651 checksum_before_op1
[16],
13652 checksum_after_op0
[16],
13653 checksum_after_op1
[16];
13654 struct md5_ctx ctx
;
13657 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13658 md5_init_ctx (&ctx
);
13659 fold_checksum_tree (op0
, &ctx
, ht
);
13660 md5_finish_ctx (&ctx
, checksum_before_op0
);
13663 md5_init_ctx (&ctx
);
13664 fold_checksum_tree (op1
, &ctx
, ht
);
13665 md5_finish_ctx (&ctx
, checksum_before_op1
);
13669 tem
= fold_binary (code
, type
, op0
, op1
);
13671 tem
= build2_stat (code
, type
, op0
, op1 PASS_MEM_STAT
);
13673 #ifdef ENABLE_FOLD_CHECKING
13674 md5_init_ctx (&ctx
);
13675 fold_checksum_tree (op0
, &ctx
, ht
);
13676 md5_finish_ctx (&ctx
, checksum_after_op0
);
13679 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13680 fold_check_failed (op0
, tem
);
13682 md5_init_ctx (&ctx
);
13683 fold_checksum_tree (op1
, &ctx
, ht
);
13684 md5_finish_ctx (&ctx
, checksum_after_op1
);
13687 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13688 fold_check_failed (op1
, tem
);
13693 /* Fold a ternary tree expression with code CODE of type TYPE with
13694 operands OP0, OP1, and OP2. Return a folded expression if
13695 successful. Otherwise, return a tree expression with code CODE of
13696 type TYPE with operands OP0, OP1, and OP2. */
13699 fold_build3_stat (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
13703 #ifdef ENABLE_FOLD_CHECKING
13704 unsigned char checksum_before_op0
[16],
13705 checksum_before_op1
[16],
13706 checksum_before_op2
[16],
13707 checksum_after_op0
[16],
13708 checksum_after_op1
[16],
13709 checksum_after_op2
[16];
13710 struct md5_ctx ctx
;
13713 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13714 md5_init_ctx (&ctx
);
13715 fold_checksum_tree (op0
, &ctx
, ht
);
13716 md5_finish_ctx (&ctx
, checksum_before_op0
);
13719 md5_init_ctx (&ctx
);
13720 fold_checksum_tree (op1
, &ctx
, ht
);
13721 md5_finish_ctx (&ctx
, checksum_before_op1
);
13724 md5_init_ctx (&ctx
);
13725 fold_checksum_tree (op2
, &ctx
, ht
);
13726 md5_finish_ctx (&ctx
, checksum_before_op2
);
13730 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
13731 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
13733 tem
= build3_stat (code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
13735 #ifdef ENABLE_FOLD_CHECKING
13736 md5_init_ctx (&ctx
);
13737 fold_checksum_tree (op0
, &ctx
, ht
);
13738 md5_finish_ctx (&ctx
, checksum_after_op0
);
13741 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13742 fold_check_failed (op0
, tem
);
13744 md5_init_ctx (&ctx
);
13745 fold_checksum_tree (op1
, &ctx
, ht
);
13746 md5_finish_ctx (&ctx
, checksum_after_op1
);
13749 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13750 fold_check_failed (op1
, tem
);
13752 md5_init_ctx (&ctx
);
13753 fold_checksum_tree (op2
, &ctx
, ht
);
13754 md5_finish_ctx (&ctx
, checksum_after_op2
);
13757 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
13758 fold_check_failed (op2
, tem
);
13763 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13764 arguments in ARGARRAY, and a null static chain.
13765 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13766 of type TYPE from the given operands as constructed by build_call_array. */
13769 fold_build_call_array (tree type
, tree fn
, int nargs
, tree
*argarray
)
13772 #ifdef ENABLE_FOLD_CHECKING
13773 unsigned char checksum_before_fn
[16],
13774 checksum_before_arglist
[16],
13775 checksum_after_fn
[16],
13776 checksum_after_arglist
[16];
13777 struct md5_ctx ctx
;
13781 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13782 md5_init_ctx (&ctx
);
13783 fold_checksum_tree (fn
, &ctx
, ht
);
13784 md5_finish_ctx (&ctx
, checksum_before_fn
);
13787 md5_init_ctx (&ctx
);
13788 for (i
= 0; i
< nargs
; i
++)
13789 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
13790 md5_finish_ctx (&ctx
, checksum_before_arglist
);
13794 tem
= fold_builtin_call_array (type
, fn
, nargs
, argarray
);
13796 #ifdef ENABLE_FOLD_CHECKING
13797 md5_init_ctx (&ctx
);
13798 fold_checksum_tree (fn
, &ctx
, ht
);
13799 md5_finish_ctx (&ctx
, checksum_after_fn
);
13802 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
13803 fold_check_failed (fn
, tem
);
13805 md5_init_ctx (&ctx
);
13806 for (i
= 0; i
< nargs
; i
++)
13807 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
13808 md5_finish_ctx (&ctx
, checksum_after_arglist
);
13811 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
13812 fold_check_failed (NULL_TREE
, tem
);
13817 /* Perform constant folding and related simplification of initializer
13818 expression EXPR. These behave identically to "fold_buildN" but ignore
13819 potential run-time traps and exceptions that fold must preserve. */
13821 #define START_FOLD_INIT \
13822 int saved_signaling_nans = flag_signaling_nans;\
13823 int saved_trapping_math = flag_trapping_math;\
13824 int saved_rounding_math = flag_rounding_math;\
13825 int saved_trapv = flag_trapv;\
13826 int saved_folding_initializer = folding_initializer;\
13827 flag_signaling_nans = 0;\
13828 flag_trapping_math = 0;\
13829 flag_rounding_math = 0;\
13831 folding_initializer = 1;
13833 #define END_FOLD_INIT \
13834 flag_signaling_nans = saved_signaling_nans;\
13835 flag_trapping_math = saved_trapping_math;\
13836 flag_rounding_math = saved_rounding_math;\
13837 flag_trapv = saved_trapv;\
13838 folding_initializer = saved_folding_initializer;
13841 fold_build1_initializer (enum tree_code code
, tree type
, tree op
)
13846 result
= fold_build1 (code
, type
, op
);
13853 fold_build2_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
)
13858 result
= fold_build2 (code
, type
, op0
, op1
);
13865 fold_build3_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
,
13871 result
= fold_build3 (code
, type
, op0
, op1
, op2
);
13878 fold_build_call_array_initializer (tree type
, tree fn
,
13879 int nargs
, tree
*argarray
)
13884 result
= fold_build_call_array (type
, fn
, nargs
, argarray
);
13890 #undef START_FOLD_INIT
13891 #undef END_FOLD_INIT
13893 /* Determine if first argument is a multiple of second argument. Return 0 if
13894 it is not, or we cannot easily determined it to be.
13896 An example of the sort of thing we care about (at this point; this routine
13897 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13898 fold cases do now) is discovering that
13900 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13906 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13908 This code also handles discovering that
13910 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13912 is a multiple of 8 so we don't have to worry about dealing with a
13913 possible remainder.
13915 Note that we *look* inside a SAVE_EXPR only to determine how it was
13916 calculated; it is not safe for fold to do much of anything else with the
13917 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13918 at run time. For example, the latter example above *cannot* be implemented
13919 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13920 evaluation time of the original SAVE_EXPR is not necessarily the same at
13921 the time the new expression is evaluated. The only optimization of this
13922 sort that would be valid is changing
13924 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13928 SAVE_EXPR (I) * SAVE_EXPR (J)
13930 (where the same SAVE_EXPR (J) is used in the original and the
13931 transformed version). */
13934 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
13936 if (operand_equal_p (top
, bottom
, 0))
13939 if (TREE_CODE (type
) != INTEGER_TYPE
)
13942 switch (TREE_CODE (top
))
13945 /* Bitwise and provides a power of two multiple. If the mask is
13946 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13947 if (!integer_pow2p (bottom
))
13952 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13953 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13957 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
13958 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
13961 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
13965 op1
= TREE_OPERAND (top
, 1);
13966 /* const_binop may not detect overflow correctly,
13967 so check for it explicitly here. */
13968 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
13969 > TREE_INT_CST_LOW (op1
)
13970 && TREE_INT_CST_HIGH (op1
) == 0
13971 && 0 != (t1
= fold_convert (type
,
13972 const_binop (LSHIFT_EXPR
,
13975 && !TREE_OVERFLOW (t1
))
13976 return multiple_of_p (type
, t1
, bottom
);
13981 /* Can't handle conversions from non-integral or wider integral type. */
13982 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
13983 || (TYPE_PRECISION (type
)
13984 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
13987 /* .. fall through ... */
13990 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
13993 if (TREE_CODE (bottom
) != INTEGER_CST
13994 || integer_zerop (bottom
)
13995 || (TYPE_UNSIGNED (type
)
13996 && (tree_int_cst_sgn (top
) < 0
13997 || tree_int_cst_sgn (bottom
) < 0)))
13999 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
14007 /* Return true if `t' is known to be non-negative. If the return
14008 value is based on the assumption that signed overflow is undefined,
14009 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14010 *STRICT_OVERFLOW_P. */
14013 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14015 if (t
== error_mark_node
)
14018 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14021 switch (TREE_CODE (t
))
14024 /* Query VRP to see if it has recorded any information about
14025 the range of this object. */
14026 return ssa_name_nonnegative_p (t
);
14029 /* We can't return 1 if flag_wrapv is set because
14030 ABS_EXPR<INT_MIN> = INT_MIN. */
14031 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
14033 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
14035 *strict_overflow_p
= true;
14041 return tree_int_cst_sgn (t
) >= 0;
14044 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
14047 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
14049 case POINTER_PLUS_EXPR
:
14051 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
14052 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14054 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14055 strict_overflow_p
));
14057 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14058 both unsigned and at least 2 bits shorter than the result. */
14059 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
14060 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
14061 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
14063 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
14064 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
14065 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14066 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14068 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14069 TYPE_PRECISION (inner2
)) + 1;
14070 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
14076 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
14078 /* x * x for floating point x is always non-negative. */
14079 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
14081 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14083 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14084 strict_overflow_p
));
14087 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14088 both unsigned and their total bits is shorter than the result. */
14089 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
14090 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
14091 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
14093 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
14094 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
14095 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14096 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14097 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
14098 < TYPE_PRECISION (TREE_TYPE (t
));
14104 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14106 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14107 strict_overflow_p
));
14113 case TRUNC_DIV_EXPR
:
14114 case CEIL_DIV_EXPR
:
14115 case FLOOR_DIV_EXPR
:
14116 case ROUND_DIV_EXPR
:
14117 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14119 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14120 strict_overflow_p
));
14122 case TRUNC_MOD_EXPR
:
14123 case CEIL_MOD_EXPR
:
14124 case FLOOR_MOD_EXPR
:
14125 case ROUND_MOD_EXPR
:
14127 case NON_LVALUE_EXPR
:
14129 case FIX_TRUNC_EXPR
:
14130 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14131 strict_overflow_p
);
14133 case COMPOUND_EXPR
:
14135 case GIMPLE_MODIFY_STMT
:
14136 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t
, 1),
14137 strict_overflow_p
);
14140 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
14141 strict_overflow_p
);
14144 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14146 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
14147 strict_overflow_p
));
14151 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
14152 tree outer_type
= TREE_TYPE (t
);
14154 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14156 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14157 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14158 strict_overflow_p
);
14159 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14161 if (TYPE_UNSIGNED (inner_type
))
14163 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14164 strict_overflow_p
);
14167 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
14169 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14170 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
,0),
14171 strict_overflow_p
);
14172 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14173 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14174 && TYPE_UNSIGNED (inner_type
);
14181 tree temp
= TARGET_EXPR_SLOT (t
);
14182 t
= TARGET_EXPR_INITIAL (t
);
14184 /* If the initializer is non-void, then it's a normal expression
14185 that will be assigned to the slot. */
14186 if (!VOID_TYPE_P (t
))
14187 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
14189 /* Otherwise, the initializer sets the slot in some way. One common
14190 way is an assignment statement at the end of the initializer. */
14193 if (TREE_CODE (t
) == BIND_EXPR
)
14194 t
= expr_last (BIND_EXPR_BODY (t
));
14195 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
14196 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
14197 t
= expr_last (TREE_OPERAND (t
, 0));
14198 else if (TREE_CODE (t
) == STATEMENT_LIST
)
14203 if ((TREE_CODE (t
) == MODIFY_EXPR
14204 || TREE_CODE (t
) == GIMPLE_MODIFY_STMT
)
14205 && GENERIC_TREE_OPERAND (t
, 0) == temp
)
14206 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t
, 1),
14207 strict_overflow_p
);
14214 tree fndecl
= get_callee_fndecl (t
);
14215 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
14216 switch (DECL_FUNCTION_CODE (fndecl
))
14218 CASE_FLT_FN (BUILT_IN_ACOS
):
14219 CASE_FLT_FN (BUILT_IN_ACOSH
):
14220 CASE_FLT_FN (BUILT_IN_CABS
):
14221 CASE_FLT_FN (BUILT_IN_COSH
):
14222 CASE_FLT_FN (BUILT_IN_ERFC
):
14223 CASE_FLT_FN (BUILT_IN_EXP
):
14224 CASE_FLT_FN (BUILT_IN_EXP10
):
14225 CASE_FLT_FN (BUILT_IN_EXP2
):
14226 CASE_FLT_FN (BUILT_IN_FABS
):
14227 CASE_FLT_FN (BUILT_IN_FDIM
):
14228 CASE_FLT_FN (BUILT_IN_HYPOT
):
14229 CASE_FLT_FN (BUILT_IN_POW10
):
14230 CASE_INT_FN (BUILT_IN_FFS
):
14231 CASE_INT_FN (BUILT_IN_PARITY
):
14232 CASE_INT_FN (BUILT_IN_POPCOUNT
):
14233 case BUILT_IN_BSWAP32
:
14234 case BUILT_IN_BSWAP64
:
14238 CASE_FLT_FN (BUILT_IN_SQRT
):
14239 /* sqrt(-0.0) is -0.0. */
14240 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
14242 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14243 strict_overflow_p
);
14245 CASE_FLT_FN (BUILT_IN_ASINH
):
14246 CASE_FLT_FN (BUILT_IN_ATAN
):
14247 CASE_FLT_FN (BUILT_IN_ATANH
):
14248 CASE_FLT_FN (BUILT_IN_CBRT
):
14249 CASE_FLT_FN (BUILT_IN_CEIL
):
14250 CASE_FLT_FN (BUILT_IN_ERF
):
14251 CASE_FLT_FN (BUILT_IN_EXPM1
):
14252 CASE_FLT_FN (BUILT_IN_FLOOR
):
14253 CASE_FLT_FN (BUILT_IN_FMOD
):
14254 CASE_FLT_FN (BUILT_IN_FREXP
):
14255 CASE_FLT_FN (BUILT_IN_LCEIL
):
14256 CASE_FLT_FN (BUILT_IN_LDEXP
):
14257 CASE_FLT_FN (BUILT_IN_LFLOOR
):
14258 CASE_FLT_FN (BUILT_IN_LLCEIL
):
14259 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
14260 CASE_FLT_FN (BUILT_IN_LLRINT
):
14261 CASE_FLT_FN (BUILT_IN_LLROUND
):
14262 CASE_FLT_FN (BUILT_IN_LRINT
):
14263 CASE_FLT_FN (BUILT_IN_LROUND
):
14264 CASE_FLT_FN (BUILT_IN_MODF
):
14265 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
14266 CASE_FLT_FN (BUILT_IN_RINT
):
14267 CASE_FLT_FN (BUILT_IN_ROUND
):
14268 CASE_FLT_FN (BUILT_IN_SCALB
):
14269 CASE_FLT_FN (BUILT_IN_SCALBLN
):
14270 CASE_FLT_FN (BUILT_IN_SCALBN
):
14271 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
14272 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
14273 CASE_FLT_FN (BUILT_IN_SINH
):
14274 CASE_FLT_FN (BUILT_IN_TANH
):
14275 CASE_FLT_FN (BUILT_IN_TRUNC
):
14276 /* True if the 1st argument is nonnegative. */
14277 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14278 strict_overflow_p
);
14280 CASE_FLT_FN (BUILT_IN_FMAX
):
14281 /* True if the 1st OR 2nd arguments are nonnegative. */
14282 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14284 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 1),
14285 strict_overflow_p
)));
14287 CASE_FLT_FN (BUILT_IN_FMIN
):
14288 /* True if the 1st AND 2nd arguments are nonnegative. */
14289 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14291 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 1),
14292 strict_overflow_p
)));
14294 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14295 /* True if the 2nd argument is nonnegative. */
14296 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 1),
14297 strict_overflow_p
);
14299 CASE_FLT_FN (BUILT_IN_POWI
):
14300 /* True if the 1st argument is nonnegative or the second
14301 argument is an even integer. */
14302 if (TREE_CODE (CALL_EXPR_ARG (t
, 1)) == INTEGER_CST
)
14304 tree arg1
= CALL_EXPR_ARG (t
, 1);
14305 if ((TREE_INT_CST_LOW (arg1
) & 1) == 0)
14308 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14309 strict_overflow_p
);
14311 CASE_FLT_FN (BUILT_IN_POW
):
14312 /* True if the 1st argument is nonnegative or the second
14313 argument is an even integer valued real. */
14314 if (TREE_CODE (CALL_EXPR_ARG (t
, 1)) == REAL_CST
)
14319 c
= TREE_REAL_CST (CALL_EXPR_ARG (t
, 1));
14320 n
= real_to_integer (&c
);
14323 REAL_VALUE_TYPE cint
;
14324 real_from_integer (&cint
, VOIDmode
, n
,
14325 n
< 0 ? -1 : 0, 0);
14326 if (real_identical (&c
, &cint
))
14330 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t
, 0),
14331 strict_overflow_p
);
14338 /* ... fall through ... */
14342 tree type
= TREE_TYPE (t
);
14343 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14344 && truth_value_p (TREE_CODE (t
)))
14345 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14346 have a signed:1 type (where the value is -1 and 0). */
14351 /* We don't know sign of `t', so be conservative and return false. */
14355 /* Return true if `t' is known to be non-negative. Handle warnings
14356 about undefined signed overflow. */
14359 tree_expr_nonnegative_p (tree t
)
14361 bool ret
, strict_overflow_p
;
14363 strict_overflow_p
= false;
14364 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
14365 if (strict_overflow_p
)
14366 fold_overflow_warning (("assuming signed overflow does not occur when "
14367 "determining that expression is always "
14369 WARN_STRICT_OVERFLOW_MISC
);
14373 /* Return true when T is an address and is known to be nonzero.
14374 For floating point we further ensure that T is not denormal.
14375 Similar logic is present in nonzero_address in rtlanal.h.
14377 If the return value is based on the assumption that signed overflow
14378 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14379 change *STRICT_OVERFLOW_P. */
14382 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
14384 tree type
= TREE_TYPE (t
);
14385 bool sub_strict_overflow_p
;
14387 /* Doing something useful for floating point would need more work. */
14388 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
14391 switch (TREE_CODE (t
))
14394 /* Query VRP to see if it has recorded any information about
14395 the range of this object. */
14396 return ssa_name_nonzero_p (t
);
14399 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14400 strict_overflow_p
);
14403 return !integer_zerop (t
);
14405 case POINTER_PLUS_EXPR
:
14407 if (TYPE_OVERFLOW_UNDEFINED (type
))
14409 /* With the presence of negative values it is hard
14410 to say something. */
14411 sub_strict_overflow_p
= false;
14412 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14413 &sub_strict_overflow_p
)
14414 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14415 &sub_strict_overflow_p
))
14417 /* One of operands must be positive and the other non-negative. */
14418 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14419 overflows, on a twos-complement machine the sum of two
14420 nonnegative numbers can never be zero. */
14421 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14423 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14424 strict_overflow_p
));
14429 if (TYPE_OVERFLOW_UNDEFINED (type
))
14431 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14433 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14434 strict_overflow_p
))
14436 *strict_overflow_p
= true;
14444 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
14445 tree outer_type
= TREE_TYPE (t
);
14447 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
14448 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14449 strict_overflow_p
));
14455 tree base
= get_base_address (TREE_OPERAND (t
, 0));
14460 /* Weak declarations may link to NULL. */
14461 if (VAR_OR_FUNCTION_DECL_P (base
))
14462 return !DECL_WEAK (base
);
14464 /* Constants are never weak. */
14465 if (CONSTANT_CLASS_P (base
))
14472 sub_strict_overflow_p
= false;
14473 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14474 &sub_strict_overflow_p
)
14475 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
14476 &sub_strict_overflow_p
))
14478 if (sub_strict_overflow_p
)
14479 *strict_overflow_p
= true;
14485 sub_strict_overflow_p
= false;
14486 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14487 &sub_strict_overflow_p
)
14488 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14489 &sub_strict_overflow_p
))
14491 if (sub_strict_overflow_p
)
14492 *strict_overflow_p
= true;
14497 sub_strict_overflow_p
= false;
14498 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14499 &sub_strict_overflow_p
))
14501 if (sub_strict_overflow_p
)
14502 *strict_overflow_p
= true;
14504 /* When both operands are nonzero, then MAX must be too. */
14505 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14506 strict_overflow_p
))
14509 /* MAX where operand 0 is positive is positive. */
14510 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14511 strict_overflow_p
);
14513 /* MAX where operand 1 is positive is positive. */
14514 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14515 &sub_strict_overflow_p
)
14516 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14517 &sub_strict_overflow_p
))
14519 if (sub_strict_overflow_p
)
14520 *strict_overflow_p
= true;
14525 case COMPOUND_EXPR
:
14527 case GIMPLE_MODIFY_STMT
:
14529 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t
, 1),
14530 strict_overflow_p
);
14533 case NON_LVALUE_EXPR
:
14534 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14535 strict_overflow_p
);
14538 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14540 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
14541 strict_overflow_p
));
14544 return alloca_call_p (t
);
14552 /* Return true when T is an address and is known to be nonzero.
14553 Handle warnings about undefined signed overflow. */
14556 tree_expr_nonzero_p (tree t
)
14558 bool ret
, strict_overflow_p
;
14560 strict_overflow_p
= false;
14561 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
14562 if (strict_overflow_p
)
14563 fold_overflow_warning (("assuming signed overflow does not occur when "
14564 "determining that expression is always "
14566 WARN_STRICT_OVERFLOW_MISC
);
14570 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14571 attempt to fold the expression to a constant without modifying TYPE,
14574 If the expression could be simplified to a constant, then return
14575 the constant. If the expression would not be simplified to a
14576 constant, then return NULL_TREE. */
14579 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
14581 tree tem
= fold_binary (code
, type
, op0
, op1
);
14582 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14585 /* Given the components of a unary expression CODE, TYPE and OP0,
14586 attempt to fold the expression to a constant without modifying
14589 If the expression could be simplified to a constant, then return
14590 the constant. If the expression would not be simplified to a
14591 constant, then return NULL_TREE. */
14594 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
14596 tree tem
= fold_unary (code
, type
, op0
);
14597 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14600 /* If EXP represents referencing an element in a constant string
14601 (either via pointer arithmetic or array indexing), return the
14602 tree representing the value accessed, otherwise return NULL. */
14605 fold_read_from_constant_string (tree exp
)
14607 if ((TREE_CODE (exp
) == INDIRECT_REF
14608 || TREE_CODE (exp
) == ARRAY_REF
)
14609 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
14611 tree exp1
= TREE_OPERAND (exp
, 0);
14615 if (TREE_CODE (exp
) == INDIRECT_REF
)
14616 string
= string_constant (exp1
, &index
);
14619 tree low_bound
= array_ref_low_bound (exp
);
14620 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
14622 /* Optimize the special-case of a zero lower bound.
14624 We convert the low_bound to sizetype to avoid some problems
14625 with constant folding. (E.g. suppose the lower bound is 1,
14626 and its mode is QI. Without the conversion,l (ARRAY
14627 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14628 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14629 if (! integer_zerop (low_bound
))
14630 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
14636 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
14637 && TREE_CODE (string
) == STRING_CST
14638 && TREE_CODE (index
) == INTEGER_CST
14639 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
14640 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
14642 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
14643 return build_int_cst_type (TREE_TYPE (exp
),
14644 (TREE_STRING_POINTER (string
)
14645 [TREE_INT_CST_LOW (index
)]));
14650 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14651 an integer constant, real, or fixed-point constant.
14653 TYPE is the type of the result. */
14656 fold_negate_const (tree arg0
, tree type
)
14658 tree t
= NULL_TREE
;
14660 switch (TREE_CODE (arg0
))
14664 unsigned HOST_WIDE_INT low
;
14665 HOST_WIDE_INT high
;
14666 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
14667 TREE_INT_CST_HIGH (arg0
),
14669 t
= force_fit_type_double (type
, low
, high
, 1,
14670 (overflow
| TREE_OVERFLOW (arg0
))
14671 && !TYPE_UNSIGNED (type
));
14676 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
14681 FIXED_VALUE_TYPE f
;
14682 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
14683 &(TREE_FIXED_CST (arg0
)), NULL
,
14684 TYPE_SATURATING (type
));
14685 t
= build_fixed (type
, f
);
14686 /* Propagate overflow flags. */
14687 if (overflow_p
| TREE_OVERFLOW (arg0
))
14689 TREE_OVERFLOW (t
) = 1;
14690 TREE_CONSTANT_OVERFLOW (t
) = 1;
14692 else if (TREE_CONSTANT_OVERFLOW (arg0
))
14693 TREE_CONSTANT_OVERFLOW (t
) = 1;
14698 gcc_unreachable ();
14704 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14705 an integer constant or real constant.
14707 TYPE is the type of the result. */
14710 fold_abs_const (tree arg0
, tree type
)
14712 tree t
= NULL_TREE
;
14714 switch (TREE_CODE (arg0
))
14717 /* If the value is unsigned, then the absolute value is
14718 the same as the ordinary value. */
14719 if (TYPE_UNSIGNED (type
))
14721 /* Similarly, if the value is non-negative. */
14722 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
14724 /* If the value is negative, then the absolute value is
14728 unsigned HOST_WIDE_INT low
;
14729 HOST_WIDE_INT high
;
14730 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
14731 TREE_INT_CST_HIGH (arg0
),
14733 t
= force_fit_type_double (type
, low
, high
, -1,
14734 overflow
| TREE_OVERFLOW (arg0
));
14739 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
14740 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
14746 gcc_unreachable ();
14752 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14753 constant. TYPE is the type of the result. */
14756 fold_not_const (tree arg0
, tree type
)
14758 tree t
= NULL_TREE
;
14760 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
14762 t
= force_fit_type_double (type
, ~TREE_INT_CST_LOW (arg0
),
14763 ~TREE_INT_CST_HIGH (arg0
), 0,
14764 TREE_OVERFLOW (arg0
));
14769 /* Given CODE, a relational operator, the target type, TYPE and two
14770 constant operands OP0 and OP1, return the result of the
14771 relational operation. If the result is not a compile time
14772 constant, then return NULL_TREE. */
14775 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
14777 int result
, invert
;
14779 /* From here on, the only cases we handle are when the result is
14780 known to be a constant. */
14782 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
14784 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
14785 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
14787 /* Handle the cases where either operand is a NaN. */
14788 if (real_isnan (c0
) || real_isnan (c1
))
14798 case UNORDERED_EXPR
:
14812 if (flag_trapping_math
)
14818 gcc_unreachable ();
14821 return constant_boolean_node (result
, type
);
14824 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
14827 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
14829 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
14830 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
14831 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
14834 /* Handle equality/inequality of complex constants. */
14835 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
14837 tree rcond
= fold_relational_const (code
, type
,
14838 TREE_REALPART (op0
),
14839 TREE_REALPART (op1
));
14840 tree icond
= fold_relational_const (code
, type
,
14841 TREE_IMAGPART (op0
),
14842 TREE_IMAGPART (op1
));
14843 if (code
== EQ_EXPR
)
14844 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
14845 else if (code
== NE_EXPR
)
14846 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
14851 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14853 To compute GT, swap the arguments and do LT.
14854 To compute GE, do LT and invert the result.
14855 To compute LE, swap the arguments, do LT and invert the result.
14856 To compute NE, do EQ and invert the result.
14858 Therefore, the code below must handle only EQ and LT. */
14860 if (code
== LE_EXPR
|| code
== GT_EXPR
)
14865 code
= swap_tree_comparison (code
);
14868 /* Note that it is safe to invert for real values here because we
14869 have already handled the one case that it matters. */
14872 if (code
== NE_EXPR
|| code
== GE_EXPR
)
14875 code
= invert_tree_comparison (code
, false);
14878 /* Compute a result for LT or EQ if args permit;
14879 Otherwise return T. */
14880 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
14882 if (code
== EQ_EXPR
)
14883 result
= tree_int_cst_equal (op0
, op1
);
14884 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
14885 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
14887 result
= INT_CST_LT (op0
, op1
);
14894 return constant_boolean_node (result
, type
);
14897 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14898 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14902 fold_build_cleanup_point_expr (tree type
, tree expr
)
14904 /* If the expression does not have side effects then we don't have to wrap
14905 it with a cleanup point expression. */
14906 if (!TREE_SIDE_EFFECTS (expr
))
14909 /* If the expression is a return, check to see if the expression inside the
14910 return has no side effects or the right hand side of the modify expression
14911 inside the return. If either don't have side effects set we don't need to
14912 wrap the expression in a cleanup point expression. Note we don't check the
14913 left hand side of the modify because it should always be a return decl. */
14914 if (TREE_CODE (expr
) == RETURN_EXPR
)
14916 tree op
= TREE_OPERAND (expr
, 0);
14917 if (!op
|| !TREE_SIDE_EFFECTS (op
))
14919 op
= TREE_OPERAND (op
, 1);
14920 if (!TREE_SIDE_EFFECTS (op
))
14924 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
14927 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14928 of an indirection through OP0, or NULL_TREE if no simplification is
14932 fold_indirect_ref_1 (tree type
, tree op0
)
14938 subtype
= TREE_TYPE (sub
);
14939 if (!POINTER_TYPE_P (subtype
))
14942 if (TREE_CODE (sub
) == ADDR_EXPR
)
14944 tree op
= TREE_OPERAND (sub
, 0);
14945 tree optype
= TREE_TYPE (op
);
14946 /* *&CONST_DECL -> to the value of the const decl. */
14947 if (TREE_CODE (op
) == CONST_DECL
)
14948 return DECL_INITIAL (op
);
14949 /* *&p => p; make sure to handle *&"str"[cst] here. */
14950 if (type
== optype
)
14952 tree fop
= fold_read_from_constant_string (op
);
14958 /* *(foo *)&fooarray => fooarray[0] */
14959 else if (TREE_CODE (optype
) == ARRAY_TYPE
14960 && type
== TREE_TYPE (optype
))
14962 tree type_domain
= TYPE_DOMAIN (optype
);
14963 tree min_val
= size_zero_node
;
14964 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14965 min_val
= TYPE_MIN_VALUE (type_domain
);
14966 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
14968 /* *(foo *)&complexfoo => __real__ complexfoo */
14969 else if (TREE_CODE (optype
) == COMPLEX_TYPE
14970 && type
== TREE_TYPE (optype
))
14971 return fold_build1 (REALPART_EXPR
, type
, op
);
14972 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14973 else if (TREE_CODE (optype
) == VECTOR_TYPE
14974 && type
== TREE_TYPE (optype
))
14976 tree part_width
= TYPE_SIZE (type
);
14977 tree index
= bitsize_int (0);
14978 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
14982 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14983 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
14984 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
14986 tree op00
= TREE_OPERAND (sub
, 0);
14987 tree op01
= TREE_OPERAND (sub
, 1);
14991 op00type
= TREE_TYPE (op00
);
14992 if (TREE_CODE (op00
) == ADDR_EXPR
14993 && TREE_CODE (TREE_TYPE (op00type
)) == COMPLEX_TYPE
14994 && type
== TREE_TYPE (TREE_TYPE (op00type
)))
14996 tree size
= TYPE_SIZE_UNIT (type
);
14997 if (tree_int_cst_equal (size
, op01
))
14998 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (op00
, 0));
15002 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15003 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
15004 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
15007 tree min_val
= size_zero_node
;
15008 sub
= build_fold_indirect_ref (sub
);
15009 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
15010 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15011 min_val
= TYPE_MIN_VALUE (type_domain
);
15012 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
15018 /* Builds an expression for an indirection through T, simplifying some
15022 build_fold_indirect_ref (tree t
)
15024 tree type
= TREE_TYPE (TREE_TYPE (t
));
15025 tree sub
= fold_indirect_ref_1 (type
, t
);
15030 return build1 (INDIRECT_REF
, type
, t
);
15033 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15036 fold_indirect_ref (tree t
)
15038 tree sub
= fold_indirect_ref_1 (TREE_TYPE (t
), TREE_OPERAND (t
, 0));
15046 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15047 whose result is ignored. The type of the returned tree need not be
15048 the same as the original expression. */
15051 fold_ignored_result (tree t
)
15053 if (!TREE_SIDE_EFFECTS (t
))
15054 return integer_zero_node
;
15057 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
15060 t
= TREE_OPERAND (t
, 0);
15064 case tcc_comparison
:
15065 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15066 t
= TREE_OPERAND (t
, 0);
15067 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
15068 t
= TREE_OPERAND (t
, 1);
15073 case tcc_expression
:
15074 switch (TREE_CODE (t
))
15076 case COMPOUND_EXPR
:
15077 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15079 t
= TREE_OPERAND (t
, 0);
15083 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15084 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15086 t
= TREE_OPERAND (t
, 0);
15099 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15100 This can only be applied to objects of a sizetype. */
15103 round_up (tree value
, int divisor
)
15105 tree div
= NULL_TREE
;
15107 gcc_assert (divisor
> 0);
15111 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15112 have to do anything. Only do this when we are not given a const,
15113 because in that case, this check is more expensive than just
15115 if (TREE_CODE (value
) != INTEGER_CST
)
15117 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15119 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15123 /* If divisor is a power of two, simplify this to bit manipulation. */
15124 if (divisor
== (divisor
& -divisor
))
15126 if (TREE_CODE (value
) == INTEGER_CST
)
15128 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (value
);
15129 unsigned HOST_WIDE_INT high
;
15132 if ((low
& (divisor
- 1)) == 0)
15135 overflow_p
= TREE_OVERFLOW (value
);
15136 high
= TREE_INT_CST_HIGH (value
);
15137 low
&= ~(divisor
- 1);
15146 return force_fit_type_double (TREE_TYPE (value
), low
, high
,
15153 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
15154 value
= size_binop (PLUS_EXPR
, value
, t
);
15155 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15156 value
= size_binop (BIT_AND_EXPR
, value
, t
);
15162 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15163 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
15164 value
= size_binop (MULT_EXPR
, value
, div
);
15170 /* Likewise, but round down. */
15173 round_down (tree value
, int divisor
)
15175 tree div
= NULL_TREE
;
15177 gcc_assert (divisor
> 0);
15181 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15182 have to do anything. Only do this when we are not given a const,
15183 because in that case, this check is more expensive than just
15185 if (TREE_CODE (value
) != INTEGER_CST
)
15187 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15189 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15193 /* If divisor is a power of two, simplify this to bit manipulation. */
15194 if (divisor
== (divisor
& -divisor
))
15198 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15199 value
= size_binop (BIT_AND_EXPR
, value
, t
);
15204 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15205 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
15206 value
= size_binop (MULT_EXPR
, value
, div
);
15212 /* Returns the pointer to the base of the object addressed by EXP and
15213 extracts the information about the offset of the access, storing it
15214 to PBITPOS and POFFSET. */
15217 split_address_to_core_and_offset (tree exp
,
15218 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
15221 enum machine_mode mode
;
15222 int unsignedp
, volatilep
;
15223 HOST_WIDE_INT bitsize
;
15225 if (TREE_CODE (exp
) == ADDR_EXPR
)
15227 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
15228 poffset
, &mode
, &unsignedp
, &volatilep
,
15230 core
= fold_addr_expr (core
);
15236 *poffset
= NULL_TREE
;
15242 /* Returns true if addresses of E1 and E2 differ by a constant, false
15243 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15246 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
15249 HOST_WIDE_INT bitpos1
, bitpos2
;
15250 tree toffset1
, toffset2
, tdiff
, type
;
15252 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
15253 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
15255 if (bitpos1
% BITS_PER_UNIT
!= 0
15256 || bitpos2
% BITS_PER_UNIT
!= 0
15257 || !operand_equal_p (core1
, core2
, 0))
15260 if (toffset1
&& toffset2
)
15262 type
= TREE_TYPE (toffset1
);
15263 if (type
!= TREE_TYPE (toffset2
))
15264 toffset2
= fold_convert (type
, toffset2
);
15266 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
15267 if (!cst_and_fits_in_hwi (tdiff
))
15270 *diff
= int_cst_value (tdiff
);
15272 else if (toffset1
|| toffset2
)
15274 /* If only one of the offsets is non-constant, the difference cannot
15281 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
15285 /* Simplify the floating point expression EXP when the sign of the
15286 result is not significant. Return NULL_TREE if no simplification
15290 fold_strip_sign_ops (tree exp
)
15294 switch (TREE_CODE (exp
))
15298 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
15299 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
15303 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
15305 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
15306 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15307 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
15308 return fold_build2 (TREE_CODE (exp
), TREE_TYPE (exp
),
15309 arg0
? arg0
: TREE_OPERAND (exp
, 0),
15310 arg1
? arg1
: TREE_OPERAND (exp
, 1));
15313 case COMPOUND_EXPR
:
15314 arg0
= TREE_OPERAND (exp
, 0);
15315 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15317 return fold_build2 (COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
15321 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15322 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
15324 return fold_build3 (COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
15325 arg0
? arg0
: TREE_OPERAND (exp
, 1),
15326 arg1
? arg1
: TREE_OPERAND (exp
, 2));
15331 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
15334 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15335 /* Strip copysign function call, return the 1st argument. */
15336 arg0
= CALL_EXPR_ARG (exp
, 0);
15337 arg1
= CALL_EXPR_ARG (exp
, 1);
15338 return omit_one_operand (TREE_TYPE (exp
), arg0
, arg1
);
15341 /* Strip sign ops from the argument of "odd" math functions. */
15342 if (negate_mathfn_p (fcode
))
15344 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
15346 return build_call_expr (get_callee_fndecl (exp
), 1, arg0
);