1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* Non-zero if we are folding constants inside an initializer; zero
64 int folding_initializer
= 0;
66 /* The following constants represent a bit based encoding of GCC's
67 comparison operators. This encoding simplifies transformations
68 on relational comparison operators, such as AND and OR. */
69 enum comparison_code
{
88 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
89 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
90 static bool negate_mathfn_p (enum built_in_function
);
91 static bool negate_expr_p (tree
);
92 static tree
negate_expr (tree
);
93 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
94 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
95 static tree
const_binop (enum tree_code
, tree
, tree
, int);
96 static enum comparison_code
comparison_to_compcode (enum tree_code
);
97 static enum tree_code
compcode_to_comparison (enum comparison_code
);
98 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
99 enum tree_code
, tree
, tree
, tree
);
100 static int truth_value_p (enum tree_code
);
101 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
102 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
103 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
104 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
105 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
106 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
107 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
108 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
109 enum machine_mode
*, int *, int *,
111 static int all_ones_mask_p (tree
, int);
112 static tree
sign_bit_p (tree
, tree
);
113 static int simple_operand_p (tree
);
114 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
115 static tree
range_predecessor (tree
);
116 static tree
range_successor (tree
);
117 static tree
make_range (tree
, int *, tree
*, tree
*);
118 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
119 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
121 static tree
fold_range_test (enum tree_code
, tree
, tree
, tree
);
122 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
123 static tree
unextend (tree
, int, int, tree
);
124 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
125 static tree
optimize_minmax_comparison (enum tree_code
, tree
, tree
, tree
);
126 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
);
127 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
);
128 static int multiple_of_p (tree
, tree
, tree
);
129 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
,
132 static bool fold_real_zero_addition_p (tree
, tree
, int);
133 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
135 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
136 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
137 static bool reorder_operands_p (tree
, tree
);
138 static tree
fold_negate_const (tree
, tree
);
139 static tree
fold_not_const (tree
, tree
);
140 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
141 static int native_encode_expr (tree
, unsigned char *, int);
142 static tree
native_interpret_expr (tree
, unsigned char *, int);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
171 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
173 words
[0] = LOWPART (low
);
174 words
[1] = HIGHPART (low
);
175 words
[2] = LOWPART (hi
);
176 words
[3] = HIGHPART (hi
);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
187 *low
= words
[0] + words
[1] * BASE
;
188 *hi
= words
[2] + words
[3] * BASE
;
191 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
192 in overflow of the value, when >0 we are only interested in signed
193 overflow, for <0 we are interested in any overflow. OVERFLOWED
194 indicates whether overflow has already occurred. CONST_OVERFLOWED
195 indicates whether constant overflow has already occurred. We force
196 T's value to be within range of T's type (by setting to 0 or 1 all
197 the bits outside the type's range). We set TREE_OVERFLOWED if,
198 OVERFLOWED is nonzero,
199 or OVERFLOWABLE is >0 and signed overflow occurs
200 or OVERFLOWABLE is <0 and any overflow occurs
201 We set TREE_CONSTANT_OVERFLOWED if,
202 CONST_OVERFLOWED is nonzero
203 or we set TREE_OVERFLOWED.
204 We return either the original T, or a copy. */
207 force_fit_type (tree t
, int overflowable
,
208 bool overflowed
, bool overflowed_const
)
210 unsigned HOST_WIDE_INT low
;
213 int sign_extended_type
;
215 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
217 low
= TREE_INT_CST_LOW (t
);
218 high
= TREE_INT_CST_HIGH (t
);
220 if (POINTER_TYPE_P (TREE_TYPE (t
))
221 || TREE_CODE (TREE_TYPE (t
)) == OFFSET_TYPE
)
224 prec
= TYPE_PRECISION (TREE_TYPE (t
));
225 /* Size types *are* sign extended. */
226 sign_extended_type
= (!TYPE_UNSIGNED (TREE_TYPE (t
))
227 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
228 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))));
230 /* First clear all bits that are beyond the type's precision. */
232 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
234 else if (prec
> HOST_BITS_PER_WIDE_INT
)
235 high
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
239 if (prec
< HOST_BITS_PER_WIDE_INT
)
240 low
&= ~((HOST_WIDE_INT
) (-1) << prec
);
243 if (!sign_extended_type
)
244 /* No sign extension */;
245 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
246 /* Correct width already. */;
247 else if (prec
> HOST_BITS_PER_WIDE_INT
)
249 /* Sign extend top half? */
250 if (high
& ((unsigned HOST_WIDE_INT
)1
251 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
252 high
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
254 else if (prec
== HOST_BITS_PER_WIDE_INT
)
256 if ((HOST_WIDE_INT
)low
< 0)
261 /* Sign extend bottom half? */
262 if (low
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
265 low
|= (HOST_WIDE_INT
)(-1) << prec
;
269 /* If the value changed, return a new node. */
270 if (overflowed
|| overflowed_const
271 || low
!= TREE_INT_CST_LOW (t
) || high
!= TREE_INT_CST_HIGH (t
))
273 t
= build_int_cst_wide (TREE_TYPE (t
), low
, high
);
277 || (overflowable
> 0 && sign_extended_type
))
280 TREE_OVERFLOW (t
) = 1;
281 TREE_CONSTANT_OVERFLOW (t
) = 1;
283 else if (overflowed_const
)
286 TREE_CONSTANT_OVERFLOW (t
) = 1;
293 /* Add two doubleword integers with doubleword result.
294 Each argument is given as two `HOST_WIDE_INT' pieces.
295 One argument is L1 and H1; the other, L2 and H2.
296 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
299 add_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
300 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
301 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
303 unsigned HOST_WIDE_INT l
;
307 h
= h1
+ h2
+ (l
< l1
);
311 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
314 /* Negate a doubleword integer with doubleword result.
315 Return nonzero if the operation overflows, assuming it's signed.
316 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
321 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
327 return (*hv
& h1
) < 0;
337 /* Multiply two doubleword integers with doubleword result.
338 Return nonzero if the operation overflows, assuming it's signed.
339 Each argument is given as two `HOST_WIDE_INT' pieces.
340 One argument is L1 and H1; the other, L2 and H2.
341 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
344 mul_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
345 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
346 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
348 HOST_WIDE_INT arg1
[4];
349 HOST_WIDE_INT arg2
[4];
350 HOST_WIDE_INT prod
[4 * 2];
351 unsigned HOST_WIDE_INT carry
;
353 unsigned HOST_WIDE_INT toplow
, neglow
;
354 HOST_WIDE_INT tophigh
, neghigh
;
356 encode (arg1
, l1
, h1
);
357 encode (arg2
, l2
, h2
);
359 memset (prod
, 0, sizeof prod
);
361 for (i
= 0; i
< 4; i
++)
364 for (j
= 0; j
< 4; j
++)
367 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
368 carry
+= arg1
[i
] * arg2
[j
];
369 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
371 prod
[k
] = LOWPART (carry
);
372 carry
= HIGHPART (carry
);
377 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
379 /* Check for overflow by calculating the top half of the answer in full;
380 it should agree with the low half's sign bit. */
381 decode (prod
+ 4, &toplow
, &tophigh
);
384 neg_double (l2
, h2
, &neglow
, &neghigh
);
385 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
389 neg_double (l1
, h1
, &neglow
, &neghigh
);
390 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
392 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
395 /* Shift the doubleword integer in L1, H1 left by COUNT places
396 keeping only PREC bits of result.
397 Shift right if COUNT is negative.
398 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
399 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
402 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
403 HOST_WIDE_INT count
, unsigned int prec
,
404 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
406 unsigned HOST_WIDE_INT signmask
;
410 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
414 if (SHIFT_COUNT_TRUNCATED
)
417 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
419 /* Shifting by the host word size is undefined according to the
420 ANSI standard, so we must handle this as a special case. */
424 else if (count
>= HOST_BITS_PER_WIDE_INT
)
426 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
431 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
432 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
436 /* Sign extend all bits that are beyond the precision. */
438 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
439 ? ((unsigned HOST_WIDE_INT
) *hv
440 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
441 : (*lv
>> (prec
- 1))) & 1);
443 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
445 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
447 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
448 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
453 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
454 *lv
|= signmask
<< prec
;
458 /* Shift the doubleword integer in L1, H1 right by COUNT places
459 keeping only PREC bits of result. COUNT must be positive.
460 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
461 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
464 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
465 HOST_WIDE_INT count
, unsigned int prec
,
466 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
469 unsigned HOST_WIDE_INT signmask
;
472 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
475 if (SHIFT_COUNT_TRUNCATED
)
478 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
480 /* Shifting by the host word size is undefined according to the
481 ANSI standard, so we must handle this as a special case. */
485 else if (count
>= HOST_BITS_PER_WIDE_INT
)
488 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
492 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
494 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
497 /* Zero / sign extend all bits that are beyond the precision. */
499 if (count
>= (HOST_WIDE_INT
)prec
)
504 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
506 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
508 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
509 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
514 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
515 *lv
|= signmask
<< (prec
- count
);
519 /* Rotate the doubleword integer in L1, H1 left by COUNT places
520 keeping only PREC bits of result.
521 Rotate right if COUNT is negative.
522 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
525 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
526 HOST_WIDE_INT count
, unsigned int prec
,
527 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
529 unsigned HOST_WIDE_INT s1l
, s2l
;
530 HOST_WIDE_INT s1h
, s2h
;
536 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
537 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
542 /* Rotate the doubleword integer in L1, H1 left by COUNT places
543 keeping only PREC bits of result. COUNT must be positive.
544 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
547 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
548 HOST_WIDE_INT count
, unsigned int prec
,
549 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
551 unsigned HOST_WIDE_INT s1l
, s2l
;
552 HOST_WIDE_INT s1h
, s2h
;
558 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
559 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
564 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
565 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
566 CODE is a tree code for a kind of division, one of
567 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
569 It controls how the quotient is rounded to an integer.
570 Return nonzero if the operation overflows.
571 UNS nonzero says do unsigned division. */
574 div_and_round_double (enum tree_code code
, int uns
,
575 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
576 HOST_WIDE_INT hnum_orig
,
577 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
578 HOST_WIDE_INT hden_orig
,
579 unsigned HOST_WIDE_INT
*lquo
,
580 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
584 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
585 HOST_WIDE_INT den
[4], quo
[4];
587 unsigned HOST_WIDE_INT work
;
588 unsigned HOST_WIDE_INT carry
= 0;
589 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
590 HOST_WIDE_INT hnum
= hnum_orig
;
591 unsigned HOST_WIDE_INT lden
= lden_orig
;
592 HOST_WIDE_INT hden
= hden_orig
;
595 if (hden
== 0 && lden
== 0)
596 overflow
= 1, lden
= 1;
598 /* Calculate quotient sign and convert operands to unsigned. */
604 /* (minimum integer) / (-1) is the only overflow case. */
605 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
606 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
612 neg_double (lden
, hden
, &lden
, &hden
);
616 if (hnum
== 0 && hden
== 0)
617 { /* single precision */
619 /* This unsigned division rounds toward zero. */
625 { /* trivial case: dividend < divisor */
626 /* hden != 0 already checked. */
633 memset (quo
, 0, sizeof quo
);
635 memset (num
, 0, sizeof num
); /* to zero 9th element */
636 memset (den
, 0, sizeof den
);
638 encode (num
, lnum
, hnum
);
639 encode (den
, lden
, hden
);
641 /* Special code for when the divisor < BASE. */
642 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
644 /* hnum != 0 already checked. */
645 for (i
= 4 - 1; i
>= 0; i
--)
647 work
= num
[i
] + carry
* BASE
;
648 quo
[i
] = work
/ lden
;
654 /* Full double precision division,
655 with thanks to Don Knuth's "Seminumerical Algorithms". */
656 int num_hi_sig
, den_hi_sig
;
657 unsigned HOST_WIDE_INT quo_est
, scale
;
659 /* Find the highest nonzero divisor digit. */
660 for (i
= 4 - 1;; i
--)
667 /* Insure that the first digit of the divisor is at least BASE/2.
668 This is required by the quotient digit estimation algorithm. */
670 scale
= BASE
/ (den
[den_hi_sig
] + 1);
672 { /* scale divisor and dividend */
674 for (i
= 0; i
<= 4 - 1; i
++)
676 work
= (num
[i
] * scale
) + carry
;
677 num
[i
] = LOWPART (work
);
678 carry
= HIGHPART (work
);
683 for (i
= 0; i
<= 4 - 1; i
++)
685 work
= (den
[i
] * scale
) + carry
;
686 den
[i
] = LOWPART (work
);
687 carry
= HIGHPART (work
);
688 if (den
[i
] != 0) den_hi_sig
= i
;
695 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
697 /* Guess the next quotient digit, quo_est, by dividing the first
698 two remaining dividend digits by the high order quotient digit.
699 quo_est is never low and is at most 2 high. */
700 unsigned HOST_WIDE_INT tmp
;
702 num_hi_sig
= i
+ den_hi_sig
+ 1;
703 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
704 if (num
[num_hi_sig
] != den
[den_hi_sig
])
705 quo_est
= work
/ den
[den_hi_sig
];
709 /* Refine quo_est so it's usually correct, and at most one high. */
710 tmp
= work
- quo_est
* den
[den_hi_sig
];
712 && (den
[den_hi_sig
- 1] * quo_est
713 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
716 /* Try QUO_EST as the quotient digit, by multiplying the
717 divisor by QUO_EST and subtracting from the remaining dividend.
718 Keep in mind that QUO_EST is the I - 1st digit. */
721 for (j
= 0; j
<= den_hi_sig
; j
++)
723 work
= quo_est
* den
[j
] + carry
;
724 carry
= HIGHPART (work
);
725 work
= num
[i
+ j
] - LOWPART (work
);
726 num
[i
+ j
] = LOWPART (work
);
727 carry
+= HIGHPART (work
) != 0;
730 /* If quo_est was high by one, then num[i] went negative and
731 we need to correct things. */
732 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
735 carry
= 0; /* add divisor back in */
736 for (j
= 0; j
<= den_hi_sig
; j
++)
738 work
= num
[i
+ j
] + den
[j
] + carry
;
739 carry
= HIGHPART (work
);
740 num
[i
+ j
] = LOWPART (work
);
743 num
[num_hi_sig
] += carry
;
746 /* Store the quotient digit. */
751 decode (quo
, lquo
, hquo
);
754 /* If result is negative, make it so. */
756 neg_double (*lquo
, *hquo
, lquo
, hquo
);
758 /* Compute trial remainder: rem = num - (quo * den) */
759 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
760 neg_double (*lrem
, *hrem
, lrem
, hrem
);
761 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
766 case TRUNC_MOD_EXPR
: /* round toward zero */
767 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
771 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
772 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
775 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
783 case CEIL_MOD_EXPR
: /* round toward positive infinity */
784 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
786 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
794 case ROUND_MOD_EXPR
: /* round to closest integer */
796 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
797 HOST_WIDE_INT habs_rem
= *hrem
;
798 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
799 HOST_WIDE_INT habs_den
= hden
, htwice
;
801 /* Get absolute values. */
803 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
805 neg_double (lden
, hden
, &labs_den
, &habs_den
);
807 /* If (2 * abs (lrem) >= abs (lden)) */
808 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
809 labs_rem
, habs_rem
, <wice
, &htwice
);
811 if (((unsigned HOST_WIDE_INT
) habs_den
812 < (unsigned HOST_WIDE_INT
) htwice
)
813 || (((unsigned HOST_WIDE_INT
) habs_den
814 == (unsigned HOST_WIDE_INT
) htwice
)
815 && (labs_den
< ltwice
)))
819 add_double (*lquo
, *hquo
,
820 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
823 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
835 /* Compute true remainder: rem = num - (quo * den) */
836 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
837 neg_double (*lrem
, *hrem
, lrem
, hrem
);
838 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
842 /* If ARG2 divides ARG1 with zero remainder, carries out the division
843 of type CODE and returns the quotient.
844 Otherwise returns NULL_TREE. */
847 div_if_zero_remainder (enum tree_code code
, tree arg1
, tree arg2
)
849 unsigned HOST_WIDE_INT int1l
, int2l
;
850 HOST_WIDE_INT int1h
, int2h
;
851 unsigned HOST_WIDE_INT quol
, reml
;
852 HOST_WIDE_INT quoh
, remh
;
853 tree type
= TREE_TYPE (arg1
);
854 int uns
= TYPE_UNSIGNED (type
);
856 int1l
= TREE_INT_CST_LOW (arg1
);
857 int1h
= TREE_INT_CST_HIGH (arg1
);
858 int2l
= TREE_INT_CST_LOW (arg2
);
859 int2h
= TREE_INT_CST_HIGH (arg2
);
861 div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
862 &quol
, &quoh
, &reml
, &remh
);
863 if (remh
!= 0 || reml
!= 0)
866 return build_int_cst_wide (type
, quol
, quoh
);
869 /* Return true if the built-in mathematical function specified by CODE
870 is odd, i.e. -f(x) == f(-x). */
873 negate_mathfn_p (enum built_in_function code
)
877 CASE_FLT_FN (BUILT_IN_ASIN
):
878 CASE_FLT_FN (BUILT_IN_ASINH
):
879 CASE_FLT_FN (BUILT_IN_ATAN
):
880 CASE_FLT_FN (BUILT_IN_ATANH
):
881 CASE_FLT_FN (BUILT_IN_CBRT
):
882 CASE_FLT_FN (BUILT_IN_SIN
):
883 CASE_FLT_FN (BUILT_IN_SINH
):
884 CASE_FLT_FN (BUILT_IN_TAN
):
885 CASE_FLT_FN (BUILT_IN_TANH
):
894 /* Check whether we may negate an integer constant T without causing
898 may_negate_without_overflow_p (tree t
)
900 unsigned HOST_WIDE_INT val
;
904 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
906 type
= TREE_TYPE (t
);
907 if (TYPE_UNSIGNED (type
))
910 prec
= TYPE_PRECISION (type
);
911 if (prec
> HOST_BITS_PER_WIDE_INT
)
913 if (TREE_INT_CST_LOW (t
) != 0)
915 prec
-= HOST_BITS_PER_WIDE_INT
;
916 val
= TREE_INT_CST_HIGH (t
);
919 val
= TREE_INT_CST_LOW (t
);
920 if (prec
< HOST_BITS_PER_WIDE_INT
)
921 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
922 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
925 /* Determine whether an expression T can be cheaply negated using
926 the function negate_expr without introducing undefined overflow. */
929 negate_expr_p (tree t
)
936 type
= TREE_TYPE (t
);
939 switch (TREE_CODE (t
))
942 if (TYPE_UNSIGNED (type
)
943 || (flag_wrapv
&& ! flag_trapv
))
946 /* Check that -CST will not overflow type. */
947 return may_negate_without_overflow_p (t
);
949 return INTEGRAL_TYPE_P (type
)
950 && (TYPE_UNSIGNED (type
)
951 || (flag_wrapv
&& !flag_trapv
));
958 return negate_expr_p (TREE_REALPART (t
))
959 && negate_expr_p (TREE_IMAGPART (t
));
962 if (FLOAT_TYPE_P (type
) && !flag_unsafe_math_optimizations
)
964 /* -(A + B) -> (-B) - A. */
965 if (negate_expr_p (TREE_OPERAND (t
, 1))
966 && reorder_operands_p (TREE_OPERAND (t
, 0),
967 TREE_OPERAND (t
, 1)))
969 /* -(A + B) -> (-A) - B. */
970 return negate_expr_p (TREE_OPERAND (t
, 0));
973 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
974 return (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
975 && reorder_operands_p (TREE_OPERAND (t
, 0),
976 TREE_OPERAND (t
, 1));
979 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
985 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
986 return negate_expr_p (TREE_OPERAND (t
, 1))
987 || negate_expr_p (TREE_OPERAND (t
, 0));
995 if (TYPE_UNSIGNED (TREE_TYPE (t
)) || flag_wrapv
)
997 return negate_expr_p (TREE_OPERAND (t
, 1))
998 || negate_expr_p (TREE_OPERAND (t
, 0));
1001 /* Negate -((double)float) as (double)(-float). */
1002 if (TREE_CODE (type
) == REAL_TYPE
)
1004 tree tem
= strip_float_extensions (t
);
1006 return negate_expr_p (tem
);
1011 /* Negate -f(x) as f(-x). */
1012 if (negate_mathfn_p (builtin_mathfn_code (t
)))
1013 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1)));
1017 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1018 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1020 tree op1
= TREE_OPERAND (t
, 1);
1021 if (TREE_INT_CST_HIGH (op1
) == 0
1022 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1023 == TREE_INT_CST_LOW (op1
))
1034 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1035 simplification is possible.
1036 If negate_expr_p would return true for T, NULL_TREE will never be
1040 fold_negate_expr (tree t
)
1042 tree type
= TREE_TYPE (t
);
1045 switch (TREE_CODE (t
))
1047 /* Convert - (~A) to A + 1. */
1049 if (INTEGRAL_TYPE_P (type
))
1050 return fold_build2 (PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
1051 build_int_cst (type
, 1));
1055 tem
= fold_negate_const (t
, type
);
1056 if (! TREE_OVERFLOW (tem
)
1057 || TYPE_UNSIGNED (type
)
1063 tem
= fold_negate_const (t
, type
);
1064 /* Two's complement FP formats, such as c4x, may overflow. */
1065 if (! TREE_OVERFLOW (tem
) || ! flag_trapping_math
)
1071 tree rpart
= negate_expr (TREE_REALPART (t
));
1072 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1074 if ((TREE_CODE (rpart
) == REAL_CST
1075 && TREE_CODE (ipart
) == REAL_CST
)
1076 || (TREE_CODE (rpart
) == INTEGER_CST
1077 && TREE_CODE (ipart
) == INTEGER_CST
))
1078 return build_complex (type
, rpart
, ipart
);
1083 return TREE_OPERAND (t
, 0);
1086 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1088 /* -(A + B) -> (-B) - A. */
1089 if (negate_expr_p (TREE_OPERAND (t
, 1))
1090 && reorder_operands_p (TREE_OPERAND (t
, 0),
1091 TREE_OPERAND (t
, 1)))
1093 tem
= negate_expr (TREE_OPERAND (t
, 1));
1094 return fold_build2 (MINUS_EXPR
, type
,
1095 tem
, TREE_OPERAND (t
, 0));
1098 /* -(A + B) -> (-A) - B. */
1099 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1101 tem
= negate_expr (TREE_OPERAND (t
, 0));
1102 return fold_build2 (MINUS_EXPR
, type
,
1103 tem
, TREE_OPERAND (t
, 1));
1109 /* - (A - B) -> B - A */
1110 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1111 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1112 return fold_build2 (MINUS_EXPR
, type
,
1113 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
1117 if (TYPE_UNSIGNED (type
))
1123 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
1125 tem
= TREE_OPERAND (t
, 1);
1126 if (negate_expr_p (tem
))
1127 return fold_build2 (TREE_CODE (t
), type
,
1128 TREE_OPERAND (t
, 0), negate_expr (tem
));
1129 tem
= TREE_OPERAND (t
, 0);
1130 if (negate_expr_p (tem
))
1131 return fold_build2 (TREE_CODE (t
), type
,
1132 negate_expr (tem
), TREE_OPERAND (t
, 1));
1136 case TRUNC_DIV_EXPR
:
1137 case ROUND_DIV_EXPR
:
1138 case FLOOR_DIV_EXPR
:
1140 case EXACT_DIV_EXPR
:
1141 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
1143 tem
= TREE_OPERAND (t
, 1);
1144 if (negate_expr_p (tem
))
1145 return fold_build2 (TREE_CODE (t
), type
,
1146 TREE_OPERAND (t
, 0), negate_expr (tem
));
1147 tem
= TREE_OPERAND (t
, 0);
1148 if (negate_expr_p (tem
))
1149 return fold_build2 (TREE_CODE (t
), type
,
1150 negate_expr (tem
), TREE_OPERAND (t
, 1));
1155 /* Convert -((double)float) into (double)(-float). */
1156 if (TREE_CODE (type
) == REAL_TYPE
)
1158 tem
= strip_float_extensions (t
);
1159 if (tem
!= t
&& negate_expr_p (tem
))
1160 return negate_expr (tem
);
1165 /* Negate -f(x) as f(-x). */
1166 if (negate_mathfn_p (builtin_mathfn_code (t
))
1167 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1))))
1169 tree fndecl
, arg
, arglist
;
1171 fndecl
= get_callee_fndecl (t
);
1172 arg
= negate_expr (TREE_VALUE (TREE_OPERAND (t
, 1)));
1173 arglist
= build_tree_list (NULL_TREE
, arg
);
1174 return build_function_call_expr (fndecl
, arglist
);
1179 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1180 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1182 tree op1
= TREE_OPERAND (t
, 1);
1183 if (TREE_INT_CST_HIGH (op1
) == 0
1184 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1185 == TREE_INT_CST_LOW (op1
))
1187 tree ntype
= TYPE_UNSIGNED (type
)
1188 ? lang_hooks
.types
.signed_type (type
)
1189 : lang_hooks
.types
.unsigned_type (type
);
1190 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1191 temp
= fold_build2 (RSHIFT_EXPR
, ntype
, temp
, op1
);
1192 return fold_convert (type
, temp
);
1204 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1205 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1206 return NULL_TREE. */
1209 negate_expr (tree t
)
1216 type
= TREE_TYPE (t
);
1217 STRIP_SIGN_NOPS (t
);
1219 tem
= fold_negate_expr (t
);
1221 tem
= build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1222 return fold_convert (type
, tem
);
1225 /* Split a tree IN into a constant, literal and variable parts that could be
1226 combined with CODE to make IN. "constant" means an expression with
1227 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1228 commutative arithmetic operation. Store the constant part into *CONP,
1229 the literal in *LITP and return the variable part. If a part isn't
1230 present, set it to null. If the tree does not decompose in this way,
1231 return the entire tree as the variable part and the other parts as null.
1233 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1234 case, we negate an operand that was subtracted. Except if it is a
1235 literal for which we use *MINUS_LITP instead.
1237 If NEGATE_P is true, we are negating all of IN, again except a literal
1238 for which we use *MINUS_LITP instead.
1240 If IN is itself a literal or constant, return it as appropriate.
1242 Note that we do not guarantee that any of the three values will be the
1243 same type as IN, but they will have the same signedness and mode. */
1246 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1247 tree
*minus_litp
, int negate_p
)
1255 /* Strip any conversions that don't change the machine mode or signedness. */
1256 STRIP_SIGN_NOPS (in
);
1258 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1260 else if (TREE_CODE (in
) == code
1261 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1262 /* We can associate addition and subtraction together (even
1263 though the C standard doesn't say so) for integers because
1264 the value is not affected. For reals, the value might be
1265 affected, so we can't. */
1266 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1267 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1269 tree op0
= TREE_OPERAND (in
, 0);
1270 tree op1
= TREE_OPERAND (in
, 1);
1271 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1272 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1274 /* First see if either of the operands is a literal, then a constant. */
1275 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1276 *litp
= op0
, op0
= 0;
1277 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1278 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1280 if (op0
!= 0 && TREE_CONSTANT (op0
))
1281 *conp
= op0
, op0
= 0;
1282 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1283 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1285 /* If we haven't dealt with either operand, this is not a case we can
1286 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1287 if (op0
!= 0 && op1
!= 0)
1292 var
= op1
, neg_var_p
= neg1_p
;
1294 /* Now do any needed negations. */
1296 *minus_litp
= *litp
, *litp
= 0;
1298 *conp
= negate_expr (*conp
);
1300 var
= negate_expr (var
);
1302 else if (TREE_CONSTANT (in
))
1310 *minus_litp
= *litp
, *litp
= 0;
1311 else if (*minus_litp
)
1312 *litp
= *minus_litp
, *minus_litp
= 0;
1313 *conp
= negate_expr (*conp
);
1314 var
= negate_expr (var
);
1320 /* Re-associate trees split by the above function. T1 and T2 are either
1321 expressions to associate or null. Return the new expression, if any. If
1322 we build an operation, do it in TYPE and with CODE. */
1325 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1332 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1333 try to fold this since we will have infinite recursion. But do
1334 deal with any NEGATE_EXPRs. */
1335 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1336 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1338 if (code
== PLUS_EXPR
)
1340 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1341 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1342 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1343 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1344 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1345 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1346 else if (integer_zerop (t2
))
1347 return fold_convert (type
, t1
);
1349 else if (code
== MINUS_EXPR
)
1351 if (integer_zerop (t2
))
1352 return fold_convert (type
, t1
);
1355 return build2 (code
, type
, fold_convert (type
, t1
),
1356 fold_convert (type
, t2
));
1359 return fold_build2 (code
, type
, fold_convert (type
, t1
),
1360 fold_convert (type
, t2
));
1363 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1364 to produce a new constant. Return NULL_TREE if we don't know how
1365 to evaluate CODE at compile-time.
1367 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1370 int_const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1372 unsigned HOST_WIDE_INT int1l
, int2l
;
1373 HOST_WIDE_INT int1h
, int2h
;
1374 unsigned HOST_WIDE_INT low
;
1376 unsigned HOST_WIDE_INT garbagel
;
1377 HOST_WIDE_INT garbageh
;
1379 tree type
= TREE_TYPE (arg1
);
1380 int uns
= TYPE_UNSIGNED (type
);
1382 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1385 int1l
= TREE_INT_CST_LOW (arg1
);
1386 int1h
= TREE_INT_CST_HIGH (arg1
);
1387 int2l
= TREE_INT_CST_LOW (arg2
);
1388 int2h
= TREE_INT_CST_HIGH (arg2
);
1393 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1397 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1401 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1407 /* It's unclear from the C standard whether shifts can overflow.
1408 The following code ignores overflow; perhaps a C standard
1409 interpretation ruling is needed. */
1410 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1417 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1422 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1426 neg_double (int2l
, int2h
, &low
, &hi
);
1427 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1428 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1432 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1435 case TRUNC_DIV_EXPR
:
1436 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1437 case EXACT_DIV_EXPR
:
1438 /* This is a shortcut for a common special case. */
1439 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1440 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1441 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1442 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1444 if (code
== CEIL_DIV_EXPR
)
1447 low
= int1l
/ int2l
, hi
= 0;
1451 /* ... fall through ... */
1453 case ROUND_DIV_EXPR
:
1454 if (int2h
== 0 && int2l
== 0)
1456 if (int2h
== 0 && int2l
== 1)
1458 low
= int1l
, hi
= int1h
;
1461 if (int1l
== int2l
&& int1h
== int2h
1462 && ! (int1l
== 0 && int1h
== 0))
1467 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1468 &low
, &hi
, &garbagel
, &garbageh
);
1471 case TRUNC_MOD_EXPR
:
1472 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1473 /* This is a shortcut for a common special case. */
1474 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1475 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1476 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1477 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1479 if (code
== CEIL_MOD_EXPR
)
1481 low
= int1l
% int2l
, hi
= 0;
1485 /* ... fall through ... */
1487 case ROUND_MOD_EXPR
:
1488 if (int2h
== 0 && int2l
== 0)
1490 overflow
= div_and_round_double (code
, uns
,
1491 int1l
, int1h
, int2l
, int2h
,
1492 &garbagel
, &garbageh
, &low
, &hi
);
1498 low
= (((unsigned HOST_WIDE_INT
) int1h
1499 < (unsigned HOST_WIDE_INT
) int2h
)
1500 || (((unsigned HOST_WIDE_INT
) int1h
1501 == (unsigned HOST_WIDE_INT
) int2h
)
1504 low
= (int1h
< int2h
1505 || (int1h
== int2h
&& int1l
< int2l
));
1507 if (low
== (code
== MIN_EXPR
))
1508 low
= int1l
, hi
= int1h
;
1510 low
= int2l
, hi
= int2h
;
1517 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1521 /* Propagate overflow flags ourselves. */
1522 if (((!uns
|| is_sizetype
) && overflow
)
1523 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1526 TREE_OVERFLOW (t
) = 1;
1527 TREE_CONSTANT_OVERFLOW (t
) = 1;
1529 else if (TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
))
1532 TREE_CONSTANT_OVERFLOW (t
) = 1;
1536 t
= force_fit_type (t
, 1,
1537 ((!uns
|| is_sizetype
) && overflow
)
1538 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
),
1539 TREE_CONSTANT_OVERFLOW (arg1
)
1540 | TREE_CONSTANT_OVERFLOW (arg2
));
1545 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1546 constant. We assume ARG1 and ARG2 have the same data type, or at least
1547 are the same kind of constant and the same machine mode. Return zero if
1548 combining the constants is not allowed in the current operating mode.
1550 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1553 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1555 /* Sanity check for the recursive cases. */
1562 if (TREE_CODE (arg1
) == INTEGER_CST
)
1563 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1565 if (TREE_CODE (arg1
) == REAL_CST
)
1567 enum machine_mode mode
;
1570 REAL_VALUE_TYPE value
;
1571 REAL_VALUE_TYPE result
;
1575 /* The following codes are handled by real_arithmetic. */
1590 d1
= TREE_REAL_CST (arg1
);
1591 d2
= TREE_REAL_CST (arg2
);
1593 type
= TREE_TYPE (arg1
);
1594 mode
= TYPE_MODE (type
);
1596 /* Don't perform operation if we honor signaling NaNs and
1597 either operand is a NaN. */
1598 if (HONOR_SNANS (mode
)
1599 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1602 /* Don't perform operation if it would raise a division
1603 by zero exception. */
1604 if (code
== RDIV_EXPR
1605 && REAL_VALUES_EQUAL (d2
, dconst0
)
1606 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1609 /* If either operand is a NaN, just return it. Otherwise, set up
1610 for floating-point trap; we return an overflow. */
1611 if (REAL_VALUE_ISNAN (d1
))
1613 else if (REAL_VALUE_ISNAN (d2
))
1616 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1617 real_convert (&result
, mode
, &value
);
1619 /* Don't constant fold this floating point operation if
1620 the result has overflowed and flag_trapping_math. */
1621 if (flag_trapping_math
1622 && MODE_HAS_INFINITIES (mode
)
1623 && REAL_VALUE_ISINF (result
)
1624 && !REAL_VALUE_ISINF (d1
)
1625 && !REAL_VALUE_ISINF (d2
))
1628 /* Don't constant fold this floating point operation if the
1629 result may dependent upon the run-time rounding mode and
1630 flag_rounding_math is set, or if GCC's software emulation
1631 is unable to accurately represent the result. */
1632 if ((flag_rounding_math
1633 || (REAL_MODE_FORMAT_COMPOSITE_P (mode
)
1634 && !flag_unsafe_math_optimizations
))
1635 && (inexact
|| !real_identical (&result
, &value
)))
1638 t
= build_real (type
, result
);
1640 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1641 TREE_CONSTANT_OVERFLOW (t
)
1643 | TREE_CONSTANT_OVERFLOW (arg1
)
1644 | TREE_CONSTANT_OVERFLOW (arg2
);
1648 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1650 tree type
= TREE_TYPE (arg1
);
1651 tree r1
= TREE_REALPART (arg1
);
1652 tree i1
= TREE_IMAGPART (arg1
);
1653 tree r2
= TREE_REALPART (arg2
);
1654 tree i2
= TREE_IMAGPART (arg2
);
1661 real
= const_binop (code
, r1
, r2
, notrunc
);
1662 imag
= const_binop (code
, i1
, i2
, notrunc
);
1666 real
= const_binop (MINUS_EXPR
,
1667 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1668 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1670 imag
= const_binop (PLUS_EXPR
,
1671 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1672 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1679 = const_binop (PLUS_EXPR
,
1680 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1681 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1684 = const_binop (PLUS_EXPR
,
1685 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1686 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1689 = const_binop (MINUS_EXPR
,
1690 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1691 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1694 if (INTEGRAL_TYPE_P (TREE_TYPE (r1
)))
1695 code
= TRUNC_DIV_EXPR
;
1697 real
= const_binop (code
, t1
, magsquared
, notrunc
);
1698 imag
= const_binop (code
, t2
, magsquared
, notrunc
);
1707 return build_complex (type
, real
, imag
);
1713 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1714 indicates which particular sizetype to create. */
1717 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1719 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1722 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1723 is a tree code. The type of the result is taken from the operands.
1724 Both must be the same type integer type and it must be a size type.
1725 If the operands are constant, so is the result. */
1728 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
1730 tree type
= TREE_TYPE (arg0
);
1732 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1733 return error_mark_node
;
1735 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1736 && type
== TREE_TYPE (arg1
));
1738 /* Handle the special case of two integer constants faster. */
1739 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1741 /* And some specific cases even faster than that. */
1742 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1744 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1745 && integer_zerop (arg1
))
1747 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1750 /* Handle general case of two integer constants. */
1751 return int_const_binop (code
, arg0
, arg1
, 0);
1754 return fold_build2 (code
, type
, arg0
, arg1
);
1757 /* Given two values, either both of sizetype or both of bitsizetype,
1758 compute the difference between the two values. Return the value
1759 in signed type corresponding to the type of the operands. */
1762 size_diffop (tree arg0
, tree arg1
)
1764 tree type
= TREE_TYPE (arg0
);
1767 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1768 && type
== TREE_TYPE (arg1
));
1770 /* If the type is already signed, just do the simple thing. */
1771 if (!TYPE_UNSIGNED (type
))
1772 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1774 ctype
= type
== bitsizetype
? sbitsizetype
: ssizetype
;
1776 /* If either operand is not a constant, do the conversions to the signed
1777 type and subtract. The hardware will do the right thing with any
1778 overflow in the subtraction. */
1779 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1780 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
1781 fold_convert (ctype
, arg1
));
1783 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1784 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1785 overflow) and negate (which can't either). Special-case a result
1786 of zero while we're here. */
1787 if (tree_int_cst_equal (arg0
, arg1
))
1788 return build_int_cst (ctype
, 0);
1789 else if (tree_int_cst_lt (arg1
, arg0
))
1790 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1792 return size_binop (MINUS_EXPR
, build_int_cst (ctype
, 0),
1793 fold_convert (ctype
, size_binop (MINUS_EXPR
,
1797 /* A subroutine of fold_convert_const handling conversions of an
1798 INTEGER_CST to another integer type. */
1801 fold_convert_const_int_from_int (tree type
, tree arg1
)
1805 /* Given an integer constant, make new constant with new type,
1806 appropriately sign-extended or truncated. */
1807 t
= build_int_cst_wide (type
, TREE_INT_CST_LOW (arg1
),
1808 TREE_INT_CST_HIGH (arg1
));
1810 t
= force_fit_type (t
,
1811 /* Don't set the overflow when
1812 converting a pointer */
1813 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1814 (TREE_INT_CST_HIGH (arg1
) < 0
1815 && (TYPE_UNSIGNED (type
)
1816 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1817 | TREE_OVERFLOW (arg1
),
1818 TREE_CONSTANT_OVERFLOW (arg1
));
1823 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1824 to an integer type. */
1827 fold_convert_const_int_from_real (enum tree_code code
, tree type
, tree arg1
)
1832 /* The following code implements the floating point to integer
1833 conversion rules required by the Java Language Specification,
1834 that IEEE NaNs are mapped to zero and values that overflow
1835 the target precision saturate, i.e. values greater than
1836 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1837 are mapped to INT_MIN. These semantics are allowed by the
1838 C and C++ standards that simply state that the behavior of
1839 FP-to-integer conversion is unspecified upon overflow. */
1841 HOST_WIDE_INT high
, low
;
1843 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1847 case FIX_TRUNC_EXPR
:
1848 real_trunc (&r
, VOIDmode
, &x
);
1852 real_ceil (&r
, VOIDmode
, &x
);
1855 case FIX_FLOOR_EXPR
:
1856 real_floor (&r
, VOIDmode
, &x
);
1859 case FIX_ROUND_EXPR
:
1860 real_round (&r
, VOIDmode
, &x
);
1867 /* If R is NaN, return zero and show we have an overflow. */
1868 if (REAL_VALUE_ISNAN (r
))
1875 /* See if R is less than the lower bound or greater than the
1880 tree lt
= TYPE_MIN_VALUE (type
);
1881 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1882 if (REAL_VALUES_LESS (r
, l
))
1885 high
= TREE_INT_CST_HIGH (lt
);
1886 low
= TREE_INT_CST_LOW (lt
);
1892 tree ut
= TYPE_MAX_VALUE (type
);
1895 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1896 if (REAL_VALUES_LESS (u
, r
))
1899 high
= TREE_INT_CST_HIGH (ut
);
1900 low
= TREE_INT_CST_LOW (ut
);
1906 REAL_VALUE_TO_INT (&low
, &high
, r
);
1908 t
= build_int_cst_wide (type
, low
, high
);
1910 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg1
),
1911 TREE_CONSTANT_OVERFLOW (arg1
));
1915 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1916 to another floating point type. */
1919 fold_convert_const_real_from_real (tree type
, tree arg1
)
1921 REAL_VALUE_TYPE value
;
1924 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1925 t
= build_real (type
, value
);
1927 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1928 TREE_CONSTANT_OVERFLOW (t
)
1929 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1933 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1934 type TYPE. If no simplification can be done return NULL_TREE. */
1937 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1939 if (TREE_TYPE (arg1
) == type
)
1942 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
1944 if (TREE_CODE (arg1
) == INTEGER_CST
)
1945 return fold_convert_const_int_from_int (type
, arg1
);
1946 else if (TREE_CODE (arg1
) == REAL_CST
)
1947 return fold_convert_const_int_from_real (code
, type
, arg1
);
1949 else if (TREE_CODE (type
) == REAL_TYPE
)
1951 if (TREE_CODE (arg1
) == INTEGER_CST
)
1952 return build_real_from_int_cst (type
, arg1
);
1953 if (TREE_CODE (arg1
) == REAL_CST
)
1954 return fold_convert_const_real_from_real (type
, arg1
);
1959 /* Construct a vector of zero elements of vector type TYPE. */
1962 build_zero_vector (tree type
)
1967 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1968 units
= TYPE_VECTOR_SUBPARTS (type
);
1971 for (i
= 0; i
< units
; i
++)
1972 list
= tree_cons (NULL_TREE
, elem
, list
);
1973 return build_vector (type
, list
);
1976 /* Convert expression ARG to type TYPE. Used by the middle-end for
1977 simple conversions in preference to calling the front-end's convert. */
1980 fold_convert (tree type
, tree arg
)
1982 tree orig
= TREE_TYPE (arg
);
1988 if (TREE_CODE (arg
) == ERROR_MARK
1989 || TREE_CODE (type
) == ERROR_MARK
1990 || TREE_CODE (orig
) == ERROR_MARK
)
1991 return error_mark_node
;
1993 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
)
1994 || lang_hooks
.types_compatible_p (TYPE_MAIN_VARIANT (type
),
1995 TYPE_MAIN_VARIANT (orig
)))
1996 return fold_build1 (NOP_EXPR
, type
, arg
);
1998 switch (TREE_CODE (type
))
2000 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2001 case POINTER_TYPE
: case REFERENCE_TYPE
:
2003 if (TREE_CODE (arg
) == INTEGER_CST
)
2005 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2006 if (tem
!= NULL_TREE
)
2009 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2010 || TREE_CODE (orig
) == OFFSET_TYPE
)
2011 return fold_build1 (NOP_EXPR
, type
, arg
);
2012 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2014 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2015 return fold_convert (type
, tem
);
2017 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2018 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2019 return fold_build1 (NOP_EXPR
, type
, arg
);
2022 if (TREE_CODE (arg
) == INTEGER_CST
)
2024 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2025 if (tem
!= NULL_TREE
)
2028 else if (TREE_CODE (arg
) == REAL_CST
)
2030 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2031 if (tem
!= NULL_TREE
)
2035 switch (TREE_CODE (orig
))
2038 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2039 case POINTER_TYPE
: case REFERENCE_TYPE
:
2040 return fold_build1 (FLOAT_EXPR
, type
, arg
);
2043 return fold_build1 (NOP_EXPR
, type
, arg
);
2046 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2047 return fold_convert (type
, tem
);
2054 switch (TREE_CODE (orig
))
2057 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2058 case POINTER_TYPE
: case REFERENCE_TYPE
:
2060 return build2 (COMPLEX_EXPR
, type
,
2061 fold_convert (TREE_TYPE (type
), arg
),
2062 fold_convert (TREE_TYPE (type
), integer_zero_node
));
2067 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2069 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
2070 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
2071 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2074 arg
= save_expr (arg
);
2075 rpart
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2076 ipart
= fold_build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2077 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
2078 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
2079 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2087 if (integer_zerop (arg
))
2088 return build_zero_vector (type
);
2089 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2090 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2091 || TREE_CODE (orig
) == VECTOR_TYPE
);
2092 return fold_build1 (VIEW_CONVERT_EXPR
, type
, arg
);
2095 return fold_build1 (NOP_EXPR
, type
, fold_ignored_result (arg
));
2102 /* Return false if expr can be assumed not to be an lvalue, true
2106 maybe_lvalue_p (tree x
)
2108 /* We only need to wrap lvalue tree codes. */
2109 switch (TREE_CODE (x
))
2120 case ALIGN_INDIRECT_REF
:
2121 case MISALIGNED_INDIRECT_REF
:
2123 case ARRAY_RANGE_REF
:
2129 case PREINCREMENT_EXPR
:
2130 case PREDECREMENT_EXPR
:
2132 case TRY_CATCH_EXPR
:
2133 case WITH_CLEANUP_EXPR
:
2144 /* Assume the worst for front-end tree codes. */
2145 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2153 /* Return an expr equal to X but certainly not valid as an lvalue. */
2158 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2163 if (! maybe_lvalue_p (x
))
2165 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2168 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2169 Zero means allow extended lvalues. */
2171 int pedantic_lvalues
;
2173 /* When pedantic, return an expr equal to X but certainly not valid as a
2174 pedantic lvalue. Otherwise, return X. */
2177 pedantic_non_lvalue (tree x
)
2179 if (pedantic_lvalues
)
2180 return non_lvalue (x
);
2185 /* Given a tree comparison code, return the code that is the logical inverse
2186 of the given code. It is not safe to do this for floating-point
2187 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2188 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2191 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2193 if (honor_nans
&& flag_trapping_math
)
2203 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2205 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2207 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2209 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2223 return UNORDERED_EXPR
;
2224 case UNORDERED_EXPR
:
2225 return ORDERED_EXPR
;
2231 /* Similar, but return the comparison that results if the operands are
2232 swapped. This is safe for floating-point. */
2235 swap_tree_comparison (enum tree_code code
)
2242 case UNORDERED_EXPR
:
2268 /* Convert a comparison tree code from an enum tree_code representation
2269 into a compcode bit-based encoding. This function is the inverse of
2270 compcode_to_comparison. */
2272 static enum comparison_code
2273 comparison_to_compcode (enum tree_code code
)
2290 return COMPCODE_ORD
;
2291 case UNORDERED_EXPR
:
2292 return COMPCODE_UNORD
;
2294 return COMPCODE_UNLT
;
2296 return COMPCODE_UNEQ
;
2298 return COMPCODE_UNLE
;
2300 return COMPCODE_UNGT
;
2302 return COMPCODE_LTGT
;
2304 return COMPCODE_UNGE
;
2310 /* Convert a compcode bit-based encoding of a comparison operator back
2311 to GCC's enum tree_code representation. This function is the
2312 inverse of comparison_to_compcode. */
2314 static enum tree_code
2315 compcode_to_comparison (enum comparison_code code
)
2332 return ORDERED_EXPR
;
2333 case COMPCODE_UNORD
:
2334 return UNORDERED_EXPR
;
2352 /* Return a tree for the comparison which is the combination of
2353 doing the AND or OR (depending on CODE) of the two operations LCODE
2354 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2355 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2356 if this makes the transformation invalid. */
2359 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2360 enum tree_code rcode
, tree truth_type
,
2361 tree ll_arg
, tree lr_arg
)
2363 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2364 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2365 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2366 enum comparison_code compcode
;
2370 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2371 compcode
= lcompcode
& rcompcode
;
2374 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2375 compcode
= lcompcode
| rcompcode
;
2384 /* Eliminate unordered comparisons, as well as LTGT and ORD
2385 which are not used unless the mode has NaNs. */
2386 compcode
&= ~COMPCODE_UNORD
;
2387 if (compcode
== COMPCODE_LTGT
)
2388 compcode
= COMPCODE_NE
;
2389 else if (compcode
== COMPCODE_ORD
)
2390 compcode
= COMPCODE_TRUE
;
2392 else if (flag_trapping_math
)
2394 /* Check that the original operation and the optimized ones will trap
2395 under the same condition. */
2396 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2397 && (lcompcode
!= COMPCODE_EQ
)
2398 && (lcompcode
!= COMPCODE_ORD
);
2399 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2400 && (rcompcode
!= COMPCODE_EQ
)
2401 && (rcompcode
!= COMPCODE_ORD
);
2402 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2403 && (compcode
!= COMPCODE_EQ
)
2404 && (compcode
!= COMPCODE_ORD
);
2406 /* In a short-circuited boolean expression the LHS might be
2407 such that the RHS, if evaluated, will never trap. For
2408 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2409 if neither x nor y is NaN. (This is a mixed blessing: for
2410 example, the expression above will never trap, hence
2411 optimizing it to x < y would be invalid). */
2412 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2413 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2416 /* If the comparison was short-circuited, and only the RHS
2417 trapped, we may now generate a spurious trap. */
2419 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2422 /* If we changed the conditions that cause a trap, we lose. */
2423 if ((ltrap
|| rtrap
) != trap
)
2427 if (compcode
== COMPCODE_TRUE
)
2428 return constant_boolean_node (true, truth_type
);
2429 else if (compcode
== COMPCODE_FALSE
)
2430 return constant_boolean_node (false, truth_type
);
2432 return fold_build2 (compcode_to_comparison (compcode
),
2433 truth_type
, ll_arg
, lr_arg
);
2436 /* Return nonzero if CODE is a tree code that represents a truth value. */
2439 truth_value_p (enum tree_code code
)
2441 return (TREE_CODE_CLASS (code
) == tcc_comparison
2442 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2443 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2444 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2447 /* Return nonzero if two operands (typically of the same tree node)
2448 are necessarily equal. If either argument has side-effects this
2449 function returns zero. FLAGS modifies behavior as follows:
2451 If OEP_ONLY_CONST is set, only return nonzero for constants.
2452 This function tests whether the operands are indistinguishable;
2453 it does not test whether they are equal using C's == operation.
2454 The distinction is important for IEEE floating point, because
2455 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2456 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2458 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2459 even though it may hold multiple values during a function.
2460 This is because a GCC tree node guarantees that nothing else is
2461 executed between the evaluation of its "operands" (which may often
2462 be evaluated in arbitrary order). Hence if the operands themselves
2463 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2464 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2465 unset means assuming isochronic (or instantaneous) tree equivalence.
2466 Unless comparing arbitrary expression trees, such as from different
2467 statements, this flag can usually be left unset.
2469 If OEP_PURE_SAME is set, then pure functions with identical arguments
2470 are considered the same. It is used when the caller has other ways
2471 to ensure that global memory is unchanged in between. */
2474 operand_equal_p (tree arg0
, tree arg1
, unsigned int flags
)
2476 /* If either is ERROR_MARK, they aren't equal. */
2477 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
2480 /* If both types don't have the same signedness, then we can't consider
2481 them equal. We must check this before the STRIP_NOPS calls
2482 because they may change the signedness of the arguments. */
2483 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2486 /* If both types don't have the same precision, then it is not safe
2488 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
2494 /* In case both args are comparisons but with different comparison
2495 code, try to swap the comparison operands of one arg to produce
2496 a match and compare that variant. */
2497 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2498 && COMPARISON_CLASS_P (arg0
)
2499 && COMPARISON_CLASS_P (arg1
))
2501 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2503 if (TREE_CODE (arg0
) == swap_code
)
2504 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2505 TREE_OPERAND (arg1
, 1), flags
)
2506 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2507 TREE_OPERAND (arg1
, 0), flags
);
2510 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2511 /* This is needed for conversions and for COMPONENT_REF.
2512 Might as well play it safe and always test this. */
2513 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2514 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2515 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2518 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2519 We don't care about side effects in that case because the SAVE_EXPR
2520 takes care of that for us. In all other cases, two expressions are
2521 equal if they have no side effects. If we have two identical
2522 expressions with side effects that should be treated the same due
2523 to the only side effects being identical SAVE_EXPR's, that will
2524 be detected in the recursive calls below. */
2525 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2526 && (TREE_CODE (arg0
) == SAVE_EXPR
2527 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2530 /* Next handle constant cases, those for which we can return 1 even
2531 if ONLY_CONST is set. */
2532 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2533 switch (TREE_CODE (arg0
))
2536 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2537 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2538 && tree_int_cst_equal (arg0
, arg1
));
2541 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2542 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2543 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2544 TREE_REAL_CST (arg1
)));
2550 if (TREE_CONSTANT_OVERFLOW (arg0
)
2551 || TREE_CONSTANT_OVERFLOW (arg1
))
2554 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2555 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2558 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2561 v1
= TREE_CHAIN (v1
);
2562 v2
= TREE_CHAIN (v2
);
2569 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2571 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2575 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2576 && ! memcmp (TREE_STRING_POINTER (arg0
),
2577 TREE_STRING_POINTER (arg1
),
2578 TREE_STRING_LENGTH (arg0
)));
2581 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2587 if (flags
& OEP_ONLY_CONST
)
2590 /* Define macros to test an operand from arg0 and arg1 for equality and a
2591 variant that allows null and views null as being different from any
2592 non-null value. In the latter case, if either is null, the both
2593 must be; otherwise, do the normal comparison. */
2594 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2595 TREE_OPERAND (arg1, N), flags)
2597 #define OP_SAME_WITH_NULL(N) \
2598 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2599 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2601 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2604 /* Two conversions are equal only if signedness and modes match. */
2605 switch (TREE_CODE (arg0
))
2610 case FIX_TRUNC_EXPR
:
2611 case FIX_FLOOR_EXPR
:
2612 case FIX_ROUND_EXPR
:
2613 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2614 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2624 case tcc_comparison
:
2626 if (OP_SAME (0) && OP_SAME (1))
2629 /* For commutative ops, allow the other order. */
2630 return (commutative_tree_code (TREE_CODE (arg0
))
2631 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2632 TREE_OPERAND (arg1
, 1), flags
)
2633 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2634 TREE_OPERAND (arg1
, 0), flags
));
2637 /* If either of the pointer (or reference) expressions we are
2638 dereferencing contain a side effect, these cannot be equal. */
2639 if (TREE_SIDE_EFFECTS (arg0
)
2640 || TREE_SIDE_EFFECTS (arg1
))
2643 switch (TREE_CODE (arg0
))
2646 case ALIGN_INDIRECT_REF
:
2647 case MISALIGNED_INDIRECT_REF
:
2653 case ARRAY_RANGE_REF
:
2654 /* Operands 2 and 3 may be null. */
2657 && OP_SAME_WITH_NULL (2)
2658 && OP_SAME_WITH_NULL (3));
2661 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2662 may be NULL when we're called to compare MEM_EXPRs. */
2663 return OP_SAME_WITH_NULL (0)
2665 && OP_SAME_WITH_NULL (2);
2668 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2674 case tcc_expression
:
2675 switch (TREE_CODE (arg0
))
2678 case TRUTH_NOT_EXPR
:
2681 case TRUTH_ANDIF_EXPR
:
2682 case TRUTH_ORIF_EXPR
:
2683 return OP_SAME (0) && OP_SAME (1);
2685 case TRUTH_AND_EXPR
:
2687 case TRUTH_XOR_EXPR
:
2688 if (OP_SAME (0) && OP_SAME (1))
2691 /* Otherwise take into account this is a commutative operation. */
2692 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2693 TREE_OPERAND (arg1
, 1), flags
)
2694 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2695 TREE_OPERAND (arg1
, 0), flags
));
2698 /* If the CALL_EXPRs call different functions, then they
2699 clearly can not be equal. */
2704 unsigned int cef
= call_expr_flags (arg0
);
2705 if (flags
& OEP_PURE_SAME
)
2706 cef
&= ECF_CONST
| ECF_PURE
;
2713 /* Now see if all the arguments are the same. operand_equal_p
2714 does not handle TREE_LIST, so we walk the operands here
2715 feeding them to operand_equal_p. */
2716 arg0
= TREE_OPERAND (arg0
, 1);
2717 arg1
= TREE_OPERAND (arg1
, 1);
2718 while (arg0
&& arg1
)
2720 if (! operand_equal_p (TREE_VALUE (arg0
), TREE_VALUE (arg1
),
2724 arg0
= TREE_CHAIN (arg0
);
2725 arg1
= TREE_CHAIN (arg1
);
2728 /* If we get here and both argument lists are exhausted
2729 then the CALL_EXPRs are equal. */
2730 return ! (arg0
|| arg1
);
2736 case tcc_declaration
:
2737 /* Consider __builtin_sqrt equal to sqrt. */
2738 return (TREE_CODE (arg0
) == FUNCTION_DECL
2739 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2740 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2741 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2748 #undef OP_SAME_WITH_NULL
2751 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2752 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2754 When in doubt, return 0. */
2757 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2759 int unsignedp1
, unsignedpo
;
2760 tree primarg0
, primarg1
, primother
;
2761 unsigned int correct_width
;
2763 if (operand_equal_p (arg0
, arg1
, 0))
2766 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2767 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2770 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2771 and see if the inner values are the same. This removes any
2772 signedness comparison, which doesn't matter here. */
2773 primarg0
= arg0
, primarg1
= arg1
;
2774 STRIP_NOPS (primarg0
);
2775 STRIP_NOPS (primarg1
);
2776 if (operand_equal_p (primarg0
, primarg1
, 0))
2779 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2780 actual comparison operand, ARG0.
2782 First throw away any conversions to wider types
2783 already present in the operands. */
2785 primarg1
= get_narrower (arg1
, &unsignedp1
);
2786 primother
= get_narrower (other
, &unsignedpo
);
2788 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2789 if (unsignedp1
== unsignedpo
2790 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2791 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2793 tree type
= TREE_TYPE (arg0
);
2795 /* Make sure shorter operand is extended the right way
2796 to match the longer operand. */
2797 primarg1
= fold_convert (lang_hooks
.types
.signed_or_unsigned_type
2798 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2800 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2807 /* See if ARG is an expression that is either a comparison or is performing
2808 arithmetic on comparisons. The comparisons must only be comparing
2809 two different values, which will be stored in *CVAL1 and *CVAL2; if
2810 they are nonzero it means that some operands have already been found.
2811 No variables may be used anywhere else in the expression except in the
2812 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2813 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2815 If this is true, return 1. Otherwise, return zero. */
2818 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2820 enum tree_code code
= TREE_CODE (arg
);
2821 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2823 /* We can handle some of the tcc_expression cases here. */
2824 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2826 else if (class == tcc_expression
2827 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2828 || code
== COMPOUND_EXPR
))
2831 else if (class == tcc_expression
&& code
== SAVE_EXPR
2832 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2834 /* If we've already found a CVAL1 or CVAL2, this expression is
2835 two complex to handle. */
2836 if (*cval1
|| *cval2
)
2846 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2849 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2850 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2851 cval1
, cval2
, save_p
));
2856 case tcc_expression
:
2857 if (code
== COND_EXPR
)
2858 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2859 cval1
, cval2
, save_p
)
2860 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2861 cval1
, cval2
, save_p
)
2862 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2863 cval1
, cval2
, save_p
));
2866 case tcc_comparison
:
2867 /* First see if we can handle the first operand, then the second. For
2868 the second operand, we know *CVAL1 can't be zero. It must be that
2869 one side of the comparison is each of the values; test for the
2870 case where this isn't true by failing if the two operands
2873 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2874 TREE_OPERAND (arg
, 1), 0))
2878 *cval1
= TREE_OPERAND (arg
, 0);
2879 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2881 else if (*cval2
== 0)
2882 *cval2
= TREE_OPERAND (arg
, 0);
2883 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2888 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2890 else if (*cval2
== 0)
2891 *cval2
= TREE_OPERAND (arg
, 1);
2892 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2904 /* ARG is a tree that is known to contain just arithmetic operations and
2905 comparisons. Evaluate the operations in the tree substituting NEW0 for
2906 any occurrence of OLD0 as an operand of a comparison and likewise for
2910 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
2912 tree type
= TREE_TYPE (arg
);
2913 enum tree_code code
= TREE_CODE (arg
);
2914 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2916 /* We can handle some of the tcc_expression cases here. */
2917 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2919 else if (class == tcc_expression
2920 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2926 return fold_build1 (code
, type
,
2927 eval_subst (TREE_OPERAND (arg
, 0),
2928 old0
, new0
, old1
, new1
));
2931 return fold_build2 (code
, type
,
2932 eval_subst (TREE_OPERAND (arg
, 0),
2933 old0
, new0
, old1
, new1
),
2934 eval_subst (TREE_OPERAND (arg
, 1),
2935 old0
, new0
, old1
, new1
));
2937 case tcc_expression
:
2941 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
2944 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
2947 return fold_build3 (code
, type
,
2948 eval_subst (TREE_OPERAND (arg
, 0),
2949 old0
, new0
, old1
, new1
),
2950 eval_subst (TREE_OPERAND (arg
, 1),
2951 old0
, new0
, old1
, new1
),
2952 eval_subst (TREE_OPERAND (arg
, 2),
2953 old0
, new0
, old1
, new1
));
2957 /* Fall through - ??? */
2959 case tcc_comparison
:
2961 tree arg0
= TREE_OPERAND (arg
, 0);
2962 tree arg1
= TREE_OPERAND (arg
, 1);
2964 /* We need to check both for exact equality and tree equality. The
2965 former will be true if the operand has a side-effect. In that
2966 case, we know the operand occurred exactly once. */
2968 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2970 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2973 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2975 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2978 return fold_build2 (code
, type
, arg0
, arg1
);
2986 /* Return a tree for the case when the result of an expression is RESULT
2987 converted to TYPE and OMITTED was previously an operand of the expression
2988 but is now not needed (e.g., we folded OMITTED * 0).
2990 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2991 the conversion of RESULT to TYPE. */
2994 omit_one_operand (tree type
, tree result
, tree omitted
)
2996 tree t
= fold_convert (type
, result
);
2998 if (TREE_SIDE_EFFECTS (omitted
))
2999 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3001 return non_lvalue (t
);
3004 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3007 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
3009 tree t
= fold_convert (type
, result
);
3011 if (TREE_SIDE_EFFECTS (omitted
))
3012 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3014 return pedantic_non_lvalue (t
);
3017 /* Return a tree for the case when the result of an expression is RESULT
3018 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3019 of the expression but are now not needed.
3021 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3022 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3023 evaluated before OMITTED2. Otherwise, if neither has side effects,
3024 just do the conversion of RESULT to TYPE. */
3027 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
3029 tree t
= fold_convert (type
, result
);
3031 if (TREE_SIDE_EFFECTS (omitted2
))
3032 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
3033 if (TREE_SIDE_EFFECTS (omitted1
))
3034 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
3036 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
3040 /* Return a simplified tree node for the truth-negation of ARG. This
3041 never alters ARG itself. We assume that ARG is an operation that
3042 returns a truth value (0 or 1).
3044 FIXME: one would think we would fold the result, but it causes
3045 problems with the dominator optimizer. */
3048 fold_truth_not_expr (tree arg
)
3050 tree type
= TREE_TYPE (arg
);
3051 enum tree_code code
= TREE_CODE (arg
);
3053 /* If this is a comparison, we can simply invert it, except for
3054 floating-point non-equality comparisons, in which case we just
3055 enclose a TRUTH_NOT_EXPR around what we have. */
3057 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3059 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3060 if (FLOAT_TYPE_P (op_type
)
3061 && flag_trapping_math
3062 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3063 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3067 code
= invert_tree_comparison (code
,
3068 HONOR_NANS (TYPE_MODE (op_type
)));
3069 if (code
== ERROR_MARK
)
3072 return build2 (code
, type
,
3073 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
3080 return constant_boolean_node (integer_zerop (arg
), type
);
3082 case TRUTH_AND_EXPR
:
3083 return build2 (TRUTH_OR_EXPR
, type
,
3084 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3085 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3088 return build2 (TRUTH_AND_EXPR
, type
,
3089 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3090 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3092 case TRUTH_XOR_EXPR
:
3093 /* Here we can invert either operand. We invert the first operand
3094 unless the second operand is a TRUTH_NOT_EXPR in which case our
3095 result is the XOR of the first operand with the inside of the
3096 negation of the second operand. */
3098 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3099 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3100 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3102 return build2 (TRUTH_XOR_EXPR
, type
,
3103 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3104 TREE_OPERAND (arg
, 1));
3106 case TRUTH_ANDIF_EXPR
:
3107 return build2 (TRUTH_ORIF_EXPR
, type
,
3108 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3109 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3111 case TRUTH_ORIF_EXPR
:
3112 return build2 (TRUTH_ANDIF_EXPR
, type
,
3113 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3114 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3116 case TRUTH_NOT_EXPR
:
3117 return TREE_OPERAND (arg
, 0);
3121 tree arg1
= TREE_OPERAND (arg
, 1);
3122 tree arg2
= TREE_OPERAND (arg
, 2);
3123 /* A COND_EXPR may have a throw as one operand, which
3124 then has void type. Just leave void operands
3126 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3127 VOID_TYPE_P (TREE_TYPE (arg1
))
3128 ? arg1
: invert_truthvalue (arg1
),
3129 VOID_TYPE_P (TREE_TYPE (arg2
))
3130 ? arg2
: invert_truthvalue (arg2
));
3134 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3135 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3137 case NON_LVALUE_EXPR
:
3138 return invert_truthvalue (TREE_OPERAND (arg
, 0));
3141 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3142 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3146 return build1 (TREE_CODE (arg
), type
,
3147 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3150 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3152 return build2 (EQ_EXPR
, type
, arg
,
3153 build_int_cst (type
, 0));
3156 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3158 case CLEANUP_POINT_EXPR
:
3159 return build1 (CLEANUP_POINT_EXPR
, type
,
3160 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3169 /* Return a simplified tree node for the truth-negation of ARG. This
3170 never alters ARG itself. We assume that ARG is an operation that
3171 returns a truth value (0 or 1).
3173 FIXME: one would think we would fold the result, but it causes
3174 problems with the dominator optimizer. */
3177 invert_truthvalue (tree arg
)
3181 if (TREE_CODE (arg
) == ERROR_MARK
)
3184 tem
= fold_truth_not_expr (arg
);
3186 tem
= build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3191 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3192 operands are another bit-wise operation with a common input. If so,
3193 distribute the bit operations to save an operation and possibly two if
3194 constants are involved. For example, convert
3195 (A | B) & (A | C) into A | (B & C)
3196 Further simplification will occur if B and C are constants.
3198 If this optimization cannot be done, 0 will be returned. */
3201 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3206 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3207 || TREE_CODE (arg0
) == code
3208 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3209 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3212 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3214 common
= TREE_OPERAND (arg0
, 0);
3215 left
= TREE_OPERAND (arg0
, 1);
3216 right
= TREE_OPERAND (arg1
, 1);
3218 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3220 common
= TREE_OPERAND (arg0
, 0);
3221 left
= TREE_OPERAND (arg0
, 1);
3222 right
= TREE_OPERAND (arg1
, 0);
3224 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3226 common
= TREE_OPERAND (arg0
, 1);
3227 left
= TREE_OPERAND (arg0
, 0);
3228 right
= TREE_OPERAND (arg1
, 1);
3230 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3232 common
= TREE_OPERAND (arg0
, 1);
3233 left
= TREE_OPERAND (arg0
, 0);
3234 right
= TREE_OPERAND (arg1
, 0);
3239 return fold_build2 (TREE_CODE (arg0
), type
, common
,
3240 fold_build2 (code
, type
, left
, right
));
3243 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3244 with code CODE. This optimization is unsafe. */
3246 distribute_real_division (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3248 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3249 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3251 /* (A / C) +- (B / C) -> (A +- B) / C. */
3253 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3254 TREE_OPERAND (arg1
, 1), 0))
3255 return fold_build2 (mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3256 fold_build2 (code
, type
,
3257 TREE_OPERAND (arg0
, 0),
3258 TREE_OPERAND (arg1
, 0)),
3259 TREE_OPERAND (arg0
, 1));
3261 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3262 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3263 TREE_OPERAND (arg1
, 0), 0)
3264 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3265 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3267 REAL_VALUE_TYPE r0
, r1
;
3268 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3269 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3271 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3273 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3274 real_arithmetic (&r0
, code
, &r0
, &r1
);
3275 return fold_build2 (MULT_EXPR
, type
,
3276 TREE_OPERAND (arg0
, 0),
3277 build_real (type
, r0
));
3283 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3284 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3287 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
3294 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3295 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3296 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3297 && host_integerp (size
, 0)
3298 && tree_low_cst (size
, 0) == bitsize
)
3299 return fold_convert (type
, inner
);
3302 result
= build3 (BIT_FIELD_REF
, type
, inner
,
3303 size_int (bitsize
), bitsize_int (bitpos
));
3305 BIT_FIELD_REF_UNSIGNED (result
) = unsignedp
;
3310 /* Optimize a bit-field compare.
3312 There are two cases: First is a compare against a constant and the
3313 second is a comparison of two items where the fields are at the same
3314 bit position relative to the start of a chunk (byte, halfword, word)
3315 large enough to contain it. In these cases we can avoid the shift
3316 implicit in bitfield extractions.
3318 For constants, we emit a compare of the shifted constant with the
3319 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3320 compared. For two fields at the same position, we do the ANDs with the
3321 similar mask and compare the result of the ANDs.
3323 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3324 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3325 are the left and right operands of the comparison, respectively.
3327 If the optimization described above can be done, we return the resulting
3328 tree. Otherwise we return zero. */
3331 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
3334 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3335 tree type
= TREE_TYPE (lhs
);
3336 tree signed_type
, unsigned_type
;
3337 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3338 enum machine_mode lmode
, rmode
, nmode
;
3339 int lunsignedp
, runsignedp
;
3340 int lvolatilep
= 0, rvolatilep
= 0;
3341 tree linner
, rinner
= NULL_TREE
;
3345 /* Get all the information about the extractions being done. If the bit size
3346 if the same as the size of the underlying object, we aren't doing an
3347 extraction at all and so can do nothing. We also don't want to
3348 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3349 then will no longer be able to replace it. */
3350 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3351 &lunsignedp
, &lvolatilep
, false);
3352 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3353 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3358 /* If this is not a constant, we can only do something if bit positions,
3359 sizes, and signedness are the same. */
3360 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3361 &runsignedp
, &rvolatilep
, false);
3363 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3364 || lunsignedp
!= runsignedp
|| offset
!= 0
3365 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3369 /* See if we can find a mode to refer to this field. We should be able to,
3370 but fail if we can't. */
3371 nmode
= get_best_mode (lbitsize
, lbitpos
,
3372 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3373 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3374 TYPE_ALIGN (TREE_TYPE (rinner
))),
3375 word_mode
, lvolatilep
|| rvolatilep
);
3376 if (nmode
== VOIDmode
)
3379 /* Set signed and unsigned types of the precision of this mode for the
3381 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3382 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3384 /* Compute the bit position and size for the new reference and our offset
3385 within it. If the new reference is the same size as the original, we
3386 won't optimize anything, so return zero. */
3387 nbitsize
= GET_MODE_BITSIZE (nmode
);
3388 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3390 if (nbitsize
== lbitsize
)
3393 if (BYTES_BIG_ENDIAN
)
3394 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3396 /* Make the mask to be used against the extracted field. */
3397 mask
= build_int_cst (unsigned_type
, -1);
3398 mask
= force_fit_type (mask
, 0, false, false);
3399 mask
= fold_convert (unsigned_type
, mask
);
3400 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
3401 mask
= const_binop (RSHIFT_EXPR
, mask
,
3402 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
3405 /* If not comparing with constant, just rework the comparison
3407 return build2 (code
, compare_type
,
3408 build2 (BIT_AND_EXPR
, unsigned_type
,
3409 make_bit_field_ref (linner
, unsigned_type
,
3410 nbitsize
, nbitpos
, 1),
3412 build2 (BIT_AND_EXPR
, unsigned_type
,
3413 make_bit_field_ref (rinner
, unsigned_type
,
3414 nbitsize
, nbitpos
, 1),
3417 /* Otherwise, we are handling the constant case. See if the constant is too
3418 big for the field. Warn and return a tree of for 0 (false) if so. We do
3419 this not only for its own sake, but to avoid having to test for this
3420 error case below. If we didn't, we might generate wrong code.
3422 For unsigned fields, the constant shifted right by the field length should
3423 be all zero. For signed fields, the high-order bits should agree with
3428 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3429 fold_convert (unsigned_type
, rhs
),
3430 size_int (lbitsize
), 0)))
3432 warning (0, "comparison is always %d due to width of bit-field",
3434 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3439 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
3440 size_int (lbitsize
- 1), 0);
3441 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3443 warning (0, "comparison is always %d due to width of bit-field",
3445 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3449 /* Single-bit compares should always be against zero. */
3450 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3452 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3453 rhs
= build_int_cst (type
, 0);
3456 /* Make a new bitfield reference, shift the constant over the
3457 appropriate number of bits and mask it with the computed mask
3458 (in case this was a signed field). If we changed it, make a new one. */
3459 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3462 TREE_SIDE_EFFECTS (lhs
) = 1;
3463 TREE_THIS_VOLATILE (lhs
) = 1;
3466 rhs
= const_binop (BIT_AND_EXPR
,
3467 const_binop (LSHIFT_EXPR
,
3468 fold_convert (unsigned_type
, rhs
),
3469 size_int (lbitpos
), 0),
3472 return build2 (code
, compare_type
,
3473 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
3477 /* Subroutine for fold_truthop: decode a field reference.
3479 If EXP is a comparison reference, we return the innermost reference.
3481 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3482 set to the starting bit number.
3484 If the innermost field can be completely contained in a mode-sized
3485 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3487 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3488 otherwise it is not changed.
3490 *PUNSIGNEDP is set to the signedness of the field.
3492 *PMASK is set to the mask used. This is either contained in a
3493 BIT_AND_EXPR or derived from the width of the field.
3495 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3497 Return 0 if this is not a component reference or is one that we can't
3498 do anything with. */
3501 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3502 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3503 int *punsignedp
, int *pvolatilep
,
3504 tree
*pmask
, tree
*pand_mask
)
3506 tree outer_type
= 0;
3508 tree mask
, inner
, offset
;
3510 unsigned int precision
;
3512 /* All the optimizations using this function assume integer fields.
3513 There are problems with FP fields since the type_for_size call
3514 below can fail for, e.g., XFmode. */
3515 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3518 /* We are interested in the bare arrangement of bits, so strip everything
3519 that doesn't affect the machine mode. However, record the type of the
3520 outermost expression if it may matter below. */
3521 if (TREE_CODE (exp
) == NOP_EXPR
3522 || TREE_CODE (exp
) == CONVERT_EXPR
3523 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3524 outer_type
= TREE_TYPE (exp
);
3527 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3529 and_mask
= TREE_OPERAND (exp
, 1);
3530 exp
= TREE_OPERAND (exp
, 0);
3531 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3532 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3536 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3537 punsignedp
, pvolatilep
, false);
3538 if ((inner
== exp
&& and_mask
== 0)
3539 || *pbitsize
< 0 || offset
!= 0
3540 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3543 /* If the number of bits in the reference is the same as the bitsize of
3544 the outer type, then the outer type gives the signedness. Otherwise
3545 (in case of a small bitfield) the signedness is unchanged. */
3546 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3547 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3549 /* Compute the mask to access the bitfield. */
3550 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3551 precision
= TYPE_PRECISION (unsigned_type
);
3553 mask
= build_int_cst (unsigned_type
, -1);
3554 mask
= force_fit_type (mask
, 0, false, false);
3556 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3557 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3559 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3561 mask
= fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3562 fold_convert (unsigned_type
, and_mask
), mask
);
3565 *pand_mask
= and_mask
;
3569 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3573 all_ones_mask_p (tree mask
, int size
)
3575 tree type
= TREE_TYPE (mask
);
3576 unsigned int precision
= TYPE_PRECISION (type
);
3579 tmask
= build_int_cst (lang_hooks
.types
.signed_type (type
), -1);
3580 tmask
= force_fit_type (tmask
, 0, false, false);
3583 tree_int_cst_equal (mask
,
3584 const_binop (RSHIFT_EXPR
,
3585 const_binop (LSHIFT_EXPR
, tmask
,
3586 size_int (precision
- size
),
3588 size_int (precision
- size
), 0));
3591 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3592 represents the sign bit of EXP's type. If EXP represents a sign
3593 or zero extension, also test VAL against the unextended type.
3594 The return value is the (sub)expression whose sign bit is VAL,
3595 or NULL_TREE otherwise. */
3598 sign_bit_p (tree exp
, tree val
)
3600 unsigned HOST_WIDE_INT mask_lo
, lo
;
3601 HOST_WIDE_INT mask_hi
, hi
;
3605 /* Tree EXP must have an integral type. */
3606 t
= TREE_TYPE (exp
);
3607 if (! INTEGRAL_TYPE_P (t
))
3610 /* Tree VAL must be an integer constant. */
3611 if (TREE_CODE (val
) != INTEGER_CST
3612 || TREE_CONSTANT_OVERFLOW (val
))
3615 width
= TYPE_PRECISION (t
);
3616 if (width
> HOST_BITS_PER_WIDE_INT
)
3618 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3621 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3622 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3628 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3631 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3632 >> (HOST_BITS_PER_WIDE_INT
- width
));
3635 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3636 treat VAL as if it were unsigned. */
3637 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3638 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3641 /* Handle extension from a narrower type. */
3642 if (TREE_CODE (exp
) == NOP_EXPR
3643 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3644 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3649 /* Subroutine for fold_truthop: determine if an operand is simple enough
3650 to be evaluated unconditionally. */
3653 simple_operand_p (tree exp
)
3655 /* Strip any conversions that don't change the machine mode. */
3658 return (CONSTANT_CLASS_P (exp
)
3659 || TREE_CODE (exp
) == SSA_NAME
3661 && ! TREE_ADDRESSABLE (exp
)
3662 && ! TREE_THIS_VOLATILE (exp
)
3663 && ! DECL_NONLOCAL (exp
)
3664 /* Don't regard global variables as simple. They may be
3665 allocated in ways unknown to the compiler (shared memory,
3666 #pragma weak, etc). */
3667 && ! TREE_PUBLIC (exp
)
3668 && ! DECL_EXTERNAL (exp
)
3669 /* Loading a static variable is unduly expensive, but global
3670 registers aren't expensive. */
3671 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3674 /* The following functions are subroutines to fold_range_test and allow it to
3675 try to change a logical combination of comparisons into a range test.
3678 X == 2 || X == 3 || X == 4 || X == 5
3682 (unsigned) (X - 2) <= 3
3684 We describe each set of comparisons as being either inside or outside
3685 a range, using a variable named like IN_P, and then describe the
3686 range with a lower and upper bound. If one of the bounds is omitted,
3687 it represents either the highest or lowest value of the type.
3689 In the comments below, we represent a range by two numbers in brackets
3690 preceded by a "+" to designate being inside that range, or a "-" to
3691 designate being outside that range, so the condition can be inverted by
3692 flipping the prefix. An omitted bound is represented by a "-". For
3693 example, "- [-, 10]" means being outside the range starting at the lowest
3694 possible value and ending at 10, in other words, being greater than 10.
3695 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3698 We set up things so that the missing bounds are handled in a consistent
3699 manner so neither a missing bound nor "true" and "false" need to be
3700 handled using a special case. */
3702 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3703 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3704 and UPPER1_P are nonzero if the respective argument is an upper bound
3705 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3706 must be specified for a comparison. ARG1 will be converted to ARG0's
3707 type if both are specified. */
3710 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3711 tree arg1
, int upper1_p
)
3717 /* If neither arg represents infinity, do the normal operation.
3718 Else, if not a comparison, return infinity. Else handle the special
3719 comparison rules. Note that most of the cases below won't occur, but
3720 are handled for consistency. */
3722 if (arg0
!= 0 && arg1
!= 0)
3724 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3725 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3727 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3730 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3733 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3734 for neither. In real maths, we cannot assume open ended ranges are
3735 the same. But, this is computer arithmetic, where numbers are finite.
3736 We can therefore make the transformation of any unbounded range with
3737 the value Z, Z being greater than any representable number. This permits
3738 us to treat unbounded ranges as equal. */
3739 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3740 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3744 result
= sgn0
== sgn1
;
3747 result
= sgn0
!= sgn1
;
3750 result
= sgn0
< sgn1
;
3753 result
= sgn0
<= sgn1
;
3756 result
= sgn0
> sgn1
;
3759 result
= sgn0
>= sgn1
;
3765 return constant_boolean_node (result
, type
);
3768 /* Given EXP, a logical expression, set the range it is testing into
3769 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3770 actually being tested. *PLOW and *PHIGH will be made of the same type
3771 as the returned expression. If EXP is not a comparison, we will most
3772 likely not be returning a useful value and range. */
3775 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
)
3777 enum tree_code code
;
3778 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
3779 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
3781 tree low
, high
, n_low
, n_high
;
3783 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3784 and see if we can refine the range. Some of the cases below may not
3785 happen, but it doesn't seem worth worrying about this. We "continue"
3786 the outer loop when we've changed something; otherwise we "break"
3787 the switch, which will "break" the while. */
3790 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
3794 code
= TREE_CODE (exp
);
3795 exp_type
= TREE_TYPE (exp
);
3797 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3799 if (TREE_CODE_LENGTH (code
) > 0)
3800 arg0
= TREE_OPERAND (exp
, 0);
3801 if (TREE_CODE_CLASS (code
) == tcc_comparison
3802 || TREE_CODE_CLASS (code
) == tcc_unary
3803 || TREE_CODE_CLASS (code
) == tcc_binary
)
3804 arg0_type
= TREE_TYPE (arg0
);
3805 if (TREE_CODE_CLASS (code
) == tcc_binary
3806 || TREE_CODE_CLASS (code
) == tcc_comparison
3807 || (TREE_CODE_CLASS (code
) == tcc_expression
3808 && TREE_CODE_LENGTH (code
) > 1))
3809 arg1
= TREE_OPERAND (exp
, 1);
3814 case TRUTH_NOT_EXPR
:
3815 in_p
= ! in_p
, exp
= arg0
;
3818 case EQ_EXPR
: case NE_EXPR
:
3819 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3820 /* We can only do something if the range is testing for zero
3821 and if the second operand is an integer constant. Note that
3822 saying something is "in" the range we make is done by
3823 complementing IN_P since it will set in the initial case of
3824 being not equal to zero; "out" is leaving it alone. */
3825 if (low
== 0 || high
== 0
3826 || ! integer_zerop (low
) || ! integer_zerop (high
)
3827 || TREE_CODE (arg1
) != INTEGER_CST
)
3832 case NE_EXPR
: /* - [c, c] */
3835 case EQ_EXPR
: /* + [c, c] */
3836 in_p
= ! in_p
, low
= high
= arg1
;
3838 case GT_EXPR
: /* - [-, c] */
3839 low
= 0, high
= arg1
;
3841 case GE_EXPR
: /* + [c, -] */
3842 in_p
= ! in_p
, low
= arg1
, high
= 0;
3844 case LT_EXPR
: /* - [c, -] */
3845 low
= arg1
, high
= 0;
3847 case LE_EXPR
: /* + [-, c] */
3848 in_p
= ! in_p
, low
= 0, high
= arg1
;
3854 /* If this is an unsigned comparison, we also know that EXP is
3855 greater than or equal to zero. We base the range tests we make
3856 on that fact, so we record it here so we can parse existing
3857 range tests. We test arg0_type since often the return type
3858 of, e.g. EQ_EXPR, is boolean. */
3859 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3861 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3863 build_int_cst (arg0_type
, 0),
3867 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3869 /* If the high bound is missing, but we have a nonzero low
3870 bound, reverse the range so it goes from zero to the low bound
3872 if (high
== 0 && low
&& ! integer_zerop (low
))
3875 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3876 integer_one_node
, 0);
3877 low
= build_int_cst (arg0_type
, 0);
3885 /* (-x) IN [a,b] -> x in [-b, -a] */
3886 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3887 build_int_cst (exp_type
, 0),
3889 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3890 build_int_cst (exp_type
, 0),
3892 low
= n_low
, high
= n_high
;
3898 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3899 build_int_cst (exp_type
, 1));
3902 case PLUS_EXPR
: case MINUS_EXPR
:
3903 if (TREE_CODE (arg1
) != INTEGER_CST
)
3906 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3907 move a constant to the other side. */
3908 if (flag_wrapv
&& !TYPE_UNSIGNED (arg0_type
))
3911 /* If EXP is signed, any overflow in the computation is undefined,
3912 so we don't worry about it so long as our computations on
3913 the bounds don't overflow. For unsigned, overflow is defined
3914 and this is exactly the right thing. */
3915 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3916 arg0_type
, low
, 0, arg1
, 0);
3917 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3918 arg0_type
, high
, 1, arg1
, 0);
3919 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3920 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3923 /* Check for an unsigned range which has wrapped around the maximum
3924 value thus making n_high < n_low, and normalize it. */
3925 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3927 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3928 integer_one_node
, 0);
3929 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3930 integer_one_node
, 0);
3932 /* If the range is of the form +/- [ x+1, x ], we won't
3933 be able to normalize it. But then, it represents the
3934 whole range or the empty set, so make it
3936 if (tree_int_cst_equal (n_low
, low
)
3937 && tree_int_cst_equal (n_high
, high
))
3943 low
= n_low
, high
= n_high
;
3948 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
3949 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3952 if (! INTEGRAL_TYPE_P (arg0_type
)
3953 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3954 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3957 n_low
= low
, n_high
= high
;
3960 n_low
= fold_convert (arg0_type
, n_low
);
3963 n_high
= fold_convert (arg0_type
, n_high
);
3966 /* If we're converting arg0 from an unsigned type, to exp,
3967 a signed type, we will be doing the comparison as unsigned.
3968 The tests above have already verified that LOW and HIGH
3971 So we have to ensure that we will handle large unsigned
3972 values the same way that the current signed bounds treat
3975 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
3978 tree equiv_type
= lang_hooks
.types
.type_for_mode
3979 (TYPE_MODE (arg0_type
), 1);
3981 /* A range without an upper bound is, naturally, unbounded.
3982 Since convert would have cropped a very large value, use
3983 the max value for the destination type. */
3985 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3986 : TYPE_MAX_VALUE (arg0_type
);
3988 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
3989 high_positive
= fold_build2 (RSHIFT_EXPR
, arg0_type
,
3990 fold_convert (arg0_type
,
3992 fold_convert (arg0_type
,
3995 /* If the low bound is specified, "and" the range with the
3996 range for which the original unsigned value will be
4000 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4001 1, n_low
, n_high
, 1,
4002 fold_convert (arg0_type
,
4007 in_p
= (n_in_p
== in_p
);
4011 /* Otherwise, "or" the range with the range of the input
4012 that will be interpreted as negative. */
4013 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4014 0, n_low
, n_high
, 1,
4015 fold_convert (arg0_type
,
4020 in_p
= (in_p
!= n_in_p
);
4025 low
= n_low
, high
= n_high
;
4035 /* If EXP is a constant, we can evaluate whether this is true or false. */
4036 if (TREE_CODE (exp
) == INTEGER_CST
)
4038 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4040 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4046 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4050 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4051 type, TYPE, return an expression to test if EXP is in (or out of, depending
4052 on IN_P) the range. Return 0 if the test couldn't be created. */
4055 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
4057 tree etype
= TREE_TYPE (exp
);
4060 #ifdef HAVE_canonicalize_funcptr_for_compare
4061 /* Disable this optimization for function pointer expressions
4062 on targets that require function pointer canonicalization. */
4063 if (HAVE_canonicalize_funcptr_for_compare
4064 && TREE_CODE (etype
) == POINTER_TYPE
4065 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4071 value
= build_range_check (type
, exp
, 1, low
, high
);
4073 return invert_truthvalue (value
);
4078 if (low
== 0 && high
== 0)
4079 return build_int_cst (type
, 1);
4082 return fold_build2 (LE_EXPR
, type
, exp
,
4083 fold_convert (etype
, high
));
4086 return fold_build2 (GE_EXPR
, type
, exp
,
4087 fold_convert (etype
, low
));
4089 if (operand_equal_p (low
, high
, 0))
4090 return fold_build2 (EQ_EXPR
, type
, exp
,
4091 fold_convert (etype
, low
));
4093 if (integer_zerop (low
))
4095 if (! TYPE_UNSIGNED (etype
))
4097 etype
= lang_hooks
.types
.unsigned_type (etype
);
4098 high
= fold_convert (etype
, high
);
4099 exp
= fold_convert (etype
, exp
);
4101 return build_range_check (type
, exp
, 1, 0, high
);
4104 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4105 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4107 unsigned HOST_WIDE_INT lo
;
4111 prec
= TYPE_PRECISION (etype
);
4112 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4115 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4119 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4120 lo
= (unsigned HOST_WIDE_INT
) -1;
4123 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4125 if (TYPE_UNSIGNED (etype
))
4127 etype
= lang_hooks
.types
.signed_type (etype
);
4128 exp
= fold_convert (etype
, exp
);
4130 return fold_build2 (GT_EXPR
, type
, exp
,
4131 build_int_cst (etype
, 0));
4135 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4136 This requires wrap-around arithmetics for the type of the expression. */
4137 switch (TREE_CODE (etype
))
4140 /* There is no requirement that LOW be within the range of ETYPE
4141 if the latter is a subtype. It must, however, be within the base
4142 type of ETYPE. So be sure we do the subtraction in that type. */
4143 if (TREE_TYPE (etype
))
4144 etype
= TREE_TYPE (etype
);
4149 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4150 TYPE_UNSIGNED (etype
));
4157 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4158 if (TREE_CODE (etype
) == INTEGER_TYPE
4159 && !TYPE_UNSIGNED (etype
) && !flag_wrapv
)
4161 tree utype
, minv
, maxv
;
4163 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4164 for the type in question, as we rely on this here. */
4165 utype
= lang_hooks
.types
.unsigned_type (etype
);
4166 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
4167 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4168 integer_one_node
, 1);
4169 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
4171 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4178 high
= fold_convert (etype
, high
);
4179 low
= fold_convert (etype
, low
);
4180 exp
= fold_convert (etype
, exp
);
4182 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4184 if (value
!= 0 && !TREE_OVERFLOW (value
))
4185 return build_range_check (type
,
4186 fold_build2 (MINUS_EXPR
, etype
, exp
, low
),
4187 1, build_int_cst (etype
, 0), value
);
4192 /* Return the predecessor of VAL in its type, handling the infinite case. */
4195 range_predecessor (tree val
)
4197 tree type
= TREE_TYPE (val
);
4199 if (INTEGRAL_TYPE_P (type
)
4200 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4203 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4206 /* Return the successor of VAL in its type, handling the infinite case. */
4209 range_successor (tree val
)
4211 tree type
= TREE_TYPE (val
);
4213 if (INTEGRAL_TYPE_P (type
)
4214 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4217 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4220 /* Given two ranges, see if we can merge them into one. Return 1 if we
4221 can, 0 if we can't. Set the output range into the specified parameters. */
4224 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4225 tree high0
, int in1_p
, tree low1
, tree high1
)
4233 int lowequal
= ((low0
== 0 && low1
== 0)
4234 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4235 low0
, 0, low1
, 0)));
4236 int highequal
= ((high0
== 0 && high1
== 0)
4237 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4238 high0
, 1, high1
, 1)));
4240 /* Make range 0 be the range that starts first, or ends last if they
4241 start at the same value. Swap them if it isn't. */
4242 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4245 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4246 high1
, 1, high0
, 1))))
4248 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4249 tem
= low0
, low0
= low1
, low1
= tem
;
4250 tem
= high0
, high0
= high1
, high1
= tem
;
4253 /* Now flag two cases, whether the ranges are disjoint or whether the
4254 second range is totally subsumed in the first. Note that the tests
4255 below are simplified by the ones above. */
4256 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4257 high0
, 1, low1
, 0));
4258 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4259 high1
, 1, high0
, 1));
4261 /* We now have four cases, depending on whether we are including or
4262 excluding the two ranges. */
4265 /* If they don't overlap, the result is false. If the second range
4266 is a subset it is the result. Otherwise, the range is from the start
4267 of the second to the end of the first. */
4269 in_p
= 0, low
= high
= 0;
4271 in_p
= 1, low
= low1
, high
= high1
;
4273 in_p
= 1, low
= low1
, high
= high0
;
4276 else if (in0_p
&& ! in1_p
)
4278 /* If they don't overlap, the result is the first range. If they are
4279 equal, the result is false. If the second range is a subset of the
4280 first, and the ranges begin at the same place, we go from just after
4281 the end of the second range to the end of the first. If the second
4282 range is not a subset of the first, or if it is a subset and both
4283 ranges end at the same place, the range starts at the start of the
4284 first range and ends just before the second range.
4285 Otherwise, we can't describe this as a single range. */
4287 in_p
= 1, low
= low0
, high
= high0
;
4288 else if (lowequal
&& highequal
)
4289 in_p
= 0, low
= high
= 0;
4290 else if (subset
&& lowequal
)
4292 low
= range_successor (high1
);
4296 else if (! subset
|| highequal
)
4299 high
= range_predecessor (low1
);
4306 else if (! in0_p
&& in1_p
)
4308 /* If they don't overlap, the result is the second range. If the second
4309 is a subset of the first, the result is false. Otherwise,
4310 the range starts just after the first range and ends at the
4311 end of the second. */
4313 in_p
= 1, low
= low1
, high
= high1
;
4314 else if (subset
|| highequal
)
4315 in_p
= 0, low
= high
= 0;
4318 low
= range_successor (high0
);
4326 /* The case where we are excluding both ranges. Here the complex case
4327 is if they don't overlap. In that case, the only time we have a
4328 range is if they are adjacent. If the second is a subset of the
4329 first, the result is the first. Otherwise, the range to exclude
4330 starts at the beginning of the first range and ends at the end of the
4334 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4335 range_successor (high0
),
4337 in_p
= 0, low
= low0
, high
= high1
;
4340 /* Canonicalize - [min, x] into - [-, x]. */
4341 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4342 switch (TREE_CODE (TREE_TYPE (low0
)))
4345 if (TYPE_PRECISION (TREE_TYPE (low0
))
4346 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4350 if (tree_int_cst_equal (low0
,
4351 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4355 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4356 && integer_zerop (low0
))
4363 /* Canonicalize - [x, max] into - [x, -]. */
4364 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4365 switch (TREE_CODE (TREE_TYPE (high1
)))
4368 if (TYPE_PRECISION (TREE_TYPE (high1
))
4369 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4373 if (tree_int_cst_equal (high1
,
4374 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4378 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4379 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4381 integer_one_node
, 1)))
4388 /* The ranges might be also adjacent between the maximum and
4389 minimum values of the given type. For
4390 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4391 return + [x + 1, y - 1]. */
4392 if (low0
== 0 && high1
== 0)
4394 low
= range_successor (high0
);
4395 high
= range_predecessor (low1
);
4396 if (low
== 0 || high
== 0)
4406 in_p
= 0, low
= low0
, high
= high0
;
4408 in_p
= 0, low
= low0
, high
= high1
;
4411 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4416 /* Subroutine of fold, looking inside expressions of the form
4417 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4418 of the COND_EXPR. This function is being used also to optimize
4419 A op B ? C : A, by reversing the comparison first.
4421 Return a folded expression whose code is not a COND_EXPR
4422 anymore, or NULL_TREE if no folding opportunity is found. */
4425 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
4427 enum tree_code comp_code
= TREE_CODE (arg0
);
4428 tree arg00
= TREE_OPERAND (arg0
, 0);
4429 tree arg01
= TREE_OPERAND (arg0
, 1);
4430 tree arg1_type
= TREE_TYPE (arg1
);
4436 /* If we have A op 0 ? A : -A, consider applying the following
4439 A == 0? A : -A same as -A
4440 A != 0? A : -A same as A
4441 A >= 0? A : -A same as abs (A)
4442 A > 0? A : -A same as abs (A)
4443 A <= 0? A : -A same as -abs (A)
4444 A < 0? A : -A same as -abs (A)
4446 None of these transformations work for modes with signed
4447 zeros. If A is +/-0, the first two transformations will
4448 change the sign of the result (from +0 to -0, or vice
4449 versa). The last four will fix the sign of the result,
4450 even though the original expressions could be positive or
4451 negative, depending on the sign of A.
4453 Note that all these transformations are correct if A is
4454 NaN, since the two alternatives (A and -A) are also NaNs. */
4455 if ((FLOAT_TYPE_P (TREE_TYPE (arg01
))
4456 ? real_zerop (arg01
)
4457 : integer_zerop (arg01
))
4458 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4459 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4460 /* In the case that A is of the form X-Y, '-A' (arg2) may
4461 have already been folded to Y-X, check for that. */
4462 || (TREE_CODE (arg1
) == MINUS_EXPR
4463 && TREE_CODE (arg2
) == MINUS_EXPR
4464 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4465 TREE_OPERAND (arg2
, 1), 0)
4466 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4467 TREE_OPERAND (arg2
, 0), 0))))
4472 tem
= fold_convert (arg1_type
, arg1
);
4473 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
4476 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4479 if (flag_trapping_math
)
4484 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4485 arg1
= fold_convert (lang_hooks
.types
.signed_type
4486 (TREE_TYPE (arg1
)), arg1
);
4487 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4488 return pedantic_non_lvalue (fold_convert (type
, tem
));
4491 if (flag_trapping_math
)
4495 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4496 arg1
= fold_convert (lang_hooks
.types
.signed_type
4497 (TREE_TYPE (arg1
)), arg1
);
4498 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4499 return negate_expr (fold_convert (type
, tem
));
4501 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4505 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4506 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4507 both transformations are correct when A is NaN: A != 0
4508 is then true, and A == 0 is false. */
4510 if (integer_zerop (arg01
) && integer_zerop (arg2
))
4512 if (comp_code
== NE_EXPR
)
4513 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4514 else if (comp_code
== EQ_EXPR
)
4515 return build_int_cst (type
, 0);
4518 /* Try some transformations of A op B ? A : B.
4520 A == B? A : B same as B
4521 A != B? A : B same as A
4522 A >= B? A : B same as max (A, B)
4523 A > B? A : B same as max (B, A)
4524 A <= B? A : B same as min (A, B)
4525 A < B? A : B same as min (B, A)
4527 As above, these transformations don't work in the presence
4528 of signed zeros. For example, if A and B are zeros of
4529 opposite sign, the first two transformations will change
4530 the sign of the result. In the last four, the original
4531 expressions give different results for (A=+0, B=-0) and
4532 (A=-0, B=+0), but the transformed expressions do not.
4534 The first two transformations are correct if either A or B
4535 is a NaN. In the first transformation, the condition will
4536 be false, and B will indeed be chosen. In the case of the
4537 second transformation, the condition A != B will be true,
4538 and A will be chosen.
4540 The conversions to max() and min() are not correct if B is
4541 a number and A is not. The conditions in the original
4542 expressions will be false, so all four give B. The min()
4543 and max() versions would give a NaN instead. */
4544 if (operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4545 /* Avoid these transformations if the COND_EXPR may be used
4546 as an lvalue in the C++ front-end. PR c++/19199. */
4548 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4549 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4550 || ! maybe_lvalue_p (arg1
)
4551 || ! maybe_lvalue_p (arg2
)))
4553 tree comp_op0
= arg00
;
4554 tree comp_op1
= arg01
;
4555 tree comp_type
= TREE_TYPE (comp_op0
);
4557 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4558 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4568 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4570 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4575 /* In C++ a ?: expression can be an lvalue, so put the
4576 operand which will be used if they are equal first
4577 so that we can convert this back to the
4578 corresponding COND_EXPR. */
4579 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4581 comp_op0
= fold_convert (comp_type
, comp_op0
);
4582 comp_op1
= fold_convert (comp_type
, comp_op1
);
4583 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4584 ? fold_build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4585 : fold_build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
);
4586 return pedantic_non_lvalue (fold_convert (type
, tem
));
4593 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4595 comp_op0
= fold_convert (comp_type
, comp_op0
);
4596 comp_op1
= fold_convert (comp_type
, comp_op1
);
4597 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4598 ? fold_build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4599 : fold_build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
);
4600 return pedantic_non_lvalue (fold_convert (type
, tem
));
4604 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4605 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4608 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4609 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4612 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4617 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4618 we might still be able to simplify this. For example,
4619 if C1 is one less or one more than C2, this might have started
4620 out as a MIN or MAX and been transformed by this function.
4621 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4623 if (INTEGRAL_TYPE_P (type
)
4624 && TREE_CODE (arg01
) == INTEGER_CST
4625 && TREE_CODE (arg2
) == INTEGER_CST
)
4629 /* We can replace A with C1 in this case. */
4630 arg1
= fold_convert (type
, arg01
);
4631 return fold_build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
);
4634 /* If C1 is C2 + 1, this is min(A, C2). */
4635 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4637 && operand_equal_p (arg01
,
4638 const_binop (PLUS_EXPR
, arg2
,
4639 integer_one_node
, 0),
4641 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4646 /* If C1 is C2 - 1, this is min(A, C2). */
4647 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4649 && operand_equal_p (arg01
,
4650 const_binop (MINUS_EXPR
, arg2
,
4651 integer_one_node
, 0),
4653 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4658 /* If C1 is C2 - 1, this is max(A, C2). */
4659 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4661 && operand_equal_p (arg01
,
4662 const_binop (MINUS_EXPR
, arg2
,
4663 integer_one_node
, 0),
4665 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4670 /* If C1 is C2 + 1, this is max(A, C2). */
4671 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4673 && operand_equal_p (arg01
,
4674 const_binop (PLUS_EXPR
, arg2
,
4675 integer_one_node
, 0),
4677 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4691 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4692 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4695 /* EXP is some logical combination of boolean tests. See if we can
4696 merge it into some range test. Return the new tree if so. */
4699 fold_range_test (enum tree_code code
, tree type
, tree op0
, tree op1
)
4701 int or_op
= (code
== TRUTH_ORIF_EXPR
4702 || code
== TRUTH_OR_EXPR
);
4703 int in0_p
, in1_p
, in_p
;
4704 tree low0
, low1
, low
, high0
, high1
, high
;
4705 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
);
4706 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
);
4709 /* If this is an OR operation, invert both sides; we will invert
4710 again at the end. */
4712 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4714 /* If both expressions are the same, if we can merge the ranges, and we
4715 can build the range test, return it or it inverted. If one of the
4716 ranges is always true or always false, consider it to be the same
4717 expression as the other. */
4718 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4719 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4721 && 0 != (tem
= (build_range_check (type
,
4723 : rhs
!= 0 ? rhs
: integer_zero_node
,
4725 return or_op
? invert_truthvalue (tem
) : tem
;
4727 /* On machines where the branch cost is expensive, if this is a
4728 short-circuited branch and the underlying object on both sides
4729 is the same, make a non-short-circuit operation. */
4730 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4731 && lhs
!= 0 && rhs
!= 0
4732 && (code
== TRUTH_ANDIF_EXPR
4733 || code
== TRUTH_ORIF_EXPR
)
4734 && operand_equal_p (lhs
, rhs
, 0))
4736 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4737 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4738 which cases we can't do this. */
4739 if (simple_operand_p (lhs
))
4740 return build2 (code
== TRUTH_ANDIF_EXPR
4741 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4744 else if (lang_hooks
.decls
.global_bindings_p () == 0
4745 && ! CONTAINS_PLACEHOLDER_P (lhs
))
4747 tree common
= save_expr (lhs
);
4749 if (0 != (lhs
= build_range_check (type
, common
,
4750 or_op
? ! in0_p
: in0_p
,
4752 && (0 != (rhs
= build_range_check (type
, common
,
4753 or_op
? ! in1_p
: in1_p
,
4755 return build2 (code
== TRUTH_ANDIF_EXPR
4756 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4764 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4765 bit value. Arrange things so the extra bits will be set to zero if and
4766 only if C is signed-extended to its full width. If MASK is nonzero,
4767 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4770 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4772 tree type
= TREE_TYPE (c
);
4773 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4776 if (p
== modesize
|| unsignedp
)
4779 /* We work by getting just the sign bit into the low-order bit, then
4780 into the high-order bit, then sign-extend. We then XOR that value
4782 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
4783 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
4785 /* We must use a signed type in order to get an arithmetic right shift.
4786 However, we must also avoid introducing accidental overflows, so that
4787 a subsequent call to integer_zerop will work. Hence we must
4788 do the type conversion here. At this point, the constant is either
4789 zero or one, and the conversion to a signed type can never overflow.
4790 We could get an overflow if this conversion is done anywhere else. */
4791 if (TYPE_UNSIGNED (type
))
4792 temp
= fold_convert (lang_hooks
.types
.signed_type (type
), temp
);
4794 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
4795 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
4797 temp
= const_binop (BIT_AND_EXPR
, temp
,
4798 fold_convert (TREE_TYPE (c
), mask
), 0);
4799 /* If necessary, convert the type back to match the type of C. */
4800 if (TYPE_UNSIGNED (type
))
4801 temp
= fold_convert (type
, temp
);
4803 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
4806 /* Find ways of folding logical expressions of LHS and RHS:
4807 Try to merge two comparisons to the same innermost item.
4808 Look for range tests like "ch >= '0' && ch <= '9'".
4809 Look for combinations of simple terms on machines with expensive branches
4810 and evaluate the RHS unconditionally.
4812 For example, if we have p->a == 2 && p->b == 4 and we can make an
4813 object large enough to span both A and B, we can do this with a comparison
4814 against the object ANDed with the a mask.
4816 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4817 operations to do this with one comparison.
4819 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4820 function and the one above.
4822 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4823 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4825 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4828 We return the simplified tree or 0 if no optimization is possible. */
4831 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
4833 /* If this is the "or" of two comparisons, we can do something if
4834 the comparisons are NE_EXPR. If this is the "and", we can do something
4835 if the comparisons are EQ_EXPR. I.e.,
4836 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4838 WANTED_CODE is this operation code. For single bit fields, we can
4839 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4840 comparison for one-bit fields. */
4842 enum tree_code wanted_code
;
4843 enum tree_code lcode
, rcode
;
4844 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
4845 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
4846 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
4847 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
4848 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
4849 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
4850 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
4851 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
4852 enum machine_mode lnmode
, rnmode
;
4853 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
4854 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
4855 tree l_const
, r_const
;
4856 tree lntype
, rntype
, result
;
4857 int first_bit
, end_bit
;
4859 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
4860 enum tree_code orig_code
= code
;
4862 /* Start by getting the comparison codes. Fail if anything is volatile.
4863 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4864 it were surrounded with a NE_EXPR. */
4866 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
4869 lcode
= TREE_CODE (lhs
);
4870 rcode
= TREE_CODE (rhs
);
4872 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
4874 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
4875 build_int_cst (TREE_TYPE (lhs
), 0));
4879 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
4881 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
4882 build_int_cst (TREE_TYPE (rhs
), 0));
4886 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
4887 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
4890 ll_arg
= TREE_OPERAND (lhs
, 0);
4891 lr_arg
= TREE_OPERAND (lhs
, 1);
4892 rl_arg
= TREE_OPERAND (rhs
, 0);
4893 rr_arg
= TREE_OPERAND (rhs
, 1);
4895 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4896 if (simple_operand_p (ll_arg
)
4897 && simple_operand_p (lr_arg
))
4900 if (operand_equal_p (ll_arg
, rl_arg
, 0)
4901 && operand_equal_p (lr_arg
, rr_arg
, 0))
4903 result
= combine_comparisons (code
, lcode
, rcode
,
4904 truth_type
, ll_arg
, lr_arg
);
4908 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
4909 && operand_equal_p (lr_arg
, rl_arg
, 0))
4911 result
= combine_comparisons (code
, lcode
,
4912 swap_tree_comparison (rcode
),
4913 truth_type
, ll_arg
, lr_arg
);
4919 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
4920 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
4922 /* If the RHS can be evaluated unconditionally and its operands are
4923 simple, it wins to evaluate the RHS unconditionally on machines
4924 with expensive branches. In this case, this isn't a comparison
4925 that can be merged. Avoid doing this if the RHS is a floating-point
4926 comparison since those can trap. */
4928 if (BRANCH_COST
>= 2
4929 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
4930 && simple_operand_p (rl_arg
)
4931 && simple_operand_p (rr_arg
))
4933 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4934 if (code
== TRUTH_OR_EXPR
4935 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
4936 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
4937 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4938 return build2 (NE_EXPR
, truth_type
,
4939 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4941 build_int_cst (TREE_TYPE (ll_arg
), 0));
4943 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4944 if (code
== TRUTH_AND_EXPR
4945 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
4946 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
4947 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4948 return build2 (EQ_EXPR
, truth_type
,
4949 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4951 build_int_cst (TREE_TYPE (ll_arg
), 0));
4953 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
4955 if (code
!= orig_code
|| lhs
!= orig_lhs
|| rhs
!= orig_rhs
)
4956 return build2 (code
, truth_type
, lhs
, rhs
);
4961 /* See if the comparisons can be merged. Then get all the parameters for
4964 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
4965 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
4969 ll_inner
= decode_field_reference (ll_arg
,
4970 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
4971 &ll_unsignedp
, &volatilep
, &ll_mask
,
4973 lr_inner
= decode_field_reference (lr_arg
,
4974 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
4975 &lr_unsignedp
, &volatilep
, &lr_mask
,
4977 rl_inner
= decode_field_reference (rl_arg
,
4978 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
4979 &rl_unsignedp
, &volatilep
, &rl_mask
,
4981 rr_inner
= decode_field_reference (rr_arg
,
4982 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
4983 &rr_unsignedp
, &volatilep
, &rr_mask
,
4986 /* It must be true that the inner operation on the lhs of each
4987 comparison must be the same if we are to be able to do anything.
4988 Then see if we have constants. If not, the same must be true for
4990 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
4991 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
4994 if (TREE_CODE (lr_arg
) == INTEGER_CST
4995 && TREE_CODE (rr_arg
) == INTEGER_CST
)
4996 l_const
= lr_arg
, r_const
= rr_arg
;
4997 else if (lr_inner
== 0 || rr_inner
== 0
4998 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5001 l_const
= r_const
= 0;
5003 /* If either comparison code is not correct for our logical operation,
5004 fail. However, we can convert a one-bit comparison against zero into
5005 the opposite comparison against that bit being set in the field. */
5007 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5008 if (lcode
!= wanted_code
)
5010 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5012 /* Make the left operand unsigned, since we are only interested
5013 in the value of one bit. Otherwise we are doing the wrong
5022 /* This is analogous to the code for l_const above. */
5023 if (rcode
!= wanted_code
)
5025 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5034 /* After this point all optimizations will generate bit-field
5035 references, which we might not want. */
5036 if (! lang_hooks
.can_use_bit_fields_p ())
5039 /* See if we can find a mode that contains both fields being compared on
5040 the left. If we can't, fail. Otherwise, update all constants and masks
5041 to be relative to a field of that size. */
5042 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5043 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5044 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5045 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5047 if (lnmode
== VOIDmode
)
5050 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5051 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5052 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5053 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5055 if (BYTES_BIG_ENDIAN
)
5057 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5058 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5061 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
5062 size_int (xll_bitpos
), 0);
5063 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
5064 size_int (xrl_bitpos
), 0);
5068 l_const
= fold_convert (lntype
, l_const
);
5069 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5070 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
5071 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5072 fold_build1 (BIT_NOT_EXPR
,
5076 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5078 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5083 r_const
= fold_convert (lntype
, r_const
);
5084 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5085 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
5086 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5087 fold_build1 (BIT_NOT_EXPR
,
5091 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5093 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5097 /* If the right sides are not constant, do the same for it. Also,
5098 disallow this optimization if a size or signedness mismatch occurs
5099 between the left and right sides. */
5102 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5103 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5104 /* Make sure the two fields on the right
5105 correspond to the left without being swapped. */
5106 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5109 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5110 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5111 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5112 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5114 if (rnmode
== VOIDmode
)
5117 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5118 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5119 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5120 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5122 if (BYTES_BIG_ENDIAN
)
5124 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5125 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5128 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
5129 size_int (xlr_bitpos
), 0);
5130 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
5131 size_int (xrr_bitpos
), 0);
5133 /* Make a mask that corresponds to both fields being compared.
5134 Do this for both items being compared. If the operands are the
5135 same size and the bits being compared are in the same position
5136 then we can do this by masking both and comparing the masked
5138 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5139 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
5140 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5142 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5143 ll_unsignedp
|| rl_unsignedp
);
5144 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5145 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5147 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5148 lr_unsignedp
|| rr_unsignedp
);
5149 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5150 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5152 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5155 /* There is still another way we can do something: If both pairs of
5156 fields being compared are adjacent, we may be able to make a wider
5157 field containing them both.
5159 Note that we still must mask the lhs/rhs expressions. Furthermore,
5160 the mask must be shifted to account for the shift done by
5161 make_bit_field_ref. */
5162 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5163 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5164 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5165 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5169 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
5170 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5171 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
5172 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5174 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5175 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
5176 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5177 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
5179 /* Convert to the smaller type before masking out unwanted bits. */
5181 if (lntype
!= rntype
)
5183 if (lnbitsize
> rnbitsize
)
5185 lhs
= fold_convert (rntype
, lhs
);
5186 ll_mask
= fold_convert (rntype
, ll_mask
);
5189 else if (lnbitsize
< rnbitsize
)
5191 rhs
= fold_convert (lntype
, rhs
);
5192 lr_mask
= fold_convert (lntype
, lr_mask
);
5197 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5198 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5200 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5201 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5203 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5209 /* Handle the case of comparisons with constants. If there is something in
5210 common between the masks, those bits of the constants must be the same.
5211 If not, the condition is always false. Test for this to avoid generating
5212 incorrect code below. */
5213 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
5214 if (! integer_zerop (result
)
5215 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
5216 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
5218 if (wanted_code
== NE_EXPR
)
5220 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5221 return constant_boolean_node (true, truth_type
);
5225 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5226 return constant_boolean_node (false, truth_type
);
5230 /* Construct the expression we will return. First get the component
5231 reference we will make. Unless the mask is all ones the width of
5232 that field, perform the mask operation. Then compare with the
5234 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5235 ll_unsignedp
|| rl_unsignedp
);
5237 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5238 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5239 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5241 return build2 (wanted_code
, truth_type
, result
,
5242 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
5245 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5249 optimize_minmax_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
5252 enum tree_code op_code
;
5253 tree comp_const
= op1
;
5255 int consts_equal
, consts_lt
;
5258 STRIP_SIGN_NOPS (arg0
);
5260 op_code
= TREE_CODE (arg0
);
5261 minmax_const
= TREE_OPERAND (arg0
, 1);
5262 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5263 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5264 inner
= TREE_OPERAND (arg0
, 0);
5266 /* If something does not permit us to optimize, return the original tree. */
5267 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5268 || TREE_CODE (comp_const
) != INTEGER_CST
5269 || TREE_CONSTANT_OVERFLOW (comp_const
)
5270 || TREE_CODE (minmax_const
) != INTEGER_CST
5271 || TREE_CONSTANT_OVERFLOW (minmax_const
))
5274 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5275 and GT_EXPR, doing the rest with recursive calls using logical
5279 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5281 tree tem
= optimize_minmax_comparison (invert_tree_comparison (code
, false),
5284 return invert_truthvalue (tem
);
5290 fold_build2 (TRUTH_ORIF_EXPR
, type
,
5291 optimize_minmax_comparison
5292 (EQ_EXPR
, type
, arg0
, comp_const
),
5293 optimize_minmax_comparison
5294 (GT_EXPR
, type
, arg0
, comp_const
));
5297 if (op_code
== MAX_EXPR
&& consts_equal
)
5298 /* MAX (X, 0) == 0 -> X <= 0 */
5299 return fold_build2 (LE_EXPR
, type
, inner
, comp_const
);
5301 else if (op_code
== MAX_EXPR
&& consts_lt
)
5302 /* MAX (X, 0) == 5 -> X == 5 */
5303 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5305 else if (op_code
== MAX_EXPR
)
5306 /* MAX (X, 0) == -1 -> false */
5307 return omit_one_operand (type
, integer_zero_node
, inner
);
5309 else if (consts_equal
)
5310 /* MIN (X, 0) == 0 -> X >= 0 */
5311 return fold_build2 (GE_EXPR
, type
, inner
, comp_const
);
5314 /* MIN (X, 0) == 5 -> false */
5315 return omit_one_operand (type
, integer_zero_node
, inner
);
5318 /* MIN (X, 0) == -1 -> X == -1 */
5319 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5322 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5323 /* MAX (X, 0) > 0 -> X > 0
5324 MAX (X, 0) > 5 -> X > 5 */
5325 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5327 else if (op_code
== MAX_EXPR
)
5328 /* MAX (X, 0) > -1 -> true */
5329 return omit_one_operand (type
, integer_one_node
, inner
);
5331 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5332 /* MIN (X, 0) > 0 -> false
5333 MIN (X, 0) > 5 -> false */
5334 return omit_one_operand (type
, integer_zero_node
, inner
);
5337 /* MIN (X, 0) > -1 -> X > -1 */
5338 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5345 /* T is an integer expression that is being multiplied, divided, or taken a
5346 modulus (CODE says which and what kind of divide or modulus) by a
5347 constant C. See if we can eliminate that operation by folding it with
5348 other operations already in T. WIDE_TYPE, if non-null, is a type that
5349 should be used for the computation if wider than our type.
5351 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5352 (X * 2) + (Y * 4). We must, however, be assured that either the original
5353 expression would not overflow or that overflow is undefined for the type
5354 in the language in question.
5356 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5357 the machine has a multiply-accumulate insn or that this is part of an
5358 addressing calculation.
5360 If we return a non-null expression, it is an equivalent form of the
5361 original computation, but need not be in the original type. */
5364 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5366 /* To avoid exponential search depth, refuse to allow recursion past
5367 three levels. Beyond that (1) it's highly unlikely that we'll find
5368 something interesting and (2) we've probably processed it before
5369 when we built the inner expression. */
5378 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
);
5385 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5387 tree type
= TREE_TYPE (t
);
5388 enum tree_code tcode
= TREE_CODE (t
);
5389 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5390 > GET_MODE_SIZE (TYPE_MODE (type
)))
5391 ? wide_type
: type
);
5393 int same_p
= tcode
== code
;
5394 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5396 /* Don't deal with constants of zero here; they confuse the code below. */
5397 if (integer_zerop (c
))
5400 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5401 op0
= TREE_OPERAND (t
, 0);
5403 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5404 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5406 /* Note that we need not handle conditional operations here since fold
5407 already handles those cases. So just do arithmetic here. */
5411 /* For a constant, we can always simplify if we are a multiply
5412 or (for divide and modulus) if it is a multiple of our constant. */
5413 if (code
== MULT_EXPR
5414 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
5415 return const_binop (code
, fold_convert (ctype
, t
),
5416 fold_convert (ctype
, c
), 0);
5419 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
5420 /* If op0 is an expression ... */
5421 if ((COMPARISON_CLASS_P (op0
)
5422 || UNARY_CLASS_P (op0
)
5423 || BINARY_CLASS_P (op0
)
5424 || EXPRESSION_CLASS_P (op0
))
5425 /* ... and is unsigned, and its type is smaller than ctype,
5426 then we cannot pass through as widening. */
5427 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
5428 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5429 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5430 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
5431 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
5432 /* ... or this is a truncation (t is narrower than op0),
5433 then we cannot pass through this narrowing. */
5434 || (GET_MODE_SIZE (TYPE_MODE (type
))
5435 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
5436 /* ... or signedness changes for division or modulus,
5437 then we cannot pass through this conversion. */
5438 || (code
!= MULT_EXPR
5439 && (TYPE_UNSIGNED (ctype
)
5440 != TYPE_UNSIGNED (TREE_TYPE (op0
))))))
5443 /* Pass the constant down and see if we can make a simplification. If
5444 we can, replace this expression with the inner simplification for
5445 possible later conversion to our or some other type. */
5446 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5447 && TREE_CODE (t2
) == INTEGER_CST
5448 && ! TREE_CONSTANT_OVERFLOW (t2
)
5449 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5451 ? ctype
: NULL_TREE
))))
5456 /* If widening the type changes it from signed to unsigned, then we
5457 must avoid building ABS_EXPR itself as unsigned. */
5458 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5460 tree cstype
= (*lang_hooks
.types
.signed_type
) (ctype
);
5461 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
)) != 0)
5463 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5464 return fold_convert (ctype
, t1
);
5470 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5471 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5474 case MIN_EXPR
: case MAX_EXPR
:
5475 /* If widening the type changes the signedness, then we can't perform
5476 this optimization as that changes the result. */
5477 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5480 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5481 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
5482 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5484 if (tree_int_cst_sgn (c
) < 0)
5485 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5487 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5488 fold_convert (ctype
, t2
));
5492 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5493 /* If the second operand is constant, this is a multiplication
5494 or floor division, by a power of two, so we can treat it that
5495 way unless the multiplier or divisor overflows. Signed
5496 left-shift overflow is implementation-defined rather than
5497 undefined in C90, so do not convert signed left shift into
5499 if (TREE_CODE (op1
) == INTEGER_CST
5500 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5501 /* const_binop may not detect overflow correctly,
5502 so check for it explicitly here. */
5503 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5504 && TREE_INT_CST_HIGH (op1
) == 0
5505 && 0 != (t1
= fold_convert (ctype
,
5506 const_binop (LSHIFT_EXPR
,
5509 && ! TREE_OVERFLOW (t1
))
5510 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5511 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5512 ctype
, fold_convert (ctype
, op0
), t1
),
5513 c
, code
, wide_type
);
5516 case PLUS_EXPR
: case MINUS_EXPR
:
5517 /* See if we can eliminate the operation on both sides. If we can, we
5518 can return a new PLUS or MINUS. If we can't, the only remaining
5519 cases where we can do anything are if the second operand is a
5521 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
5522 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
5523 if (t1
!= 0 && t2
!= 0
5524 && (code
== MULT_EXPR
5525 /* If not multiplication, we can only do this if both operands
5526 are divisible by c. */
5527 || (multiple_of_p (ctype
, op0
, c
)
5528 && multiple_of_p (ctype
, op1
, c
))))
5529 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5530 fold_convert (ctype
, t2
));
5532 /* If this was a subtraction, negate OP1 and set it to be an addition.
5533 This simplifies the logic below. */
5534 if (tcode
== MINUS_EXPR
)
5535 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5537 if (TREE_CODE (op1
) != INTEGER_CST
)
5540 /* If either OP1 or C are negative, this optimization is not safe for
5541 some of the division and remainder types while for others we need
5542 to change the code. */
5543 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5545 if (code
== CEIL_DIV_EXPR
)
5546 code
= FLOOR_DIV_EXPR
;
5547 else if (code
== FLOOR_DIV_EXPR
)
5548 code
= CEIL_DIV_EXPR
;
5549 else if (code
!= MULT_EXPR
5550 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5554 /* If it's a multiply or a division/modulus operation of a multiple
5555 of our constant, do the operation and verify it doesn't overflow. */
5556 if (code
== MULT_EXPR
5557 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5559 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5560 fold_convert (ctype
, c
), 0);
5561 /* We allow the constant to overflow with wrapping semantics. */
5563 || (TREE_OVERFLOW (op1
) && ! flag_wrapv
))
5569 /* If we have an unsigned type is not a sizetype, we cannot widen
5570 the operation since it will change the result if the original
5571 computation overflowed. */
5572 if (TYPE_UNSIGNED (ctype
)
5573 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5577 /* If we were able to eliminate our operation from the first side,
5578 apply our operation to the second side and reform the PLUS. */
5579 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5580 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5582 /* The last case is if we are a multiply. In that case, we can
5583 apply the distributive law to commute the multiply and addition
5584 if the multiplication of the constants doesn't overflow. */
5585 if (code
== MULT_EXPR
)
5586 return fold_build2 (tcode
, ctype
,
5587 fold_build2 (code
, ctype
,
5588 fold_convert (ctype
, op0
),
5589 fold_convert (ctype
, c
)),
5595 /* We have a special case here if we are doing something like
5596 (C * 8) % 4 since we know that's zero. */
5597 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5598 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5599 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5600 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5601 return omit_one_operand (type
, integer_zero_node
, op0
);
5603 /* ... fall through ... */
5605 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5606 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5607 /* If we can extract our operation from the LHS, do so and return a
5608 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5609 do something only if the second operand is a constant. */
5611 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5612 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5613 fold_convert (ctype
, op1
));
5614 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5615 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5616 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5617 fold_convert (ctype
, t1
));
5618 else if (TREE_CODE (op1
) != INTEGER_CST
)
5621 /* If these are the same operation types, we can associate them
5622 assuming no overflow. */
5624 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
5625 fold_convert (ctype
, c
), 0))
5626 && ! TREE_OVERFLOW (t1
))
5627 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
5629 /* If these operations "cancel" each other, we have the main
5630 optimizations of this pass, which occur when either constant is a
5631 multiple of the other, in which case we replace this with either an
5632 operation or CODE or TCODE.
5634 If we have an unsigned type that is not a sizetype, we cannot do
5635 this since it will change the result if the original computation
5637 if ((! TYPE_UNSIGNED (ctype
)
5638 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5640 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5641 || (tcode
== MULT_EXPR
5642 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5643 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
5645 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5646 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5647 fold_convert (ctype
,
5648 const_binop (TRUNC_DIV_EXPR
,
5650 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
5651 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5652 fold_convert (ctype
,
5653 const_binop (TRUNC_DIV_EXPR
,
5665 /* Return a node which has the indicated constant VALUE (either 0 or
5666 1), and is of the indicated TYPE. */
5669 constant_boolean_node (int value
, tree type
)
5671 if (type
== integer_type_node
)
5672 return value
? integer_one_node
: integer_zero_node
;
5673 else if (type
== boolean_type_node
)
5674 return value
? boolean_true_node
: boolean_false_node
;
5676 return build_int_cst (type
, value
);
5680 /* Return true if expr looks like an ARRAY_REF and set base and
5681 offset to the appropriate trees. If there is no offset,
5682 offset is set to NULL_TREE. Base will be canonicalized to
5683 something you can get the element type from using
5684 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5685 in bytes to the base. */
5688 extract_array_ref (tree expr
, tree
*base
, tree
*offset
)
5690 /* One canonical form is a PLUS_EXPR with the first
5691 argument being an ADDR_EXPR with a possible NOP_EXPR
5693 if (TREE_CODE (expr
) == PLUS_EXPR
)
5695 tree op0
= TREE_OPERAND (expr
, 0);
5696 tree inner_base
, dummy1
;
5697 /* Strip NOP_EXPRs here because the C frontends and/or
5698 folders present us (int *)&x.a + 4B possibly. */
5700 if (extract_array_ref (op0
, &inner_base
, &dummy1
))
5703 if (dummy1
== NULL_TREE
)
5704 *offset
= TREE_OPERAND (expr
, 1);
5706 *offset
= fold_build2 (PLUS_EXPR
, TREE_TYPE (expr
),
5707 dummy1
, TREE_OPERAND (expr
, 1));
5711 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5712 which we transform into an ADDR_EXPR with appropriate
5713 offset. For other arguments to the ADDR_EXPR we assume
5714 zero offset and as such do not care about the ADDR_EXPR
5715 type and strip possible nops from it. */
5716 else if (TREE_CODE (expr
) == ADDR_EXPR
)
5718 tree op0
= TREE_OPERAND (expr
, 0);
5719 if (TREE_CODE (op0
) == ARRAY_REF
)
5721 tree idx
= TREE_OPERAND (op0
, 1);
5722 *base
= TREE_OPERAND (op0
, 0);
5723 *offset
= fold_build2 (MULT_EXPR
, TREE_TYPE (idx
), idx
,
5724 array_ref_element_size (op0
));
5728 /* Handle array-to-pointer decay as &a. */
5729 if (TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
5730 *base
= TREE_OPERAND (expr
, 0);
5733 *offset
= NULL_TREE
;
5737 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5738 else if (SSA_VAR_P (expr
)
5739 && TREE_CODE (TREE_TYPE (expr
)) == POINTER_TYPE
)
5742 *offset
= NULL_TREE
;
5750 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5751 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5752 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5753 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5754 COND is the first argument to CODE; otherwise (as in the example
5755 given here), it is the second argument. TYPE is the type of the
5756 original expression. Return NULL_TREE if no simplification is
5760 fold_binary_op_with_conditional_arg (enum tree_code code
,
5761 tree type
, tree op0
, tree op1
,
5762 tree cond
, tree arg
, int cond_first_p
)
5764 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
5765 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
5766 tree test
, true_value
, false_value
;
5767 tree lhs
= NULL_TREE
;
5768 tree rhs
= NULL_TREE
;
5770 /* This transformation is only worthwhile if we don't have to wrap
5771 arg in a SAVE_EXPR, and the operation can be simplified on at least
5772 one of the branches once its pushed inside the COND_EXPR. */
5773 if (!TREE_CONSTANT (arg
))
5776 if (TREE_CODE (cond
) == COND_EXPR
)
5778 test
= TREE_OPERAND (cond
, 0);
5779 true_value
= TREE_OPERAND (cond
, 1);
5780 false_value
= TREE_OPERAND (cond
, 2);
5781 /* If this operand throws an expression, then it does not make
5782 sense to try to perform a logical or arithmetic operation
5784 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
5786 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
5791 tree testtype
= TREE_TYPE (cond
);
5793 true_value
= constant_boolean_node (true, testtype
);
5794 false_value
= constant_boolean_node (false, testtype
);
5797 arg
= fold_convert (arg_type
, arg
);
5800 true_value
= fold_convert (cond_type
, true_value
);
5802 lhs
= fold_build2 (code
, type
, true_value
, arg
);
5804 lhs
= fold_build2 (code
, type
, arg
, true_value
);
5808 false_value
= fold_convert (cond_type
, false_value
);
5810 rhs
= fold_build2 (code
, type
, false_value
, arg
);
5812 rhs
= fold_build2 (code
, type
, arg
, false_value
);
5815 test
= fold_build3 (COND_EXPR
, type
, test
, lhs
, rhs
);
5816 return fold_convert (type
, test
);
5820 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5822 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5823 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5824 ADDEND is the same as X.
5826 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5827 and finite. The problematic cases are when X is zero, and its mode
5828 has signed zeros. In the case of rounding towards -infinity,
5829 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5830 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5833 fold_real_zero_addition_p (tree type
, tree addend
, int negate
)
5835 if (!real_zerop (addend
))
5838 /* Don't allow the fold with -fsignaling-nans. */
5839 if (HONOR_SNANS (TYPE_MODE (type
)))
5842 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5843 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
5846 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5847 if (TREE_CODE (addend
) == REAL_CST
5848 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
5851 /* The mode has signed zeros, and we have to honor their sign.
5852 In this situation, there is only one case we can return true for.
5853 X - 0 is the same as X unless rounding towards -infinity is
5855 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
5858 /* Subroutine of fold() that checks comparisons of built-in math
5859 functions against real constants.
5861 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5862 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5863 is the type of the result and ARG0 and ARG1 are the operands of the
5864 comparison. ARG1 must be a TREE_REAL_CST.
5866 The function returns the constant folded tree if a simplification
5867 can be made, and NULL_TREE otherwise. */
5870 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
5871 tree type
, tree arg0
, tree arg1
)
5875 if (BUILTIN_SQRT_P (fcode
))
5877 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5878 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
5880 c
= TREE_REAL_CST (arg1
);
5881 if (REAL_VALUE_NEGATIVE (c
))
5883 /* sqrt(x) < y is always false, if y is negative. */
5884 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
5885 return omit_one_operand (type
, integer_zero_node
, arg
);
5887 /* sqrt(x) > y is always true, if y is negative and we
5888 don't care about NaNs, i.e. negative values of x. */
5889 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
5890 return omit_one_operand (type
, integer_one_node
, arg
);
5892 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5893 return fold_build2 (GE_EXPR
, type
, arg
,
5894 build_real (TREE_TYPE (arg
), dconst0
));
5896 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
5900 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5901 real_convert (&c2
, mode
, &c2
);
5903 if (REAL_VALUE_ISINF (c2
))
5905 /* sqrt(x) > y is x == +Inf, when y is very large. */
5906 if (HONOR_INFINITIES (mode
))
5907 return fold_build2 (EQ_EXPR
, type
, arg
,
5908 build_real (TREE_TYPE (arg
), c2
));
5910 /* sqrt(x) > y is always false, when y is very large
5911 and we don't care about infinities. */
5912 return omit_one_operand (type
, integer_zero_node
, arg
);
5915 /* sqrt(x) > c is the same as x > c*c. */
5916 return fold_build2 (code
, type
, arg
,
5917 build_real (TREE_TYPE (arg
), c2
));
5919 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
5923 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5924 real_convert (&c2
, mode
, &c2
);
5926 if (REAL_VALUE_ISINF (c2
))
5928 /* sqrt(x) < y is always true, when y is a very large
5929 value and we don't care about NaNs or Infinities. */
5930 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
5931 return omit_one_operand (type
, integer_one_node
, arg
);
5933 /* sqrt(x) < y is x != +Inf when y is very large and we
5934 don't care about NaNs. */
5935 if (! HONOR_NANS (mode
))
5936 return fold_build2 (NE_EXPR
, type
, arg
,
5937 build_real (TREE_TYPE (arg
), c2
));
5939 /* sqrt(x) < y is x >= 0 when y is very large and we
5940 don't care about Infinities. */
5941 if (! HONOR_INFINITIES (mode
))
5942 return fold_build2 (GE_EXPR
, type
, arg
,
5943 build_real (TREE_TYPE (arg
), dconst0
));
5945 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5946 if (lang_hooks
.decls
.global_bindings_p () != 0
5947 || CONTAINS_PLACEHOLDER_P (arg
))
5950 arg
= save_expr (arg
);
5951 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
5952 fold_build2 (GE_EXPR
, type
, arg
,
5953 build_real (TREE_TYPE (arg
),
5955 fold_build2 (NE_EXPR
, type
, arg
,
5956 build_real (TREE_TYPE (arg
),
5960 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5961 if (! HONOR_NANS (mode
))
5962 return fold_build2 (code
, type
, arg
,
5963 build_real (TREE_TYPE (arg
), c2
));
5965 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5966 if (lang_hooks
.decls
.global_bindings_p () == 0
5967 && ! CONTAINS_PLACEHOLDER_P (arg
))
5969 arg
= save_expr (arg
);
5970 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
5971 fold_build2 (GE_EXPR
, type
, arg
,
5972 build_real (TREE_TYPE (arg
),
5974 fold_build2 (code
, type
, arg
,
5975 build_real (TREE_TYPE (arg
),
5984 /* Subroutine of fold() that optimizes comparisons against Infinities,
5985 either +Inf or -Inf.
5987 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5988 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5989 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5991 The function returns the constant folded tree if a simplification
5992 can be made, and NULL_TREE otherwise. */
5995 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5997 enum machine_mode mode
;
5998 REAL_VALUE_TYPE max
;
6002 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6004 /* For negative infinity swap the sense of the comparison. */
6005 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6007 code
= swap_tree_comparison (code
);
6012 /* x > +Inf is always false, if with ignore sNANs. */
6013 if (HONOR_SNANS (mode
))
6015 return omit_one_operand (type
, integer_zero_node
, arg0
);
6018 /* x <= +Inf is always true, if we don't case about NaNs. */
6019 if (! HONOR_NANS (mode
))
6020 return omit_one_operand (type
, integer_one_node
, arg0
);
6022 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6023 if (lang_hooks
.decls
.global_bindings_p () == 0
6024 && ! CONTAINS_PLACEHOLDER_P (arg0
))
6026 arg0
= save_expr (arg0
);
6027 return fold_build2 (EQ_EXPR
, type
, arg0
, arg0
);
6033 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6034 real_maxval (&max
, neg
, mode
);
6035 return fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6036 arg0
, build_real (TREE_TYPE (arg0
), max
));
6039 /* x < +Inf is always equal to x <= DBL_MAX. */
6040 real_maxval (&max
, neg
, mode
);
6041 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6042 arg0
, build_real (TREE_TYPE (arg0
), max
));
6045 /* x != +Inf is always equal to !(x > DBL_MAX). */
6046 real_maxval (&max
, neg
, mode
);
6047 if (! HONOR_NANS (mode
))
6048 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6049 arg0
, build_real (TREE_TYPE (arg0
), max
));
6051 /* The transformation below creates non-gimple code and thus is
6052 not appropriate if we are in gimple form. */
6056 temp
= fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6057 arg0
, build_real (TREE_TYPE (arg0
), max
));
6058 return fold_build1 (TRUTH_NOT_EXPR
, type
, temp
);
6067 /* Subroutine of fold() that optimizes comparisons of a division by
6068 a nonzero integer constant against an integer constant, i.e.
6071 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6072 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6073 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6075 The function returns the constant folded tree if a simplification
6076 can be made, and NULL_TREE otherwise. */
6079 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6081 tree prod
, tmp
, hi
, lo
;
6082 tree arg00
= TREE_OPERAND (arg0
, 0);
6083 tree arg01
= TREE_OPERAND (arg0
, 1);
6084 unsigned HOST_WIDE_INT lpart
;
6085 HOST_WIDE_INT hpart
;
6089 /* We have to do this the hard way to detect unsigned overflow.
6090 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6091 overflow
= mul_double (TREE_INT_CST_LOW (arg01
),
6092 TREE_INT_CST_HIGH (arg01
),
6093 TREE_INT_CST_LOW (arg1
),
6094 TREE_INT_CST_HIGH (arg1
), &lpart
, &hpart
);
6095 prod
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
6096 prod
= force_fit_type (prod
, -1, overflow
, false);
6097 neg_overflow
= false;
6099 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)))
6101 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
6104 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6105 overflow
= add_double (TREE_INT_CST_LOW (prod
),
6106 TREE_INT_CST_HIGH (prod
),
6107 TREE_INT_CST_LOW (tmp
),
6108 TREE_INT_CST_HIGH (tmp
),
6110 hi
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
6111 hi
= force_fit_type (hi
, -1, overflow
| TREE_OVERFLOW (prod
),
6112 TREE_CONSTANT_OVERFLOW (prod
));
6114 else if (tree_int_cst_sgn (arg01
) >= 0)
6116 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
6117 switch (tree_int_cst_sgn (arg1
))
6120 neg_overflow
= true;
6121 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6126 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6131 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6141 /* A negative divisor reverses the relational operators. */
6142 code
= swap_tree_comparison (code
);
6144 tmp
= int_const_binop (PLUS_EXPR
, arg01
, integer_one_node
, 0);
6145 switch (tree_int_cst_sgn (arg1
))
6148 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6153 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6158 neg_overflow
= true;
6159 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6171 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6172 return omit_one_operand (type
, integer_zero_node
, arg00
);
6173 if (TREE_OVERFLOW (hi
))
6174 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6175 if (TREE_OVERFLOW (lo
))
6176 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6177 return build_range_check (type
, arg00
, 1, lo
, hi
);
6180 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6181 return omit_one_operand (type
, integer_one_node
, arg00
);
6182 if (TREE_OVERFLOW (hi
))
6183 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6184 if (TREE_OVERFLOW (lo
))
6185 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6186 return build_range_check (type
, arg00
, 0, lo
, hi
);
6189 if (TREE_OVERFLOW (lo
))
6191 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6192 return omit_one_operand (type
, tmp
, arg00
);
6194 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6197 if (TREE_OVERFLOW (hi
))
6199 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6200 return omit_one_operand (type
, tmp
, arg00
);
6202 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6205 if (TREE_OVERFLOW (hi
))
6207 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6208 return omit_one_operand (type
, tmp
, arg00
);
6210 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6213 if (TREE_OVERFLOW (lo
))
6215 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6216 return omit_one_operand (type
, tmp
, arg00
);
6218 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6228 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6229 equality/inequality test, then return a simplified form of the test
6230 using a sign testing. Otherwise return NULL. TYPE is the desired
6234 fold_single_bit_test_into_sign_test (enum tree_code code
, tree arg0
, tree arg1
,
6237 /* If this is testing a single bit, we can optimize the test. */
6238 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6239 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6240 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6242 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6243 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6244 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6246 if (arg00
!= NULL_TREE
6247 /* This is only a win if casting to a signed type is cheap,
6248 i.e. when arg00's type is not a partial mode. */
6249 && TYPE_PRECISION (TREE_TYPE (arg00
))
6250 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6252 tree stype
= lang_hooks
.types
.signed_type (TREE_TYPE (arg00
));
6253 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6254 result_type
, fold_convert (stype
, arg00
),
6255 build_int_cst (stype
, 0));
6262 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6263 equality/inequality test, then return a simplified form of
6264 the test using shifts and logical operations. Otherwise return
6265 NULL. TYPE is the desired result type. */
6268 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
6271 /* If this is testing a single bit, we can optimize the test. */
6272 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6273 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6274 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6276 tree inner
= TREE_OPERAND (arg0
, 0);
6277 tree type
= TREE_TYPE (arg0
);
6278 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6279 enum machine_mode operand_mode
= TYPE_MODE (type
);
6281 tree signed_type
, unsigned_type
, intermediate_type
;
6284 /* First, see if we can fold the single bit test into a sign-bit
6286 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
,
6291 /* Otherwise we have (A & C) != 0 where C is a single bit,
6292 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6293 Similarly for (A & C) == 0. */
6295 /* If INNER is a right shift of a constant and it plus BITNUM does
6296 not overflow, adjust BITNUM and INNER. */
6297 if (TREE_CODE (inner
) == RSHIFT_EXPR
6298 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6299 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6300 && bitnum
< TYPE_PRECISION (type
)
6301 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6302 bitnum
- TYPE_PRECISION (type
)))
6304 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6305 inner
= TREE_OPERAND (inner
, 0);
6308 /* If we are going to be able to omit the AND below, we must do our
6309 operations as unsigned. If we must use the AND, we have a choice.
6310 Normally unsigned is faster, but for some machines signed is. */
6311 #ifdef LOAD_EXTEND_OP
6312 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6313 && !flag_syntax_only
) ? 0 : 1;
6318 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6319 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6320 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6321 inner
= fold_convert (intermediate_type
, inner
);
6324 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6325 inner
, size_int (bitnum
));
6327 if (code
== EQ_EXPR
)
6328 inner
= fold_build2 (BIT_XOR_EXPR
, intermediate_type
,
6329 inner
, integer_one_node
);
6331 /* Put the AND last so it can combine with more things. */
6332 inner
= build2 (BIT_AND_EXPR
, intermediate_type
,
6333 inner
, integer_one_node
);
6335 /* Make sure to return the proper type. */
6336 inner
= fold_convert (result_type
, inner
);
6343 /* Check whether we are allowed to reorder operands arg0 and arg1,
6344 such that the evaluation of arg1 occurs before arg0. */
6347 reorder_operands_p (tree arg0
, tree arg1
)
6349 if (! flag_evaluation_order
)
6351 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6353 return ! TREE_SIDE_EFFECTS (arg0
)
6354 && ! TREE_SIDE_EFFECTS (arg1
);
6357 /* Test whether it is preferable two swap two operands, ARG0 and
6358 ARG1, for example because ARG0 is an integer constant and ARG1
6359 isn't. If REORDER is true, only recommend swapping if we can
6360 evaluate the operands in reverse order. */
6363 tree_swap_operands_p (tree arg0
, tree arg1
, bool reorder
)
6365 STRIP_SIGN_NOPS (arg0
);
6366 STRIP_SIGN_NOPS (arg1
);
6368 if (TREE_CODE (arg1
) == INTEGER_CST
)
6370 if (TREE_CODE (arg0
) == INTEGER_CST
)
6373 if (TREE_CODE (arg1
) == REAL_CST
)
6375 if (TREE_CODE (arg0
) == REAL_CST
)
6378 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6380 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6383 if (TREE_CONSTANT (arg1
))
6385 if (TREE_CONSTANT (arg0
))
6391 if (reorder
&& flag_evaluation_order
6392 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6400 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6401 for commutative and comparison operators. Ensuring a canonical
6402 form allows the optimizers to find additional redundancies without
6403 having to explicitly check for both orderings. */
6404 if (TREE_CODE (arg0
) == SSA_NAME
6405 && TREE_CODE (arg1
) == SSA_NAME
6406 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6412 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6413 ARG0 is extended to a wider type. */
6416 fold_widened_comparison (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6418 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6420 tree shorter_type
, outer_type
;
6424 if (arg0_unw
== arg0
)
6426 shorter_type
= TREE_TYPE (arg0_unw
);
6428 #ifdef HAVE_canonicalize_funcptr_for_compare
6429 /* Disable this optimization if we're casting a function pointer
6430 type on targets that require function pointer canonicalization. */
6431 if (HAVE_canonicalize_funcptr_for_compare
6432 && TREE_CODE (shorter_type
) == POINTER_TYPE
6433 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6437 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6440 arg1_unw
= get_unwidened (arg1
, shorter_type
);
6442 /* If possible, express the comparison in the shorter mode. */
6443 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6444 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6445 && (TREE_TYPE (arg1_unw
) == shorter_type
6446 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6447 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6448 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6449 && int_fits_type_p (arg1_unw
, shorter_type
))))
6450 return fold_build2 (code
, type
, arg0_unw
,
6451 fold_convert (shorter_type
, arg1_unw
));
6453 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6454 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6455 || !int_fits_type_p (arg1_unw
, shorter_type
))
6458 /* If we are comparing with the integer that does not fit into the range
6459 of the shorter type, the result is known. */
6460 outer_type
= TREE_TYPE (arg1_unw
);
6461 min
= lower_bound_in_type (outer_type
, shorter_type
);
6462 max
= upper_bound_in_type (outer_type
, shorter_type
);
6464 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6466 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6473 return omit_one_operand (type
, integer_zero_node
, arg0
);
6478 return omit_one_operand (type
, integer_one_node
, arg0
);
6484 return omit_one_operand (type
, integer_one_node
, arg0
);
6486 return omit_one_operand (type
, integer_zero_node
, arg0
);
6491 return omit_one_operand (type
, integer_zero_node
, arg0
);
6493 return omit_one_operand (type
, integer_one_node
, arg0
);
6502 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6503 ARG0 just the signedness is changed. */
6506 fold_sign_changed_comparison (enum tree_code code
, tree type
,
6507 tree arg0
, tree arg1
)
6509 tree arg0_inner
, tmp
;
6510 tree inner_type
, outer_type
;
6512 if (TREE_CODE (arg0
) != NOP_EXPR
6513 && TREE_CODE (arg0
) != CONVERT_EXPR
)
6516 outer_type
= TREE_TYPE (arg0
);
6517 arg0_inner
= TREE_OPERAND (arg0
, 0);
6518 inner_type
= TREE_TYPE (arg0_inner
);
6520 #ifdef HAVE_canonicalize_funcptr_for_compare
6521 /* Disable this optimization if we're casting a function pointer
6522 type on targets that require function pointer canonicalization. */
6523 if (HAVE_canonicalize_funcptr_for_compare
6524 && TREE_CODE (inner_type
) == POINTER_TYPE
6525 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6529 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6532 if (TREE_CODE (arg1
) != INTEGER_CST
6533 && !((TREE_CODE (arg1
) == NOP_EXPR
6534 || TREE_CODE (arg1
) == CONVERT_EXPR
)
6535 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6538 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6543 if (TREE_CODE (arg1
) == INTEGER_CST
)
6545 tmp
= build_int_cst_wide (inner_type
,
6546 TREE_INT_CST_LOW (arg1
),
6547 TREE_INT_CST_HIGH (arg1
));
6548 arg1
= force_fit_type (tmp
, 0,
6549 TREE_OVERFLOW (arg1
),
6550 TREE_CONSTANT_OVERFLOW (arg1
));
6553 arg1
= fold_convert (inner_type
, arg1
);
6555 return fold_build2 (code
, type
, arg0_inner
, arg1
);
6558 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6559 step of the array. Reconstructs s and delta in the case of s * delta
6560 being an integer constant (and thus already folded).
6561 ADDR is the address. MULT is the multiplicative expression.
6562 If the function succeeds, the new address expression is returned. Otherwise
6563 NULL_TREE is returned. */
6566 try_move_mult_to_index (enum tree_code code
, tree addr
, tree op1
)
6568 tree s
, delta
, step
;
6569 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6573 /* Canonicalize op1 into a possibly non-constant delta
6574 and an INTEGER_CST s. */
6575 if (TREE_CODE (op1
) == MULT_EXPR
)
6577 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6582 if (TREE_CODE (arg0
) == INTEGER_CST
)
6587 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6595 else if (TREE_CODE (op1
) == INTEGER_CST
)
6602 /* Simulate we are delta * 1. */
6604 s
= integer_one_node
;
6607 for (;; ref
= TREE_OPERAND (ref
, 0))
6609 if (TREE_CODE (ref
) == ARRAY_REF
)
6611 itype
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6615 step
= array_ref_element_size (ref
);
6616 if (TREE_CODE (step
) != INTEGER_CST
)
6621 if (! tree_int_cst_equal (step
, s
))
6626 /* Try if delta is a multiple of step. */
6627 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, delta
, step
);
6636 if (!handled_component_p (ref
))
6640 /* We found the suitable array reference. So copy everything up to it,
6641 and replace the index. */
6643 pref
= TREE_OPERAND (addr
, 0);
6644 ret
= copy_node (pref
);
6649 pref
= TREE_OPERAND (pref
, 0);
6650 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6651 pos
= TREE_OPERAND (pos
, 0);
6654 TREE_OPERAND (pos
, 1) = fold_build2 (code
, itype
,
6655 fold_convert (itype
,
6656 TREE_OPERAND (pos
, 1)),
6657 fold_convert (itype
, delta
));
6659 return fold_build1 (ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6663 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6664 means A >= Y && A != MAX, but in this case we know that
6665 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6668 fold_to_nonsharp_ineq_using_bound (tree ineq
, tree bound
)
6670 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6672 if (TREE_CODE (bound
) == LT_EXPR
)
6673 a
= TREE_OPERAND (bound
, 0);
6674 else if (TREE_CODE (bound
) == GT_EXPR
)
6675 a
= TREE_OPERAND (bound
, 1);
6679 typea
= TREE_TYPE (a
);
6680 if (!INTEGRAL_TYPE_P (typea
)
6681 && !POINTER_TYPE_P (typea
))
6684 if (TREE_CODE (ineq
) == LT_EXPR
)
6686 a1
= TREE_OPERAND (ineq
, 1);
6687 y
= TREE_OPERAND (ineq
, 0);
6689 else if (TREE_CODE (ineq
) == GT_EXPR
)
6691 a1
= TREE_OPERAND (ineq
, 0);
6692 y
= TREE_OPERAND (ineq
, 1);
6697 if (TREE_TYPE (a1
) != typea
)
6700 diff
= fold_build2 (MINUS_EXPR
, typea
, a1
, a
);
6701 if (!integer_onep (diff
))
6704 return fold_build2 (GE_EXPR
, type
, a
, y
);
6707 /* Fold a sum or difference of at least one multiplication.
6708 Returns the folded tree or NULL if no simplification could be made. */
6711 fold_plusminus_mult_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6713 tree arg00
, arg01
, arg10
, arg11
;
6714 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6716 /* (A * C) +- (B * C) -> (A+-B) * C.
6717 (A * C) +- A -> A * (C+-1).
6718 We are most concerned about the case where C is a constant,
6719 but other combinations show up during loop reduction. Since
6720 it is not difficult, try all four possibilities. */
6722 if (TREE_CODE (arg0
) == MULT_EXPR
)
6724 arg00
= TREE_OPERAND (arg0
, 0);
6725 arg01
= TREE_OPERAND (arg0
, 1);
6730 arg01
= build_one_cst (type
);
6732 if (TREE_CODE (arg1
) == MULT_EXPR
)
6734 arg10
= TREE_OPERAND (arg1
, 0);
6735 arg11
= TREE_OPERAND (arg1
, 1);
6740 arg11
= build_one_cst (type
);
6744 if (operand_equal_p (arg01
, arg11
, 0))
6745 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6746 else if (operand_equal_p (arg00
, arg10
, 0))
6747 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6748 else if (operand_equal_p (arg00
, arg11
, 0))
6749 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6750 else if (operand_equal_p (arg01
, arg10
, 0))
6751 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6753 /* No identical multiplicands; see if we can find a common
6754 power-of-two factor in non-power-of-two multiplies. This
6755 can help in multi-dimensional array access. */
6756 else if (host_integerp (arg01
, 0)
6757 && host_integerp (arg11
, 0))
6759 HOST_WIDE_INT int01
, int11
, tmp
;
6762 int01
= TREE_INT_CST_LOW (arg01
);
6763 int11
= TREE_INT_CST_LOW (arg11
);
6765 /* Move min of absolute values to int11. */
6766 if ((int01
>= 0 ? int01
: -int01
)
6767 < (int11
>= 0 ? int11
: -int11
))
6769 tmp
= int01
, int01
= int11
, int11
= tmp
;
6770 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6777 if (exact_log2 (int11
) > 0 && int01
% int11
== 0)
6779 alt0
= fold_build2 (MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
6780 build_int_cst (TREE_TYPE (arg00
),
6785 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
6790 return fold_build2 (MULT_EXPR
, type
,
6791 fold_build2 (code
, type
,
6792 fold_convert (type
, alt0
),
6793 fold_convert (type
, alt1
)),
6794 fold_convert (type
, same
));
6799 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6800 specified by EXPR into the buffer PTR of length LEN bytes.
6801 Return the number of bytes placed in the buffer, or zero
6805 native_encode_int (tree expr
, unsigned char *ptr
, int len
)
6807 tree type
= TREE_TYPE (expr
);
6808 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
6809 int byte
, offset
, word
, words
;
6810 unsigned char value
;
6812 if (total_bytes
> len
)
6814 words
= total_bytes
/ UNITS_PER_WORD
;
6816 for (byte
= 0; byte
< total_bytes
; byte
++)
6818 int bitpos
= byte
* BITS_PER_UNIT
;
6819 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
6820 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
6822 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
6823 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
6825 if (total_bytes
> UNITS_PER_WORD
)
6827 word
= byte
/ UNITS_PER_WORD
;
6828 if (WORDS_BIG_ENDIAN
)
6829 word
= (words
- 1) - word
;
6830 offset
= word
* UNITS_PER_WORD
;
6831 if (BYTES_BIG_ENDIAN
)
6832 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
6834 offset
+= byte
% UNITS_PER_WORD
;
6837 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
6838 ptr
[offset
] = value
;
6844 /* Subroutine of native_encode_expr. Encode the REAL_CST
6845 specified by EXPR into the buffer PTR of length LEN bytes.
6846 Return the number of bytes placed in the buffer, or zero
6850 native_encode_real (tree expr
, unsigned char *ptr
, int len
)
6852 tree type
= TREE_TYPE (expr
);
6853 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
6854 int byte
, offset
, word
, words
;
6855 unsigned char value
;
6857 /* There are always 32 bits in each long, no matter the size of
6858 the hosts long. We handle floating point representations with
6862 if (total_bytes
> len
)
6864 words
= total_bytes
/ UNITS_PER_WORD
;
6866 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
6868 for (byte
= 0; byte
< total_bytes
; byte
++)
6870 int bitpos
= byte
* BITS_PER_UNIT
;
6871 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
6873 if (total_bytes
> UNITS_PER_WORD
)
6875 word
= byte
/ UNITS_PER_WORD
;
6876 if (FLOAT_WORDS_BIG_ENDIAN
)
6877 word
= (words
- 1) - word
;
6878 offset
= word
* UNITS_PER_WORD
;
6879 if (BYTES_BIG_ENDIAN
)
6880 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
6882 offset
+= byte
% UNITS_PER_WORD
;
6885 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
6886 ptr
[offset
] = value
;
6891 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6892 specified by EXPR into the buffer PTR of length LEN bytes.
6893 Return the number of bytes placed in the buffer, or zero
6897 native_encode_complex (tree expr
, unsigned char *ptr
, int len
)
6902 part
= TREE_REALPART (expr
);
6903 rsize
= native_encode_expr (part
, ptr
, len
);
6906 part
= TREE_IMAGPART (expr
);
6907 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
6910 return rsize
+ isize
;
6914 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6915 specified by EXPR into the buffer PTR of length LEN bytes.
6916 Return the number of bytes placed in the buffer, or zero
6920 native_encode_vector (tree expr
, unsigned char *ptr
, int len
)
6922 int i
, size
, offset
, count
;
6923 tree itype
, elem
, elements
;
6926 elements
= TREE_VECTOR_CST_ELTS (expr
);
6927 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
6928 itype
= TREE_TYPE (TREE_TYPE (expr
));
6929 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
6930 for (i
= 0; i
< count
; i
++)
6934 elem
= TREE_VALUE (elements
);
6935 elements
= TREE_CHAIN (elements
);
6942 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
6947 if (offset
+ size
> len
)
6949 memset (ptr
+offset
, 0, size
);
6957 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
6958 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
6959 buffer PTR of length LEN bytes. Return the number of bytes
6960 placed in the buffer, or zero upon failure. */
6963 native_encode_expr (tree expr
, unsigned char *ptr
, int len
)
6965 switch (TREE_CODE (expr
))
6968 return native_encode_int (expr
, ptr
, len
);
6971 return native_encode_real (expr
, ptr
, len
);
6974 return native_encode_complex (expr
, ptr
, len
);
6977 return native_encode_vector (expr
, ptr
, len
);
6985 /* Subroutine of native_interpret_expr. Interpret the contents of
6986 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
6987 If the buffer cannot be interpreted, return NULL_TREE. */
6990 native_interpret_int (tree type
, unsigned char *ptr
, int len
)
6992 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
6993 int byte
, offset
, word
, words
;
6994 unsigned char value
;
6995 unsigned int HOST_WIDE_INT lo
= 0;
6996 HOST_WIDE_INT hi
= 0;
6998 if (total_bytes
> len
)
7000 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
7002 words
= total_bytes
/ UNITS_PER_WORD
;
7004 for (byte
= 0; byte
< total_bytes
; byte
++)
7006 int bitpos
= byte
* BITS_PER_UNIT
;
7007 if (total_bytes
> UNITS_PER_WORD
)
7009 word
= byte
/ UNITS_PER_WORD
;
7010 if (WORDS_BIG_ENDIAN
)
7011 word
= (words
- 1) - word
;
7012 offset
= word
* UNITS_PER_WORD
;
7013 if (BYTES_BIG_ENDIAN
)
7014 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7016 offset
+= byte
% UNITS_PER_WORD
;
7019 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7020 value
= ptr
[offset
];
7022 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7023 lo
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7025 hi
|= (unsigned HOST_WIDE_INT
) value
7026 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7029 return force_fit_type (build_int_cst_wide (type
, lo
, hi
),
7034 /* Subroutine of native_interpret_expr. Interpret the contents of
7035 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7036 If the buffer cannot be interpreted, return NULL_TREE. */
7039 native_interpret_real (tree type
, unsigned char *ptr
, int len
)
7041 enum machine_mode mode
= TYPE_MODE (type
);
7042 int total_bytes
= GET_MODE_SIZE (mode
);
7043 int byte
, offset
, word
, words
;
7044 unsigned char value
;
7045 /* There are always 32 bits in each long, no matter the size of
7046 the hosts long. We handle floating point representations with
7051 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7052 if (total_bytes
> len
|| total_bytes
> 24)
7054 words
= total_bytes
/ UNITS_PER_WORD
;
7056 memset (tmp
, 0, sizeof (tmp
));
7057 for (byte
= 0; byte
< total_bytes
; byte
++)
7059 int bitpos
= byte
* BITS_PER_UNIT
;
7060 if (total_bytes
> UNITS_PER_WORD
)
7062 word
= byte
/ UNITS_PER_WORD
;
7063 if (FLOAT_WORDS_BIG_ENDIAN
)
7064 word
= (words
- 1) - word
;
7065 offset
= word
* UNITS_PER_WORD
;
7066 if (BYTES_BIG_ENDIAN
)
7067 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7069 offset
+= byte
% UNITS_PER_WORD
;
7072 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7073 value
= ptr
[offset
];
7075 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7078 real_from_target (&r
, tmp
, mode
);
7079 return build_real (type
, r
);
7083 /* Subroutine of native_interpret_expr. Interpret the contents of
7084 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7085 If the buffer cannot be interpreted, return NULL_TREE. */
7088 native_interpret_complex (tree type
, unsigned char *ptr
, int len
)
7090 tree etype
, rpart
, ipart
;
7093 etype
= TREE_TYPE (type
);
7094 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7097 rpart
= native_interpret_expr (etype
, ptr
, size
);
7100 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7103 return build_complex (type
, rpart
, ipart
);
7107 /* Subroutine of native_interpret_expr. Interpret the contents of
7108 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7109 If the buffer cannot be interpreted, return NULL_TREE. */
7112 native_interpret_vector (tree type
, unsigned char *ptr
, int len
)
7114 tree etype
, elem
, elements
;
7117 etype
= TREE_TYPE (type
);
7118 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7119 count
= TYPE_VECTOR_SUBPARTS (type
);
7120 if (size
* count
> len
)
7123 elements
= NULL_TREE
;
7124 for (i
= count
- 1; i
>= 0; i
--)
7126 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7129 elements
= tree_cons (NULL_TREE
, elem
, elements
);
7131 return build_vector (type
, elements
);
7135 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7136 the buffer PTR of length LEN as a constant of type TYPE. For
7137 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7138 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7139 return NULL_TREE. */
7142 native_interpret_expr (tree type
, unsigned char *ptr
, int len
)
7144 switch (TREE_CODE (type
))
7149 return native_interpret_int (type
, ptr
, len
);
7152 return native_interpret_real (type
, ptr
, len
);
7155 return native_interpret_complex (type
, ptr
, len
);
7158 return native_interpret_vector (type
, ptr
, len
);
7166 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7167 TYPE at compile-time. If we're unable to perform the conversion
7168 return NULL_TREE. */
7171 fold_view_convert_expr (tree type
, tree expr
)
7173 /* We support up to 512-bit values (for V8DFmode). */
7174 unsigned char buffer
[64];
7177 /* Check that the host and target are sane. */
7178 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7181 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7185 return native_interpret_expr (type
, buffer
, len
);
7189 /* Fold a unary expression of code CODE and type TYPE with operand
7190 OP0. Return the folded expression if folding is successful.
7191 Otherwise, return NULL_TREE. */
7194 fold_unary (enum tree_code code
, tree type
, tree op0
)
7198 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7200 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7201 && TREE_CODE_LENGTH (code
) == 1);
7206 if (code
== NOP_EXPR
|| code
== CONVERT_EXPR
7207 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
7209 /* Don't use STRIP_NOPS, because signedness of argument type
7211 STRIP_SIGN_NOPS (arg0
);
7215 /* Strip any conversions that don't change the mode. This
7216 is safe for every expression, except for a comparison
7217 expression because its signedness is derived from its
7220 Note that this is done as an internal manipulation within
7221 the constant folder, in order to find the simplest
7222 representation of the arguments so that their form can be
7223 studied. In any cases, the appropriate type conversions
7224 should be put back in the tree that will get out of the
7230 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7232 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7233 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7234 fold_build1 (code
, type
, TREE_OPERAND (arg0
, 1)));
7235 else if (TREE_CODE (arg0
) == COND_EXPR
)
7237 tree arg01
= TREE_OPERAND (arg0
, 1);
7238 tree arg02
= TREE_OPERAND (arg0
, 2);
7239 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7240 arg01
= fold_build1 (code
, type
, arg01
);
7241 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7242 arg02
= fold_build1 (code
, type
, arg02
);
7243 tem
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7246 /* If this was a conversion, and all we did was to move into
7247 inside the COND_EXPR, bring it back out. But leave it if
7248 it is a conversion from integer to integer and the
7249 result precision is no wider than a word since such a
7250 conversion is cheap and may be optimized away by combine,
7251 while it couldn't if it were outside the COND_EXPR. Then return
7252 so we don't get into an infinite recursion loop taking the
7253 conversion out and then back in. */
7255 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
7256 || code
== NON_LVALUE_EXPR
)
7257 && TREE_CODE (tem
) == COND_EXPR
7258 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7259 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7260 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7261 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7262 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7263 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7264 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7266 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7267 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7268 || flag_syntax_only
))
7269 tem
= build1 (code
, type
,
7271 TREE_TYPE (TREE_OPERAND
7272 (TREE_OPERAND (tem
, 1), 0)),
7273 TREE_OPERAND (tem
, 0),
7274 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7275 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
7278 else if (COMPARISON_CLASS_P (arg0
))
7280 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7282 arg0
= copy_node (arg0
);
7283 TREE_TYPE (arg0
) = type
;
7286 else if (TREE_CODE (type
) != INTEGER_TYPE
)
7287 return fold_build3 (COND_EXPR
, type
, arg0
,
7288 fold_build1 (code
, type
,
7290 fold_build1 (code
, type
,
7291 integer_zero_node
));
7300 case FIX_TRUNC_EXPR
:
7302 case FIX_FLOOR_EXPR
:
7303 case FIX_ROUND_EXPR
:
7304 if (TREE_TYPE (op0
) == type
)
7307 /* If we have (type) (a CMP b) and type is an integral type, return
7308 new expression involving the new type. */
7309 if (COMPARISON_CLASS_P (op0
) && INTEGRAL_TYPE_P (type
))
7310 return fold_build2 (TREE_CODE (op0
), type
, TREE_OPERAND (op0
, 0),
7311 TREE_OPERAND (op0
, 1));
7313 /* Handle cases of two conversions in a row. */
7314 if (TREE_CODE (op0
) == NOP_EXPR
7315 || TREE_CODE (op0
) == CONVERT_EXPR
)
7317 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7318 tree inter_type
= TREE_TYPE (op0
);
7319 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7320 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7321 int inside_float
= FLOAT_TYPE_P (inside_type
);
7322 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7323 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7324 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7325 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7326 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7327 int inter_float
= FLOAT_TYPE_P (inter_type
);
7328 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7329 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7330 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7331 int final_int
= INTEGRAL_TYPE_P (type
);
7332 int final_ptr
= POINTER_TYPE_P (type
);
7333 int final_float
= FLOAT_TYPE_P (type
);
7334 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7335 unsigned int final_prec
= TYPE_PRECISION (type
);
7336 int final_unsignedp
= TYPE_UNSIGNED (type
);
7338 /* In addition to the cases of two conversions in a row
7339 handled below, if we are converting something to its own
7340 type via an object of identical or wider precision, neither
7341 conversion is needed. */
7342 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7343 && (((inter_int
|| inter_ptr
) && final_int
)
7344 || (inter_float
&& final_float
))
7345 && inter_prec
>= final_prec
)
7346 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7348 /* Likewise, if the intermediate and final types are either both
7349 float or both integer, we don't need the middle conversion if
7350 it is wider than the final type and doesn't change the signedness
7351 (for integers). Avoid this if the final type is a pointer
7352 since then we sometimes need the inner conversion. Likewise if
7353 the outer has a precision not equal to the size of its mode. */
7354 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
7355 || (inter_float
&& inside_float
)
7356 || (inter_vec
&& inside_vec
))
7357 && inter_prec
>= inside_prec
7358 && (inter_float
|| inter_vec
7359 || inter_unsignedp
== inside_unsignedp
)
7360 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7361 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7363 && (! final_vec
|| inter_prec
== inside_prec
))
7364 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7366 /* If we have a sign-extension of a zero-extended value, we can
7367 replace that by a single zero-extension. */
7368 if (inside_int
&& inter_int
&& final_int
7369 && inside_prec
< inter_prec
&& inter_prec
< final_prec
7370 && inside_unsignedp
&& !inter_unsignedp
)
7371 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7373 /* Two conversions in a row are not needed unless:
7374 - some conversion is floating-point (overstrict for now), or
7375 - some conversion is a vector (overstrict for now), or
7376 - the intermediate type is narrower than both initial and
7378 - the intermediate type and innermost type differ in signedness,
7379 and the outermost type is wider than the intermediate, or
7380 - the initial type is a pointer type and the precisions of the
7381 intermediate and final types differ, or
7382 - the final type is a pointer type and the precisions of the
7383 initial and intermediate types differ.
7384 - the final type is a pointer type and the initial type not
7385 - the initial type is a pointer to an array and the final type
7387 if (! inside_float
&& ! inter_float
&& ! final_float
7388 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7389 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7390 && ! (inside_int
&& inter_int
7391 && inter_unsignedp
!= inside_unsignedp
7392 && inter_prec
< final_prec
)
7393 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7394 == (final_unsignedp
&& final_prec
> inter_prec
))
7395 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7396 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7397 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7398 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7399 && final_ptr
== inside_ptr
7401 && TREE_CODE (TREE_TYPE (inside_type
)) == ARRAY_TYPE
7402 && TREE_CODE (TREE_TYPE (type
)) != ARRAY_TYPE
))
7403 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7406 /* Handle (T *)&A.B.C for A being of type T and B and C
7407 living at offset zero. This occurs frequently in
7408 C++ upcasting and then accessing the base. */
7409 if (TREE_CODE (op0
) == ADDR_EXPR
7410 && POINTER_TYPE_P (type
)
7411 && handled_component_p (TREE_OPERAND (op0
, 0)))
7413 HOST_WIDE_INT bitsize
, bitpos
;
7415 enum machine_mode mode
;
7416 int unsignedp
, volatilep
;
7417 tree base
= TREE_OPERAND (op0
, 0);
7418 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7419 &mode
, &unsignedp
, &volatilep
, false);
7420 /* If the reference was to a (constant) zero offset, we can use
7421 the address of the base if it has the same base type
7422 as the result type. */
7423 if (! offset
&& bitpos
== 0
7424 && TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7425 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7426 return fold_convert (type
, build_fold_addr_expr (base
));
7429 if (TREE_CODE (op0
) == MODIFY_EXPR
7430 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7431 /* Detect assigning a bitfield. */
7432 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7433 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7435 /* Don't leave an assignment inside a conversion
7436 unless assigning a bitfield. */
7437 tem
= fold_build1 (code
, type
, TREE_OPERAND (op0
, 1));
7438 /* First do the assignment, then return converted constant. */
7439 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7440 TREE_NO_WARNING (tem
) = 1;
7441 TREE_USED (tem
) = 1;
7445 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7446 constants (if x has signed type, the sign bit cannot be set
7447 in c). This folds extension into the BIT_AND_EXPR. */
7448 if (INTEGRAL_TYPE_P (type
)
7449 && TREE_CODE (type
) != BOOLEAN_TYPE
7450 && TREE_CODE (op0
) == BIT_AND_EXPR
7451 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7454 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
7457 if (TYPE_UNSIGNED (TREE_TYPE (and))
7458 || (TYPE_PRECISION (type
)
7459 <= TYPE_PRECISION (TREE_TYPE (and))))
7461 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7462 <= HOST_BITS_PER_WIDE_INT
7463 && host_integerp (and1
, 1))
7465 unsigned HOST_WIDE_INT cst
;
7467 cst
= tree_low_cst (and1
, 1);
7468 cst
&= (HOST_WIDE_INT
) -1
7469 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7470 change
= (cst
== 0);
7471 #ifdef LOAD_EXTEND_OP
7473 && !flag_syntax_only
7474 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7477 tree uns
= lang_hooks
.types
.unsigned_type (TREE_TYPE (and0
));
7478 and0
= fold_convert (uns
, and0
);
7479 and1
= fold_convert (uns
, and1
);
7485 tem
= build_int_cst_wide (type
, TREE_INT_CST_LOW (and1
),
7486 TREE_INT_CST_HIGH (and1
));
7487 tem
= force_fit_type (tem
, 0, TREE_OVERFLOW (and1
),
7488 TREE_CONSTANT_OVERFLOW (and1
));
7489 return fold_build2 (BIT_AND_EXPR
, type
,
7490 fold_convert (type
, and0
), tem
);
7494 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7495 T2 being pointers to types of the same size. */
7496 if (POINTER_TYPE_P (type
)
7497 && BINARY_CLASS_P (arg0
)
7498 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7499 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7501 tree arg00
= TREE_OPERAND (arg0
, 0);
7503 tree t1
= TREE_TYPE (arg00
);
7504 tree tt0
= TREE_TYPE (t0
);
7505 tree tt1
= TREE_TYPE (t1
);
7506 tree s0
= TYPE_SIZE (tt0
);
7507 tree s1
= TYPE_SIZE (tt1
);
7509 if (s0
&& s1
&& operand_equal_p (s0
, s1
, OEP_ONLY_CONST
))
7510 return build2 (TREE_CODE (arg0
), t0
, fold_convert (t0
, arg00
),
7511 TREE_OPERAND (arg0
, 1));
7514 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7515 of the same precision, and X is a integer type not narrower than
7516 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7517 if (INTEGRAL_TYPE_P (type
)
7518 && TREE_CODE (op0
) == BIT_NOT_EXPR
7519 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7520 && (TREE_CODE (TREE_OPERAND (op0
, 0)) == NOP_EXPR
7521 || TREE_CODE (TREE_OPERAND (op0
, 0)) == CONVERT_EXPR
)
7522 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7524 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7525 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7526 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7527 return fold_build1 (BIT_NOT_EXPR
, type
, fold_convert (type
, tem
));
7530 tem
= fold_convert_const (code
, type
, arg0
);
7531 return tem
? tem
: NULL_TREE
;
7533 case VIEW_CONVERT_EXPR
:
7534 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
7535 return fold_build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
7536 return fold_view_convert_expr (type
, op0
);
7539 tem
= fold_negate_expr (arg0
);
7541 return fold_convert (type
, tem
);
7545 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
7546 return fold_abs_const (arg0
, type
);
7547 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
7548 return fold_build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7549 /* Convert fabs((double)float) into (double)fabsf(float). */
7550 else if (TREE_CODE (arg0
) == NOP_EXPR
7551 && TREE_CODE (type
) == REAL_TYPE
)
7553 tree targ0
= strip_float_extensions (arg0
);
7555 return fold_convert (type
, fold_build1 (ABS_EXPR
,
7559 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7560 else if (tree_expr_nonnegative_p (arg0
) || TREE_CODE (arg0
) == ABS_EXPR
)
7563 /* Strip sign ops from argument. */
7564 if (TREE_CODE (type
) == REAL_TYPE
)
7566 tem
= fold_strip_sign_ops (arg0
);
7568 return fold_build1 (ABS_EXPR
, type
, fold_convert (type
, tem
));
7573 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7574 return fold_convert (type
, arg0
);
7575 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7577 tree itype
= TREE_TYPE (type
);
7578 tree rpart
= fold_convert (itype
, TREE_OPERAND (arg0
, 0));
7579 tree ipart
= fold_convert (itype
, TREE_OPERAND (arg0
, 1));
7580 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, negate_expr (ipart
));
7582 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7584 tree itype
= TREE_TYPE (type
);
7585 tree rpart
= fold_convert (itype
, TREE_REALPART (arg0
));
7586 tree ipart
= fold_convert (itype
, TREE_IMAGPART (arg0
));
7587 return build_complex (type
, rpart
, negate_expr (ipart
));
7589 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7590 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
7594 if (TREE_CODE (arg0
) == INTEGER_CST
)
7595 return fold_not_const (arg0
, type
);
7596 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
7597 return TREE_OPERAND (arg0
, 0);
7598 /* Convert ~ (-A) to A - 1. */
7599 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
7600 return fold_build2 (MINUS_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7601 build_int_cst (type
, 1));
7602 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7603 else if (INTEGRAL_TYPE_P (type
)
7604 && ((TREE_CODE (arg0
) == MINUS_EXPR
7605 && integer_onep (TREE_OPERAND (arg0
, 1)))
7606 || (TREE_CODE (arg0
) == PLUS_EXPR
7607 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
7608 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7609 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7610 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7611 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
7613 TREE_OPERAND (arg0
, 0)))))
7614 return fold_build2 (BIT_XOR_EXPR
, type
, tem
,
7615 fold_convert (type
, TREE_OPERAND (arg0
, 1)));
7616 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7617 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
7619 TREE_OPERAND (arg0
, 1)))))
7620 return fold_build2 (BIT_XOR_EXPR
, type
,
7621 fold_convert (type
, TREE_OPERAND (arg0
, 0)), tem
);
7625 case TRUTH_NOT_EXPR
:
7626 /* The argument to invert_truthvalue must have Boolean type. */
7627 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
7628 arg0
= fold_convert (boolean_type_node
, arg0
);
7630 /* Note that the operand of this must be an int
7631 and its values must be 0 or 1.
7632 ("true" is a fixed value perhaps depending on the language,
7633 but we don't handle values other than 1 correctly yet.) */
7634 tem
= fold_truth_not_expr (arg0
);
7637 return fold_convert (type
, tem
);
7640 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7641 return fold_convert (type
, arg0
);
7642 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7643 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
7644 TREE_OPERAND (arg0
, 1));
7645 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7646 return fold_convert (type
, TREE_REALPART (arg0
));
7647 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7649 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7650 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
7651 fold_build1 (REALPART_EXPR
, itype
,
7652 TREE_OPERAND (arg0
, 0)),
7653 fold_build1 (REALPART_EXPR
, itype
,
7654 TREE_OPERAND (arg0
, 1)));
7655 return fold_convert (type
, tem
);
7657 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7659 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7660 tem
= fold_build1 (REALPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
7661 return fold_convert (type
, tem
);
7666 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7667 return fold_convert (type
, integer_zero_node
);
7668 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7669 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
7670 TREE_OPERAND (arg0
, 0));
7671 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7672 return fold_convert (type
, TREE_IMAGPART (arg0
));
7673 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7675 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7676 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
7677 fold_build1 (IMAGPART_EXPR
, itype
,
7678 TREE_OPERAND (arg0
, 0)),
7679 fold_build1 (IMAGPART_EXPR
, itype
,
7680 TREE_OPERAND (arg0
, 1)));
7681 return fold_convert (type
, tem
);
7683 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7685 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7686 tem
= fold_build1 (IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
7687 return fold_convert (type
, negate_expr (tem
));
7693 } /* switch (code) */
7696 /* Fold a binary expression of code CODE and type TYPE with operands
7697 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7698 Return the folded expression if folding is successful. Otherwise,
7699 return NULL_TREE. */
7702 fold_minmax (enum tree_code code
, tree type
, tree op0
, tree op1
)
7704 enum tree_code compl_code
;
7706 if (code
== MIN_EXPR
)
7707 compl_code
= MAX_EXPR
;
7708 else if (code
== MAX_EXPR
)
7709 compl_code
= MIN_EXPR
;
7713 /* MIN (MAX (a, b), b) == b. Â */
7714 if (TREE_CODE (op0
) == compl_code
7715 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
7716 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 0));
7718 /* MIN (MAX (b, a), b) == b. Â */
7719 if (TREE_CODE (op0
) == compl_code
7720 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
7721 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
7722 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 1));
7724 /* MIN (a, MAX (a, b)) == a. Â */
7725 if (TREE_CODE (op1
) == compl_code
7726 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
7727 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
7728 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 1));
7730 /* MIN (a, MAX (b, a)) == a. Â */
7731 if (TREE_CODE (op1
) == compl_code
7732 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
7733 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
7734 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 0));
7739 /* Subroutine of fold_binary. This routine performs all of the
7740 transformations that are common to the equality/inequality
7741 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7742 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7743 fold_binary should call fold_binary. Fold a comparison with
7744 tree code CODE and type TYPE with operands OP0 and OP1. Return
7745 the folded comparison or NULL_TREE. */
7748 fold_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
7750 tree arg0
, arg1
, tem
;
7755 STRIP_SIGN_NOPS (arg0
);
7756 STRIP_SIGN_NOPS (arg1
);
7758 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
7759 if (tem
!= NULL_TREE
)
7762 /* If one arg is a real or integer constant, put it last. */
7763 if (tree_swap_operands_p (arg0
, arg1
, true))
7764 return fold_build2 (swap_tree_comparison (code
), type
, op1
, op0
);
7766 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7767 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7768 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7769 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
7770 && !TYPE_UNSIGNED (TREE_TYPE (arg1
))
7771 && !(flag_wrapv
|| flag_trapv
))
7772 && (TREE_CODE (arg1
) == INTEGER_CST
7773 && !TREE_OVERFLOW (arg1
)))
7775 tree const1
= TREE_OPERAND (arg0
, 1);
7777 tree variable
= TREE_OPERAND (arg0
, 0);
7780 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
7782 lhs
= fold_build2 (lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
7783 TREE_TYPE (arg1
), const2
, const1
);
7784 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
7785 && (TREE_CODE (lhs
) != INTEGER_CST
7786 || !TREE_OVERFLOW (lhs
)))
7787 return fold_build2 (code
, type
, variable
, lhs
);
7790 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
7791 same object, then we can fold this to a comparison of the two offsets in
7792 signed size type. This is possible because pointer arithmetic is
7793 restricted to retain within an object and overflow on pointer differences
7794 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
7795 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
7796 && !flag_wrapv
&& !flag_trapv
)
7798 tree base0
, offset0
, base1
, offset1
;
7800 if (extract_array_ref (arg0
, &base0
, &offset0
)
7801 && extract_array_ref (arg1
, &base1
, &offset1
)
7802 && operand_equal_p (base0
, base1
, 0))
7804 tree signed_size_type_node
;
7805 signed_size_type_node
= signed_type_for (size_type_node
);
7807 /* By converting to signed size type we cover middle-end pointer
7808 arithmetic which operates on unsigned pointer types of size
7809 type size and ARRAY_REF offsets which are properly sign or
7810 zero extended from their type in case it is narrower than
7812 if (offset0
== NULL_TREE
)
7813 offset0
= build_int_cst (signed_size_type_node
, 0);
7815 offset0
= fold_convert (signed_size_type_node
, offset0
);
7816 if (offset1
== NULL_TREE
)
7817 offset1
= build_int_cst (signed_size_type_node
, 0);
7819 offset1
= fold_convert (signed_size_type_node
, offset1
);
7821 return fold_build2 (code
, type
, offset0
, offset1
);
7825 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
7827 tree targ0
= strip_float_extensions (arg0
);
7828 tree targ1
= strip_float_extensions (arg1
);
7829 tree newtype
= TREE_TYPE (targ0
);
7831 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
7832 newtype
= TREE_TYPE (targ1
);
7834 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7835 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
7836 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
7837 fold_convert (newtype
, targ1
));
7839 /* (-a) CMP (-b) -> b CMP a */
7840 if (TREE_CODE (arg0
) == NEGATE_EXPR
7841 && TREE_CODE (arg1
) == NEGATE_EXPR
)
7842 return fold_build2 (code
, type
, TREE_OPERAND (arg1
, 0),
7843 TREE_OPERAND (arg0
, 0));
7845 if (TREE_CODE (arg1
) == REAL_CST
)
7847 REAL_VALUE_TYPE cst
;
7848 cst
= TREE_REAL_CST (arg1
);
7850 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7851 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
7852 return fold_build2 (swap_tree_comparison (code
), type
,
7853 TREE_OPERAND (arg0
, 0),
7854 build_real (TREE_TYPE (arg1
),
7855 REAL_VALUE_NEGATE (cst
)));
7857 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7858 /* a CMP (-0) -> a CMP 0 */
7859 if (REAL_VALUE_MINUS_ZERO (cst
))
7860 return fold_build2 (code
, type
, arg0
,
7861 build_real (TREE_TYPE (arg1
), dconst0
));
7863 /* x != NaN is always true, other ops are always false. */
7864 if (REAL_VALUE_ISNAN (cst
)
7865 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
7867 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
7868 return omit_one_operand (type
, tem
, arg0
);
7871 /* Fold comparisons against infinity. */
7872 if (REAL_VALUE_ISINF (cst
))
7874 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
7875 if (tem
!= NULL_TREE
)
7880 /* If this is a comparison of a real constant with a PLUS_EXPR
7881 or a MINUS_EXPR of a real constant, we can convert it into a
7882 comparison with a revised real constant as long as no overflow
7883 occurs when unsafe_math_optimizations are enabled. */
7884 if (flag_unsafe_math_optimizations
7885 && TREE_CODE (arg1
) == REAL_CST
7886 && (TREE_CODE (arg0
) == PLUS_EXPR
7887 || TREE_CODE (arg0
) == MINUS_EXPR
)
7888 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
7889 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
7890 ? MINUS_EXPR
: PLUS_EXPR
,
7891 arg1
, TREE_OPERAND (arg0
, 1), 0))
7892 && ! TREE_CONSTANT_OVERFLOW (tem
))
7893 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
7895 /* Likewise, we can simplify a comparison of a real constant with
7896 a MINUS_EXPR whose first operand is also a real constant, i.e.
7897 (c1 - x) < c2 becomes x > c1-c2. */
7898 if (flag_unsafe_math_optimizations
7899 && TREE_CODE (arg1
) == REAL_CST
7900 && TREE_CODE (arg0
) == MINUS_EXPR
7901 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
7902 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
7904 && ! TREE_CONSTANT_OVERFLOW (tem
))
7905 return fold_build2 (swap_tree_comparison (code
), type
,
7906 TREE_OPERAND (arg0
, 1), tem
);
7908 /* Fold comparisons against built-in math functions. */
7909 if (TREE_CODE (arg1
) == REAL_CST
7910 && flag_unsafe_math_optimizations
7911 && ! flag_errno_math
)
7913 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
7915 if (fcode
!= END_BUILTINS
)
7917 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
7918 if (tem
!= NULL_TREE
)
7924 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7925 if (TREE_CONSTANT (arg1
)
7926 && (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
7927 || TREE_CODE (arg0
) == POSTDECREMENT_EXPR
)
7928 /* This optimization is invalid for ordered comparisons
7929 if CONST+INCR overflows or if foo+incr might overflow.
7930 This optimization is invalid for floating point due to rounding.
7931 For pointer types we assume overflow doesn't happen. */
7932 && (POINTER_TYPE_P (TREE_TYPE (arg0
))
7933 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
7934 && (code
== EQ_EXPR
|| code
== NE_EXPR
))))
7936 tree varop
, newconst
;
7938 if (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
)
7940 newconst
= fold_build2 (PLUS_EXPR
, TREE_TYPE (arg0
),
7941 arg1
, TREE_OPERAND (arg0
, 1));
7942 varop
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (arg0
),
7943 TREE_OPERAND (arg0
, 0),
7944 TREE_OPERAND (arg0
, 1));
7948 newconst
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg0
),
7949 arg1
, TREE_OPERAND (arg0
, 1));
7950 varop
= build2 (PREDECREMENT_EXPR
, TREE_TYPE (arg0
),
7951 TREE_OPERAND (arg0
, 0),
7952 TREE_OPERAND (arg0
, 1));
7956 /* If VAROP is a reference to a bitfield, we must mask
7957 the constant by the width of the field. */
7958 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
7959 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop
, 0), 1))
7960 && host_integerp (DECL_SIZE (TREE_OPERAND
7961 (TREE_OPERAND (varop
, 0), 1)), 1))
7963 tree fielddecl
= TREE_OPERAND (TREE_OPERAND (varop
, 0), 1);
7964 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (fielddecl
), 1);
7965 tree folded_compare
, shift
;
7967 /* First check whether the comparison would come out
7968 always the same. If we don't do that we would
7969 change the meaning with the masking. */
7970 folded_compare
= fold_build2 (code
, type
,
7971 TREE_OPERAND (varop
, 0), arg1
);
7972 if (TREE_CODE (folded_compare
) == INTEGER_CST
)
7973 return omit_one_operand (type
, folded_compare
, varop
);
7975 shift
= build_int_cst (NULL_TREE
,
7976 TYPE_PRECISION (TREE_TYPE (varop
)) - size
);
7977 shift
= fold_convert (TREE_TYPE (varop
), shift
);
7978 newconst
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (varop
),
7980 newconst
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (varop
),
7984 return fold_build2 (code
, type
, varop
, newconst
);
7987 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
7988 && (TREE_CODE (arg0
) == NOP_EXPR
7989 || TREE_CODE (arg0
) == CONVERT_EXPR
))
7991 /* If we are widening one operand of an integer comparison,
7992 see if the other operand is similarly being widened. Perhaps we
7993 can do the comparison in the narrower type. */
7994 tem
= fold_widened_comparison (code
, type
, arg0
, arg1
);
7998 /* Or if we are changing signedness. */
7999 tem
= fold_sign_changed_comparison (code
, type
, arg0
, arg1
);
8004 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8005 constant, we can simplify it. */
8006 if (TREE_CODE (arg1
) == INTEGER_CST
8007 && (TREE_CODE (arg0
) == MIN_EXPR
8008 || TREE_CODE (arg0
) == MAX_EXPR
)
8009 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8011 tem
= optimize_minmax_comparison (code
, type
, op0
, op1
);
8016 /* Simplify comparison of something with itself. (For IEEE
8017 floating-point, we can only do some of these simplifications.) */
8018 if (operand_equal_p (arg0
, arg1
, 0))
8023 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8024 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8025 return constant_boolean_node (1, type
);
8030 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8031 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8032 return constant_boolean_node (1, type
);
8033 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
8036 /* For NE, we can only do this simplification if integer
8037 or we don't honor IEEE floating point NaNs. */
8038 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
8039 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8041 /* ... fall through ... */
8044 return constant_boolean_node (0, type
);
8050 /* If we are comparing an expression that just has comparisons
8051 of two integer values, arithmetic expressions of those comparisons,
8052 and constants, we can simplify it. There are only three cases
8053 to check: the two values can either be equal, the first can be
8054 greater, or the second can be greater. Fold the expression for
8055 those three values. Since each value must be 0 or 1, we have
8056 eight possibilities, each of which corresponds to the constant 0
8057 or 1 or one of the six possible comparisons.
8059 This handles common cases like (a > b) == 0 but also handles
8060 expressions like ((x > y) - (y > x)) > 0, which supposedly
8061 occur in macroized code. */
8063 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8065 tree cval1
= 0, cval2
= 0;
8068 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8069 /* Don't handle degenerate cases here; they should already
8070 have been handled anyway. */
8071 && cval1
!= 0 && cval2
!= 0
8072 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8073 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8074 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8075 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8076 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8077 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8078 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8080 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8081 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8083 /* We can't just pass T to eval_subst in case cval1 or cval2
8084 was the same as ARG1. */
8087 = fold_build2 (code
, type
,
8088 eval_subst (arg0
, cval1
, maxval
,
8092 = fold_build2 (code
, type
,
8093 eval_subst (arg0
, cval1
, maxval
,
8097 = fold_build2 (code
, type
,
8098 eval_subst (arg0
, cval1
, minval
,
8102 /* All three of these results should be 0 or 1. Confirm they are.
8103 Then use those values to select the proper code to use. */
8105 if (TREE_CODE (high_result
) == INTEGER_CST
8106 && TREE_CODE (equal_result
) == INTEGER_CST
8107 && TREE_CODE (low_result
) == INTEGER_CST
)
8109 /* Make a 3-bit mask with the high-order bit being the
8110 value for `>', the next for '=', and the low for '<'. */
8111 switch ((integer_onep (high_result
) * 4)
8112 + (integer_onep (equal_result
) * 2)
8113 + integer_onep (low_result
))
8117 return omit_one_operand (type
, integer_zero_node
, arg0
);
8138 return omit_one_operand (type
, integer_one_node
, arg0
);
8142 return save_expr (build2 (code
, type
, cval1
, cval2
));
8143 return fold_build2 (code
, type
, cval1
, cval2
);
8148 /* Fold a comparison of the address of COMPONENT_REFs with the same
8149 type and component to a comparison of the address of the base
8150 object. In short, &x->a OP &y->a to x OP y and
8151 &x->a OP &y.a to x OP &y */
8152 if (TREE_CODE (arg0
) == ADDR_EXPR
8153 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == COMPONENT_REF
8154 && TREE_CODE (arg1
) == ADDR_EXPR
8155 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == COMPONENT_REF
)
8157 tree cref0
= TREE_OPERAND (arg0
, 0);
8158 tree cref1
= TREE_OPERAND (arg1
, 0);
8159 if (TREE_OPERAND (cref0
, 1) == TREE_OPERAND (cref1
, 1))
8161 tree op0
= TREE_OPERAND (cref0
, 0);
8162 tree op1
= TREE_OPERAND (cref1
, 0);
8163 return fold_build2 (code
, type
,
8164 build_fold_addr_expr (op0
),
8165 build_fold_addr_expr (op1
));
8169 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8170 into a single range test. */
8171 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
8172 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
8173 && TREE_CODE (arg1
) == INTEGER_CST
8174 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8175 && !integer_zerop (TREE_OPERAND (arg0
, 1))
8176 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8177 && !TREE_OVERFLOW (arg1
))
8179 tem
= fold_div_compare (code
, type
, arg0
, arg1
);
8180 if (tem
!= NULL_TREE
)
8188 /* Subroutine of fold_binary. Optimize complex multiplications of the
8189 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8190 argument EXPR represents the expression "z" of type TYPE. */
8193 fold_mult_zconjz (tree type
, tree expr
)
8195 tree itype
= TREE_TYPE (type
);
8196 tree rpart
, ipart
, tem
;
8198 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
8200 rpart
= TREE_OPERAND (expr
, 0);
8201 ipart
= TREE_OPERAND (expr
, 1);
8203 else if (TREE_CODE (expr
) == COMPLEX_CST
)
8205 rpart
= TREE_REALPART (expr
);
8206 ipart
= TREE_IMAGPART (expr
);
8210 expr
= save_expr (expr
);
8211 rpart
= fold_build1 (REALPART_EXPR
, itype
, expr
);
8212 ipart
= fold_build1 (IMAGPART_EXPR
, itype
, expr
);
8215 rpart
= save_expr (rpart
);
8216 ipart
= save_expr (ipart
);
8217 tem
= fold_build2 (PLUS_EXPR
, itype
,
8218 fold_build2 (MULT_EXPR
, itype
, rpart
, rpart
),
8219 fold_build2 (MULT_EXPR
, itype
, ipart
, ipart
));
8220 return fold_build2 (COMPLEX_EXPR
, type
, tem
,
8221 fold_convert (itype
, integer_zero_node
));
8225 /* Fold a binary expression of code CODE and type TYPE with operands
8226 OP0 and OP1. Return the folded expression if folding is
8227 successful. Otherwise, return NULL_TREE. */
8230 fold_binary (enum tree_code code
, tree type
, tree op0
, tree op1
)
8232 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
8233 tree arg0
, arg1
, tem
;
8234 tree t1
= NULL_TREE
;
8236 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
8237 && TREE_CODE_LENGTH (code
) == 2
8239 && op1
!= NULL_TREE
);
8244 /* Strip any conversions that don't change the mode. This is
8245 safe for every expression, except for a comparison expression
8246 because its signedness is derived from its operands. So, in
8247 the latter case, only strip conversions that don't change the
8250 Note that this is done as an internal manipulation within the
8251 constant folder, in order to find the simplest representation
8252 of the arguments so that their form can be studied. In any
8253 cases, the appropriate type conversions should be put back in
8254 the tree that will get out of the constant folder. */
8256 if (kind
== tcc_comparison
)
8258 STRIP_SIGN_NOPS (arg0
);
8259 STRIP_SIGN_NOPS (arg1
);
8267 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8268 constant but we can't do arithmetic on them. */
8269 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
8270 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
8271 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
8272 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
8274 if (kind
== tcc_binary
)
8275 tem
= const_binop (code
, arg0
, arg1
, 0);
8276 else if (kind
== tcc_comparison
)
8277 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8281 if (tem
!= NULL_TREE
)
8283 if (TREE_TYPE (tem
) != type
)
8284 tem
= fold_convert (type
, tem
);
8289 /* If this is a commutative operation, and ARG0 is a constant, move it
8290 to ARG1 to reduce the number of tests below. */
8291 if (commutative_tree_code (code
)
8292 && tree_swap_operands_p (arg0
, arg1
, true))
8293 return fold_build2 (code
, type
, op1
, op0
);
8295 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8297 First check for cases where an arithmetic operation is applied to a
8298 compound, conditional, or comparison operation. Push the arithmetic
8299 operation inside the compound or conditional to see if any folding
8300 can then be done. Convert comparison to conditional for this purpose.
8301 The also optimizes non-constant cases that used to be done in
8304 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8305 one of the operands is a comparison and the other is a comparison, a
8306 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8307 code below would make the expression more complex. Change it to a
8308 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8309 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8311 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
8312 || code
== EQ_EXPR
|| code
== NE_EXPR
)
8313 && ((truth_value_p (TREE_CODE (arg0
))
8314 && (truth_value_p (TREE_CODE (arg1
))
8315 || (TREE_CODE (arg1
) == BIT_AND_EXPR
8316 && integer_onep (TREE_OPERAND (arg1
, 1)))))
8317 || (truth_value_p (TREE_CODE (arg1
))
8318 && (truth_value_p (TREE_CODE (arg0
))
8319 || (TREE_CODE (arg0
) == BIT_AND_EXPR
8320 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
8322 tem
= fold_build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
8323 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
8326 fold_convert (boolean_type_node
, arg0
),
8327 fold_convert (boolean_type_node
, arg1
));
8329 if (code
== EQ_EXPR
)
8330 tem
= invert_truthvalue (tem
);
8332 return fold_convert (type
, tem
);
8335 if (TREE_CODE_CLASS (code
) == tcc_binary
8336 || TREE_CODE_CLASS (code
) == tcc_comparison
)
8338 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
8339 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8340 fold_build2 (code
, type
,
8341 TREE_OPERAND (arg0
, 1), op1
));
8342 if (TREE_CODE (arg1
) == COMPOUND_EXPR
8343 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
8344 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
8345 fold_build2 (code
, type
,
8346 op0
, TREE_OPERAND (arg1
, 1)));
8348 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
8350 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
8352 /*cond_first_p=*/1);
8353 if (tem
!= NULL_TREE
)
8357 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
8359 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
8361 /*cond_first_p=*/0);
8362 if (tem
!= NULL_TREE
)
8370 /* A + (-B) -> A - B */
8371 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
8372 return fold_build2 (MINUS_EXPR
, type
,
8373 fold_convert (type
, arg0
),
8374 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
8375 /* (-A) + B -> B - A */
8376 if (TREE_CODE (arg0
) == NEGATE_EXPR
8377 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
8378 return fold_build2 (MINUS_EXPR
, type
,
8379 fold_convert (type
, arg1
),
8380 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
8381 /* Convert ~A + 1 to -A. */
8382 if (INTEGRAL_TYPE_P (type
)
8383 && TREE_CODE (arg0
) == BIT_NOT_EXPR
8384 && integer_onep (arg1
))
8385 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8387 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8389 if ((TREE_CODE (arg0
) == MULT_EXPR
8390 || TREE_CODE (arg1
) == MULT_EXPR
)
8391 && (!FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
8393 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
8398 if (! FLOAT_TYPE_P (type
))
8400 if (integer_zerop (arg1
))
8401 return non_lvalue (fold_convert (type
, arg0
));
8403 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8404 with a constant, and the two constants have no bits in common,
8405 we should treat this as a BIT_IOR_EXPR since this may produce more
8407 if (TREE_CODE (arg0
) == BIT_AND_EXPR
8408 && TREE_CODE (arg1
) == BIT_AND_EXPR
8409 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8410 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8411 && integer_zerop (const_binop (BIT_AND_EXPR
,
8412 TREE_OPERAND (arg0
, 1),
8413 TREE_OPERAND (arg1
, 1), 0)))
8415 code
= BIT_IOR_EXPR
;
8419 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8420 (plus (plus (mult) (mult)) (foo)) so that we can
8421 take advantage of the factoring cases below. */
8422 if (((TREE_CODE (arg0
) == PLUS_EXPR
8423 || TREE_CODE (arg0
) == MINUS_EXPR
)
8424 && TREE_CODE (arg1
) == MULT_EXPR
)
8425 || ((TREE_CODE (arg1
) == PLUS_EXPR
8426 || TREE_CODE (arg1
) == MINUS_EXPR
)
8427 && TREE_CODE (arg0
) == MULT_EXPR
))
8429 tree parg0
, parg1
, parg
, marg
;
8430 enum tree_code pcode
;
8432 if (TREE_CODE (arg1
) == MULT_EXPR
)
8433 parg
= arg0
, marg
= arg1
;
8435 parg
= arg1
, marg
= arg0
;
8436 pcode
= TREE_CODE (parg
);
8437 parg0
= TREE_OPERAND (parg
, 0);
8438 parg1
= TREE_OPERAND (parg
, 1);
8442 if (TREE_CODE (parg0
) == MULT_EXPR
8443 && TREE_CODE (parg1
) != MULT_EXPR
)
8444 return fold_build2 (pcode
, type
,
8445 fold_build2 (PLUS_EXPR
, type
,
8446 fold_convert (type
, parg0
),
8447 fold_convert (type
, marg
)),
8448 fold_convert (type
, parg1
));
8449 if (TREE_CODE (parg0
) != MULT_EXPR
8450 && TREE_CODE (parg1
) == MULT_EXPR
)
8451 return fold_build2 (PLUS_EXPR
, type
,
8452 fold_convert (type
, parg0
),
8453 fold_build2 (pcode
, type
,
8454 fold_convert (type
, marg
),
8459 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8460 of the array. Loop optimizer sometimes produce this type of
8462 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8464 tem
= try_move_mult_to_index (PLUS_EXPR
, arg0
, arg1
);
8466 return fold_convert (type
, tem
);
8468 else if (TREE_CODE (arg1
) == ADDR_EXPR
)
8470 tem
= try_move_mult_to_index (PLUS_EXPR
, arg1
, arg0
);
8472 return fold_convert (type
, tem
);
8477 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8478 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
8479 return non_lvalue (fold_convert (type
, arg0
));
8481 /* Likewise if the operands are reversed. */
8482 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
8483 return non_lvalue (fold_convert (type
, arg1
));
8485 /* Convert X + -C into X - C. */
8486 if (TREE_CODE (arg1
) == REAL_CST
8487 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
8489 tem
= fold_negate_const (arg1
, type
);
8490 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
8491 return fold_build2 (MINUS_EXPR
, type
,
8492 fold_convert (type
, arg0
),
8493 fold_convert (type
, tem
));
8496 if (flag_unsafe_math_optimizations
8497 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
8498 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
8499 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
8502 /* Convert x+x into x*2.0. */
8503 if (operand_equal_p (arg0
, arg1
, 0)
8504 && SCALAR_FLOAT_TYPE_P (type
))
8505 return fold_build2 (MULT_EXPR
, type
, arg0
,
8506 build_real (type
, dconst2
));
8508 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8509 if (flag_unsafe_math_optimizations
8510 && TREE_CODE (arg1
) == PLUS_EXPR
8511 && TREE_CODE (arg0
) != MULT_EXPR
)
8513 tree tree10
= TREE_OPERAND (arg1
, 0);
8514 tree tree11
= TREE_OPERAND (arg1
, 1);
8515 if (TREE_CODE (tree11
) == MULT_EXPR
8516 && TREE_CODE (tree10
) == MULT_EXPR
)
8519 tree0
= fold_build2 (PLUS_EXPR
, type
, arg0
, tree10
);
8520 return fold_build2 (PLUS_EXPR
, type
, tree0
, tree11
);
8523 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8524 if (flag_unsafe_math_optimizations
8525 && TREE_CODE (arg0
) == PLUS_EXPR
8526 && TREE_CODE (arg1
) != MULT_EXPR
)
8528 tree tree00
= TREE_OPERAND (arg0
, 0);
8529 tree tree01
= TREE_OPERAND (arg0
, 1);
8530 if (TREE_CODE (tree01
) == MULT_EXPR
8531 && TREE_CODE (tree00
) == MULT_EXPR
)
8534 tree0
= fold_build2 (PLUS_EXPR
, type
, tree01
, arg1
);
8535 return fold_build2 (PLUS_EXPR
, type
, tree00
, tree0
);
8541 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8542 is a rotate of A by C1 bits. */
8543 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8544 is a rotate of A by B bits. */
8546 enum tree_code code0
, code1
;
8547 code0
= TREE_CODE (arg0
);
8548 code1
= TREE_CODE (arg1
);
8549 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
8550 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
8551 && operand_equal_p (TREE_OPERAND (arg0
, 0),
8552 TREE_OPERAND (arg1
, 0), 0)
8553 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
8555 tree tree01
, tree11
;
8556 enum tree_code code01
, code11
;
8558 tree01
= TREE_OPERAND (arg0
, 1);
8559 tree11
= TREE_OPERAND (arg1
, 1);
8560 STRIP_NOPS (tree01
);
8561 STRIP_NOPS (tree11
);
8562 code01
= TREE_CODE (tree01
);
8563 code11
= TREE_CODE (tree11
);
8564 if (code01
== INTEGER_CST
8565 && code11
== INTEGER_CST
8566 && TREE_INT_CST_HIGH (tree01
) == 0
8567 && TREE_INT_CST_HIGH (tree11
) == 0
8568 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
8569 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
8570 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8571 code0
== LSHIFT_EXPR
? tree01
: tree11
);
8572 else if (code11
== MINUS_EXPR
)
8574 tree tree110
, tree111
;
8575 tree110
= TREE_OPERAND (tree11
, 0);
8576 tree111
= TREE_OPERAND (tree11
, 1);
8577 STRIP_NOPS (tree110
);
8578 STRIP_NOPS (tree111
);
8579 if (TREE_CODE (tree110
) == INTEGER_CST
8580 && 0 == compare_tree_int (tree110
,
8582 (TREE_TYPE (TREE_OPERAND
8584 && operand_equal_p (tree01
, tree111
, 0))
8585 return build2 ((code0
== LSHIFT_EXPR
8588 type
, TREE_OPERAND (arg0
, 0), tree01
);
8590 else if (code01
== MINUS_EXPR
)
8592 tree tree010
, tree011
;
8593 tree010
= TREE_OPERAND (tree01
, 0);
8594 tree011
= TREE_OPERAND (tree01
, 1);
8595 STRIP_NOPS (tree010
);
8596 STRIP_NOPS (tree011
);
8597 if (TREE_CODE (tree010
) == INTEGER_CST
8598 && 0 == compare_tree_int (tree010
,
8600 (TREE_TYPE (TREE_OPERAND
8602 && operand_equal_p (tree11
, tree011
, 0))
8603 return build2 ((code0
!= LSHIFT_EXPR
8606 type
, TREE_OPERAND (arg0
, 0), tree11
);
8612 /* In most languages, can't associate operations on floats through
8613 parentheses. Rather than remember where the parentheses were, we
8614 don't associate floats at all, unless the user has specified
8615 -funsafe-math-optimizations. */
8617 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
8619 tree var0
, con0
, lit0
, minus_lit0
;
8620 tree var1
, con1
, lit1
, minus_lit1
;
8622 /* Split both trees into variables, constants, and literals. Then
8623 associate each group together, the constants with literals,
8624 then the result with variables. This increases the chances of
8625 literals being recombined later and of generating relocatable
8626 expressions for the sum of a constant and literal. */
8627 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
8628 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
8629 code
== MINUS_EXPR
);
8631 /* Only do something if we found more than two objects. Otherwise,
8632 nothing has changed and we risk infinite recursion. */
8633 if (2 < ((var0
!= 0) + (var1
!= 0)
8634 + (con0
!= 0) + (con1
!= 0)
8635 + (lit0
!= 0) + (lit1
!= 0)
8636 + (minus_lit0
!= 0) + (minus_lit1
!= 0)))
8638 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8639 if (code
== MINUS_EXPR
)
8642 var0
= associate_trees (var0
, var1
, code
, type
);
8643 con0
= associate_trees (con0
, con1
, code
, type
);
8644 lit0
= associate_trees (lit0
, lit1
, code
, type
);
8645 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
8647 /* Preserve the MINUS_EXPR if the negative part of the literal is
8648 greater than the positive part. Otherwise, the multiplicative
8649 folding code (i.e extract_muldiv) may be fooled in case
8650 unsigned constants are subtracted, like in the following
8651 example: ((X*2 + 4) - 8U)/2. */
8652 if (minus_lit0
&& lit0
)
8654 if (TREE_CODE (lit0
) == INTEGER_CST
8655 && TREE_CODE (minus_lit0
) == INTEGER_CST
8656 && tree_int_cst_lt (lit0
, minus_lit0
))
8658 minus_lit0
= associate_trees (minus_lit0
, lit0
,
8664 lit0
= associate_trees (lit0
, minus_lit0
,
8672 return fold_convert (type
,
8673 associate_trees (var0
, minus_lit0
,
8677 con0
= associate_trees (con0
, minus_lit0
,
8679 return fold_convert (type
,
8680 associate_trees (var0
, con0
,
8685 con0
= associate_trees (con0
, lit0
, code
, type
);
8686 return fold_convert (type
, associate_trees (var0
, con0
,
8694 /* A - (-B) -> A + B */
8695 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
8696 return fold_build2 (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0));
8697 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8698 if (TREE_CODE (arg0
) == NEGATE_EXPR
8699 && (FLOAT_TYPE_P (type
)
8700 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
))
8701 && negate_expr_p (arg1
)
8702 && reorder_operands_p (arg0
, arg1
))
8703 return fold_build2 (MINUS_EXPR
, type
, negate_expr (arg1
),
8704 TREE_OPERAND (arg0
, 0));
8705 /* Convert -A - 1 to ~A. */
8706 if (INTEGRAL_TYPE_P (type
)
8707 && TREE_CODE (arg0
) == NEGATE_EXPR
8708 && integer_onep (arg1
))
8709 return fold_build1 (BIT_NOT_EXPR
, type
,
8710 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
8712 /* Convert -1 - A to ~A. */
8713 if (INTEGRAL_TYPE_P (type
)
8714 && integer_all_onesp (arg0
))
8715 return fold_build1 (BIT_NOT_EXPR
, type
, arg1
);
8717 if (! FLOAT_TYPE_P (type
))
8719 if (integer_zerop (arg0
))
8720 return negate_expr (fold_convert (type
, arg1
));
8721 if (integer_zerop (arg1
))
8722 return non_lvalue (fold_convert (type
, arg0
));
8724 /* Fold A - (A & B) into ~B & A. */
8725 if (!TREE_SIDE_EFFECTS (arg0
)
8726 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
8728 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
8729 return fold_build2 (BIT_AND_EXPR
, type
,
8730 fold_build1 (BIT_NOT_EXPR
, type
,
8731 TREE_OPERAND (arg1
, 0)),
8733 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8734 return fold_build2 (BIT_AND_EXPR
, type
,
8735 fold_build1 (BIT_NOT_EXPR
, type
,
8736 TREE_OPERAND (arg1
, 1)),
8740 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8741 any power of 2 minus 1. */
8742 if (TREE_CODE (arg0
) == BIT_AND_EXPR
8743 && TREE_CODE (arg1
) == BIT_AND_EXPR
8744 && operand_equal_p (TREE_OPERAND (arg0
, 0),
8745 TREE_OPERAND (arg1
, 0), 0))
8747 tree mask0
= TREE_OPERAND (arg0
, 1);
8748 tree mask1
= TREE_OPERAND (arg1
, 1);
8749 tree tem
= fold_build1 (BIT_NOT_EXPR
, type
, mask0
);
8751 if (operand_equal_p (tem
, mask1
, 0))
8753 tem
= fold_build2 (BIT_XOR_EXPR
, type
,
8754 TREE_OPERAND (arg0
, 0), mask1
);
8755 return fold_build2 (MINUS_EXPR
, type
, tem
, mask1
);
8760 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8761 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
8762 return non_lvalue (fold_convert (type
, arg0
));
8764 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
8765 ARG0 is zero and X + ARG0 reduces to X, since that would mean
8766 (-ARG1 + ARG0) reduces to -ARG1. */
8767 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
8768 return negate_expr (fold_convert (type
, arg1
));
8770 /* Fold &x - &x. This can happen from &x.foo - &x.
8771 This is unsafe for certain floats even in non-IEEE formats.
8772 In IEEE, it is unsafe because it does wrong for NaNs.
8773 Also note that operand_equal_p is always false if an operand
8776 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
8777 && operand_equal_p (arg0
, arg1
, 0))
8778 return fold_convert (type
, integer_zero_node
);
8780 /* A - B -> A + (-B) if B is easily negatable. */
8781 if (negate_expr_p (arg1
)
8782 && ((FLOAT_TYPE_P (type
)
8783 /* Avoid this transformation if B is a positive REAL_CST. */
8784 && (TREE_CODE (arg1
) != REAL_CST
8785 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
8786 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
)))
8787 return fold_build2 (PLUS_EXPR
, type
,
8788 fold_convert (type
, arg0
),
8789 fold_convert (type
, negate_expr (arg1
)));
8791 /* Try folding difference of addresses. */
8795 if ((TREE_CODE (arg0
) == ADDR_EXPR
8796 || TREE_CODE (arg1
) == ADDR_EXPR
)
8797 && ptr_difference_const (arg0
, arg1
, &diff
))
8798 return build_int_cst_type (type
, diff
);
8801 /* Fold &a[i] - &a[j] to i-j. */
8802 if (TREE_CODE (arg0
) == ADDR_EXPR
8803 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
8804 && TREE_CODE (arg1
) == ADDR_EXPR
8805 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
8807 tree aref0
= TREE_OPERAND (arg0
, 0);
8808 tree aref1
= TREE_OPERAND (arg1
, 0);
8809 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
8810 TREE_OPERAND (aref1
, 0), 0))
8812 tree op0
= fold_convert (type
, TREE_OPERAND (aref0
, 1));
8813 tree op1
= fold_convert (type
, TREE_OPERAND (aref1
, 1));
8814 tree esz
= array_ref_element_size (aref0
);
8815 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
8816 return fold_build2 (MULT_EXPR
, type
, diff
,
8817 fold_convert (type
, esz
));
8822 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
8823 of the array. Loop optimizer sometimes produce this type of
8825 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8827 tem
= try_move_mult_to_index (MINUS_EXPR
, arg0
, arg1
);
8829 return fold_convert (type
, tem
);
8832 if (flag_unsafe_math_optimizations
8833 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
8834 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
8835 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
8838 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
8840 if ((TREE_CODE (arg0
) == MULT_EXPR
8841 || TREE_CODE (arg1
) == MULT_EXPR
)
8842 && (!FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
8844 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
8852 /* (-A) * (-B) -> A * B */
8853 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
8854 return fold_build2 (MULT_EXPR
, type
,
8855 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
8856 fold_convert (type
, negate_expr (arg1
)));
8857 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
8858 return fold_build2 (MULT_EXPR
, type
,
8859 fold_convert (type
, negate_expr (arg0
)),
8860 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
8862 if (! FLOAT_TYPE_P (type
))
8864 if (integer_zerop (arg1
))
8865 return omit_one_operand (type
, arg1
, arg0
);
8866 if (integer_onep (arg1
))
8867 return non_lvalue (fold_convert (type
, arg0
));
8868 /* Transform x * -1 into -x. */
8869 if (integer_all_onesp (arg1
))
8870 return fold_convert (type
, negate_expr (arg0
));
8872 /* (a * (1 << b)) is (a << b) */
8873 if (TREE_CODE (arg1
) == LSHIFT_EXPR
8874 && integer_onep (TREE_OPERAND (arg1
, 0)))
8875 return fold_build2 (LSHIFT_EXPR
, type
, arg0
,
8876 TREE_OPERAND (arg1
, 1));
8877 if (TREE_CODE (arg0
) == LSHIFT_EXPR
8878 && integer_onep (TREE_OPERAND (arg0
, 0)))
8879 return fold_build2 (LSHIFT_EXPR
, type
, arg1
,
8880 TREE_OPERAND (arg0
, 1));
8882 if (TREE_CODE (arg1
) == INTEGER_CST
8883 && 0 != (tem
= extract_muldiv (op0
,
8884 fold_convert (type
, arg1
),
8886 return fold_convert (type
, tem
);
8888 /* Optimize z * conj(z) for integer complex numbers. */
8889 if (TREE_CODE (arg0
) == CONJ_EXPR
8890 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8891 return fold_mult_zconjz (type
, arg1
);
8892 if (TREE_CODE (arg1
) == CONJ_EXPR
8893 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8894 return fold_mult_zconjz (type
, arg0
);
8898 /* Maybe fold x * 0 to 0. The expressions aren't the same
8899 when x is NaN, since x * 0 is also NaN. Nor are they the
8900 same in modes with signed zeros, since multiplying a
8901 negative value by 0 gives -0, not +0. */
8902 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
8903 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
8904 && real_zerop (arg1
))
8905 return omit_one_operand (type
, arg1
, arg0
);
8906 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
8907 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8908 && real_onep (arg1
))
8909 return non_lvalue (fold_convert (type
, arg0
));
8911 /* Transform x * -1.0 into -x. */
8912 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8913 && real_minus_onep (arg1
))
8914 return fold_convert (type
, negate_expr (arg0
));
8916 /* Convert (C1/X)*C2 into (C1*C2)/X. */
8917 if (flag_unsafe_math_optimizations
8918 && TREE_CODE (arg0
) == RDIV_EXPR
8919 && TREE_CODE (arg1
) == REAL_CST
8920 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
8922 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
8925 return fold_build2 (RDIV_EXPR
, type
, tem
,
8926 TREE_OPERAND (arg0
, 1));
8929 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
8930 if (operand_equal_p (arg0
, arg1
, 0))
8932 tree tem
= fold_strip_sign_ops (arg0
);
8933 if (tem
!= NULL_TREE
)
8935 tem
= fold_convert (type
, tem
);
8936 return fold_build2 (MULT_EXPR
, type
, tem
, tem
);
8940 /* Optimize z * conj(z) for floating point complex numbers.
8941 Guarded by flag_unsafe_math_optimizations as non-finite
8942 imaginary components don't produce scalar results. */
8943 if (flag_unsafe_math_optimizations
8944 && TREE_CODE (arg0
) == CONJ_EXPR
8945 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8946 return fold_mult_zconjz (type
, arg1
);
8947 if (flag_unsafe_math_optimizations
8948 && TREE_CODE (arg1
) == CONJ_EXPR
8949 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8950 return fold_mult_zconjz (type
, arg0
);
8952 if (flag_unsafe_math_optimizations
)
8954 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
8955 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
8957 /* Optimizations of root(...)*root(...). */
8958 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
8960 tree rootfn
, arg
, arglist
;
8961 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
8962 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
8964 /* Optimize sqrt(x)*sqrt(x) as x. */
8965 if (BUILTIN_SQRT_P (fcode0
)
8966 && operand_equal_p (arg00
, arg10
, 0)
8967 && ! HONOR_SNANS (TYPE_MODE (type
)))
8970 /* Optimize root(x)*root(y) as root(x*y). */
8971 rootfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8972 arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
8973 arglist
= build_tree_list (NULL_TREE
, arg
);
8974 return build_function_call_expr (rootfn
, arglist
);
8977 /* Optimize expN(x)*expN(y) as expN(x+y). */
8978 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
8980 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8981 tree arg
= fold_build2 (PLUS_EXPR
, type
,
8982 TREE_VALUE (TREE_OPERAND (arg0
, 1)),
8983 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
8984 tree arglist
= build_tree_list (NULL_TREE
, arg
);
8985 return build_function_call_expr (expfn
, arglist
);
8988 /* Optimizations of pow(...)*pow(...). */
8989 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
8990 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
8991 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
8993 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
8994 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
8996 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
8997 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
9000 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9001 if (operand_equal_p (arg01
, arg11
, 0))
9003 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
9004 tree arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
9005 tree arglist
= tree_cons (NULL_TREE
, arg
,
9006 build_tree_list (NULL_TREE
,
9008 return build_function_call_expr (powfn
, arglist
);
9011 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9012 if (operand_equal_p (arg00
, arg10
, 0))
9014 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
9015 tree arg
= fold_build2 (PLUS_EXPR
, type
, arg01
, arg11
);
9016 tree arglist
= tree_cons (NULL_TREE
, arg00
,
9017 build_tree_list (NULL_TREE
,
9019 return build_function_call_expr (powfn
, arglist
);
9023 /* Optimize tan(x)*cos(x) as sin(x). */
9024 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
9025 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
9026 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
9027 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
9028 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
9029 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
9030 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
9031 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
9033 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
9035 if (sinfn
!= NULL_TREE
)
9036 return build_function_call_expr (sinfn
,
9037 TREE_OPERAND (arg0
, 1));
9040 /* Optimize x*pow(x,c) as pow(x,c+1). */
9041 if (fcode1
== BUILT_IN_POW
9042 || fcode1
== BUILT_IN_POWF
9043 || fcode1
== BUILT_IN_POWL
)
9045 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
9046 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
9048 if (TREE_CODE (arg11
) == REAL_CST
9049 && ! TREE_CONSTANT_OVERFLOW (arg11
)
9050 && operand_equal_p (arg0
, arg10
, 0))
9052 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
9056 c
= TREE_REAL_CST (arg11
);
9057 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
9058 arg
= build_real (type
, c
);
9059 arglist
= build_tree_list (NULL_TREE
, arg
);
9060 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
9061 return build_function_call_expr (powfn
, arglist
);
9065 /* Optimize pow(x,c)*x as pow(x,c+1). */
9066 if (fcode0
== BUILT_IN_POW
9067 || fcode0
== BUILT_IN_POWF
9068 || fcode0
== BUILT_IN_POWL
)
9070 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
9071 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
9073 if (TREE_CODE (arg01
) == REAL_CST
9074 && ! TREE_CONSTANT_OVERFLOW (arg01
)
9075 && operand_equal_p (arg1
, arg00
, 0))
9077 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
9081 c
= TREE_REAL_CST (arg01
);
9082 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
9083 arg
= build_real (type
, c
);
9084 arglist
= build_tree_list (NULL_TREE
, arg
);
9085 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
9086 return build_function_call_expr (powfn
, arglist
);
9090 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9092 && operand_equal_p (arg0
, arg1
, 0))
9094 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
9098 tree arg
= build_real (type
, dconst2
);
9099 tree arglist
= build_tree_list (NULL_TREE
, arg
);
9100 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
9101 return build_function_call_expr (powfn
, arglist
);
9110 if (integer_all_onesp (arg1
))
9111 return omit_one_operand (type
, arg1
, arg0
);
9112 if (integer_zerop (arg1
))
9113 return non_lvalue (fold_convert (type
, arg0
));
9114 if (operand_equal_p (arg0
, arg1
, 0))
9115 return non_lvalue (fold_convert (type
, arg0
));
9118 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9119 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9121 t1
= build_int_cst (type
, -1);
9122 t1
= force_fit_type (t1
, 0, false, false);
9123 return omit_one_operand (type
, t1
, arg1
);
9127 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9128 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9130 t1
= build_int_cst (type
, -1);
9131 t1
= force_fit_type (t1
, 0, false, false);
9132 return omit_one_operand (type
, t1
, arg0
);
9135 /* Canonicalize (X & C1) | C2. */
9136 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9137 && TREE_CODE (arg1
) == INTEGER_CST
9138 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9140 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, mlo
, mhi
;
9141 int width
= TYPE_PRECISION (type
);
9142 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
9143 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
9144 hi2
= TREE_INT_CST_HIGH (arg1
);
9145 lo2
= TREE_INT_CST_LOW (arg1
);
9147 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9148 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
9149 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
9151 if (width
> HOST_BITS_PER_WIDE_INT
)
9153 mhi
= (unsigned HOST_WIDE_INT
) -1
9154 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
9160 mlo
= (unsigned HOST_WIDE_INT
) -1
9161 >> (HOST_BITS_PER_WIDE_INT
- width
);
9164 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9165 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
9166 return fold_build2 (BIT_IOR_EXPR
, type
,
9167 TREE_OPERAND (arg0
, 0), arg1
);
9169 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9172 if ((hi1
& ~hi2
) != hi1
|| (lo1
& ~lo2
) != lo1
)
9173 return fold_build2 (BIT_IOR_EXPR
, type
,
9174 fold_build2 (BIT_AND_EXPR
, type
,
9175 TREE_OPERAND (arg0
, 0),
9176 build_int_cst_wide (type
,
9182 /* (X & Y) | Y is (X, Y). */
9183 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9184 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9185 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
9186 /* (X & Y) | X is (Y, X). */
9187 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9188 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
9189 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
9190 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
9191 /* X | (X & Y) is (Y, X). */
9192 if (TREE_CODE (arg1
) == BIT_AND_EXPR
9193 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
9194 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
9195 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
9196 /* X | (Y & X) is (Y, X). */
9197 if (TREE_CODE (arg1
) == BIT_AND_EXPR
9198 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
9199 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9200 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
9202 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
9203 if (t1
!= NULL_TREE
)
9206 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9208 This results in more efficient code for machines without a NAND
9209 instruction. Combine will canonicalize to the first form
9210 which will allow use of NAND instructions provided by the
9211 backend if they exist. */
9212 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9213 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9215 return fold_build1 (BIT_NOT_EXPR
, type
,
9216 build2 (BIT_AND_EXPR
, type
,
9217 TREE_OPERAND (arg0
, 0),
9218 TREE_OPERAND (arg1
, 0)));
9221 /* See if this can be simplified into a rotate first. If that
9222 is unsuccessful continue in the association code. */
9226 if (integer_zerop (arg1
))
9227 return non_lvalue (fold_convert (type
, arg0
));
9228 if (integer_all_onesp (arg1
))
9229 return fold_build1 (BIT_NOT_EXPR
, type
, arg0
);
9230 if (operand_equal_p (arg0
, arg1
, 0))
9231 return omit_one_operand (type
, integer_zero_node
, arg0
);
9234 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9235 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9237 t1
= build_int_cst (type
, -1);
9238 t1
= force_fit_type (t1
, 0, false, false);
9239 return omit_one_operand (type
, t1
, arg1
);
9243 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9244 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9246 t1
= build_int_cst (type
, -1);
9247 t1
= force_fit_type (t1
, 0, false, false);
9248 return omit_one_operand (type
, t1
, arg0
);
9251 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9252 with a constant, and the two constants have no bits in common,
9253 we should treat this as a BIT_IOR_EXPR since this may produce more
9255 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9256 && TREE_CODE (arg1
) == BIT_AND_EXPR
9257 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9258 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9259 && integer_zerop (const_binop (BIT_AND_EXPR
,
9260 TREE_OPERAND (arg0
, 1),
9261 TREE_OPERAND (arg1
, 1), 0)))
9263 code
= BIT_IOR_EXPR
;
9267 /* (X | Y) ^ X -> Y & ~ X*/
9268 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9269 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9271 tree t2
= TREE_OPERAND (arg0
, 1);
9272 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
9274 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
9275 fold_convert (type
, t1
));
9279 /* (Y | X) ^ X -> Y & ~ X*/
9280 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9281 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9283 tree t2
= TREE_OPERAND (arg0
, 0);
9284 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
9286 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
9287 fold_convert (type
, t1
));
9291 /* X ^ (X | Y) -> Y & ~ X*/
9292 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
9293 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
9295 tree t2
= TREE_OPERAND (arg1
, 1);
9296 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
9298 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
9299 fold_convert (type
, t1
));
9303 /* X ^ (Y | X) -> Y & ~ X*/
9304 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
9305 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
9307 tree t2
= TREE_OPERAND (arg1
, 0);
9308 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
9310 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
9311 fold_convert (type
, t1
));
9315 /* Convert ~X ^ ~Y to X ^ Y. */
9316 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9317 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9318 return fold_build2 (code
, type
,
9319 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
9320 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
9322 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9323 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9324 && integer_onep (TREE_OPERAND (arg0
, 1))
9325 && integer_onep (arg1
))
9326 return fold_build2 (EQ_EXPR
, type
, arg0
,
9327 build_int_cst (TREE_TYPE (arg0
), 0));
9329 /* Fold (X & Y) ^ Y as ~X & Y. */
9330 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9331 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9333 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
9334 return fold_build2 (BIT_AND_EXPR
, type
,
9335 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9336 fold_convert (type
, arg1
));
9338 /* Fold (X & Y) ^ X as ~Y & X. */
9339 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9340 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
9341 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
9343 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
9344 return fold_build2 (BIT_AND_EXPR
, type
,
9345 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9346 fold_convert (type
, arg1
));
9348 /* Fold X ^ (X & Y) as X & ~Y. */
9349 if (TREE_CODE (arg1
) == BIT_AND_EXPR
9350 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9352 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
9353 return fold_build2 (BIT_AND_EXPR
, type
,
9354 fold_convert (type
, arg0
),
9355 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
9357 /* Fold X ^ (Y & X) as ~Y & X. */
9358 if (TREE_CODE (arg1
) == BIT_AND_EXPR
9359 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
9360 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9362 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
9363 return fold_build2 (BIT_AND_EXPR
, type
,
9364 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9365 fold_convert (type
, arg0
));
9368 /* See if this can be simplified into a rotate first. If that
9369 is unsuccessful continue in the association code. */
9373 if (integer_all_onesp (arg1
))
9374 return non_lvalue (fold_convert (type
, arg0
));
9375 if (integer_zerop (arg1
))
9376 return omit_one_operand (type
, arg1
, arg0
);
9377 if (operand_equal_p (arg0
, arg1
, 0))
9378 return non_lvalue (fold_convert (type
, arg0
));
9380 /* ~X & X is always zero. */
9381 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9382 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9383 return omit_one_operand (type
, integer_zero_node
, arg1
);
9385 /* X & ~X is always zero. */
9386 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9387 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9388 return omit_one_operand (type
, integer_zero_node
, arg0
);
9390 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9391 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9392 && TREE_CODE (arg1
) == INTEGER_CST
9393 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9394 return fold_build2 (BIT_IOR_EXPR
, type
,
9395 fold_build2 (BIT_AND_EXPR
, type
,
9396 TREE_OPERAND (arg0
, 0), arg1
),
9397 fold_build2 (BIT_AND_EXPR
, type
,
9398 TREE_OPERAND (arg0
, 1), arg1
));
9400 /* (X | Y) & Y is (X, Y). */
9401 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9402 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9403 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
9404 /* (X | Y) & X is (Y, X). */
9405 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9406 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
9407 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
9408 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
9409 /* X & (X | Y) is (Y, X). */
9410 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
9411 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
9412 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
9413 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
9414 /* X & (Y | X) is (Y, X). */
9415 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
9416 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
9417 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9418 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
9420 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9421 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9422 && integer_onep (TREE_OPERAND (arg0
, 1))
9423 && integer_onep (arg1
))
9425 tem
= TREE_OPERAND (arg0
, 0);
9426 return fold_build2 (EQ_EXPR
, type
,
9427 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
9428 build_int_cst (TREE_TYPE (tem
), 1)),
9429 build_int_cst (TREE_TYPE (tem
), 0));
9431 /* Fold ~X & 1 as (X & 1) == 0. */
9432 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9433 && integer_onep (arg1
))
9435 tem
= TREE_OPERAND (arg0
, 0);
9436 return fold_build2 (EQ_EXPR
, type
,
9437 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
9438 build_int_cst (TREE_TYPE (tem
), 1)),
9439 build_int_cst (TREE_TYPE (tem
), 0));
9442 /* Fold (X ^ Y) & Y as ~X & Y. */
9443 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9444 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9446 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
9447 return fold_build2 (BIT_AND_EXPR
, type
,
9448 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9449 fold_convert (type
, arg1
));
9451 /* Fold (X ^ Y) & X as ~Y & X. */
9452 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9453 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
9454 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
9456 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
9457 return fold_build2 (BIT_AND_EXPR
, type
,
9458 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9459 fold_convert (type
, arg1
));
9461 /* Fold X & (X ^ Y) as X & ~Y. */
9462 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
9463 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9465 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
9466 return fold_build2 (BIT_AND_EXPR
, type
,
9467 fold_convert (type
, arg0
),
9468 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
9470 /* Fold X & (Y ^ X) as ~Y & X. */
9471 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
9472 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
9473 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9475 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
9476 return fold_build2 (BIT_AND_EXPR
, type
,
9477 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9478 fold_convert (type
, arg0
));
9481 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
9482 if (t1
!= NULL_TREE
)
9484 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9485 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
9486 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
9489 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
9491 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
9492 && (~TREE_INT_CST_LOW (arg1
)
9493 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
9494 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
9497 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9499 This results in more efficient code for machines without a NOR
9500 instruction. Combine will canonicalize to the first form
9501 which will allow use of NOR instructions provided by the
9502 backend if they exist. */
9503 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9504 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9506 return fold_build1 (BIT_NOT_EXPR
, type
,
9507 build2 (BIT_IOR_EXPR
, type
,
9508 TREE_OPERAND (arg0
, 0),
9509 TREE_OPERAND (arg1
, 0)));
9515 /* Don't touch a floating-point divide by zero unless the mode
9516 of the constant can represent infinity. */
9517 if (TREE_CODE (arg1
) == REAL_CST
9518 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
9519 && real_zerop (arg1
))
9522 /* Optimize A / A to 1.0 if we don't care about
9523 NaNs or Infinities. Skip the transformation
9524 for non-real operands. */
9525 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
9526 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
9527 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
9528 && operand_equal_p (arg0
, arg1
, 0))
9530 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
9532 return omit_two_operands (type
, r
, arg0
, arg1
);
9535 /* The complex version of the above A / A optimization. */
9536 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
9537 && operand_equal_p (arg0
, arg1
, 0))
9539 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
9540 if (! HONOR_NANS (TYPE_MODE (elem_type
))
9541 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
9543 tree r
= build_real (elem_type
, dconst1
);
9544 /* omit_two_operands will call fold_convert for us. */
9545 return omit_two_operands (type
, r
, arg0
, arg1
);
9549 /* (-A) / (-B) -> A / B */
9550 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
9551 return fold_build2 (RDIV_EXPR
, type
,
9552 TREE_OPERAND (arg0
, 0),
9553 negate_expr (arg1
));
9554 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
9555 return fold_build2 (RDIV_EXPR
, type
,
9557 TREE_OPERAND (arg1
, 0));
9559 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9560 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9561 && real_onep (arg1
))
9562 return non_lvalue (fold_convert (type
, arg0
));
9564 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9565 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9566 && real_minus_onep (arg1
))
9567 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
9569 /* If ARG1 is a constant, we can convert this to a multiply by the
9570 reciprocal. This does not have the same rounding properties,
9571 so only do this if -funsafe-math-optimizations. We can actually
9572 always safely do it if ARG1 is a power of two, but it's hard to
9573 tell if it is or not in a portable manner. */
9574 if (TREE_CODE (arg1
) == REAL_CST
)
9576 if (flag_unsafe_math_optimizations
9577 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
9579 return fold_build2 (MULT_EXPR
, type
, arg0
, tem
);
9580 /* Find the reciprocal if optimizing and the result is exact. */
9584 r
= TREE_REAL_CST (arg1
);
9585 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
9587 tem
= build_real (type
, r
);
9588 return fold_build2 (MULT_EXPR
, type
,
9589 fold_convert (type
, arg0
), tem
);
9593 /* Convert A/B/C to A/(B*C). */
9594 if (flag_unsafe_math_optimizations
9595 && TREE_CODE (arg0
) == RDIV_EXPR
)
9596 return fold_build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9597 fold_build2 (MULT_EXPR
, type
,
9598 TREE_OPERAND (arg0
, 1), arg1
));
9600 /* Convert A/(B/C) to (A/B)*C. */
9601 if (flag_unsafe_math_optimizations
9602 && TREE_CODE (arg1
) == RDIV_EXPR
)
9603 return fold_build2 (MULT_EXPR
, type
,
9604 fold_build2 (RDIV_EXPR
, type
, arg0
,
9605 TREE_OPERAND (arg1
, 0)),
9606 TREE_OPERAND (arg1
, 1));
9608 /* Convert C1/(X*C2) into (C1/C2)/X. */
9609 if (flag_unsafe_math_optimizations
9610 && TREE_CODE (arg1
) == MULT_EXPR
9611 && TREE_CODE (arg0
) == REAL_CST
9612 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
9614 tree tem
= const_binop (RDIV_EXPR
, arg0
,
9615 TREE_OPERAND (arg1
, 1), 0);
9617 return fold_build2 (RDIV_EXPR
, type
, tem
,
9618 TREE_OPERAND (arg1
, 0));
9621 if (flag_unsafe_math_optimizations
)
9623 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
9624 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
9626 /* Optimize sin(x)/cos(x) as tan(x). */
9627 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
9628 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
9629 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
9630 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
9631 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
9633 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
9635 if (tanfn
!= NULL_TREE
)
9636 return build_function_call_expr (tanfn
,
9637 TREE_OPERAND (arg0
, 1));
9640 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9641 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
9642 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
9643 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
9644 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
9645 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
9647 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
9649 if (tanfn
!= NULL_TREE
)
9651 tree tmp
= TREE_OPERAND (arg0
, 1);
9652 tmp
= build_function_call_expr (tanfn
, tmp
);
9653 return fold_build2 (RDIV_EXPR
, type
,
9654 build_real (type
, dconst1
), tmp
);
9658 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9659 NaNs or Infinities. */
9660 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
9661 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
9662 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
9664 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
9665 tree arg01
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
9667 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
9668 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
9669 && operand_equal_p (arg00
, arg01
, 0))
9671 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
9673 if (cosfn
!= NULL_TREE
)
9674 return build_function_call_expr (cosfn
,
9675 TREE_OPERAND (arg0
, 1));
9679 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9680 NaNs or Infinities. */
9681 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
9682 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
9683 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
9685 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
9686 tree arg01
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
9688 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
9689 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
9690 && operand_equal_p (arg00
, arg01
, 0))
9692 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
9694 if (cosfn
!= NULL_TREE
)
9696 tree tmp
= TREE_OPERAND (arg0
, 1);
9697 tmp
= build_function_call_expr (cosfn
, tmp
);
9698 return fold_build2 (RDIV_EXPR
, type
,
9699 build_real (type
, dconst1
),
9705 /* Optimize pow(x,c)/x as pow(x,c-1). */
9706 if (fcode0
== BUILT_IN_POW
9707 || fcode0
== BUILT_IN_POWF
9708 || fcode0
== BUILT_IN_POWL
)
9710 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
9711 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
9712 if (TREE_CODE (arg01
) == REAL_CST
9713 && ! TREE_CONSTANT_OVERFLOW (arg01
)
9714 && operand_equal_p (arg1
, arg00
, 0))
9716 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
9720 c
= TREE_REAL_CST (arg01
);
9721 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
9722 arg
= build_real (type
, c
);
9723 arglist
= build_tree_list (NULL_TREE
, arg
);
9724 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
9725 return build_function_call_expr (powfn
, arglist
);
9729 /* Optimize x/expN(y) into x*expN(-y). */
9730 if (BUILTIN_EXPONENT_P (fcode1
))
9732 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
9733 tree arg
= negate_expr (TREE_VALUE (TREE_OPERAND (arg1
, 1)));
9734 tree arglist
= build_tree_list (NULL_TREE
,
9735 fold_convert (type
, arg
));
9736 arg1
= build_function_call_expr (expfn
, arglist
);
9737 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
9740 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9741 if (fcode1
== BUILT_IN_POW
9742 || fcode1
== BUILT_IN_POWF
9743 || fcode1
== BUILT_IN_POWL
)
9745 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
9746 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
9747 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
, 1)));
9748 tree neg11
= fold_convert (type
, negate_expr (arg11
));
9749 tree arglist
= tree_cons(NULL_TREE
, arg10
,
9750 build_tree_list (NULL_TREE
, neg11
));
9751 arg1
= build_function_call_expr (powfn
, arglist
);
9752 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
9757 case TRUNC_DIV_EXPR
:
9758 case FLOOR_DIV_EXPR
:
9759 /* Simplify A / (B << N) where A and B are positive and B is
9760 a power of 2, to A >> (N + log2(B)). */
9761 if (TREE_CODE (arg1
) == LSHIFT_EXPR
9762 && (TYPE_UNSIGNED (type
) || tree_expr_nonnegative_p (arg0
)))
9764 tree sval
= TREE_OPERAND (arg1
, 0);
9765 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
9767 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
9768 unsigned long pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
9770 sh_cnt
= fold_build2 (PLUS_EXPR
, TREE_TYPE (sh_cnt
),
9771 sh_cnt
, build_int_cst (NULL_TREE
, pow2
));
9772 return fold_build2 (RSHIFT_EXPR
, type
,
9773 fold_convert (type
, arg0
), sh_cnt
);
9778 case ROUND_DIV_EXPR
:
9780 case EXACT_DIV_EXPR
:
9781 if (integer_onep (arg1
))
9782 return non_lvalue (fold_convert (type
, arg0
));
9783 if (integer_zerop (arg1
))
9786 if (!TYPE_UNSIGNED (type
)
9787 && TREE_CODE (arg1
) == INTEGER_CST
9788 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
9789 && TREE_INT_CST_HIGH (arg1
) == -1)
9790 return fold_convert (type
, negate_expr (arg0
));
9792 /* Convert -A / -B to A / B when the type is signed and overflow is
9794 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
9795 && TREE_CODE (arg0
) == NEGATE_EXPR
9796 && negate_expr_p (arg1
))
9797 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
9798 negate_expr (arg1
));
9799 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
9800 && TREE_CODE (arg1
) == NEGATE_EXPR
9801 && negate_expr_p (arg0
))
9802 return fold_build2 (code
, type
, negate_expr (arg0
),
9803 TREE_OPERAND (arg1
, 0));
9805 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
9806 operation, EXACT_DIV_EXPR.
9808 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
9809 At one time others generated faster code, it's not clear if they do
9810 after the last round to changes to the DIV code in expmed.c. */
9811 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
9812 && multiple_of_p (type
, arg0
, arg1
))
9813 return fold_build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
);
9815 if (TREE_CODE (arg1
) == INTEGER_CST
9816 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
9817 return fold_convert (type
, tem
);
9822 case FLOOR_MOD_EXPR
:
9823 case ROUND_MOD_EXPR
:
9824 case TRUNC_MOD_EXPR
:
9825 /* X % 1 is always zero, but be sure to preserve any side
9827 if (integer_onep (arg1
))
9828 return omit_one_operand (type
, integer_zero_node
, arg0
);
9830 /* X % 0, return X % 0 unchanged so that we can get the
9831 proper warnings and errors. */
9832 if (integer_zerop (arg1
))
9835 /* 0 % X is always zero, but be sure to preserve any side
9836 effects in X. Place this after checking for X == 0. */
9837 if (integer_zerop (arg0
))
9838 return omit_one_operand (type
, integer_zero_node
, arg1
);
9840 /* X % -1 is zero. */
9841 if (!TYPE_UNSIGNED (type
)
9842 && TREE_CODE (arg1
) == INTEGER_CST
9843 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
9844 && TREE_INT_CST_HIGH (arg1
) == -1)
9845 return omit_one_operand (type
, integer_zero_node
, arg0
);
9847 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
9848 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
9849 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
9850 && (TYPE_UNSIGNED (type
) || tree_expr_nonnegative_p (arg0
)))
9853 /* Also optimize A % (C << N) where C is a power of 2,
9854 to A & ((C << N) - 1). */
9855 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
9856 c
= TREE_OPERAND (arg1
, 0);
9858 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
9860 tree mask
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg1
),
9861 arg1
, integer_one_node
);
9862 return fold_build2 (BIT_AND_EXPR
, type
,
9863 fold_convert (type
, arg0
),
9864 fold_convert (type
, mask
));
9868 /* X % -C is the same as X % C. */
9869 if (code
== TRUNC_MOD_EXPR
9870 && !TYPE_UNSIGNED (type
)
9871 && TREE_CODE (arg1
) == INTEGER_CST
9872 && !TREE_CONSTANT_OVERFLOW (arg1
)
9873 && TREE_INT_CST_HIGH (arg1
) < 0
9875 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
9876 && !sign_bit_p (arg1
, arg1
))
9877 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
9878 fold_convert (type
, negate_expr (arg1
)));
9880 /* X % -Y is the same as X % Y. */
9881 if (code
== TRUNC_MOD_EXPR
9882 && !TYPE_UNSIGNED (type
)
9883 && TREE_CODE (arg1
) == NEGATE_EXPR
9885 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
9886 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
9888 if (TREE_CODE (arg1
) == INTEGER_CST
9889 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
9890 return fold_convert (type
, tem
);
9896 if (integer_all_onesp (arg0
))
9897 return omit_one_operand (type
, arg0
, arg1
);
9901 /* Optimize -1 >> x for arithmetic right shifts. */
9902 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
9903 return omit_one_operand (type
, arg0
, arg1
);
9904 /* ... fall through ... */
9908 if (integer_zerop (arg1
))
9909 return non_lvalue (fold_convert (type
, arg0
));
9910 if (integer_zerop (arg0
))
9911 return omit_one_operand (type
, arg0
, arg1
);
9913 /* Since negative shift count is not well-defined,
9914 don't try to compute it in the compiler. */
9915 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
9918 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
9919 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
9920 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
9921 && host_integerp (TREE_OPERAND (arg0
, 1), false)
9922 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
9924 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
9925 + TREE_INT_CST_LOW (arg1
));
9927 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
9928 being well defined. */
9929 if (low
>= TYPE_PRECISION (type
))
9931 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
9932 low
= low
% TYPE_PRECISION (type
);
9933 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
9934 return build_int_cst (type
, 0);
9936 low
= TYPE_PRECISION (type
) - 1;
9939 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
9940 build_int_cst (type
, low
));
9943 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
9944 into x & ((unsigned)-1 >> c) for unsigned types. */
9945 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
9946 || (TYPE_UNSIGNED (type
)
9947 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
9948 && host_integerp (arg1
, false)
9949 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
9950 && host_integerp (TREE_OPERAND (arg0
, 1), false)
9951 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
9953 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
9954 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
9960 arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
9962 lshift
= build_int_cst (type
, -1);
9963 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
9965 return fold_build2 (BIT_AND_EXPR
, type
, arg00
, lshift
);
9969 /* Rewrite an LROTATE_EXPR by a constant into an
9970 RROTATE_EXPR by a new constant. */
9971 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
9973 tree tem
= build_int_cst (NULL_TREE
,
9974 GET_MODE_BITSIZE (TYPE_MODE (type
)));
9975 tem
= fold_convert (TREE_TYPE (arg1
), tem
);
9976 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
9977 return fold_build2 (RROTATE_EXPR
, type
, arg0
, tem
);
9980 /* If we have a rotate of a bit operation with the rotate count and
9981 the second operand of the bit operation both constant,
9982 permute the two operations. */
9983 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
9984 && (TREE_CODE (arg0
) == BIT_AND_EXPR
9985 || TREE_CODE (arg0
) == BIT_IOR_EXPR
9986 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
9987 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9988 return fold_build2 (TREE_CODE (arg0
), type
,
9989 fold_build2 (code
, type
,
9990 TREE_OPERAND (arg0
, 0), arg1
),
9991 fold_build2 (code
, type
,
9992 TREE_OPERAND (arg0
, 1), arg1
));
9994 /* Two consecutive rotates adding up to the width of the mode can
9996 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
9997 && TREE_CODE (arg0
) == RROTATE_EXPR
9998 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9999 && TREE_INT_CST_HIGH (arg1
) == 0
10000 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
10001 && ((TREE_INT_CST_LOW (arg1
)
10002 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
10003 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
10004 return TREE_OPERAND (arg0
, 0);
10009 if (operand_equal_p (arg0
, arg1
, 0))
10010 return omit_one_operand (type
, arg0
, arg1
);
10011 if (INTEGRAL_TYPE_P (type
)
10012 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
10013 return omit_one_operand (type
, arg1
, arg0
);
10014 tem
= fold_minmax (MIN_EXPR
, type
, arg0
, arg1
);
10020 if (operand_equal_p (arg0
, arg1
, 0))
10021 return omit_one_operand (type
, arg0
, arg1
);
10022 if (INTEGRAL_TYPE_P (type
)
10023 && TYPE_MAX_VALUE (type
)
10024 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
10025 return omit_one_operand (type
, arg1
, arg0
);
10026 tem
= fold_minmax (MAX_EXPR
, type
, arg0
, arg1
);
10031 case TRUTH_ANDIF_EXPR
:
10032 /* Note that the operands of this must be ints
10033 and their values must be 0 or 1.
10034 ("true" is a fixed value perhaps depending on the language.) */
10035 /* If first arg is constant zero, return it. */
10036 if (integer_zerop (arg0
))
10037 return fold_convert (type
, arg0
);
10038 case TRUTH_AND_EXPR
:
10039 /* If either arg is constant true, drop it. */
10040 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10041 return non_lvalue (fold_convert (type
, arg1
));
10042 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
10043 /* Preserve sequence points. */
10044 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10045 return non_lvalue (fold_convert (type
, arg0
));
10046 /* If second arg is constant zero, result is zero, but first arg
10047 must be evaluated. */
10048 if (integer_zerop (arg1
))
10049 return omit_one_operand (type
, arg1
, arg0
);
10050 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10051 case will be handled here. */
10052 if (integer_zerop (arg0
))
10053 return omit_one_operand (type
, arg0
, arg1
);
10055 /* !X && X is always false. */
10056 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10057 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10058 return omit_one_operand (type
, integer_zero_node
, arg1
);
10059 /* X && !X is always false. */
10060 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10061 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10062 return omit_one_operand (type
, integer_zero_node
, arg0
);
10064 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10065 means A >= Y && A != MAX, but in this case we know that
10068 if (!TREE_SIDE_EFFECTS (arg0
)
10069 && !TREE_SIDE_EFFECTS (arg1
))
10071 tem
= fold_to_nonsharp_ineq_using_bound (arg0
, arg1
);
10072 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
10073 return fold_build2 (code
, type
, tem
, arg1
);
10075 tem
= fold_to_nonsharp_ineq_using_bound (arg1
, arg0
);
10076 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
10077 return fold_build2 (code
, type
, arg0
, tem
);
10081 /* We only do these simplifications if we are optimizing. */
10085 /* Check for things like (A || B) && (A || C). We can convert this
10086 to A || (B && C). Note that either operator can be any of the four
10087 truth and/or operations and the transformation will still be
10088 valid. Also note that we only care about order for the
10089 ANDIF and ORIF operators. If B contains side effects, this
10090 might change the truth-value of A. */
10091 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
10092 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
10093 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
10094 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
10095 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
10096 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
10098 tree a00
= TREE_OPERAND (arg0
, 0);
10099 tree a01
= TREE_OPERAND (arg0
, 1);
10100 tree a10
= TREE_OPERAND (arg1
, 0);
10101 tree a11
= TREE_OPERAND (arg1
, 1);
10102 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
10103 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
10104 && (code
== TRUTH_AND_EXPR
10105 || code
== TRUTH_OR_EXPR
));
10107 if (operand_equal_p (a00
, a10
, 0))
10108 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
10109 fold_build2 (code
, type
, a01
, a11
));
10110 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
10111 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
10112 fold_build2 (code
, type
, a01
, a10
));
10113 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
10114 return fold_build2 (TREE_CODE (arg0
), type
, a01
,
10115 fold_build2 (code
, type
, a00
, a11
));
10117 /* This case if tricky because we must either have commutative
10118 operators or else A10 must not have side-effects. */
10120 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
10121 && operand_equal_p (a01
, a11
, 0))
10122 return fold_build2 (TREE_CODE (arg0
), type
,
10123 fold_build2 (code
, type
, a00
, a10
),
10127 /* See if we can build a range comparison. */
10128 if (0 != (tem
= fold_range_test (code
, type
, op0
, op1
)))
10131 /* Check for the possibility of merging component references. If our
10132 lhs is another similar operation, try to merge its rhs with our
10133 rhs. Then try to merge our lhs and rhs. */
10134 if (TREE_CODE (arg0
) == code
10135 && 0 != (tem
= fold_truthop (code
, type
,
10136 TREE_OPERAND (arg0
, 1), arg1
)))
10137 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
10139 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
10144 case TRUTH_ORIF_EXPR
:
10145 /* Note that the operands of this must be ints
10146 and their values must be 0 or true.
10147 ("true" is a fixed value perhaps depending on the language.) */
10148 /* If first arg is constant true, return it. */
10149 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10150 return fold_convert (type
, arg0
);
10151 case TRUTH_OR_EXPR
:
10152 /* If either arg is constant zero, drop it. */
10153 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
10154 return non_lvalue (fold_convert (type
, arg1
));
10155 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
10156 /* Preserve sequence points. */
10157 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10158 return non_lvalue (fold_convert (type
, arg0
));
10159 /* If second arg is constant true, result is true, but we must
10160 evaluate first arg. */
10161 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
10162 return omit_one_operand (type
, arg1
, arg0
);
10163 /* Likewise for first arg, but note this only occurs here for
10165 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10166 return omit_one_operand (type
, arg0
, arg1
);
10168 /* !X || X is always true. */
10169 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10170 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10171 return omit_one_operand (type
, integer_one_node
, arg1
);
10172 /* X || !X is always true. */
10173 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10174 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10175 return omit_one_operand (type
, integer_one_node
, arg0
);
10179 case TRUTH_XOR_EXPR
:
10180 /* If the second arg is constant zero, drop it. */
10181 if (integer_zerop (arg1
))
10182 return non_lvalue (fold_convert (type
, arg0
));
10183 /* If the second arg is constant true, this is a logical inversion. */
10184 if (integer_onep (arg1
))
10186 /* Only call invert_truthvalue if operand is a truth value. */
10187 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
10188 tem
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
10190 tem
= invert_truthvalue (arg0
);
10191 return non_lvalue (fold_convert (type
, tem
));
10193 /* Identical arguments cancel to zero. */
10194 if (operand_equal_p (arg0
, arg1
, 0))
10195 return omit_one_operand (type
, integer_zero_node
, arg0
);
10197 /* !X ^ X is always true. */
10198 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10199 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10200 return omit_one_operand (type
, integer_one_node
, arg1
);
10202 /* X ^ !X is always true. */
10203 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10204 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10205 return omit_one_operand (type
, integer_one_node
, arg0
);
10211 tem
= fold_comparison (code
, type
, op0
, op1
);
10212 if (tem
!= NULL_TREE
)
10215 /* bool_var != 0 becomes bool_var. */
10216 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
10217 && code
== NE_EXPR
)
10218 return non_lvalue (fold_convert (type
, arg0
));
10220 /* bool_var == 1 becomes bool_var. */
10221 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
10222 && code
== EQ_EXPR
)
10223 return non_lvalue (fold_convert (type
, arg0
));
10225 /* bool_var != 1 becomes !bool_var. */
10226 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
10227 && code
== NE_EXPR
)
10228 return fold_build1 (TRUTH_NOT_EXPR
, type
, arg0
);
10230 /* bool_var == 0 becomes !bool_var. */
10231 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
10232 && code
== EQ_EXPR
)
10233 return fold_build1 (TRUTH_NOT_EXPR
, type
, arg0
);
10235 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
10236 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10237 && TREE_CODE (arg1
) == INTEGER_CST
)
10238 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
10239 fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10242 /* If this is an equality comparison of the address of a non-weak
10243 object against zero, then we know the result. */
10244 if (TREE_CODE (arg0
) == ADDR_EXPR
10245 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
10246 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
10247 && integer_zerop (arg1
))
10248 return constant_boolean_node (code
!= EQ_EXPR
, type
);
10250 /* If this is an equality comparison of the address of two non-weak,
10251 unaliased symbols neither of which are extern (since we do not
10252 have access to attributes for externs), then we know the result. */
10253 if (TREE_CODE (arg0
) == ADDR_EXPR
10254 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
10255 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
10256 && ! lookup_attribute ("alias",
10257 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
10258 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
10259 && TREE_CODE (arg1
) == ADDR_EXPR
10260 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
10261 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
10262 && ! lookup_attribute ("alias",
10263 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
10264 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
10266 /* We know that we're looking at the address of two
10267 non-weak, unaliased, static _DECL nodes.
10269 It is both wasteful and incorrect to call operand_equal_p
10270 to compare the two ADDR_EXPR nodes. It is wasteful in that
10271 all we need to do is test pointer equality for the arguments
10272 to the two ADDR_EXPR nodes. It is incorrect to use
10273 operand_equal_p as that function is NOT equivalent to a
10274 C equality test. It can in fact return false for two
10275 objects which would test as equal using the C equality
10277 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
10278 return constant_boolean_node (equal
10279 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
10283 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10284 a MINUS_EXPR of a constant, we can convert it into a comparison with
10285 a revised constant as long as no overflow occurs. */
10286 if (TREE_CODE (arg1
) == INTEGER_CST
10287 && (TREE_CODE (arg0
) == PLUS_EXPR
10288 || TREE_CODE (arg0
) == MINUS_EXPR
)
10289 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10290 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
10291 ? MINUS_EXPR
: PLUS_EXPR
,
10292 arg1
, TREE_OPERAND (arg0
, 1), 0))
10293 && ! TREE_CONSTANT_OVERFLOW (tem
))
10294 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
10296 /* Similarly for a NEGATE_EXPR. */
10297 if (TREE_CODE (arg0
) == NEGATE_EXPR
10298 && TREE_CODE (arg1
) == INTEGER_CST
10299 && 0 != (tem
= negate_expr (arg1
))
10300 && TREE_CODE (tem
) == INTEGER_CST
10301 && ! TREE_CONSTANT_OVERFLOW (tem
))
10302 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
10304 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10305 for !=. Don't do this for ordered comparisons due to overflow. */
10306 if (TREE_CODE (arg0
) == MINUS_EXPR
10307 && integer_zerop (arg1
))
10308 return fold_build2 (code
, type
,
10309 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
10311 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10312 if (TREE_CODE (arg0
) == ABS_EXPR
10313 && (integer_zerop (arg1
) || real_zerop (arg1
)))
10314 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
10316 /* If this is an EQ or NE comparison with zero and ARG0 is
10317 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10318 two operations, but the latter can be done in one less insn
10319 on machines that have only two-operand insns or on which a
10320 constant cannot be the first operand. */
10321 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10322 && integer_zerop (arg1
))
10324 tree arg00
= TREE_OPERAND (arg0
, 0);
10325 tree arg01
= TREE_OPERAND (arg0
, 1);
10326 if (TREE_CODE (arg00
) == LSHIFT_EXPR
10327 && integer_onep (TREE_OPERAND (arg00
, 0)))
10329 fold_build2 (code
, type
,
10330 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10331 build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
10332 arg01
, TREE_OPERAND (arg00
, 1)),
10333 fold_convert (TREE_TYPE (arg0
),
10334 integer_one_node
)),
10336 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
10337 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
10339 fold_build2 (code
, type
,
10340 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10341 build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
10342 arg00
, TREE_OPERAND (arg01
, 1)),
10343 fold_convert (TREE_TYPE (arg0
),
10344 integer_one_node
)),
10348 /* If this is an NE or EQ comparison of zero against the result of a
10349 signed MOD operation whose second operand is a power of 2, make
10350 the MOD operation unsigned since it is simpler and equivalent. */
10351 if (integer_zerop (arg1
)
10352 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
10353 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
10354 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
10355 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
10356 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
10357 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10359 tree newtype
= lang_hooks
.types
.unsigned_type (TREE_TYPE (arg0
));
10360 tree newmod
= fold_build2 (TREE_CODE (arg0
), newtype
,
10361 fold_convert (newtype
,
10362 TREE_OPERAND (arg0
, 0)),
10363 fold_convert (newtype
,
10364 TREE_OPERAND (arg0
, 1)));
10366 return fold_build2 (code
, type
, newmod
,
10367 fold_convert (newtype
, arg1
));
10370 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10371 C1 is a valid shift constant, and C2 is a power of two, i.e.
10373 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10374 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
10375 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
10377 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10378 && integer_zerop (arg1
))
10380 tree itype
= TREE_TYPE (arg0
);
10381 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
10382 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
10384 /* Check for a valid shift count. */
10385 if (TREE_INT_CST_HIGH (arg001
) == 0
10386 && TREE_INT_CST_LOW (arg001
) < prec
)
10388 tree arg01
= TREE_OPERAND (arg0
, 1);
10389 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10390 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
10391 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10392 can be rewritten as (X & (C2 << C1)) != 0. */
10393 if ((log2
+ TREE_INT_CST_LOW (arg01
)) < prec
)
10395 tem
= fold_build2 (LSHIFT_EXPR
, itype
, arg01
, arg001
);
10396 tem
= fold_build2 (BIT_AND_EXPR
, itype
, arg000
, tem
);
10397 return fold_build2 (code
, type
, tem
, arg1
);
10399 /* Otherwise, for signed (arithmetic) shifts,
10400 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10401 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10402 else if (!TYPE_UNSIGNED (itype
))
10403 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
10404 arg000
, build_int_cst (itype
, 0));
10405 /* Otherwise, of unsigned (logical) shifts,
10406 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10407 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10409 return omit_one_operand (type
,
10410 code
== EQ_EXPR
? integer_one_node
10411 : integer_zero_node
,
10416 /* If this is an NE comparison of zero with an AND of one, remove the
10417 comparison since the AND will give the correct value. */
10418 if (code
== NE_EXPR
10419 && integer_zerop (arg1
)
10420 && TREE_CODE (arg0
) == BIT_AND_EXPR
10421 && integer_onep (TREE_OPERAND (arg0
, 1)))
10422 return fold_convert (type
, arg0
);
10424 /* If we have (A & C) == C where C is a power of 2, convert this into
10425 (A & C) != 0. Similarly for NE_EXPR. */
10426 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10427 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10428 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10429 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10430 arg0
, fold_convert (TREE_TYPE (arg0
),
10431 integer_zero_node
));
10433 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10434 bit, then fold the expression into A < 0 or A >= 0. */
10435 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
, type
);
10439 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10440 Similarly for NE_EXPR. */
10441 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10442 && TREE_CODE (arg1
) == INTEGER_CST
10443 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10445 tree notc
= fold_build1 (BIT_NOT_EXPR
,
10446 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
10447 TREE_OPERAND (arg0
, 1));
10448 tree dandnotc
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10450 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
10451 if (integer_nonzerop (dandnotc
))
10452 return omit_one_operand (type
, rslt
, arg0
);
10455 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10456 Similarly for NE_EXPR. */
10457 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10458 && TREE_CODE (arg1
) == INTEGER_CST
10459 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10461 tree notd
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
10462 tree candnotd
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10463 TREE_OPERAND (arg0
, 1), notd
);
10464 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
10465 if (integer_nonzerop (candnotd
))
10466 return omit_one_operand (type
, rslt
, arg0
);
10469 /* If this is a comparison of a field, we may be able to simplify it. */
10470 if (((TREE_CODE (arg0
) == COMPONENT_REF
10471 && lang_hooks
.can_use_bit_fields_p ())
10472 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
10473 /* Handle the constant case even without -O
10474 to make sure the warnings are given. */
10475 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
10477 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
10482 /* Optimize comparisons of strlen vs zero to a compare of the
10483 first character of the string vs zero. To wit,
10484 strlen(ptr) == 0 => *ptr == 0
10485 strlen(ptr) != 0 => *ptr != 0
10486 Other cases should reduce to one of these two (or a constant)
10487 due to the return value of strlen being unsigned. */
10488 if (TREE_CODE (arg0
) == CALL_EXPR
10489 && integer_zerop (arg1
))
10491 tree fndecl
= get_callee_fndecl (arg0
);
10495 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
10496 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
10497 && (arglist
= TREE_OPERAND (arg0
, 1))
10498 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
10499 && ! TREE_CHAIN (arglist
))
10501 tree iref
= build_fold_indirect_ref (TREE_VALUE (arglist
));
10502 return fold_build2 (code
, type
, iref
,
10503 build_int_cst (TREE_TYPE (iref
), 0));
10507 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10508 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10509 if (TREE_CODE (arg0
) == RSHIFT_EXPR
10510 && integer_zerop (arg1
)
10511 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10513 tree arg00
= TREE_OPERAND (arg0
, 0);
10514 tree arg01
= TREE_OPERAND (arg0
, 1);
10515 tree itype
= TREE_TYPE (arg00
);
10516 if (TREE_INT_CST_HIGH (arg01
) == 0
10517 && TREE_INT_CST_LOW (arg01
)
10518 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
10520 if (TYPE_UNSIGNED (itype
))
10522 itype
= lang_hooks
.types
.signed_type (itype
);
10523 arg00
= fold_convert (itype
, arg00
);
10525 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
10526 type
, arg00
, build_int_cst (itype
, 0));
10530 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10531 if (integer_zerop (arg1
)
10532 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
10533 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
10534 TREE_OPERAND (arg0
, 1));
10536 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10537 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10538 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10539 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
10540 build_int_cst (TREE_TYPE (arg1
), 0));
10541 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10542 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10543 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10544 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10545 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 1),
10546 build_int_cst (TREE_TYPE (arg1
), 0));
10548 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10549 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10550 && TREE_CODE (arg1
) == INTEGER_CST
10551 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10552 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
10553 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg1
),
10554 TREE_OPERAND (arg0
, 1), arg1
));
10556 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10557 (X & C) == 0 when C is a single bit. */
10558 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10559 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
10560 && integer_zerop (arg1
)
10561 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10563 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10564 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
10565 TREE_OPERAND (arg0
, 1));
10566 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
10570 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10571 constant C is a power of two, i.e. a single bit. */
10572 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10573 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
10574 && integer_zerop (arg1
)
10575 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10576 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10577 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
10579 tree arg00
= TREE_OPERAND (arg0
, 0);
10580 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10581 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
10584 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10585 when is C is a power of two, i.e. a single bit. */
10586 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10587 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
10588 && integer_zerop (arg1
)
10589 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10590 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10591 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
10593 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10594 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg000
),
10595 arg000
, TREE_OPERAND (arg0
, 1));
10596 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10597 tem
, build_int_cst (TREE_TYPE (tem
), 0));
10600 if (integer_zerop (arg1
)
10601 && tree_expr_nonzero_p (arg0
))
10603 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
10604 return omit_one_operand (type
, res
, arg0
);
10612 tem
= fold_comparison (code
, type
, op0
, op1
);
10613 if (tem
!= NULL_TREE
)
10616 /* Transform comparisons of the form X +- C CMP X. */
10617 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
10618 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10619 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
10620 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
10621 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10622 && !TYPE_UNSIGNED (TREE_TYPE (arg1
))
10623 && !(flag_wrapv
|| flag_trapv
))))
10625 tree arg01
= TREE_OPERAND (arg0
, 1);
10626 enum tree_code code0
= TREE_CODE (arg0
);
10629 if (TREE_CODE (arg01
) == REAL_CST
)
10630 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
10632 is_positive
= tree_int_cst_sgn (arg01
);
10634 /* (X - c) > X becomes false. */
10635 if (code
== GT_EXPR
10636 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
10637 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
10638 return constant_boolean_node (0, type
);
10640 /* Likewise (X + c) < X becomes false. */
10641 if (code
== LT_EXPR
10642 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
10643 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
10644 return constant_boolean_node (0, type
);
10646 /* Convert (X - c) <= X to true. */
10647 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
10649 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
10650 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
10651 return constant_boolean_node (1, type
);
10653 /* Convert (X + c) >= X to true. */
10654 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
10656 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
10657 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
10658 return constant_boolean_node (1, type
);
10660 if (TREE_CODE (arg01
) == INTEGER_CST
)
10662 /* Convert X + c > X and X - c < X to true for integers. */
10663 if (code
== GT_EXPR
10664 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
10665 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
10666 return constant_boolean_node (1, type
);
10668 if (code
== LT_EXPR
10669 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
10670 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
10671 return constant_boolean_node (1, type
);
10673 /* Convert X + c <= X and X - c >= X to false for integers. */
10674 if (code
== LE_EXPR
10675 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
10676 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
10677 return constant_boolean_node (0, type
);
10679 if (code
== GE_EXPR
10680 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
10681 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
10682 return constant_boolean_node (0, type
);
10686 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10687 This transformation affects the cases which are handled in later
10688 optimizations involving comparisons with non-negative constants. */
10689 if (TREE_CODE (arg1
) == INTEGER_CST
10690 && TREE_CODE (arg0
) != INTEGER_CST
10691 && tree_int_cst_sgn (arg1
) > 0)
10693 if (code
== GE_EXPR
)
10695 arg1
= const_binop (MINUS_EXPR
, arg1
,
10696 build_int_cst (TREE_TYPE (arg1
), 1), 0);
10697 return fold_build2 (GT_EXPR
, type
, arg0
,
10698 fold_convert (TREE_TYPE (arg0
), arg1
));
10700 if (code
== LT_EXPR
)
10702 arg1
= const_binop (MINUS_EXPR
, arg1
,
10703 build_int_cst (TREE_TYPE (arg1
), 1), 0);
10704 return fold_build2 (LE_EXPR
, type
, arg0
,
10705 fold_convert (TREE_TYPE (arg0
), arg1
));
10709 /* Comparisons with the highest or lowest possible integer of
10710 the specified size will have known values. */
10712 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1
)));
10714 if (TREE_CODE (arg1
) == INTEGER_CST
10715 && ! TREE_CONSTANT_OVERFLOW (arg1
)
10716 && width
<= 2 * HOST_BITS_PER_WIDE_INT
10717 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10718 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
10720 HOST_WIDE_INT signed_max_hi
;
10721 unsigned HOST_WIDE_INT signed_max_lo
;
10722 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
10724 if (width
<= HOST_BITS_PER_WIDE_INT
)
10726 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
10731 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
10733 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
10739 max_lo
= signed_max_lo
;
10740 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
10746 width
-= HOST_BITS_PER_WIDE_INT
;
10747 signed_max_lo
= -1;
10748 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
10753 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
10755 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
10760 max_hi
= signed_max_hi
;
10761 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
10765 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
10766 && TREE_INT_CST_LOW (arg1
) == max_lo
)
10770 return omit_one_operand (type
, integer_zero_node
, arg0
);
10773 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
10776 return omit_one_operand (type
, integer_one_node
, arg0
);
10779 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
10781 /* The GE_EXPR and LT_EXPR cases above are not normally
10782 reached because of previous transformations. */
10787 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
10789 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
10793 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
10794 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
10796 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
10797 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
10801 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
10803 && TREE_INT_CST_LOW (arg1
) == min_lo
)
10807 return omit_one_operand (type
, integer_zero_node
, arg0
);
10810 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
10813 return omit_one_operand (type
, integer_one_node
, arg0
);
10816 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
10821 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
10823 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
10827 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
10828 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
10830 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
10831 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
10836 else if (!in_gimple_form
10837 && TREE_INT_CST_HIGH (arg1
) == signed_max_hi
10838 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
10839 && TYPE_UNSIGNED (TREE_TYPE (arg1
))
10840 /* signed_type does not work on pointer types. */
10841 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
10843 /* The following case also applies to X < signed_max+1
10844 and X >= signed_max+1 because previous transformations. */
10845 if (code
== LE_EXPR
|| code
== GT_EXPR
)
10848 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (arg0
));
10849 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (arg1
));
10850 return fold_build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
10851 type
, fold_convert (st0
, arg0
),
10852 build_int_cst (st1
, 0));
10858 /* If we are comparing an ABS_EXPR with a constant, we can
10859 convert all the cases into explicit comparisons, but they may
10860 well not be faster than doing the ABS and one comparison.
10861 But ABS (X) <= C is a range comparison, which becomes a subtraction
10862 and a comparison, and is probably faster. */
10863 if (code
== LE_EXPR
10864 && TREE_CODE (arg1
) == INTEGER_CST
10865 && TREE_CODE (arg0
) == ABS_EXPR
10866 && ! TREE_SIDE_EFFECTS (arg0
)
10867 && (0 != (tem
= negate_expr (arg1
)))
10868 && TREE_CODE (tem
) == INTEGER_CST
10869 && ! TREE_CONSTANT_OVERFLOW (tem
))
10870 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
10871 build2 (GE_EXPR
, type
,
10872 TREE_OPERAND (arg0
, 0), tem
),
10873 build2 (LE_EXPR
, type
,
10874 TREE_OPERAND (arg0
, 0), arg1
));
10876 /* Convert ABS_EXPR<x> >= 0 to true. */
10877 if (code
== GE_EXPR
10878 && tree_expr_nonnegative_p (arg0
)
10879 && (integer_zerop (arg1
)
10880 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10881 && real_zerop (arg1
))))
10882 return omit_one_operand (type
, integer_one_node
, arg0
);
10884 /* Convert ABS_EXPR<x> < 0 to false. */
10885 if (code
== LT_EXPR
10886 && tree_expr_nonnegative_p (arg0
)
10887 && (integer_zerop (arg1
) || real_zerop (arg1
)))
10888 return omit_one_operand (type
, integer_zero_node
, arg0
);
10890 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
10891 and similarly for >= into !=. */
10892 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
10893 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
10894 && TREE_CODE (arg1
) == LSHIFT_EXPR
10895 && integer_onep (TREE_OPERAND (arg1
, 0)))
10896 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
10897 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
10898 TREE_OPERAND (arg1
, 1)),
10899 build_int_cst (TREE_TYPE (arg0
), 0));
10901 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
10902 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
10903 && (TREE_CODE (arg1
) == NOP_EXPR
10904 || TREE_CODE (arg1
) == CONVERT_EXPR
)
10905 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
10906 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
10908 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
10909 fold_convert (TREE_TYPE (arg0
),
10910 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
10911 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
10913 build_int_cst (TREE_TYPE (arg0
), 0));
10917 case UNORDERED_EXPR
:
10925 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
10927 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
10928 if (t1
!= NULL_TREE
)
10932 /* If the first operand is NaN, the result is constant. */
10933 if (TREE_CODE (arg0
) == REAL_CST
10934 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
10935 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
10937 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
10938 ? integer_zero_node
10939 : integer_one_node
;
10940 return omit_one_operand (type
, t1
, arg1
);
10943 /* If the second operand is NaN, the result is constant. */
10944 if (TREE_CODE (arg1
) == REAL_CST
10945 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
10946 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
10948 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
10949 ? integer_zero_node
10950 : integer_one_node
;
10951 return omit_one_operand (type
, t1
, arg0
);
10954 /* Simplify unordered comparison of something with itself. */
10955 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
10956 && operand_equal_p (arg0
, arg1
, 0))
10957 return constant_boolean_node (1, type
);
10959 if (code
== LTGT_EXPR
10960 && !flag_trapping_math
10961 && operand_equal_p (arg0
, arg1
, 0))
10962 return constant_boolean_node (0, type
);
10964 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10966 tree targ0
= strip_float_extensions (arg0
);
10967 tree targ1
= strip_float_extensions (arg1
);
10968 tree newtype
= TREE_TYPE (targ0
);
10970 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
10971 newtype
= TREE_TYPE (targ1
);
10973 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
10974 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
10975 fold_convert (newtype
, targ1
));
10980 case COMPOUND_EXPR
:
10981 /* When pedantic, a compound expression can be neither an lvalue
10982 nor an integer constant expression. */
10983 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
10985 /* Don't let (0, 0) be null pointer constant. */
10986 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
10987 : fold_convert (type
, arg1
);
10988 return pedantic_non_lvalue (tem
);
10991 if ((TREE_CODE (arg0
) == REAL_CST
10992 && TREE_CODE (arg1
) == REAL_CST
)
10993 || (TREE_CODE (arg0
) == INTEGER_CST
10994 && TREE_CODE (arg1
) == INTEGER_CST
))
10995 return build_complex (type
, arg0
, arg1
);
10999 /* An ASSERT_EXPR should never be passed to fold_binary. */
11000 gcc_unreachable ();
11004 } /* switch (code) */
11007 /* Callback for walk_tree, looking for LABEL_EXPR.
11008 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11009 Do not check the sub-tree of GOTO_EXPR. */
11012 contains_label_1 (tree
*tp
,
11013 int *walk_subtrees
,
11014 void *data ATTRIBUTE_UNUSED
)
11016 switch (TREE_CODE (*tp
))
11021 *walk_subtrees
= 0;
11028 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11029 accessible from outside the sub-tree. Returns NULL_TREE if no
11030 addressable label is found. */
11033 contains_label_p (tree st
)
11035 return (walk_tree (&st
, contains_label_1
, NULL
, NULL
) != NULL_TREE
);
11038 /* Fold a ternary expression of code CODE and type TYPE with operands
11039 OP0, OP1, and OP2. Return the folded expression if folding is
11040 successful. Otherwise, return NULL_TREE. */
11043 fold_ternary (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
11046 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
11047 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11049 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
11050 && TREE_CODE_LENGTH (code
) == 3);
11052 /* Strip any conversions that don't change the mode. This is safe
11053 for every expression, except for a comparison expression because
11054 its signedness is derived from its operands. So, in the latter
11055 case, only strip conversions that don't change the signedness.
11057 Note that this is done as an internal manipulation within the
11058 constant folder, in order to find the simplest representation of
11059 the arguments so that their form can be studied. In any cases,
11060 the appropriate type conversions should be put back in the tree
11061 that will get out of the constant folder. */
11076 case COMPONENT_REF
:
11077 if (TREE_CODE (arg0
) == CONSTRUCTOR
11078 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
11080 unsigned HOST_WIDE_INT idx
;
11082 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
11089 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11090 so all simple results must be passed through pedantic_non_lvalue. */
11091 if (TREE_CODE (arg0
) == INTEGER_CST
)
11093 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
11094 tem
= integer_zerop (arg0
) ? op2
: op1
;
11095 /* Only optimize constant conditions when the selected branch
11096 has the same type as the COND_EXPR. This avoids optimizing
11097 away "c ? x : throw", where the throw has a void type.
11098 Avoid throwing away that operand which contains label. */
11099 if ((!TREE_SIDE_EFFECTS (unused_op
)
11100 || !contains_label_p (unused_op
))
11101 && (! VOID_TYPE_P (TREE_TYPE (tem
))
11102 || VOID_TYPE_P (type
)))
11103 return pedantic_non_lvalue (tem
);
11106 if (operand_equal_p (arg1
, op2
, 0))
11107 return pedantic_omit_one_operand (type
, arg1
, arg0
);
11109 /* If we have A op B ? A : C, we may be able to convert this to a
11110 simpler expression, depending on the operation and the values
11111 of B and C. Signed zeros prevent all of these transformations,
11112 for reasons given above each one.
11114 Also try swapping the arguments and inverting the conditional. */
11115 if (COMPARISON_CLASS_P (arg0
)
11116 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11117 arg1
, TREE_OPERAND (arg0
, 1))
11118 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
11120 tem
= fold_cond_expr_with_comparison (type
, arg0
, op1
, op2
);
11125 if (COMPARISON_CLASS_P (arg0
)
11126 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11128 TREE_OPERAND (arg0
, 1))
11129 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
11131 tem
= fold_truth_not_expr (arg0
);
11132 if (tem
&& COMPARISON_CLASS_P (tem
))
11134 tem
= fold_cond_expr_with_comparison (type
, tem
, op2
, op1
);
11140 /* If the second operand is simpler than the third, swap them
11141 since that produces better jump optimization results. */
11142 if (truth_value_p (TREE_CODE (arg0
))
11143 && tree_swap_operands_p (op1
, op2
, false))
11145 /* See if this can be inverted. If it can't, possibly because
11146 it was a floating-point inequality comparison, don't do
11148 tem
= fold_truth_not_expr (arg0
);
11150 return fold_build3 (code
, type
, tem
, op2
, op1
);
11153 /* Convert A ? 1 : 0 to simply A. */
11154 if (integer_onep (op1
)
11155 && integer_zerop (op2
)
11156 /* If we try to convert OP0 to our type, the
11157 call to fold will try to move the conversion inside
11158 a COND, which will recurse. In that case, the COND_EXPR
11159 is probably the best choice, so leave it alone. */
11160 && type
== TREE_TYPE (arg0
))
11161 return pedantic_non_lvalue (arg0
);
11163 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11164 over COND_EXPR in cases such as floating point comparisons. */
11165 if (integer_zerop (op1
)
11166 && integer_onep (op2
)
11167 && truth_value_p (TREE_CODE (arg0
)))
11168 return pedantic_non_lvalue (fold_convert (type
,
11169 invert_truthvalue (arg0
)));
11171 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11172 if (TREE_CODE (arg0
) == LT_EXPR
11173 && integer_zerop (TREE_OPERAND (arg0
, 1))
11174 && integer_zerop (op2
)
11175 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
11176 return fold_convert (type
,
11177 fold_build2 (BIT_AND_EXPR
,
11178 TREE_TYPE (tem
), tem
,
11179 fold_convert (TREE_TYPE (tem
), arg1
)));
11181 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11182 already handled above. */
11183 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11184 && integer_onep (TREE_OPERAND (arg0
, 1))
11185 && integer_zerop (op2
)
11186 && integer_pow2p (arg1
))
11188 tree tem
= TREE_OPERAND (arg0
, 0);
11190 if (TREE_CODE (tem
) == RSHIFT_EXPR
11191 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
11192 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
11193 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
11194 return fold_build2 (BIT_AND_EXPR
, type
,
11195 TREE_OPERAND (tem
, 0), arg1
);
11198 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11199 is probably obsolete because the first operand should be a
11200 truth value (that's why we have the two cases above), but let's
11201 leave it in until we can confirm this for all front-ends. */
11202 if (integer_zerop (op2
)
11203 && TREE_CODE (arg0
) == NE_EXPR
11204 && integer_zerop (TREE_OPERAND (arg0
, 1))
11205 && integer_pow2p (arg1
)
11206 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11207 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11208 arg1
, OEP_ONLY_CONST
))
11209 return pedantic_non_lvalue (fold_convert (type
,
11210 TREE_OPERAND (arg0
, 0)));
11212 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11213 if (integer_zerop (op2
)
11214 && truth_value_p (TREE_CODE (arg0
))
11215 && truth_value_p (TREE_CODE (arg1
)))
11216 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
11217 fold_convert (type
, arg0
),
11220 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11221 if (integer_onep (op2
)
11222 && truth_value_p (TREE_CODE (arg0
))
11223 && truth_value_p (TREE_CODE (arg1
)))
11225 /* Only perform transformation if ARG0 is easily inverted. */
11226 tem
= fold_truth_not_expr (arg0
);
11228 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
11229 fold_convert (type
, tem
),
11233 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11234 if (integer_zerop (arg1
)
11235 && truth_value_p (TREE_CODE (arg0
))
11236 && truth_value_p (TREE_CODE (op2
)))
11238 /* Only perform transformation if ARG0 is easily inverted. */
11239 tem
= fold_truth_not_expr (arg0
);
11241 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
11242 fold_convert (type
, tem
),
11246 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11247 if (integer_onep (arg1
)
11248 && truth_value_p (TREE_CODE (arg0
))
11249 && truth_value_p (TREE_CODE (op2
)))
11250 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
11251 fold_convert (type
, arg0
),
11257 /* Check for a built-in function. */
11258 if (TREE_CODE (op0
) == ADDR_EXPR
11259 && TREE_CODE (TREE_OPERAND (op0
, 0)) == FUNCTION_DECL
11260 && DECL_BUILT_IN (TREE_OPERAND (op0
, 0)))
11261 return fold_builtin (TREE_OPERAND (op0
, 0), op1
, false);
11264 case BIT_FIELD_REF
:
11265 if (TREE_CODE (arg0
) == VECTOR_CST
11266 && type
== TREE_TYPE (TREE_TYPE (arg0
))
11267 && host_integerp (arg1
, 1)
11268 && host_integerp (op2
, 1))
11270 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
11271 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
11274 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
11275 && (idx
% width
) == 0
11276 && (idx
= idx
/ width
)
11277 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
11279 tree elements
= TREE_VECTOR_CST_ELTS (arg0
);
11280 while (idx
-- > 0 && elements
)
11281 elements
= TREE_CHAIN (elements
);
11283 return TREE_VALUE (elements
);
11285 return fold_convert (type
, integer_zero_node
);
11292 } /* switch (code) */
11295 /* Perform constant folding and related simplification of EXPR.
11296 The related simplifications include x*1 => x, x*0 => 0, etc.,
11297 and application of the associative law.
11298 NOP_EXPR conversions may be removed freely (as long as we
11299 are careful not to change the type of the overall expression).
11300 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11301 but we can constant-fold them if they have constant operands. */
11303 #ifdef ENABLE_FOLD_CHECKING
11304 # define fold(x) fold_1 (x)
11305 static tree
fold_1 (tree
);
11311 const tree t
= expr
;
11312 enum tree_code code
= TREE_CODE (t
);
11313 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11316 /* Return right away if a constant. */
11317 if (kind
== tcc_constant
)
11320 if (IS_EXPR_CODE_CLASS (kind
))
11322 tree type
= TREE_TYPE (t
);
11323 tree op0
, op1
, op2
;
11325 switch (TREE_CODE_LENGTH (code
))
11328 op0
= TREE_OPERAND (t
, 0);
11329 tem
= fold_unary (code
, type
, op0
);
11330 return tem
? tem
: expr
;
11332 op0
= TREE_OPERAND (t
, 0);
11333 op1
= TREE_OPERAND (t
, 1);
11334 tem
= fold_binary (code
, type
, op0
, op1
);
11335 return tem
? tem
: expr
;
11337 op0
= TREE_OPERAND (t
, 0);
11338 op1
= TREE_OPERAND (t
, 1);
11339 op2
= TREE_OPERAND (t
, 2);
11340 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
11341 return tem
? tem
: expr
;
11350 return fold (DECL_INITIAL (t
));
11354 } /* switch (code) */
11357 #ifdef ENABLE_FOLD_CHECKING
11360 static void fold_checksum_tree (tree
, struct md5_ctx
*, htab_t
);
11361 static void fold_check_failed (tree
, tree
);
11362 void print_fold_checksum (tree
);
11364 /* When --enable-checking=fold, compute a digest of expr before
11365 and after actual fold call to see if fold did not accidentally
11366 change original expr. */
11372 struct md5_ctx ctx
;
11373 unsigned char checksum_before
[16], checksum_after
[16];
11376 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
11377 md5_init_ctx (&ctx
);
11378 fold_checksum_tree (expr
, &ctx
, ht
);
11379 md5_finish_ctx (&ctx
, checksum_before
);
11382 ret
= fold_1 (expr
);
11384 md5_init_ctx (&ctx
);
11385 fold_checksum_tree (expr
, &ctx
, ht
);
11386 md5_finish_ctx (&ctx
, checksum_after
);
11389 if (memcmp (checksum_before
, checksum_after
, 16))
11390 fold_check_failed (expr
, ret
);
11396 print_fold_checksum (tree expr
)
11398 struct md5_ctx ctx
;
11399 unsigned char checksum
[16], cnt
;
11402 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
11403 md5_init_ctx (&ctx
);
11404 fold_checksum_tree (expr
, &ctx
, ht
);
11405 md5_finish_ctx (&ctx
, checksum
);
11407 for (cnt
= 0; cnt
< 16; ++cnt
)
11408 fprintf (stderr
, "%02x", checksum
[cnt
]);
11409 putc ('\n', stderr
);
11413 fold_check_failed (tree expr ATTRIBUTE_UNUSED
, tree ret ATTRIBUTE_UNUSED
)
11415 internal_error ("fold check: original tree changed by fold");
11419 fold_checksum_tree (tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
11422 enum tree_code code
;
11423 struct tree_function_decl buf
;
11428 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
11429 <= sizeof (struct tree_function_decl
))
11430 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
11433 slot
= htab_find_slot (ht
, expr
, INSERT
);
11437 code
= TREE_CODE (expr
);
11438 if (TREE_CODE_CLASS (code
) == tcc_declaration
11439 && DECL_ASSEMBLER_NAME_SET_P (expr
))
11441 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11442 memcpy ((char *) &buf
, expr
, tree_size (expr
));
11443 expr
= (tree
) &buf
;
11444 SET_DECL_ASSEMBLER_NAME (expr
, NULL
);
11446 else if (TREE_CODE_CLASS (code
) == tcc_type
11447 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
11448 || TYPE_CACHED_VALUES_P (expr
)
11449 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)))
11451 /* Allow these fields to be modified. */
11452 memcpy ((char *) &buf
, expr
, tree_size (expr
));
11453 expr
= (tree
) &buf
;
11454 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
) = 0;
11455 TYPE_POINTER_TO (expr
) = NULL
;
11456 TYPE_REFERENCE_TO (expr
) = NULL
;
11457 if (TYPE_CACHED_VALUES_P (expr
))
11459 TYPE_CACHED_VALUES_P (expr
) = 0;
11460 TYPE_CACHED_VALUES (expr
) = NULL
;
11463 md5_process_bytes (expr
, tree_size (expr
), ctx
);
11464 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
11465 if (TREE_CODE_CLASS (code
) != tcc_type
11466 && TREE_CODE_CLASS (code
) != tcc_declaration
11467 && code
!= TREE_LIST
)
11468 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
11469 switch (TREE_CODE_CLASS (code
))
11475 md5_process_bytes (TREE_STRING_POINTER (expr
),
11476 TREE_STRING_LENGTH (expr
), ctx
);
11479 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
11480 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
11483 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
11489 case tcc_exceptional
:
11493 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
11494 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
11495 expr
= TREE_CHAIN (expr
);
11496 goto recursive_label
;
11499 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
11500 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
11506 case tcc_expression
:
11507 case tcc_reference
:
11508 case tcc_comparison
:
11511 case tcc_statement
:
11512 len
= TREE_CODE_LENGTH (code
);
11513 for (i
= 0; i
< len
; ++i
)
11514 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
11516 case tcc_declaration
:
11517 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
11518 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
11519 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
11521 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
11522 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
11523 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
11524 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
11525 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
11527 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
11528 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
11530 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
11532 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
11533 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
11534 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
11538 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
11539 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
11540 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
11541 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
11542 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
11543 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
11544 if (INTEGRAL_TYPE_P (expr
)
11545 || SCALAR_FLOAT_TYPE_P (expr
))
11547 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
11548 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
11550 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
11551 if (TREE_CODE (expr
) == RECORD_TYPE
11552 || TREE_CODE (expr
) == UNION_TYPE
11553 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
11554 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
11555 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
11564 /* Fold a unary tree expression with code CODE of type TYPE with an
11565 operand OP0. Return a folded expression if successful. Otherwise,
11566 return a tree expression with code CODE of type TYPE with an
11570 fold_build1_stat (enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
11573 #ifdef ENABLE_FOLD_CHECKING
11574 unsigned char checksum_before
[16], checksum_after
[16];
11575 struct md5_ctx ctx
;
11578 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
11579 md5_init_ctx (&ctx
);
11580 fold_checksum_tree (op0
, &ctx
, ht
);
11581 md5_finish_ctx (&ctx
, checksum_before
);
11585 tem
= fold_unary (code
, type
, op0
);
11587 tem
= build1_stat (code
, type
, op0 PASS_MEM_STAT
);
11589 #ifdef ENABLE_FOLD_CHECKING
11590 md5_init_ctx (&ctx
);
11591 fold_checksum_tree (op0
, &ctx
, ht
);
11592 md5_finish_ctx (&ctx
, checksum_after
);
11595 if (memcmp (checksum_before
, checksum_after
, 16))
11596 fold_check_failed (op0
, tem
);
11601 /* Fold a binary tree expression with code CODE of type TYPE with
11602 operands OP0 and OP1. Return a folded expression if successful.
11603 Otherwise, return a tree expression with code CODE of type TYPE
11604 with operands OP0 and OP1. */
11607 fold_build2_stat (enum tree_code code
, tree type
, tree op0
, tree op1
11611 #ifdef ENABLE_FOLD_CHECKING
11612 unsigned char checksum_before_op0
[16],
11613 checksum_before_op1
[16],
11614 checksum_after_op0
[16],
11615 checksum_after_op1
[16];
11616 struct md5_ctx ctx
;
11619 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
11620 md5_init_ctx (&ctx
);
11621 fold_checksum_tree (op0
, &ctx
, ht
);
11622 md5_finish_ctx (&ctx
, checksum_before_op0
);
11625 md5_init_ctx (&ctx
);
11626 fold_checksum_tree (op1
, &ctx
, ht
);
11627 md5_finish_ctx (&ctx
, checksum_before_op1
);
11631 tem
= fold_binary (code
, type
, op0
, op1
);
11633 tem
= build2_stat (code
, type
, op0
, op1 PASS_MEM_STAT
);
11635 #ifdef ENABLE_FOLD_CHECKING
11636 md5_init_ctx (&ctx
);
11637 fold_checksum_tree (op0
, &ctx
, ht
);
11638 md5_finish_ctx (&ctx
, checksum_after_op0
);
11641 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
11642 fold_check_failed (op0
, tem
);
11644 md5_init_ctx (&ctx
);
11645 fold_checksum_tree (op1
, &ctx
, ht
);
11646 md5_finish_ctx (&ctx
, checksum_after_op1
);
11649 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
11650 fold_check_failed (op1
, tem
);
11655 /* Fold a ternary tree expression with code CODE of type TYPE with
11656 operands OP0, OP1, and OP2. Return a folded expression if
11657 successful. Otherwise, return a tree expression with code CODE of
11658 type TYPE with operands OP0, OP1, and OP2. */
11661 fold_build3_stat (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
11665 #ifdef ENABLE_FOLD_CHECKING
11666 unsigned char checksum_before_op0
[16],
11667 checksum_before_op1
[16],
11668 checksum_before_op2
[16],
11669 checksum_after_op0
[16],
11670 checksum_after_op1
[16],
11671 checksum_after_op2
[16];
11672 struct md5_ctx ctx
;
11675 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
11676 md5_init_ctx (&ctx
);
11677 fold_checksum_tree (op0
, &ctx
, ht
);
11678 md5_finish_ctx (&ctx
, checksum_before_op0
);
11681 md5_init_ctx (&ctx
);
11682 fold_checksum_tree (op1
, &ctx
, ht
);
11683 md5_finish_ctx (&ctx
, checksum_before_op1
);
11686 md5_init_ctx (&ctx
);
11687 fold_checksum_tree (op2
, &ctx
, ht
);
11688 md5_finish_ctx (&ctx
, checksum_before_op2
);
11692 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
11694 tem
= build3_stat (code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
11696 #ifdef ENABLE_FOLD_CHECKING
11697 md5_init_ctx (&ctx
);
11698 fold_checksum_tree (op0
, &ctx
, ht
);
11699 md5_finish_ctx (&ctx
, checksum_after_op0
);
11702 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
11703 fold_check_failed (op0
, tem
);
11705 md5_init_ctx (&ctx
);
11706 fold_checksum_tree (op1
, &ctx
, ht
);
11707 md5_finish_ctx (&ctx
, checksum_after_op1
);
11710 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
11711 fold_check_failed (op1
, tem
);
11713 md5_init_ctx (&ctx
);
11714 fold_checksum_tree (op2
, &ctx
, ht
);
11715 md5_finish_ctx (&ctx
, checksum_after_op2
);
11718 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
11719 fold_check_failed (op2
, tem
);
11724 /* Perform constant folding and related simplification of initializer
11725 expression EXPR. These behave identically to "fold_buildN" but ignore
11726 potential run-time traps and exceptions that fold must preserve. */
11728 #define START_FOLD_INIT \
11729 int saved_signaling_nans = flag_signaling_nans;\
11730 int saved_trapping_math = flag_trapping_math;\
11731 int saved_rounding_math = flag_rounding_math;\
11732 int saved_trapv = flag_trapv;\
11733 int saved_folding_initializer = folding_initializer;\
11734 flag_signaling_nans = 0;\
11735 flag_trapping_math = 0;\
11736 flag_rounding_math = 0;\
11738 folding_initializer = 1;
11740 #define END_FOLD_INIT \
11741 flag_signaling_nans = saved_signaling_nans;\
11742 flag_trapping_math = saved_trapping_math;\
11743 flag_rounding_math = saved_rounding_math;\
11744 flag_trapv = saved_trapv;\
11745 folding_initializer = saved_folding_initializer;
11748 fold_build1_initializer (enum tree_code code
, tree type
, tree op
)
11753 result
= fold_build1 (code
, type
, op
);
11760 fold_build2_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
)
11765 result
= fold_build2 (code
, type
, op0
, op1
);
11772 fold_build3_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
,
11778 result
= fold_build3 (code
, type
, op0
, op1
, op2
);
11784 #undef START_FOLD_INIT
11785 #undef END_FOLD_INIT
11787 /* Determine if first argument is a multiple of second argument. Return 0 if
11788 it is not, or we cannot easily determined it to be.
11790 An example of the sort of thing we care about (at this point; this routine
11791 could surely be made more general, and expanded to do what the *_DIV_EXPR's
11792 fold cases do now) is discovering that
11794 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11800 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
11802 This code also handles discovering that
11804 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11806 is a multiple of 8 so we don't have to worry about dealing with a
11807 possible remainder.
11809 Note that we *look* inside a SAVE_EXPR only to determine how it was
11810 calculated; it is not safe for fold to do much of anything else with the
11811 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
11812 at run time. For example, the latter example above *cannot* be implemented
11813 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
11814 evaluation time of the original SAVE_EXPR is not necessarily the same at
11815 the time the new expression is evaluated. The only optimization of this
11816 sort that would be valid is changing
11818 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
11822 SAVE_EXPR (I) * SAVE_EXPR (J)
11824 (where the same SAVE_EXPR (J) is used in the original and the
11825 transformed version). */
11828 multiple_of_p (tree type
, tree top
, tree bottom
)
11830 if (operand_equal_p (top
, bottom
, 0))
11833 if (TREE_CODE (type
) != INTEGER_TYPE
)
11836 switch (TREE_CODE (top
))
11839 /* Bitwise and provides a power of two multiple. If the mask is
11840 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
11841 if (!integer_pow2p (bottom
))
11846 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
11847 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
11851 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
11852 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
11855 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
11859 op1
= TREE_OPERAND (top
, 1);
11860 /* const_binop may not detect overflow correctly,
11861 so check for it explicitly here. */
11862 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
11863 > TREE_INT_CST_LOW (op1
)
11864 && TREE_INT_CST_HIGH (op1
) == 0
11865 && 0 != (t1
= fold_convert (type
,
11866 const_binop (LSHIFT_EXPR
,
11869 && ! TREE_OVERFLOW (t1
))
11870 return multiple_of_p (type
, t1
, bottom
);
11875 /* Can't handle conversions from non-integral or wider integral type. */
11876 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
11877 || (TYPE_PRECISION (type
)
11878 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
11881 /* .. fall through ... */
11884 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
11887 if (TREE_CODE (bottom
) != INTEGER_CST
11888 || (TYPE_UNSIGNED (type
)
11889 && (tree_int_cst_sgn (top
) < 0
11890 || tree_int_cst_sgn (bottom
) < 0)))
11892 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,
11900 /* Return true if `t' is known to be non-negative. */
11903 tree_expr_nonnegative_p (tree t
)
11905 if (t
== error_mark_node
)
11908 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
11911 switch (TREE_CODE (t
))
11914 /* Query VRP to see if it has recorded any information about
11915 the range of this object. */
11916 return ssa_name_nonnegative_p (t
);
11919 /* We can't return 1 if flag_wrapv is set because
11920 ABS_EXPR<INT_MIN> = INT_MIN. */
11921 if (!(flag_wrapv
&& INTEGRAL_TYPE_P (TREE_TYPE (t
))))
11926 return tree_int_cst_sgn (t
) >= 0;
11929 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
11932 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
11933 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
11934 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
11936 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
11937 both unsigned and at least 2 bits shorter than the result. */
11938 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
11939 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
11940 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
11942 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
11943 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
11944 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
11945 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
11947 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
11948 TYPE_PRECISION (inner2
)) + 1;
11949 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
11955 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
11957 /* x * x for floating point x is always non-negative. */
11958 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
11960 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
11961 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
11964 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
11965 both unsigned and their total bits is shorter than the result. */
11966 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
11967 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
11968 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
11970 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
11971 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
11972 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
11973 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
11974 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
11975 < TYPE_PRECISION (TREE_TYPE (t
));
11981 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
11982 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
11988 case TRUNC_DIV_EXPR
:
11989 case CEIL_DIV_EXPR
:
11990 case FLOOR_DIV_EXPR
:
11991 case ROUND_DIV_EXPR
:
11992 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
11993 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
11995 case TRUNC_MOD_EXPR
:
11996 case CEIL_MOD_EXPR
:
11997 case FLOOR_MOD_EXPR
:
11998 case ROUND_MOD_EXPR
:
12000 case NON_LVALUE_EXPR
:
12002 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
12004 case COMPOUND_EXPR
:
12006 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
12009 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t
, 1)));
12012 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
12013 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
12017 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
12018 tree outer_type
= TREE_TYPE (t
);
12020 if (TREE_CODE (outer_type
) == REAL_TYPE
)
12022 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12023 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
12024 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
12026 if (TYPE_UNSIGNED (inner_type
))
12028 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
12031 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
12033 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12034 return tree_expr_nonnegative_p (TREE_OPERAND (t
,0));
12035 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
12036 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
12037 && TYPE_UNSIGNED (inner_type
);
12044 tree temp
= TARGET_EXPR_SLOT (t
);
12045 t
= TARGET_EXPR_INITIAL (t
);
12047 /* If the initializer is non-void, then it's a normal expression
12048 that will be assigned to the slot. */
12049 if (!VOID_TYPE_P (t
))
12050 return tree_expr_nonnegative_p (t
);
12052 /* Otherwise, the initializer sets the slot in some way. One common
12053 way is an assignment statement at the end of the initializer. */
12056 if (TREE_CODE (t
) == BIND_EXPR
)
12057 t
= expr_last (BIND_EXPR_BODY (t
));
12058 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
12059 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
12060 t
= expr_last (TREE_OPERAND (t
, 0));
12061 else if (TREE_CODE (t
) == STATEMENT_LIST
)
12066 if (TREE_CODE (t
) == MODIFY_EXPR
12067 && TREE_OPERAND (t
, 0) == temp
)
12068 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
12075 tree fndecl
= get_callee_fndecl (t
);
12076 tree arglist
= TREE_OPERAND (t
, 1);
12077 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
12078 switch (DECL_FUNCTION_CODE (fndecl
))
12080 CASE_FLT_FN (BUILT_IN_ACOS
):
12081 CASE_FLT_FN (BUILT_IN_ACOSH
):
12082 CASE_FLT_FN (BUILT_IN_CABS
):
12083 CASE_FLT_FN (BUILT_IN_COSH
):
12084 CASE_FLT_FN (BUILT_IN_ERFC
):
12085 CASE_FLT_FN (BUILT_IN_EXP
):
12086 CASE_FLT_FN (BUILT_IN_EXP10
):
12087 CASE_FLT_FN (BUILT_IN_EXP2
):
12088 CASE_FLT_FN (BUILT_IN_FABS
):
12089 CASE_FLT_FN (BUILT_IN_FDIM
):
12090 CASE_FLT_FN (BUILT_IN_HYPOT
):
12091 CASE_FLT_FN (BUILT_IN_POW10
):
12092 CASE_INT_FN (BUILT_IN_FFS
):
12093 CASE_INT_FN (BUILT_IN_PARITY
):
12094 CASE_INT_FN (BUILT_IN_POPCOUNT
):
12098 CASE_FLT_FN (BUILT_IN_SQRT
):
12099 /* sqrt(-0.0) is -0.0. */
12100 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
12102 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
12104 CASE_FLT_FN (BUILT_IN_ASINH
):
12105 CASE_FLT_FN (BUILT_IN_ATAN
):
12106 CASE_FLT_FN (BUILT_IN_ATANH
):
12107 CASE_FLT_FN (BUILT_IN_CBRT
):
12108 CASE_FLT_FN (BUILT_IN_CEIL
):
12109 CASE_FLT_FN (BUILT_IN_ERF
):
12110 CASE_FLT_FN (BUILT_IN_EXPM1
):
12111 CASE_FLT_FN (BUILT_IN_FLOOR
):
12112 CASE_FLT_FN (BUILT_IN_FMOD
):
12113 CASE_FLT_FN (BUILT_IN_FREXP
):
12114 CASE_FLT_FN (BUILT_IN_LCEIL
):
12115 CASE_FLT_FN (BUILT_IN_LDEXP
):
12116 CASE_FLT_FN (BUILT_IN_LFLOOR
):
12117 CASE_FLT_FN (BUILT_IN_LLCEIL
):
12118 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
12119 CASE_FLT_FN (BUILT_IN_LLRINT
):
12120 CASE_FLT_FN (BUILT_IN_LLROUND
):
12121 CASE_FLT_FN (BUILT_IN_LRINT
):
12122 CASE_FLT_FN (BUILT_IN_LROUND
):
12123 CASE_FLT_FN (BUILT_IN_MODF
):
12124 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
12125 CASE_FLT_FN (BUILT_IN_POW
):
12126 CASE_FLT_FN (BUILT_IN_RINT
):
12127 CASE_FLT_FN (BUILT_IN_ROUND
):
12128 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
12129 CASE_FLT_FN (BUILT_IN_SINH
):
12130 CASE_FLT_FN (BUILT_IN_TANH
):
12131 CASE_FLT_FN (BUILT_IN_TRUNC
):
12132 /* True if the 1st argument is nonnegative. */
12133 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
12135 CASE_FLT_FN (BUILT_IN_FMAX
):
12136 /* True if the 1st OR 2nd arguments are nonnegative. */
12137 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
12138 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
12140 CASE_FLT_FN (BUILT_IN_FMIN
):
12141 /* True if the 1st AND 2nd arguments are nonnegative. */
12142 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
12143 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
12145 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
12146 /* True if the 2nd argument is nonnegative. */
12147 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
12154 /* ... fall through ... */
12157 if (truth_value_p (TREE_CODE (t
)))
12158 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12162 /* We don't know sign of `t', so be conservative and return false. */
12166 /* Return true when T is an address and is known to be nonzero.
12167 For floating point we further ensure that T is not denormal.
12168 Similar logic is present in nonzero_address in rtlanal.h. */
12171 tree_expr_nonzero_p (tree t
)
12173 tree type
= TREE_TYPE (t
);
12175 /* Doing something useful for floating point would need more work. */
12176 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
12179 switch (TREE_CODE (t
))
12182 /* Query VRP to see if it has recorded any information about
12183 the range of this object. */
12184 return ssa_name_nonzero_p (t
);
12187 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
12190 /* We used to test for !integer_zerop here. This does not work correctly
12191 if TREE_CONSTANT_OVERFLOW (t). */
12192 return (TREE_INT_CST_LOW (t
) != 0
12193 || TREE_INT_CST_HIGH (t
) != 0);
12196 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
12198 /* With the presence of negative values it is hard
12199 to say something. */
12200 if (!tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
12201 || !tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
12203 /* One of operands must be positive and the other non-negative. */
12204 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
12205 || tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
12210 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
12212 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
12213 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
12219 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
12220 tree outer_type
= TREE_TYPE (t
);
12222 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
12223 && tree_expr_nonzero_p (TREE_OPERAND (t
, 0)));
12229 tree base
= get_base_address (TREE_OPERAND (t
, 0));
12234 /* Weak declarations may link to NULL. */
12235 if (VAR_OR_FUNCTION_DECL_P (base
))
12236 return !DECL_WEAK (base
);
12238 /* Constants are never weak. */
12239 if (CONSTANT_CLASS_P (base
))
12246 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
12247 && tree_expr_nonzero_p (TREE_OPERAND (t
, 2)));
12250 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
12251 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
12254 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 0)))
12256 /* When both operands are nonzero, then MAX must be too. */
12257 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1)))
12260 /* MAX where operand 0 is positive is positive. */
12261 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
12263 /* MAX where operand 1 is positive is positive. */
12264 else if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
12265 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
12269 case COMPOUND_EXPR
:
12272 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1));
12275 case NON_LVALUE_EXPR
:
12276 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
12279 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
12280 || tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
12283 return alloca_call_p (t
);
12291 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12292 attempt to fold the expression to a constant without modifying TYPE,
12295 If the expression could be simplified to a constant, then return
12296 the constant. If the expression would not be simplified to a
12297 constant, then return NULL_TREE. */
12300 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
12302 tree tem
= fold_binary (code
, type
, op0
, op1
);
12303 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
12306 /* Given the components of a unary expression CODE, TYPE and OP0,
12307 attempt to fold the expression to a constant without modifying
12310 If the expression could be simplified to a constant, then return
12311 the constant. If the expression would not be simplified to a
12312 constant, then return NULL_TREE. */
12315 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
12317 tree tem
= fold_unary (code
, type
, op0
);
12318 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
12321 /* If EXP represents referencing an element in a constant string
12322 (either via pointer arithmetic or array indexing), return the
12323 tree representing the value accessed, otherwise return NULL. */
12326 fold_read_from_constant_string (tree exp
)
12328 if (TREE_CODE (exp
) == INDIRECT_REF
|| TREE_CODE (exp
) == ARRAY_REF
)
12330 tree exp1
= TREE_OPERAND (exp
, 0);
12334 if (TREE_CODE (exp
) == INDIRECT_REF
)
12335 string
= string_constant (exp1
, &index
);
12338 tree low_bound
= array_ref_low_bound (exp
);
12339 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
12341 /* Optimize the special-case of a zero lower bound.
12343 We convert the low_bound to sizetype to avoid some problems
12344 with constant folding. (E.g. suppose the lower bound is 1,
12345 and its mode is QI. Without the conversion,l (ARRAY
12346 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12347 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12348 if (! integer_zerop (low_bound
))
12349 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
12355 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
12356 && TREE_CODE (string
) == STRING_CST
12357 && TREE_CODE (index
) == INTEGER_CST
12358 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
12359 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
12361 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
12362 return fold_convert (TREE_TYPE (exp
),
12363 build_int_cst (NULL_TREE
,
12364 (TREE_STRING_POINTER (string
)
12365 [TREE_INT_CST_LOW (index
)])));
12370 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12371 an integer constant or real constant.
12373 TYPE is the type of the result. */
12376 fold_negate_const (tree arg0
, tree type
)
12378 tree t
= NULL_TREE
;
12380 switch (TREE_CODE (arg0
))
12384 unsigned HOST_WIDE_INT low
;
12385 HOST_WIDE_INT high
;
12386 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
12387 TREE_INT_CST_HIGH (arg0
),
12389 t
= build_int_cst_wide (type
, low
, high
);
12390 t
= force_fit_type (t
, 1,
12391 (overflow
| TREE_OVERFLOW (arg0
))
12392 && !TYPE_UNSIGNED (type
),
12393 TREE_CONSTANT_OVERFLOW (arg0
));
12398 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
12402 gcc_unreachable ();
12408 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12409 an integer constant or real constant.
12411 TYPE is the type of the result. */
12414 fold_abs_const (tree arg0
, tree type
)
12416 tree t
= NULL_TREE
;
12418 switch (TREE_CODE (arg0
))
12421 /* If the value is unsigned, then the absolute value is
12422 the same as the ordinary value. */
12423 if (TYPE_UNSIGNED (type
))
12425 /* Similarly, if the value is non-negative. */
12426 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
12428 /* If the value is negative, then the absolute value is
12432 unsigned HOST_WIDE_INT low
;
12433 HOST_WIDE_INT high
;
12434 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
12435 TREE_INT_CST_HIGH (arg0
),
12437 t
= build_int_cst_wide (type
, low
, high
);
12438 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg0
),
12439 TREE_CONSTANT_OVERFLOW (arg0
));
12444 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
12445 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
12451 gcc_unreachable ();
12457 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
12458 constant. TYPE is the type of the result. */
12461 fold_not_const (tree arg0
, tree type
)
12463 tree t
= NULL_TREE
;
12465 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
12467 t
= build_int_cst_wide (type
,
12468 ~ TREE_INT_CST_LOW (arg0
),
12469 ~ TREE_INT_CST_HIGH (arg0
));
12470 t
= force_fit_type (t
, 0, TREE_OVERFLOW (arg0
),
12471 TREE_CONSTANT_OVERFLOW (arg0
));
12476 /* Given CODE, a relational operator, the target type, TYPE and two
12477 constant operands OP0 and OP1, return the result of the
12478 relational operation. If the result is not a compile time
12479 constant, then return NULL_TREE. */
12482 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
12484 int result
, invert
;
12486 /* From here on, the only cases we handle are when the result is
12487 known to be a constant. */
12489 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
12491 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
12492 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
12494 /* Handle the cases where either operand is a NaN. */
12495 if (real_isnan (c0
) || real_isnan (c1
))
12505 case UNORDERED_EXPR
:
12519 if (flag_trapping_math
)
12525 gcc_unreachable ();
12528 return constant_boolean_node (result
, type
);
12531 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
12534 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
12536 To compute GT, swap the arguments and do LT.
12537 To compute GE, do LT and invert the result.
12538 To compute LE, swap the arguments, do LT and invert the result.
12539 To compute NE, do EQ and invert the result.
12541 Therefore, the code below must handle only EQ and LT. */
12543 if (code
== LE_EXPR
|| code
== GT_EXPR
)
12548 code
= swap_tree_comparison (code
);
12551 /* Note that it is safe to invert for real values here because we
12552 have already handled the one case that it matters. */
12555 if (code
== NE_EXPR
|| code
== GE_EXPR
)
12558 code
= invert_tree_comparison (code
, false);
12561 /* Compute a result for LT or EQ if args permit;
12562 Otherwise return T. */
12563 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
12565 if (code
== EQ_EXPR
)
12566 result
= tree_int_cst_equal (op0
, op1
);
12567 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
12568 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
12570 result
= INT_CST_LT (op0
, op1
);
12577 return constant_boolean_node (result
, type
);
12580 /* Build an expression for the a clean point containing EXPR with type TYPE.
12581 Don't build a cleanup point expression for EXPR which don't have side
12585 fold_build_cleanup_point_expr (tree type
, tree expr
)
12587 /* If the expression does not have side effects then we don't have to wrap
12588 it with a cleanup point expression. */
12589 if (!TREE_SIDE_EFFECTS (expr
))
12592 /* If the expression is a return, check to see if the expression inside the
12593 return has no side effects or the right hand side of the modify expression
12594 inside the return. If either don't have side effects set we don't need to
12595 wrap the expression in a cleanup point expression. Note we don't check the
12596 left hand side of the modify because it should always be a return decl. */
12597 if (TREE_CODE (expr
) == RETURN_EXPR
)
12599 tree op
= TREE_OPERAND (expr
, 0);
12600 if (!op
|| !TREE_SIDE_EFFECTS (op
))
12602 op
= TREE_OPERAND (op
, 1);
12603 if (!TREE_SIDE_EFFECTS (op
))
12607 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
12610 /* Build an expression for the address of T. Folds away INDIRECT_REF to
12611 avoid confusing the gimplify process. */
12614 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
12616 /* The size of the object is not relevant when talking about its address. */
12617 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
12618 t
= TREE_OPERAND (t
, 0);
12620 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
12621 if (TREE_CODE (t
) == INDIRECT_REF
12622 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
12624 t
= TREE_OPERAND (t
, 0);
12625 if (TREE_TYPE (t
) != ptrtype
)
12626 t
= build1 (NOP_EXPR
, ptrtype
, t
);
12632 while (handled_component_p (base
))
12633 base
= TREE_OPERAND (base
, 0);
12635 TREE_ADDRESSABLE (base
) = 1;
12637 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
12644 build_fold_addr_expr (tree t
)
12646 return build_fold_addr_expr_with_type (t
, build_pointer_type (TREE_TYPE (t
)));
12649 /* Given a pointer value OP0 and a type TYPE, return a simplified version
12650 of an indirection through OP0, or NULL_TREE if no simplification is
12654 fold_indirect_ref_1 (tree type
, tree op0
)
12660 subtype
= TREE_TYPE (sub
);
12661 if (!POINTER_TYPE_P (subtype
))
12664 if (TREE_CODE (sub
) == ADDR_EXPR
)
12666 tree op
= TREE_OPERAND (sub
, 0);
12667 tree optype
= TREE_TYPE (op
);
12668 /* *&p => p; make sure to handle *&"str"[cst] here. */
12669 if (type
== optype
)
12671 tree fop
= fold_read_from_constant_string (op
);
12677 /* *(foo *)&fooarray => fooarray[0] */
12678 else if (TREE_CODE (optype
) == ARRAY_TYPE
12679 && type
== TREE_TYPE (optype
))
12681 tree type_domain
= TYPE_DOMAIN (optype
);
12682 tree min_val
= size_zero_node
;
12683 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
12684 min_val
= TYPE_MIN_VALUE (type_domain
);
12685 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
12687 /* *(foo *)&complexfoo => __real__ complexfoo */
12688 else if (TREE_CODE (optype
) == COMPLEX_TYPE
12689 && type
== TREE_TYPE (optype
))
12690 return fold_build1 (REALPART_EXPR
, type
, op
);
12693 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
12694 if (TREE_CODE (sub
) == PLUS_EXPR
12695 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
12697 tree op00
= TREE_OPERAND (sub
, 0);
12698 tree op01
= TREE_OPERAND (sub
, 1);
12702 op00type
= TREE_TYPE (op00
);
12703 if (TREE_CODE (op00
) == ADDR_EXPR
12704 && TREE_CODE (TREE_TYPE (op00type
)) == COMPLEX_TYPE
12705 && type
== TREE_TYPE (TREE_TYPE (op00type
)))
12707 tree size
= TYPE_SIZE_UNIT (type
);
12708 if (tree_int_cst_equal (size
, op01
))
12709 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (op00
, 0));
12713 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
12714 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
12715 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
12718 tree min_val
= size_zero_node
;
12719 sub
= build_fold_indirect_ref (sub
);
12720 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
12721 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
12722 min_val
= TYPE_MIN_VALUE (type_domain
);
12723 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
12729 /* Builds an expression for an indirection through T, simplifying some
12733 build_fold_indirect_ref (tree t
)
12735 tree type
= TREE_TYPE (TREE_TYPE (t
));
12736 tree sub
= fold_indirect_ref_1 (type
, t
);
12741 return build1 (INDIRECT_REF
, type
, t
);
12744 /* Given an INDIRECT_REF T, return either T or a simplified version. */
12747 fold_indirect_ref (tree t
)
12749 tree sub
= fold_indirect_ref_1 (TREE_TYPE (t
), TREE_OPERAND (t
, 0));
12757 /* Strip non-trapping, non-side-effecting tree nodes from an expression
12758 whose result is ignored. The type of the returned tree need not be
12759 the same as the original expression. */
12762 fold_ignored_result (tree t
)
12764 if (!TREE_SIDE_EFFECTS (t
))
12765 return integer_zero_node
;
12768 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
12771 t
= TREE_OPERAND (t
, 0);
12775 case tcc_comparison
:
12776 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
12777 t
= TREE_OPERAND (t
, 0);
12778 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
12779 t
= TREE_OPERAND (t
, 1);
12784 case tcc_expression
:
12785 switch (TREE_CODE (t
))
12787 case COMPOUND_EXPR
:
12788 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
12790 t
= TREE_OPERAND (t
, 0);
12794 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
12795 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
12797 t
= TREE_OPERAND (t
, 0);
12810 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
12811 This can only be applied to objects of a sizetype. */
12814 round_up (tree value
, int divisor
)
12816 tree div
= NULL_TREE
;
12818 gcc_assert (divisor
> 0);
12822 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12823 have to do anything. Only do this when we are not given a const,
12824 because in that case, this check is more expensive than just
12826 if (TREE_CODE (value
) != INTEGER_CST
)
12828 div
= build_int_cst (TREE_TYPE (value
), divisor
);
12830 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
12834 /* If divisor is a power of two, simplify this to bit manipulation. */
12835 if (divisor
== (divisor
& -divisor
))
12839 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
12840 value
= size_binop (PLUS_EXPR
, value
, t
);
12841 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
12842 value
= size_binop (BIT_AND_EXPR
, value
, t
);
12847 div
= build_int_cst (TREE_TYPE (value
), divisor
);
12848 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
12849 value
= size_binop (MULT_EXPR
, value
, div
);
12855 /* Likewise, but round down. */
12858 round_down (tree value
, int divisor
)
12860 tree div
= NULL_TREE
;
12862 gcc_assert (divisor
> 0);
12866 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12867 have to do anything. Only do this when we are not given a const,
12868 because in that case, this check is more expensive than just
12870 if (TREE_CODE (value
) != INTEGER_CST
)
12872 div
= build_int_cst (TREE_TYPE (value
), divisor
);
12874 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
12878 /* If divisor is a power of two, simplify this to bit manipulation. */
12879 if (divisor
== (divisor
& -divisor
))
12883 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
12884 value
= size_binop (BIT_AND_EXPR
, value
, t
);
12889 div
= build_int_cst (TREE_TYPE (value
), divisor
);
12890 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
12891 value
= size_binop (MULT_EXPR
, value
, div
);
12897 /* Returns the pointer to the base of the object addressed by EXP and
12898 extracts the information about the offset of the access, storing it
12899 to PBITPOS and POFFSET. */
12902 split_address_to_core_and_offset (tree exp
,
12903 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
12906 enum machine_mode mode
;
12907 int unsignedp
, volatilep
;
12908 HOST_WIDE_INT bitsize
;
12910 if (TREE_CODE (exp
) == ADDR_EXPR
)
12912 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
12913 poffset
, &mode
, &unsignedp
, &volatilep
,
12915 core
= build_fold_addr_expr (core
);
12921 *poffset
= NULL_TREE
;
12927 /* Returns true if addresses of E1 and E2 differ by a constant, false
12928 otherwise. If they do, E1 - E2 is stored in *DIFF. */
12931 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
12934 HOST_WIDE_INT bitpos1
, bitpos2
;
12935 tree toffset1
, toffset2
, tdiff
, type
;
12937 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
12938 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
12940 if (bitpos1
% BITS_PER_UNIT
!= 0
12941 || bitpos2
% BITS_PER_UNIT
!= 0
12942 || !operand_equal_p (core1
, core2
, 0))
12945 if (toffset1
&& toffset2
)
12947 type
= TREE_TYPE (toffset1
);
12948 if (type
!= TREE_TYPE (toffset2
))
12949 toffset2
= fold_convert (type
, toffset2
);
12951 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
12952 if (!cst_and_fits_in_hwi (tdiff
))
12955 *diff
= int_cst_value (tdiff
);
12957 else if (toffset1
|| toffset2
)
12959 /* If only one of the offsets is non-constant, the difference cannot
12966 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
12970 /* Simplify the floating point expression EXP when the sign of the
12971 result is not significant. Return NULL_TREE if no simplification
12975 fold_strip_sign_ops (tree exp
)
12979 switch (TREE_CODE (exp
))
12983 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
12984 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
12988 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
12990 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
12991 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
12992 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
12993 return fold_build2 (TREE_CODE (exp
), TREE_TYPE (exp
),
12994 arg0
? arg0
: TREE_OPERAND (exp
, 0),
12995 arg1
? arg1
: TREE_OPERAND (exp
, 1));