1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code
{
84 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
85 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
86 static bool negate_mathfn_p (enum built_in_function
);
87 static bool negate_expr_p (tree
);
88 static tree
negate_expr (tree
);
89 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
90 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
91 static tree
const_binop (enum tree_code
, tree
, tree
, int);
92 static tree
build_zero_vector (tree
);
93 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
94 static enum tree_code
invert_tree_comparison (enum tree_code
, bool);
95 static enum comparison_code
comparison_to_compcode (enum tree_code
);
96 static enum tree_code
compcode_to_comparison (enum comparison_code
);
97 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
98 enum tree_code
, tree
, tree
, tree
);
99 static int truth_value_p (enum tree_code
);
100 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
101 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
102 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
103 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
104 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
105 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
106 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
107 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
108 enum machine_mode
*, int *, int *,
110 static int all_ones_mask_p (tree
, int);
111 static tree
sign_bit_p (tree
, tree
);
112 static int simple_operand_p (tree
);
113 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
114 static tree
make_range (tree
, int *, tree
*, tree
*);
115 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
116 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
118 static tree
fold_range_test (tree
);
119 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
120 static tree
unextend (tree
, int, int, tree
);
121 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
122 static tree
optimize_minmax_comparison (tree
);
123 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
);
124 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
);
125 static int multiple_of_p (tree
, tree
, tree
);
126 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
, tree
,
128 static bool fold_real_zero_addition_p (tree
, tree
, int);
129 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
131 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
132 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
133 static bool reorder_operands_p (tree
, tree
);
134 static tree
fold_negate_const (tree
, tree
);
135 static tree
fold_not_const (tree
, tree
);
136 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
137 static tree
fold_relational_hi_lo (enum tree_code
*, const tree
,
139 static bool tree_expr_nonzero_p (tree
);
141 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
142 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
143 and SUM1. Then this yields nonzero if overflow occurred during the
146 Overflow occurs if A and B have the same sign, but A and SUM differ in
147 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
149 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
151 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
152 We do that by representing the two-word integer in 4 words, with only
153 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
154 number. The value of the word is LOWPART + HIGHPART * BASE. */
157 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
158 #define HIGHPART(x) \
159 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
160 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
162 /* Unpack a two-word integer into 4 words.
163 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
164 WORDS points to the array of HOST_WIDE_INTs. */
167 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
169 words
[0] = LOWPART (low
);
170 words
[1] = HIGHPART (low
);
171 words
[2] = LOWPART (hi
);
172 words
[3] = HIGHPART (hi
);
175 /* Pack an array of 4 words into a two-word integer.
176 WORDS points to the array of words.
177 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
180 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
183 *low
= words
[0] + words
[1] * BASE
;
184 *hi
= words
[2] + words
[3] * BASE
;
187 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
188 in overflow of the value, when >0 we are only interested in signed
189 overflow, for <0 we are interested in any overflow. OVERFLOWED
190 indicates whether overflow has already occurred. CONST_OVERFLOWED
191 indicates whether constant overflow has already occurred. We force
192 T's value to be within range of T's type (by setting to 0 or 1 all
193 the bits outside the type's range). We set TREE_OVERFLOWED if,
194 OVERFLOWED is nonzero,
195 or OVERFLOWABLE is >0 and signed overflow occurs
196 or OVERFLOWABLE is <0 and any overflow occurs
197 We set TREE_CONSTANT_OVERFLOWED if,
198 CONST_OVERFLOWED is nonzero
199 or we set TREE_OVERFLOWED.
200 We return either the original T, or a copy. */
203 force_fit_type (tree t
, int overflowable
,
204 bool overflowed
, bool overflowed_const
)
206 unsigned HOST_WIDE_INT low
;
209 int sign_extended_type
;
211 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
213 low
= TREE_INT_CST_LOW (t
);
214 high
= TREE_INT_CST_HIGH (t
);
216 if (POINTER_TYPE_P (TREE_TYPE (t
))
217 || TREE_CODE (TREE_TYPE (t
)) == OFFSET_TYPE
)
220 prec
= TYPE_PRECISION (TREE_TYPE (t
));
221 /* Size types *are* sign extended. */
222 sign_extended_type
= (!TYPE_UNSIGNED (TREE_TYPE (t
))
223 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
224 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))));
226 /* First clear all bits that are beyond the type's precision. */
228 if (prec
== 2 * HOST_BITS_PER_WIDE_INT
)
230 else if (prec
> HOST_BITS_PER_WIDE_INT
)
231 high
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
235 if (prec
< HOST_BITS_PER_WIDE_INT
)
236 low
&= ~((HOST_WIDE_INT
) (-1) << prec
);
239 if (!sign_extended_type
)
240 /* No sign extension */;
241 else if (prec
== 2 * HOST_BITS_PER_WIDE_INT
)
242 /* Correct width already. */;
243 else if (prec
> HOST_BITS_PER_WIDE_INT
)
245 /* Sign extend top half? */
246 if (high
& ((unsigned HOST_WIDE_INT
)1
247 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
248 high
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
250 else if (prec
== HOST_BITS_PER_WIDE_INT
)
252 if ((HOST_WIDE_INT
)low
< 0)
257 /* Sign extend bottom half? */
258 if (low
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
261 low
|= (HOST_WIDE_INT
)(-1) << prec
;
265 /* If the value changed, return a new node. */
266 if (overflowed
|| overflowed_const
267 || low
!= TREE_INT_CST_LOW (t
) || high
!= TREE_INT_CST_HIGH (t
))
269 t
= build_int_cst_wide (TREE_TYPE (t
), low
, high
);
273 || (overflowable
> 0 && sign_extended_type
))
276 TREE_OVERFLOW (t
) = 1;
277 TREE_CONSTANT_OVERFLOW (t
) = 1;
279 else if (overflowed_const
)
282 TREE_CONSTANT_OVERFLOW (t
) = 1;
289 /* Add two doubleword integers with doubleword result.
290 Each argument is given as two `HOST_WIDE_INT' pieces.
291 One argument is L1 and H1; the other, L2 and H2.
292 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
295 add_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
296 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
297 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
299 unsigned HOST_WIDE_INT l
;
303 h
= h1
+ h2
+ (l
< l1
);
307 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
310 /* Negate a doubleword integer with doubleword result.
311 Return nonzero if the operation overflows, assuming it's signed.
312 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
313 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
316 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
317 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
323 return (*hv
& h1
) < 0;
333 /* Multiply two doubleword integers with doubleword result.
334 Return nonzero if the operation overflows, assuming it's signed.
335 Each argument is given as two `HOST_WIDE_INT' pieces.
336 One argument is L1 and H1; the other, L2 and H2.
337 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
340 mul_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
341 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
342 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
344 HOST_WIDE_INT arg1
[4];
345 HOST_WIDE_INT arg2
[4];
346 HOST_WIDE_INT prod
[4 * 2];
347 unsigned HOST_WIDE_INT carry
;
349 unsigned HOST_WIDE_INT toplow
, neglow
;
350 HOST_WIDE_INT tophigh
, neghigh
;
352 encode (arg1
, l1
, h1
);
353 encode (arg2
, l2
, h2
);
355 memset (prod
, 0, sizeof prod
);
357 for (i
= 0; i
< 4; i
++)
360 for (j
= 0; j
< 4; j
++)
363 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
364 carry
+= arg1
[i
] * arg2
[j
];
365 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
367 prod
[k
] = LOWPART (carry
);
368 carry
= HIGHPART (carry
);
373 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
375 /* Check for overflow by calculating the top half of the answer in full;
376 it should agree with the low half's sign bit. */
377 decode (prod
+ 4, &toplow
, &tophigh
);
380 neg_double (l2
, h2
, &neglow
, &neghigh
);
381 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
385 neg_double (l1
, h1
, &neglow
, &neghigh
);
386 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
388 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
391 /* Shift the doubleword integer in L1, H1 left by COUNT places
392 keeping only PREC bits of result.
393 Shift right if COUNT is negative.
394 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
395 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
398 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
399 HOST_WIDE_INT count
, unsigned int prec
,
400 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
402 unsigned HOST_WIDE_INT signmask
;
406 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
410 if (SHIFT_COUNT_TRUNCATED
)
413 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
415 /* Shifting by the host word size is undefined according to the
416 ANSI standard, so we must handle this as a special case. */
420 else if (count
>= HOST_BITS_PER_WIDE_INT
)
422 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
427 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
428 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
432 /* Sign extend all bits that are beyond the precision. */
434 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
435 ? ((unsigned HOST_WIDE_INT
) *hv
436 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
437 : (*lv
>> (prec
- 1))) & 1);
439 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
441 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
443 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
444 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
449 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
450 *lv
|= signmask
<< prec
;
454 /* Shift the doubleword integer in L1, H1 right by COUNT places
455 keeping only PREC bits of result. COUNT must be positive.
456 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
457 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
460 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
461 HOST_WIDE_INT count
, unsigned int prec
,
462 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
465 unsigned HOST_WIDE_INT signmask
;
468 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
471 if (SHIFT_COUNT_TRUNCATED
)
474 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
476 /* Shifting by the host word size is undefined according to the
477 ANSI standard, so we must handle this as a special case. */
481 else if (count
>= HOST_BITS_PER_WIDE_INT
)
484 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
488 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
490 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
493 /* Zero / sign extend all bits that are beyond the precision. */
495 if (count
>= (HOST_WIDE_INT
)prec
)
500 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
502 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
504 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
505 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
510 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
511 *lv
|= signmask
<< (prec
- count
);
515 /* Rotate the doubleword integer in L1, H1 left by COUNT places
516 keeping only PREC bits of result.
517 Rotate right if COUNT is negative.
518 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
521 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
522 HOST_WIDE_INT count
, unsigned int prec
,
523 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
525 unsigned HOST_WIDE_INT s1l
, s2l
;
526 HOST_WIDE_INT s1h
, s2h
;
532 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
533 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
538 /* Rotate the doubleword integer in L1, H1 left by COUNT places
539 keeping only PREC bits of result. COUNT must be positive.
540 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
543 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
544 HOST_WIDE_INT count
, unsigned int prec
,
545 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
547 unsigned HOST_WIDE_INT s1l
, s2l
;
548 HOST_WIDE_INT s1h
, s2h
;
554 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
555 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
560 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
561 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
562 CODE is a tree code for a kind of division, one of
563 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
565 It controls how the quotient is rounded to an integer.
566 Return nonzero if the operation overflows.
567 UNS nonzero says do unsigned division. */
570 div_and_round_double (enum tree_code code
, int uns
,
571 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
572 HOST_WIDE_INT hnum_orig
,
573 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
574 HOST_WIDE_INT hden_orig
,
575 unsigned HOST_WIDE_INT
*lquo
,
576 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
580 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
581 HOST_WIDE_INT den
[4], quo
[4];
583 unsigned HOST_WIDE_INT work
;
584 unsigned HOST_WIDE_INT carry
= 0;
585 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
586 HOST_WIDE_INT hnum
= hnum_orig
;
587 unsigned HOST_WIDE_INT lden
= lden_orig
;
588 HOST_WIDE_INT hden
= hden_orig
;
591 if (hden
== 0 && lden
== 0)
592 overflow
= 1, lden
= 1;
594 /* Calculate quotient sign and convert operands to unsigned. */
600 /* (minimum integer) / (-1) is the only overflow case. */
601 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
602 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
608 neg_double (lden
, hden
, &lden
, &hden
);
612 if (hnum
== 0 && hden
== 0)
613 { /* single precision */
615 /* This unsigned division rounds toward zero. */
621 { /* trivial case: dividend < divisor */
622 /* hden != 0 already checked. */
629 memset (quo
, 0, sizeof quo
);
631 memset (num
, 0, sizeof num
); /* to zero 9th element */
632 memset (den
, 0, sizeof den
);
634 encode (num
, lnum
, hnum
);
635 encode (den
, lden
, hden
);
637 /* Special code for when the divisor < BASE. */
638 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
640 /* hnum != 0 already checked. */
641 for (i
= 4 - 1; i
>= 0; i
--)
643 work
= num
[i
] + carry
* BASE
;
644 quo
[i
] = work
/ lden
;
650 /* Full double precision division,
651 with thanks to Don Knuth's "Seminumerical Algorithms". */
652 int num_hi_sig
, den_hi_sig
;
653 unsigned HOST_WIDE_INT quo_est
, scale
;
655 /* Find the highest nonzero divisor digit. */
656 for (i
= 4 - 1;; i
--)
663 /* Insure that the first digit of the divisor is at least BASE/2.
664 This is required by the quotient digit estimation algorithm. */
666 scale
= BASE
/ (den
[den_hi_sig
] + 1);
668 { /* scale divisor and dividend */
670 for (i
= 0; i
<= 4 - 1; i
++)
672 work
= (num
[i
] * scale
) + carry
;
673 num
[i
] = LOWPART (work
);
674 carry
= HIGHPART (work
);
679 for (i
= 0; i
<= 4 - 1; i
++)
681 work
= (den
[i
] * scale
) + carry
;
682 den
[i
] = LOWPART (work
);
683 carry
= HIGHPART (work
);
684 if (den
[i
] != 0) den_hi_sig
= i
;
691 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
693 /* Guess the next quotient digit, quo_est, by dividing the first
694 two remaining dividend digits by the high order quotient digit.
695 quo_est is never low and is at most 2 high. */
696 unsigned HOST_WIDE_INT tmp
;
698 num_hi_sig
= i
+ den_hi_sig
+ 1;
699 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
700 if (num
[num_hi_sig
] != den
[den_hi_sig
])
701 quo_est
= work
/ den
[den_hi_sig
];
705 /* Refine quo_est so it's usually correct, and at most one high. */
706 tmp
= work
- quo_est
* den
[den_hi_sig
];
708 && (den
[den_hi_sig
- 1] * quo_est
709 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
712 /* Try QUO_EST as the quotient digit, by multiplying the
713 divisor by QUO_EST and subtracting from the remaining dividend.
714 Keep in mind that QUO_EST is the I - 1st digit. */
717 for (j
= 0; j
<= den_hi_sig
; j
++)
719 work
= quo_est
* den
[j
] + carry
;
720 carry
= HIGHPART (work
);
721 work
= num
[i
+ j
] - LOWPART (work
);
722 num
[i
+ j
] = LOWPART (work
);
723 carry
+= HIGHPART (work
) != 0;
726 /* If quo_est was high by one, then num[i] went negative and
727 we need to correct things. */
728 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
731 carry
= 0; /* add divisor back in */
732 for (j
= 0; j
<= den_hi_sig
; j
++)
734 work
= num
[i
+ j
] + den
[j
] + carry
;
735 carry
= HIGHPART (work
);
736 num
[i
+ j
] = LOWPART (work
);
739 num
[num_hi_sig
] += carry
;
742 /* Store the quotient digit. */
747 decode (quo
, lquo
, hquo
);
750 /* If result is negative, make it so. */
752 neg_double (*lquo
, *hquo
, lquo
, hquo
);
754 /* Compute trial remainder: rem = num - (quo * den) */
755 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
756 neg_double (*lrem
, *hrem
, lrem
, hrem
);
757 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
762 case TRUNC_MOD_EXPR
: /* round toward zero */
763 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
767 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
768 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
771 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
779 case CEIL_MOD_EXPR
: /* round toward positive infinity */
780 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
782 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
790 case ROUND_MOD_EXPR
: /* round to closest integer */
792 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
793 HOST_WIDE_INT habs_rem
= *hrem
;
794 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
795 HOST_WIDE_INT habs_den
= hden
, htwice
;
797 /* Get absolute values. */
799 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
801 neg_double (lden
, hden
, &labs_den
, &habs_den
);
803 /* If (2 * abs (lrem) >= abs (lden)) */
804 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
805 labs_rem
, habs_rem
, <wice
, &htwice
);
807 if (((unsigned HOST_WIDE_INT
) habs_den
808 < (unsigned HOST_WIDE_INT
) htwice
)
809 || (((unsigned HOST_WIDE_INT
) habs_den
810 == (unsigned HOST_WIDE_INT
) htwice
)
811 && (labs_den
< ltwice
)))
815 add_double (*lquo
, *hquo
,
816 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
819 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
831 /* Compute true remainder: rem = num - (quo * den) */
832 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
833 neg_double (*lrem
, *hrem
, lrem
, hrem
);
834 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
838 /* Return true if built-in mathematical function specified by CODE
839 preserves the sign of it argument, i.e. -f(x) == f(-x). */
842 negate_mathfn_p (enum built_in_function code
)
866 /* Check whether we may negate an integer constant T without causing
870 may_negate_without_overflow_p (tree t
)
872 unsigned HOST_WIDE_INT val
;
876 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
878 type
= TREE_TYPE (t
);
879 if (TYPE_UNSIGNED (type
))
882 prec
= TYPE_PRECISION (type
);
883 if (prec
> HOST_BITS_PER_WIDE_INT
)
885 if (TREE_INT_CST_LOW (t
) != 0)
887 prec
-= HOST_BITS_PER_WIDE_INT
;
888 val
= TREE_INT_CST_HIGH (t
);
891 val
= TREE_INT_CST_LOW (t
);
892 if (prec
< HOST_BITS_PER_WIDE_INT
)
893 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
894 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
897 /* Determine whether an expression T can be cheaply negated using
898 the function negate_expr. */
901 negate_expr_p (tree t
)
908 type
= TREE_TYPE (t
);
911 switch (TREE_CODE (t
))
914 if (TYPE_UNSIGNED (type
) || ! flag_trapv
)
917 /* Check that -CST will not overflow type. */
918 return may_negate_without_overflow_p (t
);
925 return negate_expr_p (TREE_REALPART (t
))
926 && negate_expr_p (TREE_IMAGPART (t
));
929 if (FLOAT_TYPE_P (type
) && !flag_unsafe_math_optimizations
)
931 /* -(A + B) -> (-B) - A. */
932 if (negate_expr_p (TREE_OPERAND (t
, 1))
933 && reorder_operands_p (TREE_OPERAND (t
, 0),
934 TREE_OPERAND (t
, 1)))
936 /* -(A + B) -> (-A) - B. */
937 return negate_expr_p (TREE_OPERAND (t
, 0));
940 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
941 return (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
942 && reorder_operands_p (TREE_OPERAND (t
, 0),
943 TREE_OPERAND (t
, 1));
946 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
952 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
953 return negate_expr_p (TREE_OPERAND (t
, 1))
954 || negate_expr_p (TREE_OPERAND (t
, 0));
958 /* Negate -((double)float) as (double)(-float). */
959 if (TREE_CODE (type
) == REAL_TYPE
)
961 tree tem
= strip_float_extensions (t
);
963 return negate_expr_p (tem
);
968 /* Negate -f(x) as f(-x). */
969 if (negate_mathfn_p (builtin_mathfn_code (t
)))
970 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1)));
974 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
975 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
977 tree op1
= TREE_OPERAND (t
, 1);
978 if (TREE_INT_CST_HIGH (op1
) == 0
979 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
980 == TREE_INT_CST_LOW (op1
))
991 /* Given T, an expression, return the negation of T. Allow for T to be
992 null, in which case return null. */
1003 type
= TREE_TYPE (t
);
1004 STRIP_SIGN_NOPS (t
);
1006 switch (TREE_CODE (t
))
1009 tem
= fold_negate_const (t
, type
);
1010 if (! TREE_OVERFLOW (tem
)
1011 || TYPE_UNSIGNED (type
)
1017 tem
= fold_negate_const (t
, type
);
1018 /* Two's complement FP formats, such as c4x, may overflow. */
1019 if (! TREE_OVERFLOW (tem
) || ! flag_trapping_math
)
1020 return fold_convert (type
, tem
);
1025 tree rpart
= negate_expr (TREE_REALPART (t
));
1026 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1028 if ((TREE_CODE (rpart
) == REAL_CST
1029 && TREE_CODE (ipart
) == REAL_CST
)
1030 || (TREE_CODE (rpart
) == INTEGER_CST
1031 && TREE_CODE (ipart
) == INTEGER_CST
))
1032 return build_complex (type
, rpart
, ipart
);
1037 return fold_convert (type
, TREE_OPERAND (t
, 0));
1040 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1042 /* -(A + B) -> (-B) - A. */
1043 if (negate_expr_p (TREE_OPERAND (t
, 1))
1044 && reorder_operands_p (TREE_OPERAND (t
, 0),
1045 TREE_OPERAND (t
, 1)))
1047 tem
= negate_expr (TREE_OPERAND (t
, 1));
1048 tem
= fold (build2 (MINUS_EXPR
, TREE_TYPE (t
),
1049 tem
, TREE_OPERAND (t
, 0)));
1050 return fold_convert (type
, tem
);
1053 /* -(A + B) -> (-A) - B. */
1054 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1056 tem
= negate_expr (TREE_OPERAND (t
, 0));
1057 tem
= fold (build2 (MINUS_EXPR
, TREE_TYPE (t
),
1058 tem
, TREE_OPERAND (t
, 1)));
1059 return fold_convert (type
, tem
);
1065 /* - (A - B) -> B - A */
1066 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1067 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1068 return fold_convert (type
,
1069 fold (build2 (MINUS_EXPR
, TREE_TYPE (t
),
1070 TREE_OPERAND (t
, 1),
1071 TREE_OPERAND (t
, 0))));
1075 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1081 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1083 tem
= TREE_OPERAND (t
, 1);
1084 if (negate_expr_p (tem
))
1085 return fold_convert (type
,
1086 fold (build2 (TREE_CODE (t
), TREE_TYPE (t
),
1087 TREE_OPERAND (t
, 0),
1088 negate_expr (tem
))));
1089 tem
= TREE_OPERAND (t
, 0);
1090 if (negate_expr_p (tem
))
1091 return fold_convert (type
,
1092 fold (build2 (TREE_CODE (t
), TREE_TYPE (t
),
1094 TREE_OPERAND (t
, 1))));
1099 /* Convert -((double)float) into (double)(-float). */
1100 if (TREE_CODE (type
) == REAL_TYPE
)
1102 tem
= strip_float_extensions (t
);
1103 if (tem
!= t
&& negate_expr_p (tem
))
1104 return fold_convert (type
, negate_expr (tem
));
1109 /* Negate -f(x) as f(-x). */
1110 if (negate_mathfn_p (builtin_mathfn_code (t
))
1111 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1))))
1113 tree fndecl
, arg
, arglist
;
1115 fndecl
= get_callee_fndecl (t
);
1116 arg
= negate_expr (TREE_VALUE (TREE_OPERAND (t
, 1)));
1117 arglist
= build_tree_list (NULL_TREE
, arg
);
1118 return build_function_call_expr (fndecl
, arglist
);
1123 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1124 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1126 tree op1
= TREE_OPERAND (t
, 1);
1127 if (TREE_INT_CST_HIGH (op1
) == 0
1128 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1129 == TREE_INT_CST_LOW (op1
))
1131 tree ntype
= TYPE_UNSIGNED (type
)
1132 ? lang_hooks
.types
.signed_type (type
)
1133 : lang_hooks
.types
.unsigned_type (type
);
1134 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1135 temp
= fold (build2 (RSHIFT_EXPR
, ntype
, temp
, op1
));
1136 return fold_convert (type
, temp
);
1145 tem
= fold (build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
));
1146 return fold_convert (type
, tem
);
1149 /* Split a tree IN into a constant, literal and variable parts that could be
1150 combined with CODE to make IN. "constant" means an expression with
1151 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1152 commutative arithmetic operation. Store the constant part into *CONP,
1153 the literal in *LITP and return the variable part. If a part isn't
1154 present, set it to null. If the tree does not decompose in this way,
1155 return the entire tree as the variable part and the other parts as null.
1157 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1158 case, we negate an operand that was subtracted. Except if it is a
1159 literal for which we use *MINUS_LITP instead.
1161 If NEGATE_P is true, we are negating all of IN, again except a literal
1162 for which we use *MINUS_LITP instead.
1164 If IN is itself a literal or constant, return it as appropriate.
1166 Note that we do not guarantee that any of the three values will be the
1167 same type as IN, but they will have the same signedness and mode. */
1170 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1171 tree
*minus_litp
, int negate_p
)
1179 /* Strip any conversions that don't change the machine mode or signedness. */
1180 STRIP_SIGN_NOPS (in
);
1182 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1184 else if (TREE_CODE (in
) == code
1185 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1186 /* We can associate addition and subtraction together (even
1187 though the C standard doesn't say so) for integers because
1188 the value is not affected. For reals, the value might be
1189 affected, so we can't. */
1190 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1191 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1193 tree op0
= TREE_OPERAND (in
, 0);
1194 tree op1
= TREE_OPERAND (in
, 1);
1195 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1196 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1198 /* First see if either of the operands is a literal, then a constant. */
1199 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1200 *litp
= op0
, op0
= 0;
1201 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1202 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1204 if (op0
!= 0 && TREE_CONSTANT (op0
))
1205 *conp
= op0
, op0
= 0;
1206 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1207 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1209 /* If we haven't dealt with either operand, this is not a case we can
1210 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1211 if (op0
!= 0 && op1
!= 0)
1216 var
= op1
, neg_var_p
= neg1_p
;
1218 /* Now do any needed negations. */
1220 *minus_litp
= *litp
, *litp
= 0;
1222 *conp
= negate_expr (*conp
);
1224 var
= negate_expr (var
);
1226 else if (TREE_CONSTANT (in
))
1234 *minus_litp
= *litp
, *litp
= 0;
1235 else if (*minus_litp
)
1236 *litp
= *minus_litp
, *minus_litp
= 0;
1237 *conp
= negate_expr (*conp
);
1238 var
= negate_expr (var
);
1244 /* Re-associate trees split by the above function. T1 and T2 are either
1245 expressions to associate or null. Return the new expression, if any. If
1246 we build an operation, do it in TYPE and with CODE. */
1249 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1256 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1257 try to fold this since we will have infinite recursion. But do
1258 deal with any NEGATE_EXPRs. */
1259 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1260 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1262 if (code
== PLUS_EXPR
)
1264 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1265 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1266 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1267 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1268 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1269 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1271 return build2 (code
, type
, fold_convert (type
, t1
),
1272 fold_convert (type
, t2
));
1275 return fold (build2 (code
, type
, fold_convert (type
, t1
),
1276 fold_convert (type
, t2
)));
1279 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1280 to produce a new constant.
1282 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1285 int_const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1287 unsigned HOST_WIDE_INT int1l
, int2l
;
1288 HOST_WIDE_INT int1h
, int2h
;
1289 unsigned HOST_WIDE_INT low
;
1291 unsigned HOST_WIDE_INT garbagel
;
1292 HOST_WIDE_INT garbageh
;
1294 tree type
= TREE_TYPE (arg1
);
1295 int uns
= TYPE_UNSIGNED (type
);
1297 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1299 int no_overflow
= 0;
1301 int1l
= TREE_INT_CST_LOW (arg1
);
1302 int1h
= TREE_INT_CST_HIGH (arg1
);
1303 int2l
= TREE_INT_CST_LOW (arg2
);
1304 int2h
= TREE_INT_CST_HIGH (arg2
);
1309 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1313 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1317 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1323 /* It's unclear from the C standard whether shifts can overflow.
1324 The following code ignores overflow; perhaps a C standard
1325 interpretation ruling is needed. */
1326 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1334 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1339 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1343 neg_double (int2l
, int2h
, &low
, &hi
);
1344 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1345 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1349 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1352 case TRUNC_DIV_EXPR
:
1353 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1354 case EXACT_DIV_EXPR
:
1355 /* This is a shortcut for a common special case. */
1356 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1357 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1358 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1359 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1361 if (code
== CEIL_DIV_EXPR
)
1364 low
= int1l
/ int2l
, hi
= 0;
1368 /* ... fall through ... */
1370 case ROUND_DIV_EXPR
:
1371 if (int2h
== 0 && int2l
== 1)
1373 low
= int1l
, hi
= int1h
;
1376 if (int1l
== int2l
&& int1h
== int2h
1377 && ! (int1l
== 0 && int1h
== 0))
1382 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1383 &low
, &hi
, &garbagel
, &garbageh
);
1386 case TRUNC_MOD_EXPR
:
1387 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1388 /* This is a shortcut for a common special case. */
1389 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1390 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1391 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1392 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1394 if (code
== CEIL_MOD_EXPR
)
1396 low
= int1l
% int2l
, hi
= 0;
1400 /* ... fall through ... */
1402 case ROUND_MOD_EXPR
:
1403 overflow
= div_and_round_double (code
, uns
,
1404 int1l
, int1h
, int2l
, int2h
,
1405 &garbagel
, &garbageh
, &low
, &hi
);
1411 low
= (((unsigned HOST_WIDE_INT
) int1h
1412 < (unsigned HOST_WIDE_INT
) int2h
)
1413 || (((unsigned HOST_WIDE_INT
) int1h
1414 == (unsigned HOST_WIDE_INT
) int2h
)
1417 low
= (int1h
< int2h
1418 || (int1h
== int2h
&& int1l
< int2l
));
1420 if (low
== (code
== MIN_EXPR
))
1421 low
= int1l
, hi
= int1h
;
1423 low
= int2l
, hi
= int2h
;
1430 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1434 /* Propagate overflow flags ourselves. */
1435 if (((!uns
|| is_sizetype
) && overflow
)
1436 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1439 TREE_OVERFLOW (t
) = 1;
1440 TREE_CONSTANT_OVERFLOW (t
) = 1;
1442 else if (TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
))
1445 TREE_CONSTANT_OVERFLOW (t
) = 1;
1449 t
= force_fit_type (t
, 1,
1450 ((!uns
|| is_sizetype
) && overflow
)
1451 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
),
1452 TREE_CONSTANT_OVERFLOW (arg1
)
1453 | TREE_CONSTANT_OVERFLOW (arg2
));
1458 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1459 constant. We assume ARG1 and ARG2 have the same data type, or at least
1460 are the same kind of constant and the same machine mode.
1462 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1465 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1470 if (TREE_CODE (arg1
) == INTEGER_CST
)
1471 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1473 if (TREE_CODE (arg1
) == REAL_CST
)
1475 enum machine_mode mode
;
1478 REAL_VALUE_TYPE value
;
1481 d1
= TREE_REAL_CST (arg1
);
1482 d2
= TREE_REAL_CST (arg2
);
1484 type
= TREE_TYPE (arg1
);
1485 mode
= TYPE_MODE (type
);
1487 /* Don't perform operation if we honor signaling NaNs and
1488 either operand is a NaN. */
1489 if (HONOR_SNANS (mode
)
1490 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1493 /* Don't perform operation if it would raise a division
1494 by zero exception. */
1495 if (code
== RDIV_EXPR
1496 && REAL_VALUES_EQUAL (d2
, dconst0
)
1497 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1500 /* If either operand is a NaN, just return it. Otherwise, set up
1501 for floating-point trap; we return an overflow. */
1502 if (REAL_VALUE_ISNAN (d1
))
1504 else if (REAL_VALUE_ISNAN (d2
))
1507 REAL_ARITHMETIC (value
, code
, d1
, d2
);
1509 t
= build_real (type
, real_value_truncate (mode
, value
));
1511 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1512 TREE_CONSTANT_OVERFLOW (t
)
1514 | TREE_CONSTANT_OVERFLOW (arg1
)
1515 | TREE_CONSTANT_OVERFLOW (arg2
);
1518 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1520 tree type
= TREE_TYPE (arg1
);
1521 tree r1
= TREE_REALPART (arg1
);
1522 tree i1
= TREE_IMAGPART (arg1
);
1523 tree r2
= TREE_REALPART (arg2
);
1524 tree i2
= TREE_IMAGPART (arg2
);
1530 t
= build_complex (type
,
1531 const_binop (PLUS_EXPR
, r1
, r2
, notrunc
),
1532 const_binop (PLUS_EXPR
, i1
, i2
, notrunc
));
1536 t
= build_complex (type
,
1537 const_binop (MINUS_EXPR
, r1
, r2
, notrunc
),
1538 const_binop (MINUS_EXPR
, i1
, i2
, notrunc
));
1542 t
= build_complex (type
,
1543 const_binop (MINUS_EXPR
,
1544 const_binop (MULT_EXPR
,
1546 const_binop (MULT_EXPR
,
1549 const_binop (PLUS_EXPR
,
1550 const_binop (MULT_EXPR
,
1552 const_binop (MULT_EXPR
,
1560 = const_binop (PLUS_EXPR
,
1561 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1562 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1565 t
= build_complex (type
,
1567 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1568 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1569 const_binop (PLUS_EXPR
,
1570 const_binop (MULT_EXPR
, r1
, r2
,
1572 const_binop (MULT_EXPR
, i1
, i2
,
1575 magsquared
, notrunc
),
1577 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1578 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1579 const_binop (MINUS_EXPR
,
1580 const_binop (MULT_EXPR
, i1
, r2
,
1582 const_binop (MULT_EXPR
, r1
, i2
,
1585 magsquared
, notrunc
));
1597 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1598 indicates which particular sizetype to create. */
1601 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1603 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1606 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1607 is a tree code. The type of the result is taken from the operands.
1608 Both must be the same type integer type and it must be a size type.
1609 If the operands are constant, so is the result. */
1612 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
1614 tree type
= TREE_TYPE (arg0
);
1616 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1617 && type
== TREE_TYPE (arg1
));
1619 /* Handle the special case of two integer constants faster. */
1620 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1622 /* And some specific cases even faster than that. */
1623 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1625 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1626 && integer_zerop (arg1
))
1628 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1631 /* Handle general case of two integer constants. */
1632 return int_const_binop (code
, arg0
, arg1
, 0);
1635 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1636 return error_mark_node
;
1638 return fold (build2 (code
, type
, arg0
, arg1
));
1641 /* Given two values, either both of sizetype or both of bitsizetype,
1642 compute the difference between the two values. Return the value
1643 in signed type corresponding to the type of the operands. */
1646 size_diffop (tree arg0
, tree arg1
)
1648 tree type
= TREE_TYPE (arg0
);
1651 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1652 && type
== TREE_TYPE (arg1
));
1654 /* If the type is already signed, just do the simple thing. */
1655 if (!TYPE_UNSIGNED (type
))
1656 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1658 ctype
= type
== bitsizetype
? sbitsizetype
: ssizetype
;
1660 /* If either operand is not a constant, do the conversions to the signed
1661 type and subtract. The hardware will do the right thing with any
1662 overflow in the subtraction. */
1663 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1664 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
1665 fold_convert (ctype
, arg1
));
1667 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1668 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1669 overflow) and negate (which can't either). Special-case a result
1670 of zero while we're here. */
1671 if (tree_int_cst_equal (arg0
, arg1
))
1672 return fold_convert (ctype
, integer_zero_node
);
1673 else if (tree_int_cst_lt (arg1
, arg0
))
1674 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1676 return size_binop (MINUS_EXPR
, fold_convert (ctype
, integer_zero_node
),
1677 fold_convert (ctype
, size_binop (MINUS_EXPR
,
1681 /* Construct a vector of zero elements of vector type TYPE. */
1684 build_zero_vector (tree type
)
1689 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1690 units
= TYPE_VECTOR_SUBPARTS (type
);
1693 for (i
= 0; i
< units
; i
++)
1694 list
= tree_cons (NULL_TREE
, elem
, list
);
1695 return build_vector (type
, list
);
1699 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1700 type TYPE. If no simplification can be done return NULL_TREE. */
1703 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1708 if (TREE_TYPE (arg1
) == type
)
1711 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
1713 if (TREE_CODE (arg1
) == INTEGER_CST
)
1715 /* If we would build a constant wider than GCC supports,
1716 leave the conversion unfolded. */
1717 if (TYPE_PRECISION (type
) > 2 * HOST_BITS_PER_WIDE_INT
)
1720 /* Given an integer constant, make new constant with new type,
1721 appropriately sign-extended or truncated. */
1722 t
= build_int_cst_wide (type
, TREE_INT_CST_LOW (arg1
),
1723 TREE_INT_CST_HIGH (arg1
));
1725 t
= force_fit_type (t
,
1726 /* Don't set the overflow when
1727 converting a pointer */
1728 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1729 (TREE_INT_CST_HIGH (arg1
) < 0
1730 && (TYPE_UNSIGNED (type
)
1731 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1732 | TREE_OVERFLOW (arg1
),
1733 TREE_CONSTANT_OVERFLOW (arg1
));
1736 else if (TREE_CODE (arg1
) == REAL_CST
)
1738 /* The following code implements the floating point to integer
1739 conversion rules required by the Java Language Specification,
1740 that IEEE NaNs are mapped to zero and values that overflow
1741 the target precision saturate, i.e. values greater than
1742 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1743 are mapped to INT_MIN. These semantics are allowed by the
1744 C and C++ standards that simply state that the behavior of
1745 FP-to-integer conversion is unspecified upon overflow. */
1747 HOST_WIDE_INT high
, low
;
1749 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1753 case FIX_TRUNC_EXPR
:
1754 real_trunc (&r
, VOIDmode
, &x
);
1758 real_ceil (&r
, VOIDmode
, &x
);
1761 case FIX_FLOOR_EXPR
:
1762 real_floor (&r
, VOIDmode
, &x
);
1765 case FIX_ROUND_EXPR
:
1766 real_round (&r
, VOIDmode
, &x
);
1773 /* If R is NaN, return zero and show we have an overflow. */
1774 if (REAL_VALUE_ISNAN (r
))
1781 /* See if R is less than the lower bound or greater than the
1786 tree lt
= TYPE_MIN_VALUE (type
);
1787 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1788 if (REAL_VALUES_LESS (r
, l
))
1791 high
= TREE_INT_CST_HIGH (lt
);
1792 low
= TREE_INT_CST_LOW (lt
);
1798 tree ut
= TYPE_MAX_VALUE (type
);
1801 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1802 if (REAL_VALUES_LESS (u
, r
))
1805 high
= TREE_INT_CST_HIGH (ut
);
1806 low
= TREE_INT_CST_LOW (ut
);
1812 REAL_VALUE_TO_INT (&low
, &high
, r
);
1814 t
= build_int_cst_wide (type
, low
, high
);
1816 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg1
),
1817 TREE_CONSTANT_OVERFLOW (arg1
));
1821 else if (TREE_CODE (type
) == REAL_TYPE
)
1823 if (TREE_CODE (arg1
) == INTEGER_CST
)
1824 return build_real_from_int_cst (type
, arg1
);
1825 if (TREE_CODE (arg1
) == REAL_CST
)
1827 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
1829 /* We make a copy of ARG1 so that we don't modify an
1830 existing constant tree. */
1831 t
= copy_node (arg1
);
1832 TREE_TYPE (t
) = type
;
1836 t
= build_real (type
,
1837 real_value_truncate (TYPE_MODE (type
),
1838 TREE_REAL_CST (arg1
)));
1840 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1841 TREE_CONSTANT_OVERFLOW (t
)
1842 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1849 /* Convert expression ARG to type TYPE. Used by the middle-end for
1850 simple conversions in preference to calling the front-end's convert. */
1853 fold_convert (tree type
, tree arg
)
1855 tree orig
= TREE_TYPE (arg
);
1861 if (TREE_CODE (arg
) == ERROR_MARK
1862 || TREE_CODE (type
) == ERROR_MARK
1863 || TREE_CODE (orig
) == ERROR_MARK
)
1864 return error_mark_node
;
1866 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
)
1867 || lang_hooks
.types_compatible_p (TYPE_MAIN_VARIANT (type
),
1868 TYPE_MAIN_VARIANT (orig
)))
1869 return fold (build1 (NOP_EXPR
, type
, arg
));
1871 switch (TREE_CODE (type
))
1873 case INTEGER_TYPE
: case CHAR_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1874 case POINTER_TYPE
: case REFERENCE_TYPE
:
1876 if (TREE_CODE (arg
) == INTEGER_CST
)
1878 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1879 if (tem
!= NULL_TREE
)
1882 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1883 || TREE_CODE (orig
) == OFFSET_TYPE
)
1884 return fold (build1 (NOP_EXPR
, type
, arg
));
1885 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1887 tem
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1888 return fold_convert (type
, tem
);
1890 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1891 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1892 return fold (build1 (NOP_EXPR
, type
, arg
));
1895 if (TREE_CODE (arg
) == INTEGER_CST
)
1897 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1898 if (tem
!= NULL_TREE
)
1901 else if (TREE_CODE (arg
) == REAL_CST
)
1903 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1904 if (tem
!= NULL_TREE
)
1908 switch (TREE_CODE (orig
))
1910 case INTEGER_TYPE
: case CHAR_TYPE
:
1911 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1912 case POINTER_TYPE
: case REFERENCE_TYPE
:
1913 return fold (build1 (FLOAT_EXPR
, type
, arg
));
1916 return fold (build1 (flag_float_store
? CONVERT_EXPR
: NOP_EXPR
,
1920 tem
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1921 return fold_convert (type
, tem
);
1928 switch (TREE_CODE (orig
))
1930 case INTEGER_TYPE
: case CHAR_TYPE
:
1931 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1932 case POINTER_TYPE
: case REFERENCE_TYPE
:
1934 return build2 (COMPLEX_EXPR
, type
,
1935 fold_convert (TREE_TYPE (type
), arg
),
1936 fold_convert (TREE_TYPE (type
), integer_zero_node
));
1941 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1943 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
1944 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
1945 return fold (build2 (COMPLEX_EXPR
, type
, rpart
, ipart
));
1948 arg
= save_expr (arg
);
1949 rpart
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1950 ipart
= fold (build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
));
1951 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
1952 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
1953 return fold (build2 (COMPLEX_EXPR
, type
, rpart
, ipart
));
1961 if (integer_zerop (arg
))
1962 return build_zero_vector (type
);
1963 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1964 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1965 || TREE_CODE (orig
) == VECTOR_TYPE
);
1966 return fold (build1 (NOP_EXPR
, type
, arg
));
1969 return fold (build1 (CONVERT_EXPR
, type
, fold_ignored_result (arg
)));
1976 /* Return an expr equal to X but certainly not valid as an lvalue. */
1981 /* We only need to wrap lvalue tree codes. */
1982 switch (TREE_CODE (x
))
1993 case ALIGN_INDIRECT_REF
:
1994 case MISALIGNED_INDIRECT_REF
:
1996 case ARRAY_RANGE_REF
:
2002 case PREINCREMENT_EXPR
:
2003 case PREDECREMENT_EXPR
:
2005 case TRY_CATCH_EXPR
:
2006 case WITH_CLEANUP_EXPR
:
2017 /* Assume the worst for front-end tree codes. */
2018 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2022 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2025 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2026 Zero means allow extended lvalues. */
2028 int pedantic_lvalues
;
2030 /* When pedantic, return an expr equal to X but certainly not valid as a
2031 pedantic lvalue. Otherwise, return X. */
2034 pedantic_non_lvalue (tree x
)
2036 if (pedantic_lvalues
)
2037 return non_lvalue (x
);
2042 /* Given a tree comparison code, return the code that is the logical inverse
2043 of the given code. It is not safe to do this for floating-point
2044 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2045 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2047 static enum tree_code
2048 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2050 if (honor_nans
&& flag_trapping_math
)
2060 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2062 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2064 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2066 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2080 return UNORDERED_EXPR
;
2081 case UNORDERED_EXPR
:
2082 return ORDERED_EXPR
;
2088 /* Similar, but return the comparison that results if the operands are
2089 swapped. This is safe for floating-point. */
2092 swap_tree_comparison (enum tree_code code
)
2113 /* Convert a comparison tree code from an enum tree_code representation
2114 into a compcode bit-based encoding. This function is the inverse of
2115 compcode_to_comparison. */
2117 static enum comparison_code
2118 comparison_to_compcode (enum tree_code code
)
2135 return COMPCODE_ORD
;
2136 case UNORDERED_EXPR
:
2137 return COMPCODE_UNORD
;
2139 return COMPCODE_UNLT
;
2141 return COMPCODE_UNEQ
;
2143 return COMPCODE_UNLE
;
2145 return COMPCODE_UNGT
;
2147 return COMPCODE_LTGT
;
2149 return COMPCODE_UNGE
;
2155 /* Convert a compcode bit-based encoding of a comparison operator back
2156 to GCC's enum tree_code representation. This function is the
2157 inverse of comparison_to_compcode. */
2159 static enum tree_code
2160 compcode_to_comparison (enum comparison_code code
)
2177 return ORDERED_EXPR
;
2178 case COMPCODE_UNORD
:
2179 return UNORDERED_EXPR
;
2197 /* Return a tree for the comparison which is the combination of
2198 doing the AND or OR (depending on CODE) of the two operations LCODE
2199 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2200 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2201 if this makes the transformation invalid. */
2204 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2205 enum tree_code rcode
, tree truth_type
,
2206 tree ll_arg
, tree lr_arg
)
2208 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2209 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2210 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2211 enum comparison_code compcode
;
2215 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2216 compcode
= lcompcode
& rcompcode
;
2219 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2220 compcode
= lcompcode
| rcompcode
;
2229 /* Eliminate unordered comparisons, as well as LTGT and ORD
2230 which are not used unless the mode has NaNs. */
2231 compcode
&= ~COMPCODE_UNORD
;
2232 if (compcode
== COMPCODE_LTGT
)
2233 compcode
= COMPCODE_NE
;
2234 else if (compcode
== COMPCODE_ORD
)
2235 compcode
= COMPCODE_TRUE
;
2237 else if (flag_trapping_math
)
2239 /* Check that the original operation and the optimized ones will trap
2240 under the same condition. */
2241 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2242 && (lcompcode
!= COMPCODE_EQ
)
2243 && (lcompcode
!= COMPCODE_ORD
);
2244 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2245 && (rcompcode
!= COMPCODE_EQ
)
2246 && (rcompcode
!= COMPCODE_ORD
);
2247 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2248 && (compcode
!= COMPCODE_EQ
)
2249 && (compcode
!= COMPCODE_ORD
);
2251 /* In a short-circuited boolean expression the LHS might be
2252 such that the RHS, if evaluated, will never trap. For
2253 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2254 if neither x nor y is NaN. (This is a mixed blessing: for
2255 example, the expression above will never trap, hence
2256 optimizing it to x < y would be invalid). */
2257 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2258 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2261 /* If the comparison was short-circuited, and only the RHS
2262 trapped, we may now generate a spurious trap. */
2264 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2267 /* If we changed the conditions that cause a trap, we lose. */
2268 if ((ltrap
|| rtrap
) != trap
)
2272 if (compcode
== COMPCODE_TRUE
)
2273 return constant_boolean_node (true, truth_type
);
2274 else if (compcode
== COMPCODE_FALSE
)
2275 return constant_boolean_node (false, truth_type
);
2277 return fold (build2 (compcode_to_comparison (compcode
),
2278 truth_type
, ll_arg
, lr_arg
));
2281 /* Return nonzero if CODE is a tree code that represents a truth value. */
2284 truth_value_p (enum tree_code code
)
2286 return (TREE_CODE_CLASS (code
) == tcc_comparison
2287 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2288 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2289 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2292 /* Return nonzero if two operands (typically of the same tree node)
2293 are necessarily equal. If either argument has side-effects this
2294 function returns zero. FLAGS modifies behavior as follows:
2296 If OEP_ONLY_CONST is set, only return nonzero for constants.
2297 This function tests whether the operands are indistinguishable;
2298 it does not test whether they are equal using C's == operation.
2299 The distinction is important for IEEE floating point, because
2300 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2301 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2303 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2304 even though it may hold multiple values during a function.
2305 This is because a GCC tree node guarantees that nothing else is
2306 executed between the evaluation of its "operands" (which may often
2307 be evaluated in arbitrary order). Hence if the operands themselves
2308 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2309 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2310 unset means assuming isochronic (or instantaneous) tree equivalence.
2311 Unless comparing arbitrary expression trees, such as from different
2312 statements, this flag can usually be left unset.
2314 If OEP_PURE_SAME is set, then pure functions with identical arguments
2315 are considered the same. It is used when the caller has other ways
2316 to ensure that global memory is unchanged in between. */
2319 operand_equal_p (tree arg0
, tree arg1
, unsigned int flags
)
2321 /* If one is specified and the other isn't, they aren't equal and if
2322 neither is specified, they are.
2324 ??? This is temporary and is meant only to handle the cases of the
2325 optional operands for COMPONENT_REF and ARRAY_REF. */
2326 if ((arg0
&& !arg1
) || (!arg0
&& arg1
))
2328 else if (!arg0
&& !arg1
)
2330 /* If either is ERROR_MARK, they aren't equal. */
2331 else if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
2334 /* If both types don't have the same signedness, then we can't consider
2335 them equal. We must check this before the STRIP_NOPS calls
2336 because they may change the signedness of the arguments. */
2337 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2343 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2344 /* This is needed for conversions and for COMPONENT_REF.
2345 Might as well play it safe and always test this. */
2346 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2347 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2348 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2351 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2352 We don't care about side effects in that case because the SAVE_EXPR
2353 takes care of that for us. In all other cases, two expressions are
2354 equal if they have no side effects. If we have two identical
2355 expressions with side effects that should be treated the same due
2356 to the only side effects being identical SAVE_EXPR's, that will
2357 be detected in the recursive calls below. */
2358 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2359 && (TREE_CODE (arg0
) == SAVE_EXPR
2360 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2363 /* Next handle constant cases, those for which we can return 1 even
2364 if ONLY_CONST is set. */
2365 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2366 switch (TREE_CODE (arg0
))
2369 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2370 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2371 && tree_int_cst_equal (arg0
, arg1
));
2374 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2375 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2376 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2377 TREE_REAL_CST (arg1
)));
2383 if (TREE_CONSTANT_OVERFLOW (arg0
)
2384 || TREE_CONSTANT_OVERFLOW (arg1
))
2387 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2388 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2391 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2394 v1
= TREE_CHAIN (v1
);
2395 v2
= TREE_CHAIN (v2
);
2402 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2404 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2408 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2409 && ! memcmp (TREE_STRING_POINTER (arg0
),
2410 TREE_STRING_POINTER (arg1
),
2411 TREE_STRING_LENGTH (arg0
)));
2414 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2420 if (flags
& OEP_ONLY_CONST
)
2423 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2426 /* Two conversions are equal only if signedness and modes match. */
2427 switch (TREE_CODE (arg0
))
2432 case FIX_TRUNC_EXPR
:
2433 case FIX_FLOOR_EXPR
:
2434 case FIX_ROUND_EXPR
:
2435 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2436 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2443 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2444 TREE_OPERAND (arg1
, 0), flags
);
2446 case tcc_comparison
:
2448 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
2449 TREE_OPERAND (arg1
, 0), flags
)
2450 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2451 TREE_OPERAND (arg1
, 1), flags
))
2454 /* For commutative ops, allow the other order. */
2455 return (commutative_tree_code (TREE_CODE (arg0
))
2456 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2457 TREE_OPERAND (arg1
, 1), flags
)
2458 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2459 TREE_OPERAND (arg1
, 0), flags
));
2462 /* If either of the pointer (or reference) expressions we are
2463 dereferencing contain a side effect, these cannot be equal. */
2464 if (TREE_SIDE_EFFECTS (arg0
)
2465 || TREE_SIDE_EFFECTS (arg1
))
2468 switch (TREE_CODE (arg0
))
2471 case ALIGN_INDIRECT_REF
:
2472 case MISALIGNED_INDIRECT_REF
:
2475 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2476 TREE_OPERAND (arg1
, 0), flags
);
2479 case ARRAY_RANGE_REF
:
2480 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2481 TREE_OPERAND (arg1
, 0), flags
)
2482 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2483 TREE_OPERAND (arg1
, 1), flags
)
2484 && operand_equal_p (TREE_OPERAND (arg0
, 2),
2485 TREE_OPERAND (arg1
, 2), flags
)
2486 && operand_equal_p (TREE_OPERAND (arg0
, 3),
2487 TREE_OPERAND (arg1
, 3), flags
));
2491 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2492 TREE_OPERAND (arg1
, 0), flags
)
2493 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2494 TREE_OPERAND (arg1
, 1), flags
)
2495 && operand_equal_p (TREE_OPERAND (arg0
, 2),
2496 TREE_OPERAND (arg1
, 2), flags
));
2500 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2501 TREE_OPERAND (arg1
, 0), flags
)
2502 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2503 TREE_OPERAND (arg1
, 1), flags
)
2504 && operand_equal_p (TREE_OPERAND (arg0
, 2),
2505 TREE_OPERAND (arg1
, 2), flags
));
2510 case tcc_expression
:
2511 switch (TREE_CODE (arg0
))
2514 case TRUTH_NOT_EXPR
:
2515 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2516 TREE_OPERAND (arg1
, 0), flags
);
2518 case TRUTH_ANDIF_EXPR
:
2519 case TRUTH_ORIF_EXPR
:
2520 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2521 TREE_OPERAND (arg1
, 0), flags
)
2522 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2523 TREE_OPERAND (arg1
, 1), flags
);
2525 case TRUTH_AND_EXPR
:
2527 case TRUTH_XOR_EXPR
:
2528 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2529 TREE_OPERAND (arg1
, 0), flags
)
2530 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2531 TREE_OPERAND (arg1
, 1), flags
))
2532 || (operand_equal_p (TREE_OPERAND (arg0
, 0),
2533 TREE_OPERAND (arg1
, 1), flags
)
2534 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2535 TREE_OPERAND (arg1
, 0), flags
));
2538 /* If the CALL_EXPRs call different functions, then they
2539 clearly can not be equal. */
2540 if (! operand_equal_p (TREE_OPERAND (arg0
, 0),
2541 TREE_OPERAND (arg1
, 0), flags
))
2545 unsigned int cef
= call_expr_flags (arg0
);
2546 if (flags
& OEP_PURE_SAME
)
2547 cef
&= ECF_CONST
| ECF_PURE
;
2554 /* Now see if all the arguments are the same. operand_equal_p
2555 does not handle TREE_LIST, so we walk the operands here
2556 feeding them to operand_equal_p. */
2557 arg0
= TREE_OPERAND (arg0
, 1);
2558 arg1
= TREE_OPERAND (arg1
, 1);
2559 while (arg0
&& arg1
)
2561 if (! operand_equal_p (TREE_VALUE (arg0
), TREE_VALUE (arg1
),
2565 arg0
= TREE_CHAIN (arg0
);
2566 arg1
= TREE_CHAIN (arg1
);
2569 /* If we get here and both argument lists are exhausted
2570 then the CALL_EXPRs are equal. */
2571 return ! (arg0
|| arg1
);
2577 case tcc_declaration
:
2578 /* Consider __builtin_sqrt equal to sqrt. */
2579 return (TREE_CODE (arg0
) == FUNCTION_DECL
2580 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2581 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2582 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2589 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2590 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2592 When in doubt, return 0. */
2595 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2597 int unsignedp1
, unsignedpo
;
2598 tree primarg0
, primarg1
, primother
;
2599 unsigned int correct_width
;
2601 if (operand_equal_p (arg0
, arg1
, 0))
2604 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2605 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2608 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2609 and see if the inner values are the same. This removes any
2610 signedness comparison, which doesn't matter here. */
2611 primarg0
= arg0
, primarg1
= arg1
;
2612 STRIP_NOPS (primarg0
);
2613 STRIP_NOPS (primarg1
);
2614 if (operand_equal_p (primarg0
, primarg1
, 0))
2617 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2618 actual comparison operand, ARG0.
2620 First throw away any conversions to wider types
2621 already present in the operands. */
2623 primarg1
= get_narrower (arg1
, &unsignedp1
);
2624 primother
= get_narrower (other
, &unsignedpo
);
2626 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2627 if (unsignedp1
== unsignedpo
2628 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2629 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2631 tree type
= TREE_TYPE (arg0
);
2633 /* Make sure shorter operand is extended the right way
2634 to match the longer operand. */
2635 primarg1
= fold_convert (lang_hooks
.types
.signed_or_unsigned_type
2636 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2638 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2645 /* See if ARG is an expression that is either a comparison or is performing
2646 arithmetic on comparisons. The comparisons must only be comparing
2647 two different values, which will be stored in *CVAL1 and *CVAL2; if
2648 they are nonzero it means that some operands have already been found.
2649 No variables may be used anywhere else in the expression except in the
2650 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2651 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2653 If this is true, return 1. Otherwise, return zero. */
2656 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2658 enum tree_code code
= TREE_CODE (arg
);
2659 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2661 /* We can handle some of the tcc_expression cases here. */
2662 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2664 else if (class == tcc_expression
2665 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2666 || code
== COMPOUND_EXPR
))
2669 else if (class == tcc_expression
&& code
== SAVE_EXPR
2670 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2672 /* If we've already found a CVAL1 or CVAL2, this expression is
2673 two complex to handle. */
2674 if (*cval1
|| *cval2
)
2684 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2687 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2688 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2689 cval1
, cval2
, save_p
));
2694 case tcc_expression
:
2695 if (code
== COND_EXPR
)
2696 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2697 cval1
, cval2
, save_p
)
2698 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2699 cval1
, cval2
, save_p
)
2700 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2701 cval1
, cval2
, save_p
));
2704 case tcc_comparison
:
2705 /* First see if we can handle the first operand, then the second. For
2706 the second operand, we know *CVAL1 can't be zero. It must be that
2707 one side of the comparison is each of the values; test for the
2708 case where this isn't true by failing if the two operands
2711 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2712 TREE_OPERAND (arg
, 1), 0))
2716 *cval1
= TREE_OPERAND (arg
, 0);
2717 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2719 else if (*cval2
== 0)
2720 *cval2
= TREE_OPERAND (arg
, 0);
2721 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2726 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2728 else if (*cval2
== 0)
2729 *cval2
= TREE_OPERAND (arg
, 1);
2730 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2742 /* ARG is a tree that is known to contain just arithmetic operations and
2743 comparisons. Evaluate the operations in the tree substituting NEW0 for
2744 any occurrence of OLD0 as an operand of a comparison and likewise for
2748 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
2750 tree type
= TREE_TYPE (arg
);
2751 enum tree_code code
= TREE_CODE (arg
);
2752 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2754 /* We can handle some of the tcc_expression cases here. */
2755 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2757 else if (class == tcc_expression
2758 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2764 return fold (build1 (code
, type
,
2765 eval_subst (TREE_OPERAND (arg
, 0),
2766 old0
, new0
, old1
, new1
)));
2769 return fold (build2 (code
, type
,
2770 eval_subst (TREE_OPERAND (arg
, 0),
2771 old0
, new0
, old1
, new1
),
2772 eval_subst (TREE_OPERAND (arg
, 1),
2773 old0
, new0
, old1
, new1
)));
2775 case tcc_expression
:
2779 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
2782 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
2785 return fold (build3 (code
, type
,
2786 eval_subst (TREE_OPERAND (arg
, 0),
2787 old0
, new0
, old1
, new1
),
2788 eval_subst (TREE_OPERAND (arg
, 1),
2789 old0
, new0
, old1
, new1
),
2790 eval_subst (TREE_OPERAND (arg
, 2),
2791 old0
, new0
, old1
, new1
)));
2795 /* Fall through - ??? */
2797 case tcc_comparison
:
2799 tree arg0
= TREE_OPERAND (arg
, 0);
2800 tree arg1
= TREE_OPERAND (arg
, 1);
2802 /* We need to check both for exact equality and tree equality. The
2803 former will be true if the operand has a side-effect. In that
2804 case, we know the operand occurred exactly once. */
2806 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2808 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2811 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2813 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2816 return fold (build2 (code
, type
, arg0
, arg1
));
2824 /* Return a tree for the case when the result of an expression is RESULT
2825 converted to TYPE and OMITTED was previously an operand of the expression
2826 but is now not needed (e.g., we folded OMITTED * 0).
2828 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2829 the conversion of RESULT to TYPE. */
2832 omit_one_operand (tree type
, tree result
, tree omitted
)
2834 tree t
= fold_convert (type
, result
);
2836 if (TREE_SIDE_EFFECTS (omitted
))
2837 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2839 return non_lvalue (t
);
2842 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2845 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
2847 tree t
= fold_convert (type
, result
);
2849 if (TREE_SIDE_EFFECTS (omitted
))
2850 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2852 return pedantic_non_lvalue (t
);
2855 /* Return a tree for the case when the result of an expression is RESULT
2856 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2857 of the expression but are now not needed.
2859 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2860 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2861 evaluated before OMITTED2. Otherwise, if neither has side effects,
2862 just do the conversion of RESULT to TYPE. */
2865 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
2867 tree t
= fold_convert (type
, result
);
2869 if (TREE_SIDE_EFFECTS (omitted2
))
2870 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
2871 if (TREE_SIDE_EFFECTS (omitted1
))
2872 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
2874 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
2878 /* Return a simplified tree node for the truth-negation of ARG. This
2879 never alters ARG itself. We assume that ARG is an operation that
2880 returns a truth value (0 or 1).
2882 FIXME: one would think we would fold the result, but it causes
2883 problems with the dominator optimizer. */
2885 invert_truthvalue (tree arg
)
2887 tree type
= TREE_TYPE (arg
);
2888 enum tree_code code
= TREE_CODE (arg
);
2890 if (code
== ERROR_MARK
)
2893 /* If this is a comparison, we can simply invert it, except for
2894 floating-point non-equality comparisons, in which case we just
2895 enclose a TRUTH_NOT_EXPR around what we have. */
2897 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
2899 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
2900 if (FLOAT_TYPE_P (op_type
)
2901 && flag_trapping_math
2902 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
2903 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
2904 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2907 code
= invert_tree_comparison (code
,
2908 HONOR_NANS (TYPE_MODE (op_type
)));
2909 if (code
== ERROR_MARK
)
2910 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2912 return build2 (code
, type
,
2913 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
2920 return fold_convert (type
,
2921 build_int_cst (NULL_TREE
, integer_zerop (arg
)));
2923 case TRUTH_AND_EXPR
:
2924 return build2 (TRUTH_OR_EXPR
, type
,
2925 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2926 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2929 return build2 (TRUTH_AND_EXPR
, type
,
2930 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2931 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2933 case TRUTH_XOR_EXPR
:
2934 /* Here we can invert either operand. We invert the first operand
2935 unless the second operand is a TRUTH_NOT_EXPR in which case our
2936 result is the XOR of the first operand with the inside of the
2937 negation of the second operand. */
2939 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
2940 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
2941 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
2943 return build2 (TRUTH_XOR_EXPR
, type
,
2944 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2945 TREE_OPERAND (arg
, 1));
2947 case TRUTH_ANDIF_EXPR
:
2948 return build2 (TRUTH_ORIF_EXPR
, type
,
2949 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2950 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2952 case TRUTH_ORIF_EXPR
:
2953 return build2 (TRUTH_ANDIF_EXPR
, type
,
2954 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2955 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2957 case TRUTH_NOT_EXPR
:
2958 return TREE_OPERAND (arg
, 0);
2961 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2962 invert_truthvalue (TREE_OPERAND (arg
, 1)),
2963 invert_truthvalue (TREE_OPERAND (arg
, 2)));
2966 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2967 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2969 case NON_LVALUE_EXPR
:
2970 return invert_truthvalue (TREE_OPERAND (arg
, 0));
2973 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
2978 return build1 (TREE_CODE (arg
), type
,
2979 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2982 if (!integer_onep (TREE_OPERAND (arg
, 1)))
2984 return build2 (EQ_EXPR
, type
, arg
,
2985 fold_convert (type
, integer_zero_node
));
2988 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2990 case CLEANUP_POINT_EXPR
:
2991 return build1 (CLEANUP_POINT_EXPR
, type
,
2992 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2997 gcc_assert (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
);
2998 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3001 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3002 operands are another bit-wise operation with a common input. If so,
3003 distribute the bit operations to save an operation and possibly two if
3004 constants are involved. For example, convert
3005 (A | B) & (A | C) into A | (B & C)
3006 Further simplification will occur if B and C are constants.
3008 If this optimization cannot be done, 0 will be returned. */
3011 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3016 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3017 || TREE_CODE (arg0
) == code
3018 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3019 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3022 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3024 common
= TREE_OPERAND (arg0
, 0);
3025 left
= TREE_OPERAND (arg0
, 1);
3026 right
= TREE_OPERAND (arg1
, 1);
3028 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3030 common
= TREE_OPERAND (arg0
, 0);
3031 left
= TREE_OPERAND (arg0
, 1);
3032 right
= TREE_OPERAND (arg1
, 0);
3034 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3036 common
= TREE_OPERAND (arg0
, 1);
3037 left
= TREE_OPERAND (arg0
, 0);
3038 right
= TREE_OPERAND (arg1
, 1);
3040 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3042 common
= TREE_OPERAND (arg0
, 1);
3043 left
= TREE_OPERAND (arg0
, 0);
3044 right
= TREE_OPERAND (arg1
, 0);
3049 return fold (build2 (TREE_CODE (arg0
), type
, common
,
3050 fold (build2 (code
, type
, left
, right
))));
3053 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3054 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3057 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
3060 tree result
= build3 (BIT_FIELD_REF
, type
, inner
,
3061 size_int (bitsize
), bitsize_int (bitpos
));
3063 BIT_FIELD_REF_UNSIGNED (result
) = unsignedp
;
3068 /* Optimize a bit-field compare.
3070 There are two cases: First is a compare against a constant and the
3071 second is a comparison of two items where the fields are at the same
3072 bit position relative to the start of a chunk (byte, halfword, word)
3073 large enough to contain it. In these cases we can avoid the shift
3074 implicit in bitfield extractions.
3076 For constants, we emit a compare of the shifted constant with the
3077 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3078 compared. For two fields at the same position, we do the ANDs with the
3079 similar mask and compare the result of the ANDs.
3081 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3082 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3083 are the left and right operands of the comparison, respectively.
3085 If the optimization described above can be done, we return the resulting
3086 tree. Otherwise we return zero. */
3089 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
3092 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3093 tree type
= TREE_TYPE (lhs
);
3094 tree signed_type
, unsigned_type
;
3095 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3096 enum machine_mode lmode
, rmode
, nmode
;
3097 int lunsignedp
, runsignedp
;
3098 int lvolatilep
= 0, rvolatilep
= 0;
3099 tree linner
, rinner
= NULL_TREE
;
3103 /* Get all the information about the extractions being done. If the bit size
3104 if the same as the size of the underlying object, we aren't doing an
3105 extraction at all and so can do nothing. We also don't want to
3106 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3107 then will no longer be able to replace it. */
3108 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3109 &lunsignedp
, &lvolatilep
);
3110 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3111 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3116 /* If this is not a constant, we can only do something if bit positions,
3117 sizes, and signedness are the same. */
3118 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3119 &runsignedp
, &rvolatilep
);
3121 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3122 || lunsignedp
!= runsignedp
|| offset
!= 0
3123 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3127 /* See if we can find a mode to refer to this field. We should be able to,
3128 but fail if we can't. */
3129 nmode
= get_best_mode (lbitsize
, lbitpos
,
3130 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3131 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3132 TYPE_ALIGN (TREE_TYPE (rinner
))),
3133 word_mode
, lvolatilep
|| rvolatilep
);
3134 if (nmode
== VOIDmode
)
3137 /* Set signed and unsigned types of the precision of this mode for the
3139 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3140 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3142 /* Compute the bit position and size for the new reference and our offset
3143 within it. If the new reference is the same size as the original, we
3144 won't optimize anything, so return zero. */
3145 nbitsize
= GET_MODE_BITSIZE (nmode
);
3146 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3148 if (nbitsize
== lbitsize
)
3151 if (BYTES_BIG_ENDIAN
)
3152 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3154 /* Make the mask to be used against the extracted field. */
3155 mask
= build_int_cst (unsigned_type
, -1);
3156 mask
= force_fit_type (mask
, 0, false, false);
3157 mask
= fold_convert (unsigned_type
, mask
);
3158 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
3159 mask
= const_binop (RSHIFT_EXPR
, mask
,
3160 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
3163 /* If not comparing with constant, just rework the comparison
3165 return build2 (code
, compare_type
,
3166 build2 (BIT_AND_EXPR
, unsigned_type
,
3167 make_bit_field_ref (linner
, unsigned_type
,
3168 nbitsize
, nbitpos
, 1),
3170 build2 (BIT_AND_EXPR
, unsigned_type
,
3171 make_bit_field_ref (rinner
, unsigned_type
,
3172 nbitsize
, nbitpos
, 1),
3175 /* Otherwise, we are handling the constant case. See if the constant is too
3176 big for the field. Warn and return a tree of for 0 (false) if so. We do
3177 this not only for its own sake, but to avoid having to test for this
3178 error case below. If we didn't, we might generate wrong code.
3180 For unsigned fields, the constant shifted right by the field length should
3181 be all zero. For signed fields, the high-order bits should agree with
3186 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3187 fold_convert (unsigned_type
, rhs
),
3188 size_int (lbitsize
), 0)))
3190 warning ("comparison is always %d due to width of bit-field",
3192 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3197 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
3198 size_int (lbitsize
- 1), 0);
3199 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3201 warning ("comparison is always %d due to width of bit-field",
3203 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3207 /* Single-bit compares should always be against zero. */
3208 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3210 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3211 rhs
= fold_convert (type
, integer_zero_node
);
3214 /* Make a new bitfield reference, shift the constant over the
3215 appropriate number of bits and mask it with the computed mask
3216 (in case this was a signed field). If we changed it, make a new one. */
3217 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3220 TREE_SIDE_EFFECTS (lhs
) = 1;
3221 TREE_THIS_VOLATILE (lhs
) = 1;
3224 rhs
= fold (const_binop (BIT_AND_EXPR
,
3225 const_binop (LSHIFT_EXPR
,
3226 fold_convert (unsigned_type
, rhs
),
3227 size_int (lbitpos
), 0),
3230 return build2 (code
, compare_type
,
3231 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
3235 /* Subroutine for fold_truthop: decode a field reference.
3237 If EXP is a comparison reference, we return the innermost reference.
3239 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3240 set to the starting bit number.
3242 If the innermost field can be completely contained in a mode-sized
3243 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3245 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3246 otherwise it is not changed.
3248 *PUNSIGNEDP is set to the signedness of the field.
3250 *PMASK is set to the mask used. This is either contained in a
3251 BIT_AND_EXPR or derived from the width of the field.
3253 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3255 Return 0 if this is not a component reference or is one that we can't
3256 do anything with. */
3259 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3260 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3261 int *punsignedp
, int *pvolatilep
,
3262 tree
*pmask
, tree
*pand_mask
)
3264 tree outer_type
= 0;
3266 tree mask
, inner
, offset
;
3268 unsigned int precision
;
3270 /* All the optimizations using this function assume integer fields.
3271 There are problems with FP fields since the type_for_size call
3272 below can fail for, e.g., XFmode. */
3273 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3276 /* We are interested in the bare arrangement of bits, so strip everything
3277 that doesn't affect the machine mode. However, record the type of the
3278 outermost expression if it may matter below. */
3279 if (TREE_CODE (exp
) == NOP_EXPR
3280 || TREE_CODE (exp
) == CONVERT_EXPR
3281 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3282 outer_type
= TREE_TYPE (exp
);
3285 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3287 and_mask
= TREE_OPERAND (exp
, 1);
3288 exp
= TREE_OPERAND (exp
, 0);
3289 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3290 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3294 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3295 punsignedp
, pvolatilep
);
3296 if ((inner
== exp
&& and_mask
== 0)
3297 || *pbitsize
< 0 || offset
!= 0
3298 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3301 /* If the number of bits in the reference is the same as the bitsize of
3302 the outer type, then the outer type gives the signedness. Otherwise
3303 (in case of a small bitfield) the signedness is unchanged. */
3304 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3305 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3307 /* Compute the mask to access the bitfield. */
3308 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3309 precision
= TYPE_PRECISION (unsigned_type
);
3311 mask
= build_int_cst (unsigned_type
, -1);
3312 mask
= force_fit_type (mask
, 0, false, false);
3314 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3315 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3317 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3319 mask
= fold (build2 (BIT_AND_EXPR
, unsigned_type
,
3320 fold_convert (unsigned_type
, and_mask
), mask
));
3323 *pand_mask
= and_mask
;
3327 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3331 all_ones_mask_p (tree mask
, int size
)
3333 tree type
= TREE_TYPE (mask
);
3334 unsigned int precision
= TYPE_PRECISION (type
);
3337 tmask
= build_int_cst (lang_hooks
.types
.signed_type (type
), -1);
3338 tmask
= force_fit_type (tmask
, 0, false, false);
3341 tree_int_cst_equal (mask
,
3342 const_binop (RSHIFT_EXPR
,
3343 const_binop (LSHIFT_EXPR
, tmask
,
3344 size_int (precision
- size
),
3346 size_int (precision
- size
), 0));
3349 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3350 represents the sign bit of EXP's type. If EXP represents a sign
3351 or zero extension, also test VAL against the unextended type.
3352 The return value is the (sub)expression whose sign bit is VAL,
3353 or NULL_TREE otherwise. */
3356 sign_bit_p (tree exp
, tree val
)
3358 unsigned HOST_WIDE_INT mask_lo
, lo
;
3359 HOST_WIDE_INT mask_hi
, hi
;
3363 /* Tree EXP must have an integral type. */
3364 t
= TREE_TYPE (exp
);
3365 if (! INTEGRAL_TYPE_P (t
))
3368 /* Tree VAL must be an integer constant. */
3369 if (TREE_CODE (val
) != INTEGER_CST
3370 || TREE_CONSTANT_OVERFLOW (val
))
3373 width
= TYPE_PRECISION (t
);
3374 if (width
> HOST_BITS_PER_WIDE_INT
)
3376 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3379 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3380 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3386 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3389 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3390 >> (HOST_BITS_PER_WIDE_INT
- width
));
3393 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3394 treat VAL as if it were unsigned. */
3395 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3396 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3399 /* Handle extension from a narrower type. */
3400 if (TREE_CODE (exp
) == NOP_EXPR
3401 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3402 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3407 /* Subroutine for fold_truthop: determine if an operand is simple enough
3408 to be evaluated unconditionally. */
3411 simple_operand_p (tree exp
)
3413 /* Strip any conversions that don't change the machine mode. */
3414 while ((TREE_CODE (exp
) == NOP_EXPR
3415 || TREE_CODE (exp
) == CONVERT_EXPR
)
3416 && (TYPE_MODE (TREE_TYPE (exp
))
3417 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
3418 exp
= TREE_OPERAND (exp
, 0);
3420 return (CONSTANT_CLASS_P (exp
)
3422 && ! TREE_ADDRESSABLE (exp
)
3423 && ! TREE_THIS_VOLATILE (exp
)
3424 && ! DECL_NONLOCAL (exp
)
3425 /* Don't regard global variables as simple. They may be
3426 allocated in ways unknown to the compiler (shared memory,
3427 #pragma weak, etc). */
3428 && ! TREE_PUBLIC (exp
)
3429 && ! DECL_EXTERNAL (exp
)
3430 /* Loading a static variable is unduly expensive, but global
3431 registers aren't expensive. */
3432 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3435 /* The following functions are subroutines to fold_range_test and allow it to
3436 try to change a logical combination of comparisons into a range test.
3439 X == 2 || X == 3 || X == 4 || X == 5
3443 (unsigned) (X - 2) <= 3
3445 We describe each set of comparisons as being either inside or outside
3446 a range, using a variable named like IN_P, and then describe the
3447 range with a lower and upper bound. If one of the bounds is omitted,
3448 it represents either the highest or lowest value of the type.
3450 In the comments below, we represent a range by two numbers in brackets
3451 preceded by a "+" to designate being inside that range, or a "-" to
3452 designate being outside that range, so the condition can be inverted by
3453 flipping the prefix. An omitted bound is represented by a "-". For
3454 example, "- [-, 10]" means being outside the range starting at the lowest
3455 possible value and ending at 10, in other words, being greater than 10.
3456 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3459 We set up things so that the missing bounds are handled in a consistent
3460 manner so neither a missing bound nor "true" and "false" need to be
3461 handled using a special case. */
3463 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3464 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3465 and UPPER1_P are nonzero if the respective argument is an upper bound
3466 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3467 must be specified for a comparison. ARG1 will be converted to ARG0's
3468 type if both are specified. */
3471 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3472 tree arg1
, int upper1_p
)
3478 /* If neither arg represents infinity, do the normal operation.
3479 Else, if not a comparison, return infinity. Else handle the special
3480 comparison rules. Note that most of the cases below won't occur, but
3481 are handled for consistency. */
3483 if (arg0
!= 0 && arg1
!= 0)
3485 tem
= fold (build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3486 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
)));
3488 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3491 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3494 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3495 for neither. In real maths, we cannot assume open ended ranges are
3496 the same. But, this is computer arithmetic, where numbers are finite.
3497 We can therefore make the transformation of any unbounded range with
3498 the value Z, Z being greater than any representable number. This permits
3499 us to treat unbounded ranges as equal. */
3500 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3501 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3505 result
= sgn0
== sgn1
;
3508 result
= sgn0
!= sgn1
;
3511 result
= sgn0
< sgn1
;
3514 result
= sgn0
<= sgn1
;
3517 result
= sgn0
> sgn1
;
3520 result
= sgn0
>= sgn1
;
3526 return constant_boolean_node (result
, type
);
3529 /* Given EXP, a logical expression, set the range it is testing into
3530 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3531 actually being tested. *PLOW and *PHIGH will be made of the same type
3532 as the returned expression. If EXP is not a comparison, we will most
3533 likely not be returning a useful value and range. */
3536 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
)
3538 enum tree_code code
;
3539 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
3540 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
3542 tree low
, high
, n_low
, n_high
;
3544 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3545 and see if we can refine the range. Some of the cases below may not
3546 happen, but it doesn't seem worth worrying about this. We "continue"
3547 the outer loop when we've changed something; otherwise we "break"
3548 the switch, which will "break" the while. */
3551 low
= high
= fold_convert (TREE_TYPE (exp
), integer_zero_node
);
3555 code
= TREE_CODE (exp
);
3556 exp_type
= TREE_TYPE (exp
);
3558 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3560 if (first_rtl_op (code
) > 0)
3561 arg0
= TREE_OPERAND (exp
, 0);
3562 if (TREE_CODE_CLASS (code
) == tcc_comparison
3563 || TREE_CODE_CLASS (code
) == tcc_unary
3564 || TREE_CODE_CLASS (code
) == tcc_binary
)
3565 arg0_type
= TREE_TYPE (arg0
);
3566 if (TREE_CODE_CLASS (code
) == tcc_binary
3567 || TREE_CODE_CLASS (code
) == tcc_comparison
3568 || (TREE_CODE_CLASS (code
) == tcc_expression
3569 && TREE_CODE_LENGTH (code
) > 1))
3570 arg1
= TREE_OPERAND (exp
, 1);
3575 case TRUTH_NOT_EXPR
:
3576 in_p
= ! in_p
, exp
= arg0
;
3579 case EQ_EXPR
: case NE_EXPR
:
3580 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3581 /* We can only do something if the range is testing for zero
3582 and if the second operand is an integer constant. Note that
3583 saying something is "in" the range we make is done by
3584 complementing IN_P since it will set in the initial case of
3585 being not equal to zero; "out" is leaving it alone. */
3586 if (low
== 0 || high
== 0
3587 || ! integer_zerop (low
) || ! integer_zerop (high
)
3588 || TREE_CODE (arg1
) != INTEGER_CST
)
3593 case NE_EXPR
: /* - [c, c] */
3596 case EQ_EXPR
: /* + [c, c] */
3597 in_p
= ! in_p
, low
= high
= arg1
;
3599 case GT_EXPR
: /* - [-, c] */
3600 low
= 0, high
= arg1
;
3602 case GE_EXPR
: /* + [c, -] */
3603 in_p
= ! in_p
, low
= arg1
, high
= 0;
3605 case LT_EXPR
: /* - [c, -] */
3606 low
= arg1
, high
= 0;
3608 case LE_EXPR
: /* + [-, c] */
3609 in_p
= ! in_p
, low
= 0, high
= arg1
;
3615 /* If this is an unsigned comparison, we also know that EXP is
3616 greater than or equal to zero. We base the range tests we make
3617 on that fact, so we record it here so we can parse existing
3618 range tests. We test arg0_type since often the return type
3619 of, e.g. EQ_EXPR, is boolean. */
3620 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3622 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3624 fold_convert (arg0_type
, integer_zero_node
),
3628 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3630 /* If the high bound is missing, but we have a nonzero low
3631 bound, reverse the range so it goes from zero to the low bound
3633 if (high
== 0 && low
&& ! integer_zerop (low
))
3636 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3637 integer_one_node
, 0);
3638 low
= fold_convert (arg0_type
, integer_zero_node
);
3646 /* (-x) IN [a,b] -> x in [-b, -a] */
3647 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3648 fold_convert (exp_type
, integer_zero_node
),
3650 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3651 fold_convert (exp_type
, integer_zero_node
),
3653 low
= n_low
, high
= n_high
;
3659 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3660 fold_convert (exp_type
, integer_one_node
));
3663 case PLUS_EXPR
: case MINUS_EXPR
:
3664 if (TREE_CODE (arg1
) != INTEGER_CST
)
3667 /* If EXP is signed, any overflow in the computation is undefined,
3668 so we don't worry about it so long as our computations on
3669 the bounds don't overflow. For unsigned, overflow is defined
3670 and this is exactly the right thing. */
3671 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3672 arg0_type
, low
, 0, arg1
, 0);
3673 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3674 arg0_type
, high
, 1, arg1
, 0);
3675 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3676 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3679 /* Check for an unsigned range which has wrapped around the maximum
3680 value thus making n_high < n_low, and normalize it. */
3681 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3683 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3684 integer_one_node
, 0);
3685 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3686 integer_one_node
, 0);
3688 /* If the range is of the form +/- [ x+1, x ], we won't
3689 be able to normalize it. But then, it represents the
3690 whole range or the empty set, so make it
3692 if (tree_int_cst_equal (n_low
, low
)
3693 && tree_int_cst_equal (n_high
, high
))
3699 low
= n_low
, high
= n_high
;
3704 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
3705 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3708 if (! INTEGRAL_TYPE_P (arg0_type
)
3709 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3710 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3713 n_low
= low
, n_high
= high
;
3716 n_low
= fold_convert (arg0_type
, n_low
);
3719 n_high
= fold_convert (arg0_type
, n_high
);
3722 /* If we're converting arg0 from an unsigned type, to exp,
3723 a signed type, we will be doing the comparison as unsigned.
3724 The tests above have already verified that LOW and HIGH
3727 So we have to ensure that we will handle large unsigned
3728 values the same way that the current signed bounds treat
3731 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
3734 tree equiv_type
= lang_hooks
.types
.type_for_mode
3735 (TYPE_MODE (arg0_type
), 1);
3737 /* A range without an upper bound is, naturally, unbounded.
3738 Since convert would have cropped a very large value, use
3739 the max value for the destination type. */
3741 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3742 : TYPE_MAX_VALUE (arg0_type
);
3744 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
3745 high_positive
= fold (build2 (RSHIFT_EXPR
, arg0_type
,
3746 fold_convert (arg0_type
,
3748 fold_convert (arg0_type
,
3749 integer_one_node
)));
3751 /* If the low bound is specified, "and" the range with the
3752 range for which the original unsigned value will be
3756 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3757 1, n_low
, n_high
, 1,
3758 fold_convert (arg0_type
,
3763 in_p
= (n_in_p
== in_p
);
3767 /* Otherwise, "or" the range with the range of the input
3768 that will be interpreted as negative. */
3769 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3770 0, n_low
, n_high
, 1,
3771 fold_convert (arg0_type
,
3776 in_p
= (in_p
!= n_in_p
);
3781 low
= n_low
, high
= n_high
;
3791 /* If EXP is a constant, we can evaluate whether this is true or false. */
3792 if (TREE_CODE (exp
) == INTEGER_CST
)
3794 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
3796 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3802 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3806 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3807 type, TYPE, return an expression to test if EXP is in (or out of, depending
3808 on IN_P) the range. Return 0 if the test couldn't be created. */
3811 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
3813 tree etype
= TREE_TYPE (exp
);
3818 value
= build_range_check (type
, exp
, 1, low
, high
);
3820 return invert_truthvalue (value
);
3825 if (low
== 0 && high
== 0)
3826 return fold_convert (type
, integer_one_node
);
3829 return fold (build2 (LE_EXPR
, type
, exp
, high
));
3832 return fold (build2 (GE_EXPR
, type
, exp
, low
));
3834 if (operand_equal_p (low
, high
, 0))
3835 return fold (build2 (EQ_EXPR
, type
, exp
, low
));
3837 if (integer_zerop (low
))
3839 if (! TYPE_UNSIGNED (etype
))
3841 etype
= lang_hooks
.types
.unsigned_type (etype
);
3842 high
= fold_convert (etype
, high
);
3843 exp
= fold_convert (etype
, exp
);
3845 return build_range_check (type
, exp
, 1, 0, high
);
3848 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3849 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
3851 unsigned HOST_WIDE_INT lo
;
3855 prec
= TYPE_PRECISION (etype
);
3856 if (prec
<= HOST_BITS_PER_WIDE_INT
)
3859 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
3863 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
3864 lo
= (unsigned HOST_WIDE_INT
) -1;
3867 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
3869 if (TYPE_UNSIGNED (etype
))
3871 etype
= lang_hooks
.types
.signed_type (etype
);
3872 exp
= fold_convert (etype
, exp
);
3874 return fold (build2 (GT_EXPR
, type
, exp
,
3875 fold_convert (etype
, integer_zero_node
)));
3879 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
3880 if (value
!= 0 && TREE_OVERFLOW (value
) && ! TYPE_UNSIGNED (etype
))
3882 tree utype
, minv
, maxv
;
3884 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3885 for the type in question, as we rely on this here. */
3886 switch (TREE_CODE (etype
))
3891 utype
= lang_hooks
.types
.unsigned_type (etype
);
3892 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
3893 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
3894 integer_one_node
, 1);
3895 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
3896 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
3900 high
= fold_convert (etype
, high
);
3901 low
= fold_convert (etype
, low
);
3902 exp
= fold_convert (etype
, exp
);
3903 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
3911 if (value
!= 0 && ! TREE_OVERFLOW (value
))
3912 return build_range_check (type
,
3913 fold (build2 (MINUS_EXPR
, etype
, exp
, low
)),
3914 1, fold_convert (etype
, integer_zero_node
),
3920 /* Given two ranges, see if we can merge them into one. Return 1 if we
3921 can, 0 if we can't. Set the output range into the specified parameters. */
3924 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
3925 tree high0
, int in1_p
, tree low1
, tree high1
)
3933 int lowequal
= ((low0
== 0 && low1
== 0)
3934 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3935 low0
, 0, low1
, 0)));
3936 int highequal
= ((high0
== 0 && high1
== 0)
3937 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3938 high0
, 1, high1
, 1)));
3940 /* Make range 0 be the range that starts first, or ends last if they
3941 start at the same value. Swap them if it isn't. */
3942 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3945 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3946 high1
, 1, high0
, 1))))
3948 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
3949 tem
= low0
, low0
= low1
, low1
= tem
;
3950 tem
= high0
, high0
= high1
, high1
= tem
;
3953 /* Now flag two cases, whether the ranges are disjoint or whether the
3954 second range is totally subsumed in the first. Note that the tests
3955 below are simplified by the ones above. */
3956 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
3957 high0
, 1, low1
, 0));
3958 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3959 high1
, 1, high0
, 1));
3961 /* We now have four cases, depending on whether we are including or
3962 excluding the two ranges. */
3965 /* If they don't overlap, the result is false. If the second range
3966 is a subset it is the result. Otherwise, the range is from the start
3967 of the second to the end of the first. */
3969 in_p
= 0, low
= high
= 0;
3971 in_p
= 1, low
= low1
, high
= high1
;
3973 in_p
= 1, low
= low1
, high
= high0
;
3976 else if (in0_p
&& ! in1_p
)
3978 /* If they don't overlap, the result is the first range. If they are
3979 equal, the result is false. If the second range is a subset of the
3980 first, and the ranges begin at the same place, we go from just after
3981 the end of the first range to the end of the second. If the second
3982 range is not a subset of the first, or if it is a subset and both
3983 ranges end at the same place, the range starts at the start of the
3984 first range and ends just before the second range.
3985 Otherwise, we can't describe this as a single range. */
3987 in_p
= 1, low
= low0
, high
= high0
;
3988 else if (lowequal
&& highequal
)
3989 in_p
= 0, low
= high
= 0;
3990 else if (subset
&& lowequal
)
3992 in_p
= 1, high
= high0
;
3993 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high1
, 0,
3994 integer_one_node
, 0);
3996 else if (! subset
|| highequal
)
3998 in_p
= 1, low
= low0
;
3999 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
4000 integer_one_node
, 0);
4006 else if (! in0_p
&& in1_p
)
4008 /* If they don't overlap, the result is the second range. If the second
4009 is a subset of the first, the result is false. Otherwise,
4010 the range starts just after the first range and ends at the
4011 end of the second. */
4013 in_p
= 1, low
= low1
, high
= high1
;
4014 else if (subset
|| highequal
)
4015 in_p
= 0, low
= high
= 0;
4018 in_p
= 1, high
= high1
;
4019 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4020 integer_one_node
, 0);
4026 /* The case where we are excluding both ranges. Here the complex case
4027 is if they don't overlap. In that case, the only time we have a
4028 range is if they are adjacent. If the second is a subset of the
4029 first, the result is the first. Otherwise, the range to exclude
4030 starts at the beginning of the first range and ends at the end of the
4034 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4035 range_binop (PLUS_EXPR
, NULL_TREE
,
4037 integer_one_node
, 1),
4039 in_p
= 0, low
= low0
, high
= high1
;
4042 /* Canonicalize - [min, x] into - [-, x]. */
4043 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4044 switch (TREE_CODE (TREE_TYPE (low0
)))
4047 if (TYPE_PRECISION (TREE_TYPE (low0
))
4048 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4053 if (tree_int_cst_equal (low0
,
4054 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4058 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4059 && integer_zerop (low0
))
4066 /* Canonicalize - [x, max] into - [x, -]. */
4067 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4068 switch (TREE_CODE (TREE_TYPE (high1
)))
4071 if (TYPE_PRECISION (TREE_TYPE (high1
))
4072 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4077 if (tree_int_cst_equal (high1
,
4078 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4082 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4083 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4085 integer_one_node
, 1)))
4092 /* The ranges might be also adjacent between the maximum and
4093 minimum values of the given type. For
4094 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4095 return + [x + 1, y - 1]. */
4096 if (low0
== 0 && high1
== 0)
4098 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4099 integer_one_node
, 1);
4100 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
4101 integer_one_node
, 0);
4102 if (low
== 0 || high
== 0)
4112 in_p
= 0, low
= low0
, high
= high0
;
4114 in_p
= 0, low
= low0
, high
= high1
;
4117 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4122 /* Subroutine of fold, looking inside expressions of the form
4123 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4124 of the COND_EXPR. This function is being used also to optimize
4125 A op B ? C : A, by reversing the comparison first.
4127 Return a folded expression whose code is not a COND_EXPR
4128 anymore, or NULL_TREE if no folding opportunity is found. */
4131 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
4133 enum tree_code comp_code
= TREE_CODE (arg0
);
4134 tree arg00
= TREE_OPERAND (arg0
, 0);
4135 tree arg01
= TREE_OPERAND (arg0
, 1);
4136 tree arg1_type
= TREE_TYPE (arg1
);
4142 /* If we have A op 0 ? A : -A, consider applying the following
4145 A == 0? A : -A same as -A
4146 A != 0? A : -A same as A
4147 A >= 0? A : -A same as abs (A)
4148 A > 0? A : -A same as abs (A)
4149 A <= 0? A : -A same as -abs (A)
4150 A < 0? A : -A same as -abs (A)
4152 None of these transformations work for modes with signed
4153 zeros. If A is +/-0, the first two transformations will
4154 change the sign of the result (from +0 to -0, or vice
4155 versa). The last four will fix the sign of the result,
4156 even though the original expressions could be positive or
4157 negative, depending on the sign of A.
4159 Note that all these transformations are correct if A is
4160 NaN, since the two alternatives (A and -A) are also NaNs. */
4161 if ((FLOAT_TYPE_P (TREE_TYPE (arg01
))
4162 ? real_zerop (arg01
)
4163 : integer_zerop (arg01
))
4164 && TREE_CODE (arg2
) == NEGATE_EXPR
4165 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4170 tem
= fold_convert (arg1_type
, arg1
);
4171 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
4174 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4177 if (flag_trapping_math
)
4182 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4183 arg1
= fold_convert (lang_hooks
.types
.signed_type
4184 (TREE_TYPE (arg1
)), arg1
);
4185 tem
= fold (build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
));
4186 return pedantic_non_lvalue (fold_convert (type
, tem
));
4189 if (flag_trapping_math
)
4193 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4194 arg1
= fold_convert (lang_hooks
.types
.signed_type
4195 (TREE_TYPE (arg1
)), arg1
);
4196 tem
= fold (build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
));
4197 return negate_expr (fold_convert (type
, tem
));
4199 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4203 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4204 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4205 both transformations are correct when A is NaN: A != 0
4206 is then true, and A == 0 is false. */
4208 if (integer_zerop (arg01
) && integer_zerop (arg2
))
4210 if (comp_code
== NE_EXPR
)
4211 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4212 else if (comp_code
== EQ_EXPR
)
4213 return fold_convert (type
, integer_zero_node
);
4216 /* Try some transformations of A op B ? A : B.
4218 A == B? A : B same as B
4219 A != B? A : B same as A
4220 A >= B? A : B same as max (A, B)
4221 A > B? A : B same as max (B, A)
4222 A <= B? A : B same as min (A, B)
4223 A < B? A : B same as min (B, A)
4225 As above, these transformations don't work in the presence
4226 of signed zeros. For example, if A and B are zeros of
4227 opposite sign, the first two transformations will change
4228 the sign of the result. In the last four, the original
4229 expressions give different results for (A=+0, B=-0) and
4230 (A=-0, B=+0), but the transformed expressions do not.
4232 The first two transformations are correct if either A or B
4233 is a NaN. In the first transformation, the condition will
4234 be false, and B will indeed be chosen. In the case of the
4235 second transformation, the condition A != B will be true,
4236 and A will be chosen.
4238 The conversions to max() and min() are not correct if B is
4239 a number and A is not. The conditions in the original
4240 expressions will be false, so all four give B. The min()
4241 and max() versions would give a NaN instead. */
4242 if (operand_equal_for_comparison_p (arg01
, arg2
, arg00
))
4244 tree comp_op0
= arg00
;
4245 tree comp_op1
= arg01
;
4246 tree comp_type
= TREE_TYPE (comp_op0
);
4248 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4249 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4259 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4261 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4266 /* In C++ a ?: expression can be an lvalue, so put the
4267 operand which will be used if they are equal first
4268 so that we can convert this back to the
4269 corresponding COND_EXPR. */
4270 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4272 comp_op0
= fold_convert (comp_type
, comp_op0
);
4273 comp_op1
= fold_convert (comp_type
, comp_op1
);
4274 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4275 ? fold (build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
))
4276 : fold (build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
));
4277 return pedantic_non_lvalue (fold_convert (type
, tem
));
4284 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4286 comp_op0
= fold_convert (comp_type
, comp_op0
);
4287 comp_op1
= fold_convert (comp_type
, comp_op1
);
4288 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4289 ? fold (build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
))
4290 : fold (build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
));
4291 return pedantic_non_lvalue (fold_convert (type
, tem
));
4295 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4296 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4299 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4300 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4303 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4308 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4309 we might still be able to simplify this. For example,
4310 if C1 is one less or one more than C2, this might have started
4311 out as a MIN or MAX and been transformed by this function.
4312 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4314 if (INTEGRAL_TYPE_P (type
)
4315 && TREE_CODE (arg01
) == INTEGER_CST
4316 && TREE_CODE (arg2
) == INTEGER_CST
)
4320 /* We can replace A with C1 in this case. */
4321 arg1
= fold_convert (type
, arg01
);
4322 return fold (build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
));
4325 /* If C1 is C2 + 1, this is min(A, C2). */
4326 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4328 && operand_equal_p (arg01
,
4329 const_binop (PLUS_EXPR
, arg2
,
4330 integer_one_node
, 0),
4332 return pedantic_non_lvalue (fold (build2 (MIN_EXPR
,
4333 type
, arg1
, arg2
)));
4337 /* If C1 is C2 - 1, this is min(A, C2). */
4338 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4340 && operand_equal_p (arg01
,
4341 const_binop (MINUS_EXPR
, arg2
,
4342 integer_one_node
, 0),
4344 return pedantic_non_lvalue (fold (build2 (MIN_EXPR
,
4345 type
, arg1
, arg2
)));
4349 /* If C1 is C2 - 1, this is max(A, C2). */
4350 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4352 && operand_equal_p (arg01
,
4353 const_binop (MINUS_EXPR
, arg2
,
4354 integer_one_node
, 0),
4356 return pedantic_non_lvalue (fold (build2 (MAX_EXPR
,
4357 type
, arg1
, arg2
)));
4361 /* If C1 is C2 + 1, this is max(A, C2). */
4362 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4364 && operand_equal_p (arg01
,
4365 const_binop (PLUS_EXPR
, arg2
,
4366 integer_one_node
, 0),
4368 return pedantic_non_lvalue (fold (build2 (MAX_EXPR
,
4369 type
, arg1
, arg2
)));
4382 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
4383 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4386 /* EXP is some logical combination of boolean tests. See if we can
4387 merge it into some range test. Return the new tree if so. */
4390 fold_range_test (tree exp
)
4392 int or_op
= (TREE_CODE (exp
) == TRUTH_ORIF_EXPR
4393 || TREE_CODE (exp
) == TRUTH_OR_EXPR
);
4394 int in0_p
, in1_p
, in_p
;
4395 tree low0
, low1
, low
, high0
, high1
, high
;
4396 tree lhs
= make_range (TREE_OPERAND (exp
, 0), &in0_p
, &low0
, &high0
);
4397 tree rhs
= make_range (TREE_OPERAND (exp
, 1), &in1_p
, &low1
, &high1
);
4400 /* If this is an OR operation, invert both sides; we will invert
4401 again at the end. */
4403 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4405 /* If both expressions are the same, if we can merge the ranges, and we
4406 can build the range test, return it or it inverted. If one of the
4407 ranges is always true or always false, consider it to be the same
4408 expression as the other. */
4409 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4410 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4412 && 0 != (tem
= (build_range_check (TREE_TYPE (exp
),
4414 : rhs
!= 0 ? rhs
: integer_zero_node
,
4416 return or_op
? invert_truthvalue (tem
) : tem
;
4418 /* On machines where the branch cost is expensive, if this is a
4419 short-circuited branch and the underlying object on both sides
4420 is the same, make a non-short-circuit operation. */
4421 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4422 && lhs
!= 0 && rhs
!= 0
4423 && (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
4424 || TREE_CODE (exp
) == TRUTH_ORIF_EXPR
)
4425 && operand_equal_p (lhs
, rhs
, 0))
4427 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4428 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4429 which cases we can't do this. */
4430 if (simple_operand_p (lhs
))
4431 return build2 (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
4432 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4433 TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
4434 TREE_OPERAND (exp
, 1));
4436 else if (lang_hooks
.decls
.global_bindings_p () == 0
4437 && ! CONTAINS_PLACEHOLDER_P (lhs
))
4439 tree common
= save_expr (lhs
);
4441 if (0 != (lhs
= build_range_check (TREE_TYPE (exp
), common
,
4442 or_op
? ! in0_p
: in0_p
,
4444 && (0 != (rhs
= build_range_check (TREE_TYPE (exp
), common
,
4445 or_op
? ! in1_p
: in1_p
,
4447 return build2 (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
4448 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4449 TREE_TYPE (exp
), lhs
, rhs
);
4456 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4457 bit value. Arrange things so the extra bits will be set to zero if and
4458 only if C is signed-extended to its full width. If MASK is nonzero,
4459 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4462 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4464 tree type
= TREE_TYPE (c
);
4465 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4468 if (p
== modesize
|| unsignedp
)
4471 /* We work by getting just the sign bit into the low-order bit, then
4472 into the high-order bit, then sign-extend. We then XOR that value
4474 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
4475 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
4477 /* We must use a signed type in order to get an arithmetic right shift.
4478 However, we must also avoid introducing accidental overflows, so that
4479 a subsequent call to integer_zerop will work. Hence we must
4480 do the type conversion here. At this point, the constant is either
4481 zero or one, and the conversion to a signed type can never overflow.
4482 We could get an overflow if this conversion is done anywhere else. */
4483 if (TYPE_UNSIGNED (type
))
4484 temp
= fold_convert (lang_hooks
.types
.signed_type (type
), temp
);
4486 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
4487 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
4489 temp
= const_binop (BIT_AND_EXPR
, temp
,
4490 fold_convert (TREE_TYPE (c
), mask
), 0);
4491 /* If necessary, convert the type back to match the type of C. */
4492 if (TYPE_UNSIGNED (type
))
4493 temp
= fold_convert (type
, temp
);
4495 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
4498 /* Find ways of folding logical expressions of LHS and RHS:
4499 Try to merge two comparisons to the same innermost item.
4500 Look for range tests like "ch >= '0' && ch <= '9'".
4501 Look for combinations of simple terms on machines with expensive branches
4502 and evaluate the RHS unconditionally.
4504 For example, if we have p->a == 2 && p->b == 4 and we can make an
4505 object large enough to span both A and B, we can do this with a comparison
4506 against the object ANDed with the a mask.
4508 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4509 operations to do this with one comparison.
4511 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4512 function and the one above.
4514 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4515 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4517 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4520 We return the simplified tree or 0 if no optimization is possible. */
4523 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
4525 /* If this is the "or" of two comparisons, we can do something if
4526 the comparisons are NE_EXPR. If this is the "and", we can do something
4527 if the comparisons are EQ_EXPR. I.e.,
4528 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4530 WANTED_CODE is this operation code. For single bit fields, we can
4531 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4532 comparison for one-bit fields. */
4534 enum tree_code wanted_code
;
4535 enum tree_code lcode
, rcode
;
4536 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
4537 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
4538 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
4539 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
4540 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
4541 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
4542 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
4543 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
4544 enum machine_mode lnmode
, rnmode
;
4545 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
4546 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
4547 tree l_const
, r_const
;
4548 tree lntype
, rntype
, result
;
4549 int first_bit
, end_bit
;
4552 /* Start by getting the comparison codes. Fail if anything is volatile.
4553 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4554 it were surrounded with a NE_EXPR. */
4556 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
4559 lcode
= TREE_CODE (lhs
);
4560 rcode
= TREE_CODE (rhs
);
4562 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
4564 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
4565 fold_convert (TREE_TYPE (lhs
), integer_zero_node
));
4569 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
4571 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
4572 fold_convert (TREE_TYPE (rhs
), integer_zero_node
));
4576 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
4577 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
4580 ll_arg
= TREE_OPERAND (lhs
, 0);
4581 lr_arg
= TREE_OPERAND (lhs
, 1);
4582 rl_arg
= TREE_OPERAND (rhs
, 0);
4583 rr_arg
= TREE_OPERAND (rhs
, 1);
4585 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4586 if (simple_operand_p (ll_arg
)
4587 && simple_operand_p (lr_arg
))
4590 if (operand_equal_p (ll_arg
, rl_arg
, 0)
4591 && operand_equal_p (lr_arg
, rr_arg
, 0))
4593 result
= combine_comparisons (code
, lcode
, rcode
,
4594 truth_type
, ll_arg
, lr_arg
);
4598 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
4599 && operand_equal_p (lr_arg
, rl_arg
, 0))
4601 result
= combine_comparisons (code
, lcode
,
4602 swap_tree_comparison (rcode
),
4603 truth_type
, ll_arg
, lr_arg
);
4609 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
4610 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
4612 /* If the RHS can be evaluated unconditionally and its operands are
4613 simple, it wins to evaluate the RHS unconditionally on machines
4614 with expensive branches. In this case, this isn't a comparison
4615 that can be merged. Avoid doing this if the RHS is a floating-point
4616 comparison since those can trap. */
4618 if (BRANCH_COST
>= 2
4619 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
4620 && simple_operand_p (rl_arg
)
4621 && simple_operand_p (rr_arg
))
4623 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4624 if (code
== TRUTH_OR_EXPR
4625 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
4626 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
4627 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4628 return build2 (NE_EXPR
, truth_type
,
4629 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4631 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4633 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4634 if (code
== TRUTH_AND_EXPR
4635 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
4636 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
4637 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4638 return build2 (EQ_EXPR
, truth_type
,
4639 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4641 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4643 return build2 (code
, truth_type
, lhs
, rhs
);
4646 /* See if the comparisons can be merged. Then get all the parameters for
4649 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
4650 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
4654 ll_inner
= decode_field_reference (ll_arg
,
4655 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
4656 &ll_unsignedp
, &volatilep
, &ll_mask
,
4658 lr_inner
= decode_field_reference (lr_arg
,
4659 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
4660 &lr_unsignedp
, &volatilep
, &lr_mask
,
4662 rl_inner
= decode_field_reference (rl_arg
,
4663 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
4664 &rl_unsignedp
, &volatilep
, &rl_mask
,
4666 rr_inner
= decode_field_reference (rr_arg
,
4667 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
4668 &rr_unsignedp
, &volatilep
, &rr_mask
,
4671 /* It must be true that the inner operation on the lhs of each
4672 comparison must be the same if we are to be able to do anything.
4673 Then see if we have constants. If not, the same must be true for
4675 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
4676 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
4679 if (TREE_CODE (lr_arg
) == INTEGER_CST
4680 && TREE_CODE (rr_arg
) == INTEGER_CST
)
4681 l_const
= lr_arg
, r_const
= rr_arg
;
4682 else if (lr_inner
== 0 || rr_inner
== 0
4683 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
4686 l_const
= r_const
= 0;
4688 /* If either comparison code is not correct for our logical operation,
4689 fail. However, we can convert a one-bit comparison against zero into
4690 the opposite comparison against that bit being set in the field. */
4692 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
4693 if (lcode
!= wanted_code
)
4695 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
4697 /* Make the left operand unsigned, since we are only interested
4698 in the value of one bit. Otherwise we are doing the wrong
4707 /* This is analogous to the code for l_const above. */
4708 if (rcode
!= wanted_code
)
4710 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
4719 /* After this point all optimizations will generate bit-field
4720 references, which we might not want. */
4721 if (! lang_hooks
.can_use_bit_fields_p ())
4724 /* See if we can find a mode that contains both fields being compared on
4725 the left. If we can't, fail. Otherwise, update all constants and masks
4726 to be relative to a field of that size. */
4727 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
4728 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
4729 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4730 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
4732 if (lnmode
== VOIDmode
)
4735 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
4736 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
4737 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
4738 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
4740 if (BYTES_BIG_ENDIAN
)
4742 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
4743 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
4746 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
4747 size_int (xll_bitpos
), 0);
4748 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
4749 size_int (xrl_bitpos
), 0);
4753 l_const
= fold_convert (lntype
, l_const
);
4754 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
4755 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
4756 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
4757 fold (build1 (BIT_NOT_EXPR
,
4761 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
4763 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4768 r_const
= fold_convert (lntype
, r_const
);
4769 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
4770 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
4771 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
4772 fold (build1 (BIT_NOT_EXPR
,
4776 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
4778 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4782 /* If the right sides are not constant, do the same for it. Also,
4783 disallow this optimization if a size or signedness mismatch occurs
4784 between the left and right sides. */
4787 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
4788 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
4789 /* Make sure the two fields on the right
4790 correspond to the left without being swapped. */
4791 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
4794 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
4795 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
4796 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4797 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
4799 if (rnmode
== VOIDmode
)
4802 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
4803 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
4804 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
4805 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
4807 if (BYTES_BIG_ENDIAN
)
4809 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
4810 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
4813 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
4814 size_int (xlr_bitpos
), 0);
4815 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
4816 size_int (xrr_bitpos
), 0);
4818 /* Make a mask that corresponds to both fields being compared.
4819 Do this for both items being compared. If the operands are the
4820 same size and the bits being compared are in the same position
4821 then we can do this by masking both and comparing the masked
4823 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4824 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
4825 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
4827 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4828 ll_unsignedp
|| rl_unsignedp
);
4829 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4830 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
4832 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
4833 lr_unsignedp
|| rr_unsignedp
);
4834 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
4835 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
4837 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
4840 /* There is still another way we can do something: If both pairs of
4841 fields being compared are adjacent, we may be able to make a wider
4842 field containing them both.
4844 Note that we still must mask the lhs/rhs expressions. Furthermore,
4845 the mask must be shifted to account for the shift done by
4846 make_bit_field_ref. */
4847 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
4848 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
4849 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
4850 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
4854 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
4855 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
4856 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
4857 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
4859 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
4860 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
4861 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
4862 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
4864 /* Convert to the smaller type before masking out unwanted bits. */
4866 if (lntype
!= rntype
)
4868 if (lnbitsize
> rnbitsize
)
4870 lhs
= fold_convert (rntype
, lhs
);
4871 ll_mask
= fold_convert (rntype
, ll_mask
);
4874 else if (lnbitsize
< rnbitsize
)
4876 rhs
= fold_convert (lntype
, rhs
);
4877 lr_mask
= fold_convert (lntype
, lr_mask
);
4882 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
4883 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
4885 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
4886 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
4888 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
4894 /* Handle the case of comparisons with constants. If there is something in
4895 common between the masks, those bits of the constants must be the same.
4896 If not, the condition is always false. Test for this to avoid generating
4897 incorrect code below. */
4898 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
4899 if (! integer_zerop (result
)
4900 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
4901 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
4903 if (wanted_code
== NE_EXPR
)
4905 warning ("%<or%> of unmatched not-equal tests is always 1");
4906 return constant_boolean_node (true, truth_type
);
4910 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4911 return constant_boolean_node (false, truth_type
);
4915 /* Construct the expression we will return. First get the component
4916 reference we will make. Unless the mask is all ones the width of
4917 that field, perform the mask operation. Then compare with the
4919 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4920 ll_unsignedp
|| rl_unsignedp
);
4922 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4923 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4924 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
4926 return build2 (wanted_code
, truth_type
, result
,
4927 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
4930 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4934 optimize_minmax_comparison (tree t
)
4936 tree type
= TREE_TYPE (t
);
4937 tree arg0
= TREE_OPERAND (t
, 0);
4938 enum tree_code op_code
;
4939 tree comp_const
= TREE_OPERAND (t
, 1);
4941 int consts_equal
, consts_lt
;
4944 STRIP_SIGN_NOPS (arg0
);
4946 op_code
= TREE_CODE (arg0
);
4947 minmax_const
= TREE_OPERAND (arg0
, 1);
4948 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
4949 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
4950 inner
= TREE_OPERAND (arg0
, 0);
4952 /* If something does not permit us to optimize, return the original tree. */
4953 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
4954 || TREE_CODE (comp_const
) != INTEGER_CST
4955 || TREE_CONSTANT_OVERFLOW (comp_const
)
4956 || TREE_CODE (minmax_const
) != INTEGER_CST
4957 || TREE_CONSTANT_OVERFLOW (minmax_const
))
4960 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4961 and GT_EXPR, doing the rest with recursive calls using logical
4963 switch (TREE_CODE (t
))
4965 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
4967 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t
)));
4971 fold (build2 (TRUTH_ORIF_EXPR
, type
,
4972 optimize_minmax_comparison
4973 (build2 (EQ_EXPR
, type
, arg0
, comp_const
)),
4974 optimize_minmax_comparison
4975 (build2 (GT_EXPR
, type
, arg0
, comp_const
))));
4978 if (op_code
== MAX_EXPR
&& consts_equal
)
4979 /* MAX (X, 0) == 0 -> X <= 0 */
4980 return fold (build2 (LE_EXPR
, type
, inner
, comp_const
));
4982 else if (op_code
== MAX_EXPR
&& consts_lt
)
4983 /* MAX (X, 0) == 5 -> X == 5 */
4984 return fold (build2 (EQ_EXPR
, type
, inner
, comp_const
));
4986 else if (op_code
== MAX_EXPR
)
4987 /* MAX (X, 0) == -1 -> false */
4988 return omit_one_operand (type
, integer_zero_node
, inner
);
4990 else if (consts_equal
)
4991 /* MIN (X, 0) == 0 -> X >= 0 */
4992 return fold (build2 (GE_EXPR
, type
, inner
, comp_const
));
4995 /* MIN (X, 0) == 5 -> false */
4996 return omit_one_operand (type
, integer_zero_node
, inner
);
4999 /* MIN (X, 0) == -1 -> X == -1 */
5000 return fold (build2 (EQ_EXPR
, type
, inner
, comp_const
));
5003 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5004 /* MAX (X, 0) > 0 -> X > 0
5005 MAX (X, 0) > 5 -> X > 5 */
5006 return fold (build2 (GT_EXPR
, type
, inner
, comp_const
));
5008 else if (op_code
== MAX_EXPR
)
5009 /* MAX (X, 0) > -1 -> true */
5010 return omit_one_operand (type
, integer_one_node
, inner
);
5012 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5013 /* MIN (X, 0) > 0 -> false
5014 MIN (X, 0) > 5 -> false */
5015 return omit_one_operand (type
, integer_zero_node
, inner
);
5018 /* MIN (X, 0) > -1 -> X > -1 */
5019 return fold (build2 (GT_EXPR
, type
, inner
, comp_const
));
5026 /* T is an integer expression that is being multiplied, divided, or taken a
5027 modulus (CODE says which and what kind of divide or modulus) by a
5028 constant C. See if we can eliminate that operation by folding it with
5029 other operations already in T. WIDE_TYPE, if non-null, is a type that
5030 should be used for the computation if wider than our type.
5032 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5033 (X * 2) + (Y * 4). We must, however, be assured that either the original
5034 expression would not overflow or that overflow is undefined for the type
5035 in the language in question.
5037 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5038 the machine has a multiply-accumulate insn or that this is part of an
5039 addressing calculation.
5041 If we return a non-null expression, it is an equivalent form of the
5042 original computation, but need not be in the original type. */
5045 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5047 /* To avoid exponential search depth, refuse to allow recursion past
5048 three levels. Beyond that (1) it's highly unlikely that we'll find
5049 something interesting and (2) we've probably processed it before
5050 when we built the inner expression. */
5059 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
);
5066 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5068 tree type
= TREE_TYPE (t
);
5069 enum tree_code tcode
= TREE_CODE (t
);
5070 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5071 > GET_MODE_SIZE (TYPE_MODE (type
)))
5072 ? wide_type
: type
);
5074 int same_p
= tcode
== code
;
5075 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5077 /* Don't deal with constants of zero here; they confuse the code below. */
5078 if (integer_zerop (c
))
5081 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5082 op0
= TREE_OPERAND (t
, 0);
5084 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5085 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5087 /* Note that we need not handle conditional operations here since fold
5088 already handles those cases. So just do arithmetic here. */
5092 /* For a constant, we can always simplify if we are a multiply
5093 or (for divide and modulus) if it is a multiple of our constant. */
5094 if (code
== MULT_EXPR
5095 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
5096 return const_binop (code
, fold_convert (ctype
, t
),
5097 fold_convert (ctype
, c
), 0);
5100 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
5101 /* If op0 is an expression ... */
5102 if ((COMPARISON_CLASS_P (op0
)
5103 || UNARY_CLASS_P (op0
)
5104 || BINARY_CLASS_P (op0
)
5105 || EXPRESSION_CLASS_P (op0
))
5106 /* ... and is unsigned, and its type is smaller than ctype,
5107 then we cannot pass through as widening. */
5108 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
5109 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5110 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5111 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
5112 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
5113 /* ... or this is a truncation (t is narrower than op0),
5114 then we cannot pass through this narrowing. */
5115 || (GET_MODE_SIZE (TYPE_MODE (type
))
5116 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
5117 /* ... or signedness changes for division or modulus,
5118 then we cannot pass through this conversion. */
5119 || (code
!= MULT_EXPR
5120 && (TYPE_UNSIGNED (ctype
)
5121 != TYPE_UNSIGNED (TREE_TYPE (op0
))))))
5124 /* Pass the constant down and see if we can make a simplification. If
5125 we can, replace this expression with the inner simplification for
5126 possible later conversion to our or some other type. */
5127 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5128 && TREE_CODE (t2
) == INTEGER_CST
5129 && ! TREE_CONSTANT_OVERFLOW (t2
)
5130 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5132 ? ctype
: NULL_TREE
))))
5136 case NEGATE_EXPR
: case ABS_EXPR
:
5137 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5138 return fold (build1 (tcode
, ctype
, fold_convert (ctype
, t1
)));
5141 case MIN_EXPR
: case MAX_EXPR
:
5142 /* If widening the type changes the signedness, then we can't perform
5143 this optimization as that changes the result. */
5144 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5147 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5148 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
5149 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5151 if (tree_int_cst_sgn (c
) < 0)
5152 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5154 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5155 fold_convert (ctype
, t2
)));
5159 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5160 /* If the second operand is constant, this is a multiplication
5161 or floor division, by a power of two, so we can treat it that
5162 way unless the multiplier or divisor overflows. Signed
5163 left-shift overflow is implementation-defined rather than
5164 undefined in C90, so do not convert signed left shift into
5166 if (TREE_CODE (op1
) == INTEGER_CST
5167 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5168 /* const_binop may not detect overflow correctly,
5169 so check for it explicitly here. */
5170 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5171 && TREE_INT_CST_HIGH (op1
) == 0
5172 && 0 != (t1
= fold_convert (ctype
,
5173 const_binop (LSHIFT_EXPR
,
5176 && ! TREE_OVERFLOW (t1
))
5177 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5178 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5179 ctype
, fold_convert (ctype
, op0
), t1
),
5180 c
, code
, wide_type
);
5183 case PLUS_EXPR
: case MINUS_EXPR
:
5184 /* See if we can eliminate the operation on both sides. If we can, we
5185 can return a new PLUS or MINUS. If we can't, the only remaining
5186 cases where we can do anything are if the second operand is a
5188 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
5189 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
5190 if (t1
!= 0 && t2
!= 0
5191 && (code
== MULT_EXPR
5192 /* If not multiplication, we can only do this if both operands
5193 are divisible by c. */
5194 || (multiple_of_p (ctype
, op0
, c
)
5195 && multiple_of_p (ctype
, op1
, c
))))
5196 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5197 fold_convert (ctype
, t2
)));
5199 /* If this was a subtraction, negate OP1 and set it to be an addition.
5200 This simplifies the logic below. */
5201 if (tcode
== MINUS_EXPR
)
5202 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5204 if (TREE_CODE (op1
) != INTEGER_CST
)
5207 /* If either OP1 or C are negative, this optimization is not safe for
5208 some of the division and remainder types while for others we need
5209 to change the code. */
5210 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5212 if (code
== CEIL_DIV_EXPR
)
5213 code
= FLOOR_DIV_EXPR
;
5214 else if (code
== FLOOR_DIV_EXPR
)
5215 code
= CEIL_DIV_EXPR
;
5216 else if (code
!= MULT_EXPR
5217 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5221 /* If it's a multiply or a division/modulus operation of a multiple
5222 of our constant, do the operation and verify it doesn't overflow. */
5223 if (code
== MULT_EXPR
5224 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5226 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5227 fold_convert (ctype
, c
), 0);
5228 /* We allow the constant to overflow with wrapping semantics. */
5230 || (TREE_OVERFLOW (op1
) && ! flag_wrapv
))
5236 /* If we have an unsigned type is not a sizetype, we cannot widen
5237 the operation since it will change the result if the original
5238 computation overflowed. */
5239 if (TYPE_UNSIGNED (ctype
)
5240 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5244 /* If we were able to eliminate our operation from the first side,
5245 apply our operation to the second side and reform the PLUS. */
5246 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5247 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
));
5249 /* The last case is if we are a multiply. In that case, we can
5250 apply the distributive law to commute the multiply and addition
5251 if the multiplication of the constants doesn't overflow. */
5252 if (code
== MULT_EXPR
)
5253 return fold (build2 (tcode
, ctype
,
5254 fold (build2 (code
, ctype
,
5255 fold_convert (ctype
, op0
),
5256 fold_convert (ctype
, c
))),
5262 /* We have a special case here if we are doing something like
5263 (C * 8) % 4 since we know that's zero. */
5264 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5265 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5266 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5267 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5268 return omit_one_operand (type
, integer_zero_node
, op0
);
5270 /* ... fall through ... */
5272 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5273 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5274 /* If we can extract our operation from the LHS, do so and return a
5275 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5276 do something only if the second operand is a constant. */
5278 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5279 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5280 fold_convert (ctype
, op1
)));
5281 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5282 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5283 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5284 fold_convert (ctype
, t1
)));
5285 else if (TREE_CODE (op1
) != INTEGER_CST
)
5288 /* If these are the same operation types, we can associate them
5289 assuming no overflow. */
5291 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
5292 fold_convert (ctype
, c
), 0))
5293 && ! TREE_OVERFLOW (t1
))
5294 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
));
5296 /* If these operations "cancel" each other, we have the main
5297 optimizations of this pass, which occur when either constant is a
5298 multiple of the other, in which case we replace this with either an
5299 operation or CODE or TCODE.
5301 If we have an unsigned type that is not a sizetype, we cannot do
5302 this since it will change the result if the original computation
5304 if ((! TYPE_UNSIGNED (ctype
)
5305 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5307 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5308 || (tcode
== MULT_EXPR
5309 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5310 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
5312 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5313 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5314 fold_convert (ctype
,
5315 const_binop (TRUNC_DIV_EXPR
,
5317 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
5318 return fold (build2 (code
, ctype
, fold_convert (ctype
, op0
),
5319 fold_convert (ctype
,
5320 const_binop (TRUNC_DIV_EXPR
,
5332 /* Return a node which has the indicated constant VALUE (either 0 or
5333 1), and is of the indicated TYPE. */
5336 constant_boolean_node (int value
, tree type
)
5338 if (type
== integer_type_node
)
5339 return value
? integer_one_node
: integer_zero_node
;
5340 else if (type
== boolean_type_node
)
5341 return value
? boolean_true_node
: boolean_false_node
;
5342 else if (TREE_CODE (type
) == BOOLEAN_TYPE
)
5343 return lang_hooks
.truthvalue_conversion (value
? integer_one_node
5344 : integer_zero_node
);
5346 return build_int_cst (type
, value
);
5349 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5350 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5351 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5352 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5353 COND is the first argument to CODE; otherwise (as in the example
5354 given here), it is the second argument. TYPE is the type of the
5355 original expression. Return NULL_TREE if no simplification is
5359 fold_binary_op_with_conditional_arg (enum tree_code code
, tree type
,
5360 tree cond
, tree arg
, int cond_first_p
)
5362 tree test
, true_value
, false_value
;
5363 tree lhs
= NULL_TREE
;
5364 tree rhs
= NULL_TREE
;
5366 /* This transformation is only worthwhile if we don't have to wrap
5367 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5368 one of the branches once its pushed inside the COND_EXPR. */
5369 if (!TREE_CONSTANT (arg
))
5372 if (TREE_CODE (cond
) == COND_EXPR
)
5374 test
= TREE_OPERAND (cond
, 0);
5375 true_value
= TREE_OPERAND (cond
, 1);
5376 false_value
= TREE_OPERAND (cond
, 2);
5377 /* If this operand throws an expression, then it does not make
5378 sense to try to perform a logical or arithmetic operation
5380 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
5382 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
5387 tree testtype
= TREE_TYPE (cond
);
5389 true_value
= constant_boolean_node (true, testtype
);
5390 false_value
= constant_boolean_node (false, testtype
);
5394 lhs
= fold (cond_first_p
? build2 (code
, type
, true_value
, arg
)
5395 : build2 (code
, type
, arg
, true_value
));
5397 rhs
= fold (cond_first_p
? build2 (code
, type
, false_value
, arg
)
5398 : build2 (code
, type
, arg
, false_value
));
5400 test
= fold (build3 (COND_EXPR
, type
, test
, lhs
, rhs
));
5401 return fold_convert (type
, test
);
5405 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5407 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5408 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5409 ADDEND is the same as X.
5411 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5412 and finite. The problematic cases are when X is zero, and its mode
5413 has signed zeros. In the case of rounding towards -infinity,
5414 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5415 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5418 fold_real_zero_addition_p (tree type
, tree addend
, int negate
)
5420 if (!real_zerop (addend
))
5423 /* Don't allow the fold with -fsignaling-nans. */
5424 if (HONOR_SNANS (TYPE_MODE (type
)))
5427 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5428 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
5431 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5432 if (TREE_CODE (addend
) == REAL_CST
5433 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
5436 /* The mode has signed zeros, and we have to honor their sign.
5437 In this situation, there is only one case we can return true for.
5438 X - 0 is the same as X unless rounding towards -infinity is
5440 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
5443 /* Subroutine of fold() that checks comparisons of built-in math
5444 functions against real constants.
5446 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5447 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5448 is the type of the result and ARG0 and ARG1 are the operands of the
5449 comparison. ARG1 must be a TREE_REAL_CST.
5451 The function returns the constant folded tree if a simplification
5452 can be made, and NULL_TREE otherwise. */
5455 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
5456 tree type
, tree arg0
, tree arg1
)
5460 if (BUILTIN_SQRT_P (fcode
))
5462 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5463 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
5465 c
= TREE_REAL_CST (arg1
);
5466 if (REAL_VALUE_NEGATIVE (c
))
5468 /* sqrt(x) < y is always false, if y is negative. */
5469 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
5470 return omit_one_operand (type
, integer_zero_node
, arg
);
5472 /* sqrt(x) > y is always true, if y is negative and we
5473 don't care about NaNs, i.e. negative values of x. */
5474 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
5475 return omit_one_operand (type
, integer_one_node
, arg
);
5477 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5478 return fold (build2 (GE_EXPR
, type
, arg
,
5479 build_real (TREE_TYPE (arg
), dconst0
)));
5481 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
5485 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5486 real_convert (&c2
, mode
, &c2
);
5488 if (REAL_VALUE_ISINF (c2
))
5490 /* sqrt(x) > y is x == +Inf, when y is very large. */
5491 if (HONOR_INFINITIES (mode
))
5492 return fold (build2 (EQ_EXPR
, type
, arg
,
5493 build_real (TREE_TYPE (arg
), c2
)));
5495 /* sqrt(x) > y is always false, when y is very large
5496 and we don't care about infinities. */
5497 return omit_one_operand (type
, integer_zero_node
, arg
);
5500 /* sqrt(x) > c is the same as x > c*c. */
5501 return fold (build2 (code
, type
, arg
,
5502 build_real (TREE_TYPE (arg
), c2
)));
5504 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
5508 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5509 real_convert (&c2
, mode
, &c2
);
5511 if (REAL_VALUE_ISINF (c2
))
5513 /* sqrt(x) < y is always true, when y is a very large
5514 value and we don't care about NaNs or Infinities. */
5515 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
5516 return omit_one_operand (type
, integer_one_node
, arg
);
5518 /* sqrt(x) < y is x != +Inf when y is very large and we
5519 don't care about NaNs. */
5520 if (! HONOR_NANS (mode
))
5521 return fold (build2 (NE_EXPR
, type
, arg
,
5522 build_real (TREE_TYPE (arg
), c2
)));
5524 /* sqrt(x) < y is x >= 0 when y is very large and we
5525 don't care about Infinities. */
5526 if (! HONOR_INFINITIES (mode
))
5527 return fold (build2 (GE_EXPR
, type
, arg
,
5528 build_real (TREE_TYPE (arg
), dconst0
)));
5530 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5531 if (lang_hooks
.decls
.global_bindings_p () != 0
5532 || CONTAINS_PLACEHOLDER_P (arg
))
5535 arg
= save_expr (arg
);
5536 return fold (build2 (TRUTH_ANDIF_EXPR
, type
,
5537 fold (build2 (GE_EXPR
, type
, arg
,
5538 build_real (TREE_TYPE (arg
),
5540 fold (build2 (NE_EXPR
, type
, arg
,
5541 build_real (TREE_TYPE (arg
),
5545 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5546 if (! HONOR_NANS (mode
))
5547 return fold (build2 (code
, type
, arg
,
5548 build_real (TREE_TYPE (arg
), c2
)));
5550 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5551 if (lang_hooks
.decls
.global_bindings_p () == 0
5552 && ! CONTAINS_PLACEHOLDER_P (arg
))
5554 arg
= save_expr (arg
);
5555 return fold (build2 (TRUTH_ANDIF_EXPR
, type
,
5556 fold (build2 (GE_EXPR
, type
, arg
,
5557 build_real (TREE_TYPE (arg
),
5559 fold (build2 (code
, type
, arg
,
5560 build_real (TREE_TYPE (arg
),
5569 /* Subroutine of fold() that optimizes comparisons against Infinities,
5570 either +Inf or -Inf.
5572 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5573 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5574 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5576 The function returns the constant folded tree if a simplification
5577 can be made, and NULL_TREE otherwise. */
5580 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5582 enum machine_mode mode
;
5583 REAL_VALUE_TYPE max
;
5587 mode
= TYPE_MODE (TREE_TYPE (arg0
));
5589 /* For negative infinity swap the sense of the comparison. */
5590 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
5592 code
= swap_tree_comparison (code
);
5597 /* x > +Inf is always false, if with ignore sNANs. */
5598 if (HONOR_SNANS (mode
))
5600 return omit_one_operand (type
, integer_zero_node
, arg0
);
5603 /* x <= +Inf is always true, if we don't case about NaNs. */
5604 if (! HONOR_NANS (mode
))
5605 return omit_one_operand (type
, integer_one_node
, arg0
);
5607 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5608 if (lang_hooks
.decls
.global_bindings_p () == 0
5609 && ! CONTAINS_PLACEHOLDER_P (arg0
))
5611 arg0
= save_expr (arg0
);
5612 return fold (build2 (EQ_EXPR
, type
, arg0
, arg0
));
5618 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5619 real_maxval (&max
, neg
, mode
);
5620 return fold (build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5621 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5624 /* x < +Inf is always equal to x <= DBL_MAX. */
5625 real_maxval (&max
, neg
, mode
);
5626 return fold (build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5627 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5630 /* x != +Inf is always equal to !(x > DBL_MAX). */
5631 real_maxval (&max
, neg
, mode
);
5632 if (! HONOR_NANS (mode
))
5633 return fold (build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5634 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5636 /* The transformation below creates non-gimple code and thus is
5637 not appropriate if we are in gimple form. */
5641 temp
= fold (build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5642 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5643 return fold (build1 (TRUTH_NOT_EXPR
, type
, temp
));
5652 /* Subroutine of fold() that optimizes comparisons of a division by
5653 a nonzero integer constant against an integer constant, i.e.
5656 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5657 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5658 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5660 The function returns the constant folded tree if a simplification
5661 can be made, and NULL_TREE otherwise. */
5664 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5666 tree prod
, tmp
, hi
, lo
;
5667 tree arg00
= TREE_OPERAND (arg0
, 0);
5668 tree arg01
= TREE_OPERAND (arg0
, 1);
5669 unsigned HOST_WIDE_INT lpart
;
5670 HOST_WIDE_INT hpart
;
5673 /* We have to do this the hard way to detect unsigned overflow.
5674 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5675 overflow
= mul_double (TREE_INT_CST_LOW (arg01
),
5676 TREE_INT_CST_HIGH (arg01
),
5677 TREE_INT_CST_LOW (arg1
),
5678 TREE_INT_CST_HIGH (arg1
), &lpart
, &hpart
);
5679 prod
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
5680 prod
= force_fit_type (prod
, -1, overflow
, false);
5682 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)))
5684 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5687 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5688 overflow
= add_double (TREE_INT_CST_LOW (prod
),
5689 TREE_INT_CST_HIGH (prod
),
5690 TREE_INT_CST_LOW (tmp
),
5691 TREE_INT_CST_HIGH (tmp
),
5693 hi
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
5694 hi
= force_fit_type (hi
, -1, overflow
| TREE_OVERFLOW (prod
),
5695 TREE_CONSTANT_OVERFLOW (prod
));
5697 else if (tree_int_cst_sgn (arg01
) >= 0)
5699 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5700 switch (tree_int_cst_sgn (arg1
))
5703 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
5708 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
5713 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
5723 tmp
= int_const_binop (PLUS_EXPR
, arg01
, integer_one_node
, 0);
5724 switch (tree_int_cst_sgn (arg1
))
5727 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
5732 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
5737 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
5749 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
5750 return omit_one_operand (type
, integer_zero_node
, arg00
);
5751 if (TREE_OVERFLOW (hi
))
5752 return fold (build2 (GE_EXPR
, type
, arg00
, lo
));
5753 if (TREE_OVERFLOW (lo
))
5754 return fold (build2 (LE_EXPR
, type
, arg00
, hi
));
5755 return build_range_check (type
, arg00
, 1, lo
, hi
);
5758 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
5759 return omit_one_operand (type
, integer_one_node
, arg00
);
5760 if (TREE_OVERFLOW (hi
))
5761 return fold (build2 (LT_EXPR
, type
, arg00
, lo
));
5762 if (TREE_OVERFLOW (lo
))
5763 return fold (build2 (GT_EXPR
, type
, arg00
, hi
));
5764 return build_range_check (type
, arg00
, 0, lo
, hi
);
5767 if (TREE_OVERFLOW (lo
))
5768 return omit_one_operand (type
, integer_zero_node
, arg00
);
5769 return fold (build2 (LT_EXPR
, type
, arg00
, lo
));
5772 if (TREE_OVERFLOW (hi
))
5773 return omit_one_operand (type
, integer_one_node
, arg00
);
5774 return fold (build2 (LE_EXPR
, type
, arg00
, hi
));
5777 if (TREE_OVERFLOW (hi
))
5778 return omit_one_operand (type
, integer_zero_node
, arg00
);
5779 return fold (build2 (GT_EXPR
, type
, arg00
, hi
));
5782 if (TREE_OVERFLOW (lo
))
5783 return omit_one_operand (type
, integer_one_node
, arg00
);
5784 return fold (build2 (GE_EXPR
, type
, arg00
, lo
));
5794 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5795 equality/inequality test, then return a simplified form of
5796 the test using shifts and logical operations. Otherwise return
5797 NULL. TYPE is the desired result type. */
5800 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
5803 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5805 if (code
== TRUTH_NOT_EXPR
)
5807 code
= TREE_CODE (arg0
);
5808 if (code
!= NE_EXPR
&& code
!= EQ_EXPR
)
5811 /* Extract the arguments of the EQ/NE. */
5812 arg1
= TREE_OPERAND (arg0
, 1);
5813 arg0
= TREE_OPERAND (arg0
, 0);
5815 /* This requires us to invert the code. */
5816 code
= (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
);
5819 /* If this is testing a single bit, we can optimize the test. */
5820 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
5821 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
5822 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
5824 tree inner
= TREE_OPERAND (arg0
, 0);
5825 tree type
= TREE_TYPE (arg0
);
5826 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
5827 enum machine_mode operand_mode
= TYPE_MODE (type
);
5829 tree signed_type
, unsigned_type
, intermediate_type
;
5832 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5833 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5834 arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
5835 if (arg00
!= NULL_TREE
5836 /* This is only a win if casting to a signed type is cheap,
5837 i.e. when arg00's type is not a partial mode. */
5838 && TYPE_PRECISION (TREE_TYPE (arg00
))
5839 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
5841 tree stype
= lang_hooks
.types
.signed_type (TREE_TYPE (arg00
));
5842 return fold (build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
5843 result_type
, fold_convert (stype
, arg00
),
5844 fold_convert (stype
, integer_zero_node
)));
5847 /* Otherwise we have (A & C) != 0 where C is a single bit,
5848 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5849 Similarly for (A & C) == 0. */
5851 /* If INNER is a right shift of a constant and it plus BITNUM does
5852 not overflow, adjust BITNUM and INNER. */
5853 if (TREE_CODE (inner
) == RSHIFT_EXPR
5854 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
5855 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
5856 && bitnum
< TYPE_PRECISION (type
)
5857 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
5858 bitnum
- TYPE_PRECISION (type
)))
5860 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
5861 inner
= TREE_OPERAND (inner
, 0);
5864 /* If we are going to be able to omit the AND below, we must do our
5865 operations as unsigned. If we must use the AND, we have a choice.
5866 Normally unsigned is faster, but for some machines signed is. */
5867 #ifdef LOAD_EXTEND_OP
5868 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1);
5873 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
5874 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
5875 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
5876 inner
= fold_convert (intermediate_type
, inner
);
5879 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
5880 inner
, size_int (bitnum
));
5882 if (code
== EQ_EXPR
)
5883 inner
= fold (build2 (BIT_XOR_EXPR
, intermediate_type
,
5884 inner
, integer_one_node
));
5886 /* Put the AND last so it can combine with more things. */
5887 inner
= build2 (BIT_AND_EXPR
, intermediate_type
,
5888 inner
, integer_one_node
);
5890 /* Make sure to return the proper type. */
5891 inner
= fold_convert (result_type
, inner
);
5898 /* Check whether we are allowed to reorder operands arg0 and arg1,
5899 such that the evaluation of arg1 occurs before arg0. */
5902 reorder_operands_p (tree arg0
, tree arg1
)
5904 if (! flag_evaluation_order
)
5906 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
5908 return ! TREE_SIDE_EFFECTS (arg0
)
5909 && ! TREE_SIDE_EFFECTS (arg1
);
5912 /* Test whether it is preferable two swap two operands, ARG0 and
5913 ARG1, for example because ARG0 is an integer constant and ARG1
5914 isn't. If REORDER is true, only recommend swapping if we can
5915 evaluate the operands in reverse order. */
5918 tree_swap_operands_p (tree arg0
, tree arg1
, bool reorder
)
5920 STRIP_SIGN_NOPS (arg0
);
5921 STRIP_SIGN_NOPS (arg1
);
5923 if (TREE_CODE (arg1
) == INTEGER_CST
)
5925 if (TREE_CODE (arg0
) == INTEGER_CST
)
5928 if (TREE_CODE (arg1
) == REAL_CST
)
5930 if (TREE_CODE (arg0
) == REAL_CST
)
5933 if (TREE_CODE (arg1
) == COMPLEX_CST
)
5935 if (TREE_CODE (arg0
) == COMPLEX_CST
)
5938 if (TREE_CONSTANT (arg1
))
5940 if (TREE_CONSTANT (arg0
))
5946 if (reorder
&& flag_evaluation_order
5947 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
5955 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5956 for commutative and comparison operators. Ensuring a canonical
5957 form allows the optimizers to find additional redundancies without
5958 having to explicitly check for both orderings. */
5959 if (TREE_CODE (arg0
) == SSA_NAME
5960 && TREE_CODE (arg1
) == SSA_NAME
5961 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
5967 /* Perform constant folding and related simplification of EXPR.
5968 The related simplifications include x*1 => x, x*0 => 0, etc.,
5969 and application of the associative law.
5970 NOP_EXPR conversions may be removed freely (as long as we
5971 are careful not to change the type of the overall expression).
5972 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5973 but we can constant-fold them if they have constant operands. */
5975 #ifdef ENABLE_FOLD_CHECKING
5976 # define fold(x) fold_1 (x)
5977 static tree
fold_1 (tree
);
5983 const tree t
= expr
;
5984 const tree type
= TREE_TYPE (expr
);
5985 tree t1
= NULL_TREE
;
5987 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
5988 enum tree_code code
= TREE_CODE (t
);
5989 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
5991 /* WINS will be nonzero when the switch is done
5992 if all operands are constant. */
5995 /* Return right away if a constant. */
5996 if (kind
== tcc_constant
)
5999 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
6003 /* Special case for conversion ops that can have fixed point args. */
6004 arg0
= TREE_OPERAND (t
, 0);
6006 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6008 STRIP_SIGN_NOPS (arg0
);
6010 if (arg0
!= 0 && TREE_CODE (arg0
) == COMPLEX_CST
)
6011 subop
= TREE_REALPART (arg0
);
6015 if (subop
!= 0 && TREE_CODE (subop
) != INTEGER_CST
6016 && TREE_CODE (subop
) != REAL_CST
)
6017 /* Note that TREE_CONSTANT isn't enough:
6018 static var addresses are constant but we can't
6019 do arithmetic on them. */
6022 else if (IS_EXPR_CODE_CLASS (kind
))
6024 int len
= first_rtl_op (code
);
6026 for (i
= 0; i
< len
; i
++)
6028 tree op
= TREE_OPERAND (t
, i
);
6032 continue; /* Valid for CALL_EXPR, at least. */
6034 /* Strip any conversions that don't change the mode. This is
6035 safe for every expression, except for a comparison expression
6036 because its signedness is derived from its operands. So, in
6037 the latter case, only strip conversions that don't change the
6040 Note that this is done as an internal manipulation within the
6041 constant folder, in order to find the simplest representation
6042 of the arguments so that their form can be studied. In any
6043 cases, the appropriate type conversions should be put back in
6044 the tree that will get out of the constant folder. */
6045 if (kind
== tcc_comparison
)
6046 STRIP_SIGN_NOPS (op
);
6050 if (TREE_CODE (op
) == COMPLEX_CST
)
6051 subop
= TREE_REALPART (op
);
6055 if (TREE_CODE (subop
) != INTEGER_CST
6056 && TREE_CODE (subop
) != REAL_CST
)
6057 /* Note that TREE_CONSTANT isn't enough:
6058 static var addresses are constant but we can't
6059 do arithmetic on them. */
6069 /* If this is a commutative operation, and ARG0 is a constant, move it
6070 to ARG1 to reduce the number of tests below. */
6071 if (commutative_tree_code (code
)
6072 && tree_swap_operands_p (arg0
, arg1
, true))
6073 return fold (build2 (code
, type
, TREE_OPERAND (t
, 1),
6074 TREE_OPERAND (t
, 0)));
6076 /* Now WINS is set as described above,
6077 ARG0 is the first operand of EXPR,
6078 and ARG1 is the second operand (if it has more than one operand).
6080 First check for cases where an arithmetic operation is applied to a
6081 compound, conditional, or comparison operation. Push the arithmetic
6082 operation inside the compound or conditional to see if any folding
6083 can then be done. Convert comparison to conditional for this purpose.
6084 The also optimizes non-constant cases that used to be done in
6087 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6088 one of the operands is a comparison and the other is a comparison, a
6089 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6090 code below would make the expression more complex. Change it to a
6091 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6092 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6094 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
6095 || code
== EQ_EXPR
|| code
== NE_EXPR
)
6096 && ((truth_value_p (TREE_CODE (arg0
))
6097 && (truth_value_p (TREE_CODE (arg1
))
6098 || (TREE_CODE (arg1
) == BIT_AND_EXPR
6099 && integer_onep (TREE_OPERAND (arg1
, 1)))))
6100 || (truth_value_p (TREE_CODE (arg1
))
6101 && (truth_value_p (TREE_CODE (arg0
))
6102 || (TREE_CODE (arg0
) == BIT_AND_EXPR
6103 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
6105 tem
= fold (build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
6106 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
6108 type
, fold_convert (boolean_type_node
, arg0
),
6109 fold_convert (boolean_type_node
, arg1
)));
6111 if (code
== EQ_EXPR
)
6112 tem
= invert_truthvalue (tem
);
6117 if (TREE_CODE_CLASS (code
) == tcc_unary
)
6119 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
6120 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6121 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))));
6122 else if (TREE_CODE (arg0
) == COND_EXPR
)
6124 tree arg01
= TREE_OPERAND (arg0
, 1);
6125 tree arg02
= TREE_OPERAND (arg0
, 2);
6126 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
6127 arg01
= fold (build1 (code
, type
, arg01
));
6128 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
6129 arg02
= fold (build1 (code
, type
, arg02
));
6130 tem
= fold (build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6133 /* If this was a conversion, and all we did was to move into
6134 inside the COND_EXPR, bring it back out. But leave it if
6135 it is a conversion from integer to integer and the
6136 result precision is no wider than a word since such a
6137 conversion is cheap and may be optimized away by combine,
6138 while it couldn't if it were outside the COND_EXPR. Then return
6139 so we don't get into an infinite recursion loop taking the
6140 conversion out and then back in. */
6142 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
6143 || code
== NON_LVALUE_EXPR
)
6144 && TREE_CODE (tem
) == COND_EXPR
6145 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
6146 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
6147 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
6148 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
6149 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
6150 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
6151 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
6153 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
6154 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
))
6155 tem
= build1 (code
, type
,
6157 TREE_TYPE (TREE_OPERAND
6158 (TREE_OPERAND (tem
, 1), 0)),
6159 TREE_OPERAND (tem
, 0),
6160 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
6161 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
6164 else if (COMPARISON_CLASS_P (arg0
))
6166 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
6168 arg0
= copy_node (arg0
);
6169 TREE_TYPE (arg0
) = type
;
6172 else if (TREE_CODE (type
) != INTEGER_TYPE
)
6173 return fold (build3 (COND_EXPR
, type
, arg0
,
6174 fold (build1 (code
, type
,
6176 fold (build1 (code
, type
,
6177 integer_zero_node
))));
6180 else if (TREE_CODE_CLASS (code
) == tcc_comparison
6181 && TREE_CODE (arg0
) == COMPOUND_EXPR
)
6182 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6183 fold (build2 (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
6184 else if (TREE_CODE_CLASS (code
) == tcc_comparison
6185 && TREE_CODE (arg1
) == COMPOUND_EXPR
)
6186 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
6187 fold (build2 (code
, type
, arg0
, TREE_OPERAND (arg1
, 1))));
6188 else if (TREE_CODE_CLASS (code
) == tcc_binary
6189 || TREE_CODE_CLASS (code
) == tcc_comparison
)
6191 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
6192 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6193 fold (build2 (code
, type
, TREE_OPERAND (arg0
, 1),
6195 if (TREE_CODE (arg1
) == COMPOUND_EXPR
6196 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
6197 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
6198 fold (build2 (code
, type
,
6199 arg0
, TREE_OPERAND (arg1
, 1))));
6201 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
6203 tem
= fold_binary_op_with_conditional_arg (code
, type
, arg0
, arg1
,
6204 /*cond_first_p=*/1);
6205 if (tem
!= NULL_TREE
)
6209 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
6211 tem
= fold_binary_op_with_conditional_arg (code
, type
, arg1
, arg0
,
6212 /*cond_first_p=*/0);
6213 if (tem
!= NULL_TREE
)
6221 return fold (DECL_INITIAL (t
));
6226 case FIX_TRUNC_EXPR
:
6228 case FIX_FLOOR_EXPR
:
6229 case FIX_ROUND_EXPR
:
6230 if (TREE_TYPE (TREE_OPERAND (t
, 0)) == type
)
6231 return TREE_OPERAND (t
, 0);
6233 /* Handle cases of two conversions in a row. */
6234 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
6235 || TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
)
6237 tree inside_type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
6238 tree inter_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
6239 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
6240 int inside_ptr
= POINTER_TYPE_P (inside_type
);
6241 int inside_float
= FLOAT_TYPE_P (inside_type
);
6242 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
6243 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
6244 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
6245 int inter_ptr
= POINTER_TYPE_P (inter_type
);
6246 int inter_float
= FLOAT_TYPE_P (inter_type
);
6247 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
6248 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
6249 int final_int
= INTEGRAL_TYPE_P (type
);
6250 int final_ptr
= POINTER_TYPE_P (type
);
6251 int final_float
= FLOAT_TYPE_P (type
);
6252 unsigned int final_prec
= TYPE_PRECISION (type
);
6253 int final_unsignedp
= TYPE_UNSIGNED (type
);
6255 /* In addition to the cases of two conversions in a row
6256 handled below, if we are converting something to its own
6257 type via an object of identical or wider precision, neither
6258 conversion is needed. */
6259 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
6260 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
6261 && inter_prec
>= final_prec
)
6262 return fold (build1 (code
, type
,
6263 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
6265 /* Likewise, if the intermediate and final types are either both
6266 float or both integer, we don't need the middle conversion if
6267 it is wider than the final type and doesn't change the signedness
6268 (for integers). Avoid this if the final type is a pointer
6269 since then we sometimes need the inner conversion. Likewise if
6270 the outer has a precision not equal to the size of its mode. */
6271 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
6272 || (inter_float
&& inside_float
))
6273 && inter_prec
>= inside_prec
6274 && (inter_float
|| inter_unsignedp
== inside_unsignedp
)
6275 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6276 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6278 return fold (build1 (code
, type
,
6279 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
6281 /* If we have a sign-extension of a zero-extended value, we can
6282 replace that by a single zero-extension. */
6283 if (inside_int
&& inter_int
&& final_int
6284 && inside_prec
< inter_prec
&& inter_prec
< final_prec
6285 && inside_unsignedp
&& !inter_unsignedp
)
6286 return fold (build1 (code
, type
,
6287 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
6289 /* Two conversions in a row are not needed unless:
6290 - some conversion is floating-point (overstrict for now), or
6291 - the intermediate type is narrower than both initial and
6293 - the intermediate type and innermost type differ in signedness,
6294 and the outermost type is wider than the intermediate, or
6295 - the initial type is a pointer type and the precisions of the
6296 intermediate and final types differ, or
6297 - the final type is a pointer type and the precisions of the
6298 initial and intermediate types differ. */
6299 if (! inside_float
&& ! inter_float
&& ! final_float
6300 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
6301 && ! (inside_int
&& inter_int
6302 && inter_unsignedp
!= inside_unsignedp
6303 && inter_prec
< final_prec
)
6304 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
6305 == (final_unsignedp
&& final_prec
> inter_prec
))
6306 && ! (inside_ptr
&& inter_prec
!= final_prec
)
6307 && ! (final_ptr
&& inside_prec
!= inter_prec
)
6308 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6309 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6311 return fold (build1 (code
, type
,
6312 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
6315 if (TREE_CODE (TREE_OPERAND (t
, 0)) == MODIFY_EXPR
6316 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t
, 0), 1))
6317 /* Detect assigning a bitfield. */
6318 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)) == COMPONENT_REF
6319 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 1))))
6321 /* Don't leave an assignment inside a conversion
6322 unless assigning a bitfield. */
6323 tree prev
= TREE_OPERAND (t
, 0);
6324 tem
= copy_node (t
);
6325 TREE_OPERAND (tem
, 0) = TREE_OPERAND (prev
, 1);
6326 /* First do the assignment, then return converted constant. */
6327 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), prev
, fold (tem
));
6328 TREE_NO_WARNING (tem
) = 1;
6329 TREE_USED (tem
) = 1;
6333 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6334 constants (if x has signed type, the sign bit cannot be set
6335 in c). This folds extension into the BIT_AND_EXPR. */
6336 if (INTEGRAL_TYPE_P (type
)
6337 && TREE_CODE (type
) != BOOLEAN_TYPE
6338 && TREE_CODE (TREE_OPERAND (t
, 0)) == BIT_AND_EXPR
6339 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 1)) == INTEGER_CST
)
6341 tree
and = TREE_OPERAND (t
, 0);
6342 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
6345 if (TYPE_UNSIGNED (TREE_TYPE (and))
6346 || (TYPE_PRECISION (type
)
6347 <= TYPE_PRECISION (TREE_TYPE (and))))
6349 else if (TYPE_PRECISION (TREE_TYPE (and1
))
6350 <= HOST_BITS_PER_WIDE_INT
6351 && host_integerp (and1
, 1))
6353 unsigned HOST_WIDE_INT cst
;
6355 cst
= tree_low_cst (and1
, 1);
6356 cst
&= (HOST_WIDE_INT
) -1
6357 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
6358 change
= (cst
== 0);
6359 #ifdef LOAD_EXTEND_OP
6361 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
6364 tree uns
= lang_hooks
.types
.unsigned_type (TREE_TYPE (and0
));
6365 and0
= fold_convert (uns
, and0
);
6366 and1
= fold_convert (uns
, and1
);
6371 return fold (build2 (BIT_AND_EXPR
, type
,
6372 fold_convert (type
, and0
),
6373 fold_convert (type
, and1
)));
6376 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6377 T2 being pointers to types of the same size. */
6378 if (POINTER_TYPE_P (TREE_TYPE (t
))
6379 && BINARY_CLASS_P (arg0
)
6380 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
6381 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
6383 tree arg00
= TREE_OPERAND (arg0
, 0);
6384 tree t0
= TREE_TYPE (t
);
6385 tree t1
= TREE_TYPE (arg00
);
6386 tree tt0
= TREE_TYPE (t0
);
6387 tree tt1
= TREE_TYPE (t1
);
6388 tree s0
= TYPE_SIZE (tt0
);
6389 tree s1
= TYPE_SIZE (tt1
);
6391 if (s0
&& s1
&& operand_equal_p (s0
, s1
, OEP_ONLY_CONST
))
6392 return build2 (TREE_CODE (arg0
), t0
, fold_convert (t0
, arg00
),
6393 TREE_OPERAND (arg0
, 1));
6396 tem
= fold_convert_const (code
, type
, arg0
);
6397 return tem
? tem
: t
;
6399 case VIEW_CONVERT_EXPR
:
6400 if (TREE_CODE (TREE_OPERAND (t
, 0)) == VIEW_CONVERT_EXPR
)
6401 return build1 (VIEW_CONVERT_EXPR
, type
,
6402 TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
6406 if (TREE_CODE (arg0
) == CONSTRUCTOR
6407 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
6409 tree m
= purpose_member (arg1
, CONSTRUCTOR_ELTS (arg0
));
6411 return TREE_VALUE (m
);
6416 if (TREE_CONSTANT (t
) != wins
)
6418 tem
= copy_node (t
);
6419 TREE_CONSTANT (tem
) = wins
;
6420 TREE_INVARIANT (tem
) = wins
;
6426 if (negate_expr_p (arg0
))
6427 return fold_convert (type
, negate_expr (arg0
));
6431 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
6432 return fold_abs_const (arg0
, type
);
6433 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
6434 return fold (build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0)));
6435 /* Convert fabs((double)float) into (double)fabsf(float). */
6436 else if (TREE_CODE (arg0
) == NOP_EXPR
6437 && TREE_CODE (type
) == REAL_TYPE
)
6439 tree targ0
= strip_float_extensions (arg0
);
6441 return fold_convert (type
, fold (build1 (ABS_EXPR
,
6445 else if (tree_expr_nonnegative_p (arg0
))
6450 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
6451 return fold_convert (type
, arg0
);
6452 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
6453 return build2 (COMPLEX_EXPR
, type
,
6454 TREE_OPERAND (arg0
, 0),
6455 negate_expr (TREE_OPERAND (arg0
, 1)));
6456 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
6457 return build_complex (type
, TREE_REALPART (arg0
),
6458 negate_expr (TREE_IMAGPART (arg0
)));
6459 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
6460 return fold (build2 (TREE_CODE (arg0
), type
,
6461 fold (build1 (CONJ_EXPR
, type
,
6462 TREE_OPERAND (arg0
, 0))),
6463 fold (build1 (CONJ_EXPR
, type
,
6464 TREE_OPERAND (arg0
, 1)))));
6465 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
6466 return TREE_OPERAND (arg0
, 0);
6470 if (TREE_CODE (arg0
) == INTEGER_CST
)
6471 return fold_not_const (arg0
, type
);
6472 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
6473 return TREE_OPERAND (arg0
, 0);
6477 /* A + (-B) -> A - B */
6478 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
6479 return fold (build2 (MINUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
6480 /* (-A) + B -> B - A */
6481 if (TREE_CODE (arg0
) == NEGATE_EXPR
6482 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
6483 return fold (build2 (MINUS_EXPR
, type
, arg1
, TREE_OPERAND (arg0
, 0)));
6484 if (! FLOAT_TYPE_P (type
))
6486 if (integer_zerop (arg1
))
6487 return non_lvalue (fold_convert (type
, arg0
));
6489 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6490 with a constant, and the two constants have no bits in common,
6491 we should treat this as a BIT_IOR_EXPR since this may produce more
6493 if (TREE_CODE (arg0
) == BIT_AND_EXPR
6494 && TREE_CODE (arg1
) == BIT_AND_EXPR
6495 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
6496 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
6497 && integer_zerop (const_binop (BIT_AND_EXPR
,
6498 TREE_OPERAND (arg0
, 1),
6499 TREE_OPERAND (arg1
, 1), 0)))
6501 code
= BIT_IOR_EXPR
;
6505 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6506 (plus (plus (mult) (mult)) (foo)) so that we can
6507 take advantage of the factoring cases below. */
6508 if ((TREE_CODE (arg0
) == PLUS_EXPR
6509 && TREE_CODE (arg1
) == MULT_EXPR
)
6510 || (TREE_CODE (arg1
) == PLUS_EXPR
6511 && TREE_CODE (arg0
) == MULT_EXPR
))
6513 tree parg0
, parg1
, parg
, marg
;
6515 if (TREE_CODE (arg0
) == PLUS_EXPR
)
6516 parg
= arg0
, marg
= arg1
;
6518 parg
= arg1
, marg
= arg0
;
6519 parg0
= TREE_OPERAND (parg
, 0);
6520 parg1
= TREE_OPERAND (parg
, 1);
6524 if (TREE_CODE (parg0
) == MULT_EXPR
6525 && TREE_CODE (parg1
) != MULT_EXPR
)
6526 return fold (build2 (PLUS_EXPR
, type
,
6527 fold (build2 (PLUS_EXPR
, type
,
6528 fold_convert (type
, parg0
),
6529 fold_convert (type
, marg
))),
6530 fold_convert (type
, parg1
)));
6531 if (TREE_CODE (parg0
) != MULT_EXPR
6532 && TREE_CODE (parg1
) == MULT_EXPR
)
6533 return fold (build2 (PLUS_EXPR
, type
,
6534 fold (build2 (PLUS_EXPR
, type
,
6535 fold_convert (type
, parg1
),
6536 fold_convert (type
, marg
))),
6537 fold_convert (type
, parg0
)));
6540 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
)
6542 tree arg00
, arg01
, arg10
, arg11
;
6543 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6545 /* (A * C) + (B * C) -> (A+B) * C.
6546 We are most concerned about the case where C is a constant,
6547 but other combinations show up during loop reduction. Since
6548 it is not difficult, try all four possibilities. */
6550 arg00
= TREE_OPERAND (arg0
, 0);
6551 arg01
= TREE_OPERAND (arg0
, 1);
6552 arg10
= TREE_OPERAND (arg1
, 0);
6553 arg11
= TREE_OPERAND (arg1
, 1);
6556 if (operand_equal_p (arg01
, arg11
, 0))
6557 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6558 else if (operand_equal_p (arg00
, arg10
, 0))
6559 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6560 else if (operand_equal_p (arg00
, arg11
, 0))
6561 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6562 else if (operand_equal_p (arg01
, arg10
, 0))
6563 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6565 /* No identical multiplicands; see if we can find a common
6566 power-of-two factor in non-power-of-two multiplies. This
6567 can help in multi-dimensional array access. */
6568 else if (TREE_CODE (arg01
) == INTEGER_CST
6569 && TREE_CODE (arg11
) == INTEGER_CST
6570 && TREE_INT_CST_HIGH (arg01
) == 0
6571 && TREE_INT_CST_HIGH (arg11
) == 0)
6573 HOST_WIDE_INT int01
, int11
, tmp
;
6574 int01
= TREE_INT_CST_LOW (arg01
);
6575 int11
= TREE_INT_CST_LOW (arg11
);
6577 /* Move min of absolute values to int11. */
6578 if ((int01
>= 0 ? int01
: -int01
)
6579 < (int11
>= 0 ? int11
: -int11
))
6581 tmp
= int01
, int01
= int11
, int11
= tmp
;
6582 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6583 alt0
= arg01
, arg01
= arg11
, arg11
= alt0
;
6586 if (exact_log2 (int11
) > 0 && int01
% int11
== 0)
6588 alt0
= fold (build2 (MULT_EXPR
, type
, arg00
,
6589 build_int_cst (NULL_TREE
,
6597 return fold (build2 (MULT_EXPR
, type
,
6598 fold (build2 (PLUS_EXPR
, type
,
6605 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6606 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
6607 return non_lvalue (fold_convert (type
, arg0
));
6609 /* Likewise if the operands are reversed. */
6610 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
6611 return non_lvalue (fold_convert (type
, arg1
));
6613 /* Convert X + -C into X - C. */
6614 if (TREE_CODE (arg1
) == REAL_CST
6615 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
6617 tem
= fold_negate_const (arg1
, type
);
6618 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
6619 return fold (build2 (MINUS_EXPR
, type
,
6620 fold_convert (type
, arg0
),
6621 fold_convert (type
, tem
)));
6624 /* Convert x+x into x*2.0. */
6625 if (operand_equal_p (arg0
, arg1
, 0)
6626 && SCALAR_FLOAT_TYPE_P (type
))
6627 return fold (build2 (MULT_EXPR
, type
, arg0
,
6628 build_real (type
, dconst2
)));
6630 /* Convert x*c+x into x*(c+1). */
6631 if (flag_unsafe_math_optimizations
6632 && TREE_CODE (arg0
) == MULT_EXPR
6633 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
6634 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
6635 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
6639 c
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
6640 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6641 return fold (build2 (MULT_EXPR
, type
, arg1
,
6642 build_real (type
, c
)));
6645 /* Convert x+x*c into x*(c+1). */
6646 if (flag_unsafe_math_optimizations
6647 && TREE_CODE (arg1
) == MULT_EXPR
6648 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
6649 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
6650 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
6654 c
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
6655 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6656 return fold (build2 (MULT_EXPR
, type
, arg0
,
6657 build_real (type
, c
)));
6660 /* Convert x*c1+x*c2 into x*(c1+c2). */
6661 if (flag_unsafe_math_optimizations
6662 && TREE_CODE (arg0
) == MULT_EXPR
6663 && TREE_CODE (arg1
) == MULT_EXPR
6664 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
6665 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
6666 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
6667 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
6668 && operand_equal_p (TREE_OPERAND (arg0
, 0),
6669 TREE_OPERAND (arg1
, 0), 0))
6671 REAL_VALUE_TYPE c1
, c2
;
6673 c1
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
6674 c2
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
6675 real_arithmetic (&c1
, PLUS_EXPR
, &c1
, &c2
);
6676 return fold (build2 (MULT_EXPR
, type
,
6677 TREE_OPERAND (arg0
, 0),
6678 build_real (type
, c1
)));
6680 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
6681 if (flag_unsafe_math_optimizations
6682 && TREE_CODE (arg1
) == PLUS_EXPR
6683 && TREE_CODE (arg0
) != MULT_EXPR
)
6685 tree tree10
= TREE_OPERAND (arg1
, 0);
6686 tree tree11
= TREE_OPERAND (arg1
, 1);
6687 if (TREE_CODE (tree11
) == MULT_EXPR
6688 && TREE_CODE (tree10
) == MULT_EXPR
)
6691 tree0
= fold (build2 (PLUS_EXPR
, type
, arg0
, tree10
));
6692 return fold (build2 (PLUS_EXPR
, type
, tree0
, tree11
));
6695 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
6696 if (flag_unsafe_math_optimizations
6697 && TREE_CODE (arg0
) == PLUS_EXPR
6698 && TREE_CODE (arg1
) != MULT_EXPR
)
6700 tree tree00
= TREE_OPERAND (arg0
, 0);
6701 tree tree01
= TREE_OPERAND (arg0
, 1);
6702 if (TREE_CODE (tree01
) == MULT_EXPR
6703 && TREE_CODE (tree00
) == MULT_EXPR
)
6706 tree0
= fold (build2 (PLUS_EXPR
, type
, tree01
, arg1
));
6707 return fold (build2 (PLUS_EXPR
, type
, tree00
, tree0
));
6713 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6714 is a rotate of A by C1 bits. */
6715 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6716 is a rotate of A by B bits. */
6718 enum tree_code code0
, code1
;
6719 code0
= TREE_CODE (arg0
);
6720 code1
= TREE_CODE (arg1
);
6721 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
6722 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
6723 && operand_equal_p (TREE_OPERAND (arg0
, 0),
6724 TREE_OPERAND (arg1
, 0), 0)
6725 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
6727 tree tree01
, tree11
;
6728 enum tree_code code01
, code11
;
6730 tree01
= TREE_OPERAND (arg0
, 1);
6731 tree11
= TREE_OPERAND (arg1
, 1);
6732 STRIP_NOPS (tree01
);
6733 STRIP_NOPS (tree11
);
6734 code01
= TREE_CODE (tree01
);
6735 code11
= TREE_CODE (tree11
);
6736 if (code01
== INTEGER_CST
6737 && code11
== INTEGER_CST
6738 && TREE_INT_CST_HIGH (tree01
) == 0
6739 && TREE_INT_CST_HIGH (tree11
) == 0
6740 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
6741 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
6742 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6743 code0
== LSHIFT_EXPR
? tree01
: tree11
);
6744 else if (code11
== MINUS_EXPR
)
6746 tree tree110
, tree111
;
6747 tree110
= TREE_OPERAND (tree11
, 0);
6748 tree111
= TREE_OPERAND (tree11
, 1);
6749 STRIP_NOPS (tree110
);
6750 STRIP_NOPS (tree111
);
6751 if (TREE_CODE (tree110
) == INTEGER_CST
6752 && 0 == compare_tree_int (tree110
,
6754 (TREE_TYPE (TREE_OPERAND
6756 && operand_equal_p (tree01
, tree111
, 0))
6757 return build2 ((code0
== LSHIFT_EXPR
6760 type
, TREE_OPERAND (arg0
, 0), tree01
);
6762 else if (code01
== MINUS_EXPR
)
6764 tree tree010
, tree011
;
6765 tree010
= TREE_OPERAND (tree01
, 0);
6766 tree011
= TREE_OPERAND (tree01
, 1);
6767 STRIP_NOPS (tree010
);
6768 STRIP_NOPS (tree011
);
6769 if (TREE_CODE (tree010
) == INTEGER_CST
6770 && 0 == compare_tree_int (tree010
,
6772 (TREE_TYPE (TREE_OPERAND
6774 && operand_equal_p (tree11
, tree011
, 0))
6775 return build2 ((code0
!= LSHIFT_EXPR
6778 type
, TREE_OPERAND (arg0
, 0), tree11
);
6784 /* In most languages, can't associate operations on floats through
6785 parentheses. Rather than remember where the parentheses were, we
6786 don't associate floats at all, unless the user has specified
6787 -funsafe-math-optimizations. */
6790 && (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
6792 tree var0
, con0
, lit0
, minus_lit0
;
6793 tree var1
, con1
, lit1
, minus_lit1
;
6795 /* Split both trees into variables, constants, and literals. Then
6796 associate each group together, the constants with literals,
6797 then the result with variables. This increases the chances of
6798 literals being recombined later and of generating relocatable
6799 expressions for the sum of a constant and literal. */
6800 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
6801 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
6802 code
== MINUS_EXPR
);
6804 /* Only do something if we found more than two objects. Otherwise,
6805 nothing has changed and we risk infinite recursion. */
6806 if (2 < ((var0
!= 0) + (var1
!= 0)
6807 + (con0
!= 0) + (con1
!= 0)
6808 + (lit0
!= 0) + (lit1
!= 0)
6809 + (minus_lit0
!= 0) + (minus_lit1
!= 0)))
6811 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6812 if (code
== MINUS_EXPR
)
6815 var0
= associate_trees (var0
, var1
, code
, type
);
6816 con0
= associate_trees (con0
, con1
, code
, type
);
6817 lit0
= associate_trees (lit0
, lit1
, code
, type
);
6818 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
6820 /* Preserve the MINUS_EXPR if the negative part of the literal is
6821 greater than the positive part. Otherwise, the multiplicative
6822 folding code (i.e extract_muldiv) may be fooled in case
6823 unsigned constants are subtracted, like in the following
6824 example: ((X*2 + 4) - 8U)/2. */
6825 if (minus_lit0
&& lit0
)
6827 if (TREE_CODE (lit0
) == INTEGER_CST
6828 && TREE_CODE (minus_lit0
) == INTEGER_CST
6829 && tree_int_cst_lt (lit0
, minus_lit0
))
6831 minus_lit0
= associate_trees (minus_lit0
, lit0
,
6837 lit0
= associate_trees (lit0
, minus_lit0
,
6845 return fold_convert (type
,
6846 associate_trees (var0
, minus_lit0
,
6850 con0
= associate_trees (con0
, minus_lit0
,
6852 return fold_convert (type
,
6853 associate_trees (var0
, con0
,
6858 con0
= associate_trees (con0
, lit0
, code
, type
);
6859 return fold_convert (type
, associate_trees (var0
, con0
,
6866 t1
= const_binop (code
, arg0
, arg1
, 0);
6867 if (t1
!= NULL_TREE
)
6869 /* The return value should always have
6870 the same type as the original expression. */
6871 if (TREE_TYPE (t1
) != type
)
6872 t1
= fold_convert (type
, t1
);
6879 /* A - (-B) -> A + B */
6880 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
6881 return fold (build2 (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
6882 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6883 if (TREE_CODE (arg0
) == NEGATE_EXPR
6884 && (FLOAT_TYPE_P (type
)
6885 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
))
6886 && negate_expr_p (arg1
)
6887 && reorder_operands_p (arg0
, arg1
))
6888 return fold (build2 (MINUS_EXPR
, type
, negate_expr (arg1
),
6889 TREE_OPERAND (arg0
, 0)));
6891 if (! FLOAT_TYPE_P (type
))
6893 if (! wins
&& integer_zerop (arg0
))
6894 return negate_expr (fold_convert (type
, arg1
));
6895 if (integer_zerop (arg1
))
6896 return non_lvalue (fold_convert (type
, arg0
));
6898 /* Fold A - (A & B) into ~B & A. */
6899 if (!TREE_SIDE_EFFECTS (arg0
)
6900 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
6902 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
6903 return fold (build2 (BIT_AND_EXPR
, type
,
6904 fold (build1 (BIT_NOT_EXPR
, type
,
6905 TREE_OPERAND (arg1
, 0))),
6907 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
6908 return fold (build2 (BIT_AND_EXPR
, type
,
6909 fold (build1 (BIT_NOT_EXPR
, type
,
6910 TREE_OPERAND (arg1
, 1))),
6914 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6915 any power of 2 minus 1. */
6916 if (TREE_CODE (arg0
) == BIT_AND_EXPR
6917 && TREE_CODE (arg1
) == BIT_AND_EXPR
6918 && operand_equal_p (TREE_OPERAND (arg0
, 0),
6919 TREE_OPERAND (arg1
, 0), 0))
6921 tree mask0
= TREE_OPERAND (arg0
, 1);
6922 tree mask1
= TREE_OPERAND (arg1
, 1);
6923 tree tem
= fold (build1 (BIT_NOT_EXPR
, type
, mask0
));
6925 if (operand_equal_p (tem
, mask1
, 0))
6927 tem
= fold (build2 (BIT_XOR_EXPR
, type
,
6928 TREE_OPERAND (arg0
, 0), mask1
));
6929 return fold (build2 (MINUS_EXPR
, type
, tem
, mask1
));
6934 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6935 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
6936 return non_lvalue (fold_convert (type
, arg0
));
6938 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6939 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6940 (-ARG1 + ARG0) reduces to -ARG1. */
6941 else if (!wins
&& fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
6942 return negate_expr (fold_convert (type
, arg1
));
6944 /* Fold &x - &x. This can happen from &x.foo - &x.
6945 This is unsafe for certain floats even in non-IEEE formats.
6946 In IEEE, it is unsafe because it does wrong for NaNs.
6947 Also note that operand_equal_p is always false if an operand
6950 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
6951 && operand_equal_p (arg0
, arg1
, 0))
6952 return fold_convert (type
, integer_zero_node
);
6954 /* A - B -> A + (-B) if B is easily negatable. */
6955 if (!wins
&& negate_expr_p (arg1
)
6956 && ((FLOAT_TYPE_P (type
)
6957 /* Avoid this transformation if B is a positive REAL_CST. */
6958 && (TREE_CODE (arg1
) != REAL_CST
6959 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
6960 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
)))
6961 return fold (build2 (PLUS_EXPR
, type
, arg0
, negate_expr (arg1
)));
6963 /* Try folding difference of addresses. */
6967 if (TREE_CODE (arg0
) == ADDR_EXPR
6968 && TREE_CODE (arg1
) == ADDR_EXPR
6969 && ptr_difference_const (TREE_OPERAND (arg0
, 0),
6970 TREE_OPERAND (arg1
, 0),
6972 return build_int_cst_type (type
, diff
);
6975 if (TREE_CODE (arg0
) == MULT_EXPR
6976 && TREE_CODE (arg1
) == MULT_EXPR
6977 && (INTEGRAL_TYPE_P (type
) || flag_unsafe_math_optimizations
))
6979 /* (A * C) - (B * C) -> (A-B) * C. */
6980 if (operand_equal_p (TREE_OPERAND (arg0
, 1),
6981 TREE_OPERAND (arg1
, 1), 0))
6982 return fold (build2 (MULT_EXPR
, type
,
6983 fold (build2 (MINUS_EXPR
, type
,
6984 TREE_OPERAND (arg0
, 0),
6985 TREE_OPERAND (arg1
, 0))),
6986 TREE_OPERAND (arg0
, 1)));
6987 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6988 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
6989 TREE_OPERAND (arg1
, 0), 0))
6990 return fold (build2 (MULT_EXPR
, type
,
6991 TREE_OPERAND (arg0
, 0),
6992 fold (build2 (MINUS_EXPR
, type
,
6993 TREE_OPERAND (arg0
, 1),
6994 TREE_OPERAND (arg1
, 1)))));
7000 /* (-A) * (-B) -> A * B */
7001 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
7002 return fold (build2 (MULT_EXPR
, type
,
7003 TREE_OPERAND (arg0
, 0),
7004 negate_expr (arg1
)));
7005 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
7006 return fold (build2 (MULT_EXPR
, type
,
7008 TREE_OPERAND (arg1
, 0)));
7010 if (! FLOAT_TYPE_P (type
))
7012 if (integer_zerop (arg1
))
7013 return omit_one_operand (type
, arg1
, arg0
);
7014 if (integer_onep (arg1
))
7015 return non_lvalue (fold_convert (type
, arg0
));
7017 /* (a * (1 << b)) is (a << b) */
7018 if (TREE_CODE (arg1
) == LSHIFT_EXPR
7019 && integer_onep (TREE_OPERAND (arg1
, 0)))
7020 return fold (build2 (LSHIFT_EXPR
, type
, arg0
,
7021 TREE_OPERAND (arg1
, 1)));
7022 if (TREE_CODE (arg0
) == LSHIFT_EXPR
7023 && integer_onep (TREE_OPERAND (arg0
, 0)))
7024 return fold (build2 (LSHIFT_EXPR
, type
, arg1
,
7025 TREE_OPERAND (arg0
, 1)));
7027 if (TREE_CODE (arg1
) == INTEGER_CST
7028 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0),
7029 fold_convert (type
, arg1
),
7031 return fold_convert (type
, tem
);
7036 /* Maybe fold x * 0 to 0. The expressions aren't the same
7037 when x is NaN, since x * 0 is also NaN. Nor are they the
7038 same in modes with signed zeros, since multiplying a
7039 negative value by 0 gives -0, not +0. */
7040 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
7041 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
7042 && real_zerop (arg1
))
7043 return omit_one_operand (type
, arg1
, arg0
);
7044 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7045 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7046 && real_onep (arg1
))
7047 return non_lvalue (fold_convert (type
, arg0
));
7049 /* Transform x * -1.0 into -x. */
7050 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7051 && real_minus_onep (arg1
))
7052 return fold_convert (type
, negate_expr (arg0
));
7054 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7055 if (flag_unsafe_math_optimizations
7056 && TREE_CODE (arg0
) == RDIV_EXPR
7057 && TREE_CODE (arg1
) == REAL_CST
7058 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
7060 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
7063 return fold (build2 (RDIV_EXPR
, type
, tem
,
7064 TREE_OPERAND (arg0
, 1)));
7067 if (flag_unsafe_math_optimizations
)
7069 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
7070 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
7072 /* Optimizations of root(...)*root(...). */
7073 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
7075 tree rootfn
, arg
, arglist
;
7076 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7077 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7079 /* Optimize sqrt(x)*sqrt(x) as x. */
7080 if (BUILTIN_SQRT_P (fcode0
)
7081 && operand_equal_p (arg00
, arg10
, 0)
7082 && ! HONOR_SNANS (TYPE_MODE (type
)))
7085 /* Optimize root(x)*root(y) as root(x*y). */
7086 rootfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7087 arg
= fold (build2 (MULT_EXPR
, type
, arg00
, arg10
));
7088 arglist
= build_tree_list (NULL_TREE
, arg
);
7089 return build_function_call_expr (rootfn
, arglist
);
7092 /* Optimize expN(x)*expN(y) as expN(x+y). */
7093 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
7095 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7096 tree arg
= build2 (PLUS_EXPR
, type
,
7097 TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7098 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
7099 tree arglist
= build_tree_list (NULL_TREE
, fold (arg
));
7100 return build_function_call_expr (expfn
, arglist
);
7103 /* Optimizations of pow(...)*pow(...). */
7104 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
7105 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
7106 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
7108 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7109 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
7111 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7112 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
7115 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7116 if (operand_equal_p (arg01
, arg11
, 0))
7118 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7119 tree arg
= build2 (MULT_EXPR
, type
, arg00
, arg10
);
7120 tree arglist
= tree_cons (NULL_TREE
, fold (arg
),
7121 build_tree_list (NULL_TREE
,
7123 return build_function_call_expr (powfn
, arglist
);
7126 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7127 if (operand_equal_p (arg00
, arg10
, 0))
7129 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7130 tree arg
= fold (build2 (PLUS_EXPR
, type
, arg01
, arg11
));
7131 tree arglist
= tree_cons (NULL_TREE
, arg00
,
7132 build_tree_list (NULL_TREE
,
7134 return build_function_call_expr (powfn
, arglist
);
7138 /* Optimize tan(x)*cos(x) as sin(x). */
7139 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
7140 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
7141 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
7142 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
7143 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
7144 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
7145 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7146 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
7148 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
7150 if (sinfn
!= NULL_TREE
)
7151 return build_function_call_expr (sinfn
,
7152 TREE_OPERAND (arg0
, 1));
7155 /* Optimize x*pow(x,c) as pow(x,c+1). */
7156 if (fcode1
== BUILT_IN_POW
7157 || fcode1
== BUILT_IN_POWF
7158 || fcode1
== BUILT_IN_POWL
)
7160 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7161 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
7163 if (TREE_CODE (arg11
) == REAL_CST
7164 && ! TREE_CONSTANT_OVERFLOW (arg11
)
7165 && operand_equal_p (arg0
, arg10
, 0))
7167 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
7171 c
= TREE_REAL_CST (arg11
);
7172 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7173 arg
= build_real (type
, c
);
7174 arglist
= build_tree_list (NULL_TREE
, arg
);
7175 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
7176 return build_function_call_expr (powfn
, arglist
);
7180 /* Optimize pow(x,c)*x as pow(x,c+1). */
7181 if (fcode0
== BUILT_IN_POW
7182 || fcode0
== BUILT_IN_POWF
7183 || fcode0
== BUILT_IN_POWL
)
7185 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7186 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
7188 if (TREE_CODE (arg01
) == REAL_CST
7189 && ! TREE_CONSTANT_OVERFLOW (arg01
)
7190 && operand_equal_p (arg1
, arg00
, 0))
7192 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7196 c
= TREE_REAL_CST (arg01
);
7197 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7198 arg
= build_real (type
, c
);
7199 arglist
= build_tree_list (NULL_TREE
, arg
);
7200 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
7201 return build_function_call_expr (powfn
, arglist
);
7205 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7207 && operand_equal_p (arg0
, arg1
, 0))
7209 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7213 tree arg
= build_real (type
, dconst2
);
7214 tree arglist
= build_tree_list (NULL_TREE
, arg
);
7215 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
7216 return build_function_call_expr (powfn
, arglist
);
7225 if (integer_all_onesp (arg1
))
7226 return omit_one_operand (type
, arg1
, arg0
);
7227 if (integer_zerop (arg1
))
7228 return non_lvalue (fold_convert (type
, arg0
));
7229 if (operand_equal_p (arg0
, arg1
, 0))
7230 return non_lvalue (fold_convert (type
, arg0
));
7233 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
7234 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7236 t1
= build_int_cst (type
, -1);
7237 t1
= force_fit_type (t1
, 0, false, false);
7238 return omit_one_operand (type
, t1
, arg1
);
7242 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
7243 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7245 t1
= build_int_cst (type
, -1);
7246 t1
= force_fit_type (t1
, 0, false, false);
7247 return omit_one_operand (type
, t1
, arg0
);
7250 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
7251 if (t1
!= NULL_TREE
)
7254 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7256 This results in more efficient code for machines without a NAND
7257 instruction. Combine will canonicalize to the first form
7258 which will allow use of NAND instructions provided by the
7259 backend if they exist. */
7260 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
7261 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
7263 return fold (build1 (BIT_NOT_EXPR
, type
,
7264 build2 (BIT_AND_EXPR
, type
,
7265 TREE_OPERAND (arg0
, 0),
7266 TREE_OPERAND (arg1
, 0))));
7269 /* See if this can be simplified into a rotate first. If that
7270 is unsuccessful continue in the association code. */
7274 if (integer_zerop (arg1
))
7275 return non_lvalue (fold_convert (type
, arg0
));
7276 if (integer_all_onesp (arg1
))
7277 return fold (build1 (BIT_NOT_EXPR
, type
, arg0
));
7278 if (operand_equal_p (arg0
, arg1
, 0))
7279 return omit_one_operand (type
, integer_zero_node
, arg0
);
7282 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
7283 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7285 t1
= build_int_cst (type
, -1);
7286 t1
= force_fit_type (t1
, 0, false, false);
7287 return omit_one_operand (type
, t1
, arg1
);
7291 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
7292 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7294 t1
= build_int_cst (type
, -1);
7295 t1
= force_fit_type (t1
, 0, false, false);
7296 return omit_one_operand (type
, t1
, arg0
);
7299 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7300 with a constant, and the two constants have no bits in common,
7301 we should treat this as a BIT_IOR_EXPR since this may produce more
7303 if (TREE_CODE (arg0
) == BIT_AND_EXPR
7304 && TREE_CODE (arg1
) == BIT_AND_EXPR
7305 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7306 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
7307 && integer_zerop (const_binop (BIT_AND_EXPR
,
7308 TREE_OPERAND (arg0
, 1),
7309 TREE_OPERAND (arg1
, 1), 0)))
7311 code
= BIT_IOR_EXPR
;
7315 /* See if this can be simplified into a rotate first. If that
7316 is unsuccessful continue in the association code. */
7320 if (integer_all_onesp (arg1
))
7321 return non_lvalue (fold_convert (type
, arg0
));
7322 if (integer_zerop (arg1
))
7323 return omit_one_operand (type
, arg1
, arg0
);
7324 if (operand_equal_p (arg0
, arg1
, 0))
7325 return non_lvalue (fold_convert (type
, arg0
));
7327 /* ~X & X is always zero. */
7328 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
7329 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7330 return omit_one_operand (type
, integer_zero_node
, arg1
);
7332 /* X & ~X is always zero. */
7333 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
7334 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7335 return omit_one_operand (type
, integer_zero_node
, arg0
);
7337 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
7338 if (t1
!= NULL_TREE
)
7340 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7341 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
7342 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7345 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
7347 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
7348 && (~TREE_INT_CST_LOW (arg1
)
7349 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
7350 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
7353 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7355 This results in more efficient code for machines without a NOR
7356 instruction. Combine will canonicalize to the first form
7357 which will allow use of NOR instructions provided by the
7358 backend if they exist. */
7359 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
7360 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
7362 return fold (build1 (BIT_NOT_EXPR
, type
,
7363 build2 (BIT_IOR_EXPR
, type
,
7364 TREE_OPERAND (arg0
, 0),
7365 TREE_OPERAND (arg1
, 0))));
7371 /* Don't touch a floating-point divide by zero unless the mode
7372 of the constant can represent infinity. */
7373 if (TREE_CODE (arg1
) == REAL_CST
7374 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
7375 && real_zerop (arg1
))
7378 /* (-A) / (-B) -> A / B */
7379 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
7380 return fold (build2 (RDIV_EXPR
, type
,
7381 TREE_OPERAND (arg0
, 0),
7382 negate_expr (arg1
)));
7383 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
7384 return fold (build2 (RDIV_EXPR
, type
,
7386 TREE_OPERAND (arg1
, 0)));
7388 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7389 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7390 && real_onep (arg1
))
7391 return non_lvalue (fold_convert (type
, arg0
));
7393 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7394 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7395 && real_minus_onep (arg1
))
7396 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
7398 /* If ARG1 is a constant, we can convert this to a multiply by the
7399 reciprocal. This does not have the same rounding properties,
7400 so only do this if -funsafe-math-optimizations. We can actually
7401 always safely do it if ARG1 is a power of two, but it's hard to
7402 tell if it is or not in a portable manner. */
7403 if (TREE_CODE (arg1
) == REAL_CST
)
7405 if (flag_unsafe_math_optimizations
7406 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
7408 return fold (build2 (MULT_EXPR
, type
, arg0
, tem
));
7409 /* Find the reciprocal if optimizing and the result is exact. */
7413 r
= TREE_REAL_CST (arg1
);
7414 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
7416 tem
= build_real (type
, r
);
7417 return fold (build2 (MULT_EXPR
, type
, arg0
, tem
));
7421 /* Convert A/B/C to A/(B*C). */
7422 if (flag_unsafe_math_optimizations
7423 && TREE_CODE (arg0
) == RDIV_EXPR
)
7424 return fold (build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7425 fold (build2 (MULT_EXPR
, type
,
7426 TREE_OPERAND (arg0
, 1), arg1
))));
7428 /* Convert A/(B/C) to (A/B)*C. */
7429 if (flag_unsafe_math_optimizations
7430 && TREE_CODE (arg1
) == RDIV_EXPR
)
7431 return fold (build2 (MULT_EXPR
, type
,
7432 fold (build2 (RDIV_EXPR
, type
, arg0
,
7433 TREE_OPERAND (arg1
, 0))),
7434 TREE_OPERAND (arg1
, 1)));
7436 /* Convert C1/(X*C2) into (C1/C2)/X. */
7437 if (flag_unsafe_math_optimizations
7438 && TREE_CODE (arg1
) == MULT_EXPR
7439 && TREE_CODE (arg0
) == REAL_CST
7440 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
7442 tree tem
= const_binop (RDIV_EXPR
, arg0
,
7443 TREE_OPERAND (arg1
, 1), 0);
7445 return fold (build2 (RDIV_EXPR
, type
, tem
,
7446 TREE_OPERAND (arg1
, 0)));
7449 if (flag_unsafe_math_optimizations
)
7451 enum built_in_function fcode
= builtin_mathfn_code (arg1
);
7452 /* Optimize x/expN(y) into x*expN(-y). */
7453 if (BUILTIN_EXPONENT_P (fcode
))
7455 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
7456 tree arg
= negate_expr (TREE_VALUE (TREE_OPERAND (arg1
, 1)));
7457 tree arglist
= build_tree_list (NULL_TREE
,
7458 fold_convert (type
, arg
));
7459 arg1
= build_function_call_expr (expfn
, arglist
);
7460 return fold (build2 (MULT_EXPR
, type
, arg0
, arg1
));
7463 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7464 if (fcode
== BUILT_IN_POW
7465 || fcode
== BUILT_IN_POWF
7466 || fcode
== BUILT_IN_POWL
)
7468 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
7469 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7470 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
, 1)));
7471 tree neg11
= fold_convert (type
, negate_expr (arg11
));
7472 tree arglist
= tree_cons(NULL_TREE
, arg10
,
7473 build_tree_list (NULL_TREE
, neg11
));
7474 arg1
= build_function_call_expr (powfn
, arglist
);
7475 return fold (build2 (MULT_EXPR
, type
, arg0
, arg1
));
7479 if (flag_unsafe_math_optimizations
)
7481 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
7482 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
7484 /* Optimize sin(x)/cos(x) as tan(x). */
7485 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
7486 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
7487 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
7488 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7489 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
7491 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
7493 if (tanfn
!= NULL_TREE
)
7494 return build_function_call_expr (tanfn
,
7495 TREE_OPERAND (arg0
, 1));
7498 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7499 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
7500 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
7501 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
7502 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7503 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
7505 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
7507 if (tanfn
!= NULL_TREE
)
7509 tree tmp
= TREE_OPERAND (arg0
, 1);
7510 tmp
= build_function_call_expr (tanfn
, tmp
);
7511 return fold (build2 (RDIV_EXPR
, type
,
7512 build_real (type
, dconst1
), tmp
));
7516 /* Optimize pow(x,c)/x as pow(x,c-1). */
7517 if (fcode0
== BUILT_IN_POW
7518 || fcode0
== BUILT_IN_POWF
7519 || fcode0
== BUILT_IN_POWL
)
7521 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7522 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
7523 if (TREE_CODE (arg01
) == REAL_CST
7524 && ! TREE_CONSTANT_OVERFLOW (arg01
)
7525 && operand_equal_p (arg1
, arg00
, 0))
7527 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7531 c
= TREE_REAL_CST (arg01
);
7532 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
7533 arg
= build_real (type
, c
);
7534 arglist
= build_tree_list (NULL_TREE
, arg
);
7535 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
7536 return build_function_call_expr (powfn
, arglist
);
7542 case TRUNC_DIV_EXPR
:
7543 case ROUND_DIV_EXPR
:
7544 case FLOOR_DIV_EXPR
:
7546 case EXACT_DIV_EXPR
:
7547 if (integer_onep (arg1
))
7548 return non_lvalue (fold_convert (type
, arg0
));
7549 if (integer_zerop (arg1
))
7552 if (!TYPE_UNSIGNED (type
)
7553 && TREE_CODE (arg1
) == INTEGER_CST
7554 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
7555 && TREE_INT_CST_HIGH (arg1
) == -1)
7556 return fold_convert (type
, negate_expr (arg0
));
7558 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7559 operation, EXACT_DIV_EXPR.
7561 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7562 At one time others generated faster code, it's not clear if they do
7563 after the last round to changes to the DIV code in expmed.c. */
7564 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
7565 && multiple_of_p (type
, arg0
, arg1
))
7566 return fold (build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
));
7568 if (TREE_CODE (arg1
) == INTEGER_CST
7569 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
7571 return fold_convert (type
, tem
);
7576 case FLOOR_MOD_EXPR
:
7577 case ROUND_MOD_EXPR
:
7578 case TRUNC_MOD_EXPR
:
7579 if (integer_onep (arg1
))
7580 return omit_one_operand (type
, integer_zero_node
, arg0
);
7581 if (integer_zerop (arg1
))
7584 /* X % -1 is zero. */
7585 if (!TYPE_UNSIGNED (type
)
7586 && TREE_CODE (arg1
) == INTEGER_CST
7587 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
7588 && TREE_INT_CST_HIGH (arg1
) == -1)
7589 return omit_one_operand (type
, integer_zero_node
, arg0
);
7591 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7592 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7593 if (code
== TRUNC_MOD_EXPR
7594 && TYPE_UNSIGNED (type
)
7595 && integer_pow2p (arg1
))
7597 unsigned HOST_WIDE_INT high
, low
;
7601 l
= tree_log2 (arg1
);
7602 if (l
>= HOST_BITS_PER_WIDE_INT
)
7604 high
= ((unsigned HOST_WIDE_INT
) 1
7605 << (l
- HOST_BITS_PER_WIDE_INT
)) - 1;
7611 low
= ((unsigned HOST_WIDE_INT
) 1 << l
) - 1;
7614 mask
= build_int_cst_wide (type
, low
, high
);
7615 return fold (build2 (BIT_AND_EXPR
, type
,
7616 fold_convert (type
, arg0
), mask
));
7619 /* X % -C is the same as X % C. */
7620 if (code
== TRUNC_MOD_EXPR
7621 && !TYPE_UNSIGNED (type
)
7622 && TREE_CODE (arg1
) == INTEGER_CST
7623 && TREE_INT_CST_HIGH (arg1
) < 0
7625 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7626 && !sign_bit_p (arg1
, arg1
))
7627 return fold (build2 (code
, type
, fold_convert (type
, arg0
),
7628 fold_convert (type
, negate_expr (arg1
))));
7630 /* X % -Y is the same as X % Y. */
7631 if (code
== TRUNC_MOD_EXPR
7632 && !TYPE_UNSIGNED (type
)
7633 && TREE_CODE (arg1
) == NEGATE_EXPR
7635 return fold (build2 (code
, type
, fold_convert (type
, arg0
),
7636 fold_convert (type
, TREE_OPERAND (arg1
, 0))));
7638 if (TREE_CODE (arg1
) == INTEGER_CST
7639 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
7641 return fold_convert (type
, tem
);
7647 if (integer_all_onesp (arg0
))
7648 return omit_one_operand (type
, arg0
, arg1
);
7652 /* Optimize -1 >> x for arithmetic right shifts. */
7653 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
7654 return omit_one_operand (type
, arg0
, arg1
);
7655 /* ... fall through ... */
7659 if (integer_zerop (arg1
))
7660 return non_lvalue (fold_convert (type
, arg0
));
7661 if (integer_zerop (arg0
))
7662 return omit_one_operand (type
, arg0
, arg1
);
7664 /* Since negative shift count is not well-defined,
7665 don't try to compute it in the compiler. */
7666 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
7668 /* Rewrite an LROTATE_EXPR by a constant into an
7669 RROTATE_EXPR by a new constant. */
7670 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
7672 tree tem
= build_int_cst (NULL_TREE
,
7673 GET_MODE_BITSIZE (TYPE_MODE (type
)));
7674 tem
= fold_convert (TREE_TYPE (arg1
), tem
);
7675 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
7676 return fold (build2 (RROTATE_EXPR
, type
, arg0
, tem
));
7679 /* If we have a rotate of a bit operation with the rotate count and
7680 the second operand of the bit operation both constant,
7681 permute the two operations. */
7682 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
7683 && (TREE_CODE (arg0
) == BIT_AND_EXPR
7684 || TREE_CODE (arg0
) == BIT_IOR_EXPR
7685 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
7686 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
7687 return fold (build2 (TREE_CODE (arg0
), type
,
7688 fold (build2 (code
, type
,
7689 TREE_OPERAND (arg0
, 0), arg1
)),
7690 fold (build2 (code
, type
,
7691 TREE_OPERAND (arg0
, 1), arg1
))));
7693 /* Two consecutive rotates adding up to the width of the mode can
7695 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
7696 && TREE_CODE (arg0
) == RROTATE_EXPR
7697 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7698 && TREE_INT_CST_HIGH (arg1
) == 0
7699 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
7700 && ((TREE_INT_CST_LOW (arg1
)
7701 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
7702 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
7703 return TREE_OPERAND (arg0
, 0);
7708 if (operand_equal_p (arg0
, arg1
, 0))
7709 return omit_one_operand (type
, arg0
, arg1
);
7710 if (INTEGRAL_TYPE_P (type
)
7711 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
7712 return omit_one_operand (type
, arg1
, arg0
);
7716 if (operand_equal_p (arg0
, arg1
, 0))
7717 return omit_one_operand (type
, arg0
, arg1
);
7718 if (INTEGRAL_TYPE_P (type
)
7719 && TYPE_MAX_VALUE (type
)
7720 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
7721 return omit_one_operand (type
, arg1
, arg0
);
7724 case TRUTH_NOT_EXPR
:
7725 /* The argument to invert_truthvalue must have Boolean type. */
7726 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
7727 arg0
= fold_convert (boolean_type_node
, arg0
);
7729 /* Note that the operand of this must be an int
7730 and its values must be 0 or 1.
7731 ("true" is a fixed value perhaps depending on the language,
7732 but we don't handle values other than 1 correctly yet.) */
7733 tem
= invert_truthvalue (arg0
);
7734 /* Avoid infinite recursion. */
7735 if (TREE_CODE (tem
) == TRUTH_NOT_EXPR
)
7737 tem
= fold_single_bit_test (code
, arg0
, arg1
, type
);
7742 return fold_convert (type
, tem
);
7744 case TRUTH_ANDIF_EXPR
:
7745 /* Note that the operands of this must be ints
7746 and their values must be 0 or 1.
7747 ("true" is a fixed value perhaps depending on the language.) */
7748 /* If first arg is constant zero, return it. */
7749 if (integer_zerop (arg0
))
7750 return fold_convert (type
, arg0
);
7751 case TRUTH_AND_EXPR
:
7752 /* If either arg is constant true, drop it. */
7753 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
7754 return non_lvalue (fold_convert (type
, arg1
));
7755 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
7756 /* Preserve sequence points. */
7757 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
7758 return non_lvalue (fold_convert (type
, arg0
));
7759 /* If second arg is constant zero, result is zero, but first arg
7760 must be evaluated. */
7761 if (integer_zerop (arg1
))
7762 return omit_one_operand (type
, arg1
, arg0
);
7763 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7764 case will be handled here. */
7765 if (integer_zerop (arg0
))
7766 return omit_one_operand (type
, arg0
, arg1
);
7768 /* !X && X is always false. */
7769 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
7770 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7771 return omit_one_operand (type
, integer_zero_node
, arg1
);
7772 /* X && !X is always false. */
7773 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
7774 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7775 return omit_one_operand (type
, integer_zero_node
, arg0
);
7778 /* We only do these simplifications if we are optimizing. */
7782 /* Check for things like (A || B) && (A || C). We can convert this
7783 to A || (B && C). Note that either operator can be any of the four
7784 truth and/or operations and the transformation will still be
7785 valid. Also note that we only care about order for the
7786 ANDIF and ORIF operators. If B contains side effects, this
7787 might change the truth-value of A. */
7788 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
7789 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
7790 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
7791 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
7792 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
7793 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
7795 tree a00
= TREE_OPERAND (arg0
, 0);
7796 tree a01
= TREE_OPERAND (arg0
, 1);
7797 tree a10
= TREE_OPERAND (arg1
, 0);
7798 tree a11
= TREE_OPERAND (arg1
, 1);
7799 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
7800 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
7801 && (code
== TRUTH_AND_EXPR
7802 || code
== TRUTH_OR_EXPR
));
7804 if (operand_equal_p (a00
, a10
, 0))
7805 return fold (build2 (TREE_CODE (arg0
), type
, a00
,
7806 fold (build2 (code
, type
, a01
, a11
))));
7807 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
7808 return fold (build2 (TREE_CODE (arg0
), type
, a00
,
7809 fold (build2 (code
, type
, a01
, a10
))));
7810 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
7811 return fold (build2 (TREE_CODE (arg0
), type
, a01
,
7812 fold (build2 (code
, type
, a00
, a11
))));
7814 /* This case if tricky because we must either have commutative
7815 operators or else A10 must not have side-effects. */
7817 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
7818 && operand_equal_p (a01
, a11
, 0))
7819 return fold (build2 (TREE_CODE (arg0
), type
,
7820 fold (build2 (code
, type
, a00
, a10
)),
7824 /* See if we can build a range comparison. */
7825 if (0 != (tem
= fold_range_test (t
)))
7828 /* Check for the possibility of merging component references. If our
7829 lhs is another similar operation, try to merge its rhs with our
7830 rhs. Then try to merge our lhs and rhs. */
7831 if (TREE_CODE (arg0
) == code
7832 && 0 != (tem
= fold_truthop (code
, type
,
7833 TREE_OPERAND (arg0
, 1), arg1
)))
7834 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
7836 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
7841 case TRUTH_ORIF_EXPR
:
7842 /* Note that the operands of this must be ints
7843 and their values must be 0 or true.
7844 ("true" is a fixed value perhaps depending on the language.) */
7845 /* If first arg is constant true, return it. */
7846 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
7847 return fold_convert (type
, arg0
);
7849 /* If either arg is constant zero, drop it. */
7850 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
7851 return non_lvalue (fold_convert (type
, arg1
));
7852 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
7853 /* Preserve sequence points. */
7854 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
7855 return non_lvalue (fold_convert (type
, arg0
));
7856 /* If second arg is constant true, result is true, but we must
7857 evaluate first arg. */
7858 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
7859 return omit_one_operand (type
, arg1
, arg0
);
7860 /* Likewise for first arg, but note this only occurs here for
7862 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
7863 return omit_one_operand (type
, arg0
, arg1
);
7865 /* !X || X is always true. */
7866 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
7867 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7868 return omit_one_operand (type
, integer_one_node
, arg1
);
7869 /* X || !X is always true. */
7870 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
7871 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7872 return omit_one_operand (type
, integer_one_node
, arg0
);
7876 case TRUTH_XOR_EXPR
:
7877 /* If the second arg is constant zero, drop it. */
7878 if (integer_zerop (arg1
))
7879 return non_lvalue (fold_convert (type
, arg0
));
7880 /* If the second arg is constant true, this is a logical inversion. */
7881 if (integer_onep (arg1
))
7882 return non_lvalue (fold_convert (type
, invert_truthvalue (arg0
)));
7883 /* Identical arguments cancel to zero. */
7884 if (operand_equal_p (arg0
, arg1
, 0))
7885 return omit_one_operand (type
, integer_zero_node
, arg0
);
7887 /* !X ^ X is always true. */
7888 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
7889 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7890 return omit_one_operand (type
, integer_one_node
, arg1
);
7892 /* X ^ !X is always true. */
7893 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
7894 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7895 return omit_one_operand (type
, integer_one_node
, arg0
);
7905 /* If one arg is a real or integer constant, put it last. */
7906 if (tree_swap_operands_p (arg0
, arg1
, true))
7907 return fold (build2 (swap_tree_comparison (code
), type
, arg1
, arg0
));
7909 /* If this is an equality comparison of the address of a non-weak
7910 object against zero, then we know the result. */
7911 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7912 && TREE_CODE (arg0
) == ADDR_EXPR
7913 && DECL_P (TREE_OPERAND (arg0
, 0))
7914 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
7915 && integer_zerop (arg1
))
7916 return constant_boolean_node (code
!= EQ_EXPR
, type
);
7918 /* If this is an equality comparison of the address of two non-weak,
7919 unaliased symbols neither of which are extern (since we do not
7920 have access to attributes for externs), then we know the result. */
7921 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7922 && TREE_CODE (arg0
) == ADDR_EXPR
7923 && DECL_P (TREE_OPERAND (arg0
, 0))
7924 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
7925 && ! lookup_attribute ("alias",
7926 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
7927 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
7928 && TREE_CODE (arg1
) == ADDR_EXPR
7929 && DECL_P (TREE_OPERAND (arg1
, 0))
7930 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
7931 && ! lookup_attribute ("alias",
7932 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
7933 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
7934 return constant_boolean_node (operand_equal_p (arg0
, arg1
, 0)
7935 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
7938 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
7940 tree targ0
= strip_float_extensions (arg0
);
7941 tree targ1
= strip_float_extensions (arg1
);
7942 tree newtype
= TREE_TYPE (targ0
);
7944 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
7945 newtype
= TREE_TYPE (targ1
);
7947 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7948 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
7949 return fold (build2 (code
, type
, fold_convert (newtype
, targ0
),
7950 fold_convert (newtype
, targ1
)));
7952 /* (-a) CMP (-b) -> b CMP a */
7953 if (TREE_CODE (arg0
) == NEGATE_EXPR
7954 && TREE_CODE (arg1
) == NEGATE_EXPR
)
7955 return fold (build2 (code
, type
, TREE_OPERAND (arg1
, 0),
7956 TREE_OPERAND (arg0
, 0)));
7958 if (TREE_CODE (arg1
) == REAL_CST
)
7960 REAL_VALUE_TYPE cst
;
7961 cst
= TREE_REAL_CST (arg1
);
7963 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7964 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
7966 fold (build2 (swap_tree_comparison (code
), type
,
7967 TREE_OPERAND (arg0
, 0),
7968 build_real (TREE_TYPE (arg1
),
7969 REAL_VALUE_NEGATE (cst
))));
7971 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7972 /* a CMP (-0) -> a CMP 0 */
7973 if (REAL_VALUE_MINUS_ZERO (cst
))
7974 return fold (build2 (code
, type
, arg0
,
7975 build_real (TREE_TYPE (arg1
), dconst0
)));
7977 /* x != NaN is always true, other ops are always false. */
7978 if (REAL_VALUE_ISNAN (cst
)
7979 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
7981 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
7982 return omit_one_operand (type
, tem
, arg0
);
7985 /* Fold comparisons against infinity. */
7986 if (REAL_VALUE_ISINF (cst
))
7988 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
7989 if (tem
!= NULL_TREE
)
7994 /* If this is a comparison of a real constant with a PLUS_EXPR
7995 or a MINUS_EXPR of a real constant, we can convert it into a
7996 comparison with a revised real constant as long as no overflow
7997 occurs when unsafe_math_optimizations are enabled. */
7998 if (flag_unsafe_math_optimizations
7999 && TREE_CODE (arg1
) == REAL_CST
8000 && (TREE_CODE (arg0
) == PLUS_EXPR
8001 || TREE_CODE (arg0
) == MINUS_EXPR
)
8002 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
8003 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
8004 ? MINUS_EXPR
: PLUS_EXPR
,
8005 arg1
, TREE_OPERAND (arg0
, 1), 0))
8006 && ! TREE_CONSTANT_OVERFLOW (tem
))
8007 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
8009 /* Likewise, we can simplify a comparison of a real constant with
8010 a MINUS_EXPR whose first operand is also a real constant, i.e.
8011 (c1 - x) < c2 becomes x > c1-c2. */
8012 if (flag_unsafe_math_optimizations
8013 && TREE_CODE (arg1
) == REAL_CST
8014 && TREE_CODE (arg0
) == MINUS_EXPR
8015 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
8016 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
8018 && ! TREE_CONSTANT_OVERFLOW (tem
))
8019 return fold (build2 (swap_tree_comparison (code
), type
,
8020 TREE_OPERAND (arg0
, 1), tem
));
8022 /* Fold comparisons against built-in math functions. */
8023 if (TREE_CODE (arg1
) == REAL_CST
8024 && flag_unsafe_math_optimizations
8025 && ! flag_errno_math
)
8027 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8029 if (fcode
!= END_BUILTINS
)
8031 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
8032 if (tem
!= NULL_TREE
)
8038 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8039 if (TREE_CONSTANT (arg1
)
8040 && (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
8041 || TREE_CODE (arg0
) == POSTDECREMENT_EXPR
)
8042 /* This optimization is invalid for ordered comparisons
8043 if CONST+INCR overflows or if foo+incr might overflow.
8044 This optimization is invalid for floating point due to rounding.
8045 For pointer types we assume overflow doesn't happen. */
8046 && (POINTER_TYPE_P (TREE_TYPE (arg0
))
8047 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8048 && (code
== EQ_EXPR
|| code
== NE_EXPR
))))
8050 tree varop
, newconst
;
8052 if (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
)
8054 newconst
= fold (build2 (PLUS_EXPR
, TREE_TYPE (arg0
),
8055 arg1
, TREE_OPERAND (arg0
, 1)));
8056 varop
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (arg0
),
8057 TREE_OPERAND (arg0
, 0),
8058 TREE_OPERAND (arg0
, 1));
8062 newconst
= fold (build2 (MINUS_EXPR
, TREE_TYPE (arg0
),
8063 arg1
, TREE_OPERAND (arg0
, 1)));
8064 varop
= build2 (PREDECREMENT_EXPR
, TREE_TYPE (arg0
),
8065 TREE_OPERAND (arg0
, 0),
8066 TREE_OPERAND (arg0
, 1));
8070 /* If VAROP is a reference to a bitfield, we must mask
8071 the constant by the width of the field. */
8072 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
8073 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop
, 0), 1))
8074 && host_integerp (DECL_SIZE (TREE_OPERAND
8075 (TREE_OPERAND (varop
, 0), 1)), 1))
8077 tree fielddecl
= TREE_OPERAND (TREE_OPERAND (varop
, 0), 1);
8078 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (fielddecl
), 1);
8079 tree folded_compare
, shift
;
8081 /* First check whether the comparison would come out
8082 always the same. If we don't do that we would
8083 change the meaning with the masking. */
8084 folded_compare
= fold (build2 (code
, type
,
8085 TREE_OPERAND (varop
, 0), arg1
));
8086 if (integer_zerop (folded_compare
)
8087 || integer_onep (folded_compare
))
8088 return omit_one_operand (type
, folded_compare
, varop
);
8090 shift
= build_int_cst (NULL_TREE
,
8091 TYPE_PRECISION (TREE_TYPE (varop
)) - size
);
8092 shift
= fold_convert (TREE_TYPE (varop
), shift
);
8093 newconst
= fold (build2 (LSHIFT_EXPR
, TREE_TYPE (varop
),
8095 newconst
= fold (build2 (RSHIFT_EXPR
, TREE_TYPE (varop
),
8099 return fold (build2 (code
, type
, varop
, newconst
));
8102 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8103 This transformation affects the cases which are handled in later
8104 optimizations involving comparisons with non-negative constants. */
8105 if (TREE_CODE (arg1
) == INTEGER_CST
8106 && TREE_CODE (arg0
) != INTEGER_CST
8107 && tree_int_cst_sgn (arg1
) > 0)
8112 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
8113 return fold (build2 (GT_EXPR
, type
, arg0
, arg1
));
8116 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
8117 return fold (build2 (LE_EXPR
, type
, arg0
, arg1
));
8124 /* Comparisons with the highest or lowest possible integer of
8125 the specified size will have known values.
8127 This is quite similar to fold_relational_hi_lo; however, my
8128 attempts to share the code have been nothing but trouble.
8129 I give up for now. */
8131 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1
)));
8133 if (TREE_CODE (arg1
) == INTEGER_CST
8134 && ! TREE_CONSTANT_OVERFLOW (arg1
)
8135 && width
<= HOST_BITS_PER_WIDE_INT
8136 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
8137 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
8139 unsigned HOST_WIDE_INT signed_max
;
8140 unsigned HOST_WIDE_INT max
, min
;
8142 signed_max
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1)) - 1;
8144 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
8146 max
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
8152 min
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
8155 if (TREE_INT_CST_HIGH (arg1
) == 0
8156 && TREE_INT_CST_LOW (arg1
) == max
)
8160 return omit_one_operand (type
, integer_zero_node
, arg0
);
8163 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
8166 return omit_one_operand (type
, integer_one_node
, arg0
);
8169 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
8171 /* The GE_EXPR and LT_EXPR cases above are not normally
8172 reached because of previous transformations. */
8177 else if (TREE_INT_CST_HIGH (arg1
) == 0
8178 && TREE_INT_CST_LOW (arg1
) == max
- 1)
8182 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
8183 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
8185 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
8186 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
8190 else if (TREE_INT_CST_HIGH (arg1
) == (min
? -1 : 0)
8191 && TREE_INT_CST_LOW (arg1
) == min
)
8195 return omit_one_operand (type
, integer_zero_node
, arg0
);
8198 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
8201 return omit_one_operand (type
, integer_one_node
, arg0
);
8204 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
8209 else if (TREE_INT_CST_HIGH (arg1
) == (min
? -1 : 0)
8210 && TREE_INT_CST_LOW (arg1
) == min
+ 1)
8214 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
8215 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
8217 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
8218 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
8223 else if (!in_gimple_form
8224 && TREE_INT_CST_HIGH (arg1
) == 0
8225 && TREE_INT_CST_LOW (arg1
) == signed_max
8226 && TYPE_UNSIGNED (TREE_TYPE (arg1
))
8227 /* signed_type does not work on pointer types. */
8228 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
8230 /* The following case also applies to X < signed_max+1
8231 and X >= signed_max+1 because previous transformations. */
8232 if (code
== LE_EXPR
|| code
== GT_EXPR
)
8235 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (arg0
));
8236 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (arg1
));
8238 (build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
8239 type
, fold_convert (st0
, arg0
),
8240 fold_convert (st1
, integer_zero_node
)));
8246 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8247 a MINUS_EXPR of a constant, we can convert it into a comparison with
8248 a revised constant as long as no overflow occurs. */
8249 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8250 && TREE_CODE (arg1
) == INTEGER_CST
8251 && (TREE_CODE (arg0
) == PLUS_EXPR
8252 || TREE_CODE (arg0
) == MINUS_EXPR
)
8253 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8254 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
8255 ? MINUS_EXPR
: PLUS_EXPR
,
8256 arg1
, TREE_OPERAND (arg0
, 1), 0))
8257 && ! TREE_CONSTANT_OVERFLOW (tem
))
8258 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
8260 /* Similarly for a NEGATE_EXPR. */
8261 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8262 && TREE_CODE (arg0
) == NEGATE_EXPR
8263 && TREE_CODE (arg1
) == INTEGER_CST
8264 && 0 != (tem
= negate_expr (arg1
))
8265 && TREE_CODE (tem
) == INTEGER_CST
8266 && ! TREE_CONSTANT_OVERFLOW (tem
))
8267 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
8269 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8270 for !=. Don't do this for ordered comparisons due to overflow. */
8271 else if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
8272 && integer_zerop (arg1
) && TREE_CODE (arg0
) == MINUS_EXPR
)
8273 return fold (build2 (code
, type
,
8274 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1)));
8276 /* If we are widening one operand of an integer comparison,
8277 see if the other operand is similarly being widened. Perhaps we
8278 can do the comparison in the narrower type. */
8279 else if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
8280 && TREE_CODE (arg0
) == NOP_EXPR
8281 && (tem
= get_unwidened (arg0
, NULL_TREE
)) != arg0
8282 && (code
== EQ_EXPR
|| code
== NE_EXPR
8283 || TYPE_UNSIGNED (TREE_TYPE (arg0
))
8284 == TYPE_UNSIGNED (TREE_TYPE (tem
)))
8285 && (t1
= get_unwidened (arg1
, TREE_TYPE (tem
))) != 0
8286 && (TREE_TYPE (t1
) == TREE_TYPE (tem
)
8287 || (TREE_CODE (t1
) == INTEGER_CST
8288 && TREE_CODE (TREE_TYPE (tem
)) == INTEGER_TYPE
8289 && int_fits_type_p (t1
, TREE_TYPE (tem
)))))
8290 return fold (build2 (code
, type
, tem
,
8291 fold_convert (TREE_TYPE (tem
), t1
)));
8293 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8294 constant, we can simplify it. */
8295 else if (TREE_CODE (arg1
) == INTEGER_CST
8296 && (TREE_CODE (arg0
) == MIN_EXPR
8297 || TREE_CODE (arg0
) == MAX_EXPR
)
8298 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8299 return optimize_minmax_comparison (t
);
8301 /* If we are comparing an ABS_EXPR with a constant, we can
8302 convert all the cases into explicit comparisons, but they may
8303 well not be faster than doing the ABS and one comparison.
8304 But ABS (X) <= C is a range comparison, which becomes a subtraction
8305 and a comparison, and is probably faster. */
8306 else if (code
== LE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
8307 && TREE_CODE (arg0
) == ABS_EXPR
8308 && ! TREE_SIDE_EFFECTS (arg0
)
8309 && (0 != (tem
= negate_expr (arg1
)))
8310 && TREE_CODE (tem
) == INTEGER_CST
8311 && ! TREE_CONSTANT_OVERFLOW (tem
))
8312 return fold (build2 (TRUTH_ANDIF_EXPR
, type
,
8313 build2 (GE_EXPR
, type
,
8314 TREE_OPERAND (arg0
, 0), tem
),
8315 build2 (LE_EXPR
, type
,
8316 TREE_OPERAND (arg0
, 0), arg1
)));
8318 /* If this is an EQ or NE comparison with zero and ARG0 is
8319 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8320 two operations, but the latter can be done in one less insn
8321 on machines that have only two-operand insns or on which a
8322 constant cannot be the first operand. */
8323 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
8324 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
8326 tree arg00
= TREE_OPERAND (arg0
, 0);
8327 tree arg01
= TREE_OPERAND (arg0
, 1);
8328 if (TREE_CODE (arg00
) == LSHIFT_EXPR
8329 && integer_onep (TREE_OPERAND (arg00
, 0)))
8331 fold (build2 (code
, type
,
8332 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
8333 build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
8334 arg01
, TREE_OPERAND (arg00
, 1)),
8335 fold_convert (TREE_TYPE (arg0
),
8338 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
8339 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
8341 fold (build2 (code
, type
,
8342 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
8343 build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
8344 arg00
, TREE_OPERAND (arg01
, 1)),
8345 fold_convert (TREE_TYPE (arg0
),
8350 /* If this is an NE or EQ comparison of zero against the result of a
8351 signed MOD operation whose second operand is a power of 2, make
8352 the MOD operation unsigned since it is simpler and equivalent. */
8353 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
8354 && integer_zerop (arg1
)
8355 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
8356 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
8357 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
8358 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
8359 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
8360 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
8362 tree newtype
= lang_hooks
.types
.unsigned_type (TREE_TYPE (arg0
));
8363 tree newmod
= fold (build2 (TREE_CODE (arg0
), newtype
,
8364 fold_convert (newtype
,
8365 TREE_OPERAND (arg0
, 0)),
8366 fold_convert (newtype
,
8367 TREE_OPERAND (arg0
, 1))));
8369 return fold (build2 (code
, type
, newmod
,
8370 fold_convert (newtype
, arg1
)));
8373 /* If this is an NE comparison of zero with an AND of one, remove the
8374 comparison since the AND will give the correct value. */
8375 if (code
== NE_EXPR
&& integer_zerop (arg1
)
8376 && TREE_CODE (arg0
) == BIT_AND_EXPR
8377 && integer_onep (TREE_OPERAND (arg0
, 1)))
8378 return fold_convert (type
, arg0
);
8380 /* If we have (A & C) == C where C is a power of 2, convert this into
8381 (A & C) != 0. Similarly for NE_EXPR. */
8382 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8383 && TREE_CODE (arg0
) == BIT_AND_EXPR
8384 && integer_pow2p (TREE_OPERAND (arg0
, 1))
8385 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
8386 return fold (build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
8387 arg0
, fold_convert (TREE_TYPE (arg0
),
8388 integer_zero_node
)));
8390 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8391 2, then fold the expression into shifts and logical operations. */
8392 tem
= fold_single_bit_test (code
, arg0
, arg1
, type
);
8396 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8397 Similarly for NE_EXPR. */
8398 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8399 && TREE_CODE (arg0
) == BIT_AND_EXPR
8400 && TREE_CODE (arg1
) == INTEGER_CST
8401 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8403 tree notc
= fold (build1 (BIT_NOT_EXPR
,
8404 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
8405 TREE_OPERAND (arg0
, 1)));
8406 tree dandnotc
= fold (build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
8408 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
8409 if (integer_nonzerop (dandnotc
))
8410 return omit_one_operand (type
, rslt
, arg0
);
8413 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8414 Similarly for NE_EXPR. */
8415 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8416 && TREE_CODE (arg0
) == BIT_IOR_EXPR
8417 && TREE_CODE (arg1
) == INTEGER_CST
8418 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8420 tree notd
= fold (build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
));
8421 tree candnotd
= fold (build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
8422 TREE_OPERAND (arg0
, 1), notd
));
8423 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
8424 if (integer_nonzerop (candnotd
))
8425 return omit_one_operand (type
, rslt
, arg0
);
8428 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8429 and similarly for >= into !=. */
8430 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
8431 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
8432 && TREE_CODE (arg1
) == LSHIFT_EXPR
8433 && integer_onep (TREE_OPERAND (arg1
, 0)))
8434 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
8435 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
8436 TREE_OPERAND (arg1
, 1)),
8437 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
8439 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
8440 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
8441 && (TREE_CODE (arg1
) == NOP_EXPR
8442 || TREE_CODE (arg1
) == CONVERT_EXPR
)
8443 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
8444 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
8446 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
8447 fold_convert (TREE_TYPE (arg0
),
8448 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
8449 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
8451 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
8453 /* Simplify comparison of something with itself. (For IEEE
8454 floating-point, we can only do some of these simplifications.) */
8455 if (operand_equal_p (arg0
, arg1
, 0))
8460 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8461 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8462 return constant_boolean_node (1, type
);
8467 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8468 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8469 return constant_boolean_node (1, type
);
8470 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
8473 /* For NE, we can only do this simplification if integer
8474 or we don't honor IEEE floating point NaNs. */
8475 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
8476 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8478 /* ... fall through ... */
8481 return constant_boolean_node (0, type
);
8487 /* If we are comparing an expression that just has comparisons
8488 of two integer values, arithmetic expressions of those comparisons,
8489 and constants, we can simplify it. There are only three cases
8490 to check: the two values can either be equal, the first can be
8491 greater, or the second can be greater. Fold the expression for
8492 those three values. Since each value must be 0 or 1, we have
8493 eight possibilities, each of which corresponds to the constant 0
8494 or 1 or one of the six possible comparisons.
8496 This handles common cases like (a > b) == 0 but also handles
8497 expressions like ((x > y) - (y > x)) > 0, which supposedly
8498 occur in macroized code. */
8500 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8502 tree cval1
= 0, cval2
= 0;
8505 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8506 /* Don't handle degenerate cases here; they should already
8507 have been handled anyway. */
8508 && cval1
!= 0 && cval2
!= 0
8509 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8510 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8511 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8512 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8513 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8514 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8515 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8517 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8518 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8520 /* We can't just pass T to eval_subst in case cval1 or cval2
8521 was the same as ARG1. */
8524 = fold (build2 (code
, type
,
8525 eval_subst (arg0
, cval1
, maxval
,
8529 = fold (build2 (code
, type
,
8530 eval_subst (arg0
, cval1
, maxval
,
8534 = fold (build2 (code
, type
,
8535 eval_subst (arg0
, cval1
, minval
,
8539 /* All three of these results should be 0 or 1. Confirm they
8540 are. Then use those values to select the proper code
8543 if ((integer_zerop (high_result
)
8544 || integer_onep (high_result
))
8545 && (integer_zerop (equal_result
)
8546 || integer_onep (equal_result
))
8547 && (integer_zerop (low_result
)
8548 || integer_onep (low_result
)))
8550 /* Make a 3-bit mask with the high-order bit being the
8551 value for `>', the next for '=', and the low for '<'. */
8552 switch ((integer_onep (high_result
) * 4)
8553 + (integer_onep (equal_result
) * 2)
8554 + integer_onep (low_result
))
8558 return omit_one_operand (type
, integer_zero_node
, arg0
);
8579 return omit_one_operand (type
, integer_one_node
, arg0
);
8582 tem
= build2 (code
, type
, cval1
, cval2
);
8584 return save_expr (tem
);
8591 /* If this is a comparison of a field, we may be able to simplify it. */
8592 if (((TREE_CODE (arg0
) == COMPONENT_REF
8593 && lang_hooks
.can_use_bit_fields_p ())
8594 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
8595 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
8596 /* Handle the constant case even without -O
8597 to make sure the warnings are given. */
8598 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
8600 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
8605 /* If this is a comparison of complex values and either or both sides
8606 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8607 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8608 This may prevent needless evaluations. */
8609 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8610 && TREE_CODE (TREE_TYPE (arg0
)) == COMPLEX_TYPE
8611 && (TREE_CODE (arg0
) == COMPLEX_EXPR
8612 || TREE_CODE (arg1
) == COMPLEX_EXPR
8613 || TREE_CODE (arg0
) == COMPLEX_CST
8614 || TREE_CODE (arg1
) == COMPLEX_CST
))
8616 tree subtype
= TREE_TYPE (TREE_TYPE (arg0
));
8617 tree real0
, imag0
, real1
, imag1
;
8619 arg0
= save_expr (arg0
);
8620 arg1
= save_expr (arg1
);
8621 real0
= fold (build1 (REALPART_EXPR
, subtype
, arg0
));
8622 imag0
= fold (build1 (IMAGPART_EXPR
, subtype
, arg0
));
8623 real1
= fold (build1 (REALPART_EXPR
, subtype
, arg1
));
8624 imag1
= fold (build1 (IMAGPART_EXPR
, subtype
, arg1
));
8626 return fold (build2 ((code
== EQ_EXPR
? TRUTH_ANDIF_EXPR
8629 fold (build2 (code
, type
, real0
, real1
)),
8630 fold (build2 (code
, type
, imag0
, imag1
))));
8633 /* Optimize comparisons of strlen vs zero to a compare of the
8634 first character of the string vs zero. To wit,
8635 strlen(ptr) == 0 => *ptr == 0
8636 strlen(ptr) != 0 => *ptr != 0
8637 Other cases should reduce to one of these two (or a constant)
8638 due to the return value of strlen being unsigned. */
8639 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8640 && integer_zerop (arg1
)
8641 && TREE_CODE (arg0
) == CALL_EXPR
)
8643 tree fndecl
= get_callee_fndecl (arg0
);
8647 && DECL_BUILT_IN (fndecl
)
8648 && DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_MD
8649 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
8650 && (arglist
= TREE_OPERAND (arg0
, 1))
8651 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
8652 && ! TREE_CHAIN (arglist
))
8653 return fold (build2 (code
, type
,
8654 build1 (INDIRECT_REF
, char_type_node
,
8655 TREE_VALUE (arglist
)),
8656 fold_convert (char_type_node
,
8657 integer_zero_node
)));
8660 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8661 into a single range test. */
8662 if (TREE_CODE (arg0
) == TRUNC_DIV_EXPR
8663 && TREE_CODE (arg1
) == INTEGER_CST
8664 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8665 && !integer_zerop (TREE_OPERAND (arg0
, 1))
8666 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8667 && !TREE_OVERFLOW (arg1
))
8669 t1
= fold_div_compare (code
, type
, arg0
, arg1
);
8670 if (t1
!= NULL_TREE
)
8674 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8675 && !TREE_SIDE_EFFECTS (arg0
)
8676 && integer_zerop (arg1
)
8677 && tree_expr_nonzero_p (arg0
))
8678 return constant_boolean_node (code
==NE_EXPR
, type
);
8680 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
8681 return t1
== NULL_TREE
? t
: t1
;
8683 case UNORDERED_EXPR
:
8691 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
8693 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
8694 if (t1
!= NULL_TREE
)
8698 /* If the first operand is NaN, the result is constant. */
8699 if (TREE_CODE (arg0
) == REAL_CST
8700 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
8701 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
8703 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
8706 return omit_one_operand (type
, t1
, arg1
);
8709 /* If the second operand is NaN, the result is constant. */
8710 if (TREE_CODE (arg1
) == REAL_CST
8711 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
8712 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
8714 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
8717 return omit_one_operand (type
, t1
, arg0
);
8720 /* Simplify unordered comparison of something with itself. */
8721 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
8722 && operand_equal_p (arg0
, arg1
, 0))
8723 return constant_boolean_node (1, type
);
8725 if (code
== LTGT_EXPR
8726 && !flag_trapping_math
8727 && operand_equal_p (arg0
, arg1
, 0))
8728 return constant_boolean_node (0, type
);
8730 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8732 tree targ0
= strip_float_extensions (arg0
);
8733 tree targ1
= strip_float_extensions (arg1
);
8734 tree newtype
= TREE_TYPE (targ0
);
8736 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8737 newtype
= TREE_TYPE (targ1
);
8739 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
8740 return fold (build2 (code
, type
, fold_convert (newtype
, targ0
),
8741 fold_convert (newtype
, targ1
)));
8747 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8748 so all simple results must be passed through pedantic_non_lvalue. */
8749 if (TREE_CODE (arg0
) == INTEGER_CST
)
8751 tem
= TREE_OPERAND (t
, (integer_zerop (arg0
) ? 2 : 1));
8752 /* Only optimize constant conditions when the selected branch
8753 has the same type as the COND_EXPR. This avoids optimizing
8754 away "c ? x : throw", where the throw has a void type. */
8755 if (! VOID_TYPE_P (TREE_TYPE (tem
))
8756 || VOID_TYPE_P (type
))
8757 return pedantic_non_lvalue (tem
);
8760 if (operand_equal_p (arg1
, TREE_OPERAND (t
, 2), 0))
8761 return pedantic_omit_one_operand (type
, arg1
, arg0
);
8763 /* If we have A op B ? A : C, we may be able to convert this to a
8764 simpler expression, depending on the operation and the values
8765 of B and C. Signed zeros prevent all of these transformations,
8766 for reasons given above each one.
8768 Also try swapping the arguments and inverting the conditional. */
8769 if (COMPARISON_CLASS_P (arg0
)
8770 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
8771 arg1
, TREE_OPERAND (arg0
, 1))
8772 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
8774 tem
= fold_cond_expr_with_comparison (type
, arg0
,
8775 TREE_OPERAND (t
, 1),
8776 TREE_OPERAND (t
, 2));
8781 if (COMPARISON_CLASS_P (arg0
)
8782 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
8783 TREE_OPERAND (t
, 2),
8784 TREE_OPERAND (arg0
, 1))
8785 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 2)))))
8787 tem
= invert_truthvalue (arg0
);
8788 if (COMPARISON_CLASS_P (tem
))
8790 tem
= fold_cond_expr_with_comparison (type
, tem
,
8791 TREE_OPERAND (t
, 2),
8792 TREE_OPERAND (t
, 1));
8798 /* If the second operand is simpler than the third, swap them
8799 since that produces better jump optimization results. */
8800 if (tree_swap_operands_p (TREE_OPERAND (t
, 1),
8801 TREE_OPERAND (t
, 2), false))
8803 /* See if this can be inverted. If it can't, possibly because
8804 it was a floating-point inequality comparison, don't do
8806 tem
= invert_truthvalue (arg0
);
8808 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
8809 return fold (build3 (code
, type
, tem
,
8810 TREE_OPERAND (t
, 2), TREE_OPERAND (t
, 1)));
8813 /* Convert A ? 1 : 0 to simply A. */
8814 if (integer_onep (TREE_OPERAND (t
, 1))
8815 && integer_zerop (TREE_OPERAND (t
, 2))
8816 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8817 call to fold will try to move the conversion inside
8818 a COND, which will recurse. In that case, the COND_EXPR
8819 is probably the best choice, so leave it alone. */
8820 && type
== TREE_TYPE (arg0
))
8821 return pedantic_non_lvalue (arg0
);
8823 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8824 over COND_EXPR in cases such as floating point comparisons. */
8825 if (integer_zerop (TREE_OPERAND (t
, 1))
8826 && integer_onep (TREE_OPERAND (t
, 2))
8827 && truth_value_p (TREE_CODE (arg0
)))
8828 return pedantic_non_lvalue (fold_convert (type
,
8829 invert_truthvalue (arg0
)));
8831 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
8832 if (TREE_CODE (arg0
) == LT_EXPR
8833 && integer_zerop (TREE_OPERAND (arg0
, 1))
8834 && integer_zerop (TREE_OPERAND (t
, 2))
8835 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
8836 return fold_convert (type
, fold (build2 (BIT_AND_EXPR
,
8837 TREE_TYPE (tem
), tem
, arg1
)));
8839 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
8840 already handled above. */
8841 if (TREE_CODE (arg0
) == BIT_AND_EXPR
8842 && integer_onep (TREE_OPERAND (arg0
, 1))
8843 && integer_zerop (TREE_OPERAND (t
, 2))
8844 && integer_pow2p (arg1
))
8846 tree tem
= TREE_OPERAND (arg0
, 0);
8848 if (TREE_CODE (tem
) == RSHIFT_EXPR
8849 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
8850 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
8851 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
8852 return fold (build2 (BIT_AND_EXPR
, type
,
8853 TREE_OPERAND (tem
, 0), arg1
));
8856 /* A & N ? N : 0 is simply A & N if N is a power of two. This
8857 is probably obsolete because the first operand should be a
8858 truth value (that's why we have the two cases above), but let's
8859 leave it in until we can confirm this for all front-ends. */
8860 if (integer_zerop (TREE_OPERAND (t
, 2))
8861 && TREE_CODE (arg0
) == NE_EXPR
8862 && integer_zerop (TREE_OPERAND (arg0
, 1))
8863 && integer_pow2p (arg1
)
8864 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
8865 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
8866 arg1
, OEP_ONLY_CONST
))
8867 return pedantic_non_lvalue (fold_convert (type
,
8868 TREE_OPERAND (arg0
, 0)));
8870 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8871 if (integer_zerop (TREE_OPERAND (t
, 2))
8872 && truth_value_p (TREE_CODE (arg0
))
8873 && truth_value_p (TREE_CODE (arg1
)))
8874 return fold (build2 (TRUTH_ANDIF_EXPR
, type
, arg0
, arg1
));
8876 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8877 if (integer_onep (TREE_OPERAND (t
, 2))
8878 && truth_value_p (TREE_CODE (arg0
))
8879 && truth_value_p (TREE_CODE (arg1
)))
8881 /* Only perform transformation if ARG0 is easily inverted. */
8882 tem
= invert_truthvalue (arg0
);
8883 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
8884 return fold (build2 (TRUTH_ORIF_EXPR
, type
, tem
, arg1
));
8887 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
8888 if (integer_zerop (arg1
)
8889 && truth_value_p (TREE_CODE (arg0
))
8890 && truth_value_p (TREE_CODE (TREE_OPERAND (t
, 2))))
8892 /* Only perform transformation if ARG0 is easily inverted. */
8893 tem
= invert_truthvalue (arg0
);
8894 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
8895 return fold (build2 (TRUTH_ANDIF_EXPR
, type
, tem
,
8896 TREE_OPERAND (t
, 2)));
8899 /* Convert A ? 1 : B into A || B if A and B are truth values. */
8900 if (integer_onep (arg1
)
8901 && truth_value_p (TREE_CODE (arg0
))
8902 && truth_value_p (TREE_CODE (TREE_OPERAND (t
, 2))))
8903 return fold (build2 (TRUTH_ORIF_EXPR
, type
, arg0
,
8904 TREE_OPERAND (t
, 2)));
8909 /* When pedantic, a compound expression can be neither an lvalue
8910 nor an integer constant expression. */
8911 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
8913 /* Don't let (0, 0) be null pointer constant. */
8914 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
8915 : fold_convert (type
, arg1
);
8916 return pedantic_non_lvalue (tem
);
8920 return build_complex (type
, arg0
, arg1
);
8924 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8926 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8927 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
8928 TREE_OPERAND (arg0
, 1));
8929 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
8930 return TREE_REALPART (arg0
);
8931 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8932 return fold (build2 (TREE_CODE (arg0
), type
,
8933 fold (build1 (REALPART_EXPR
, type
,
8934 TREE_OPERAND (arg0
, 0))),
8935 fold (build1 (REALPART_EXPR
, type
,
8936 TREE_OPERAND (arg0
, 1)))));
8940 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8941 return fold_convert (type
, integer_zero_node
);
8942 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8943 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
8944 TREE_OPERAND (arg0
, 0));
8945 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
8946 return TREE_IMAGPART (arg0
);
8947 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8948 return fold (build2 (TREE_CODE (arg0
), type
,
8949 fold (build1 (IMAGPART_EXPR
, type
,
8950 TREE_OPERAND (arg0
, 0))),
8951 fold (build1 (IMAGPART_EXPR
, type
,
8952 TREE_OPERAND (arg0
, 1)))));
8956 /* Check for a built-in function. */
8957 if (TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
8958 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0))
8960 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)))
8962 tree tmp
= fold_builtin (t
, false);
8970 } /* switch (code) */
8973 #ifdef ENABLE_FOLD_CHECKING
8976 static void fold_checksum_tree (tree
, struct md5_ctx
*, htab_t
);
8977 static void fold_check_failed (tree
, tree
);
8978 void print_fold_checksum (tree
);
8980 /* When --enable-checking=fold, compute a digest of expr before
8981 and after actual fold call to see if fold did not accidentally
8982 change original expr. */
8989 unsigned char checksum_before
[16], checksum_after
[16];
8992 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
8993 md5_init_ctx (&ctx
);
8994 fold_checksum_tree (expr
, &ctx
, ht
);
8995 md5_finish_ctx (&ctx
, checksum_before
);
8998 ret
= fold_1 (expr
);
9000 md5_init_ctx (&ctx
);
9001 fold_checksum_tree (expr
, &ctx
, ht
);
9002 md5_finish_ctx (&ctx
, checksum_after
);
9005 if (memcmp (checksum_before
, checksum_after
, 16))
9006 fold_check_failed (expr
, ret
);
9012 print_fold_checksum (tree expr
)
9015 unsigned char checksum
[16], cnt
;
9018 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
9019 md5_init_ctx (&ctx
);
9020 fold_checksum_tree (expr
, &ctx
, ht
);
9021 md5_finish_ctx (&ctx
, checksum
);
9023 for (cnt
= 0; cnt
< 16; ++cnt
)
9024 fprintf (stderr
, "%02x", checksum
[cnt
]);
9025 putc ('\n', stderr
);
9029 fold_check_failed (tree expr ATTRIBUTE_UNUSED
, tree ret ATTRIBUTE_UNUSED
)
9031 internal_error ("fold check: original tree changed by fold");
9035 fold_checksum_tree (tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
9038 enum tree_code code
;
9039 char buf
[sizeof (struct tree_decl
)];
9042 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
9043 <= sizeof (struct tree_decl
))
9044 && sizeof (struct tree_type
) <= sizeof (struct tree_decl
));
9047 slot
= htab_find_slot (ht
, expr
, INSERT
);
9051 code
= TREE_CODE (expr
);
9052 if (TREE_CODE_CLASS (code
) == tcc_declaration
9053 && DECL_ASSEMBLER_NAME_SET_P (expr
))
9055 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9056 memcpy (buf
, expr
, tree_size (expr
));
9058 SET_DECL_ASSEMBLER_NAME (expr
, NULL
);
9060 else if (TREE_CODE_CLASS (code
) == tcc_type
9061 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
9062 || TYPE_CACHED_VALUES_P (expr
)))
9064 /* Allow these fields to be modified. */
9065 memcpy (buf
, expr
, tree_size (expr
));
9067 TYPE_POINTER_TO (expr
) = NULL
;
9068 TYPE_REFERENCE_TO (expr
) = NULL
;
9069 TYPE_CACHED_VALUES_P (expr
) = 0;
9070 TYPE_CACHED_VALUES (expr
) = NULL
;
9072 md5_process_bytes (expr
, tree_size (expr
), ctx
);
9073 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
9074 if (TREE_CODE_CLASS (code
) != tcc_type
9075 && TREE_CODE_CLASS (code
) != tcc_declaration
)
9076 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
9077 switch (TREE_CODE_CLASS (code
))
9083 md5_process_bytes (TREE_STRING_POINTER (expr
),
9084 TREE_STRING_LENGTH (expr
), ctx
);
9087 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
9088 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
9091 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
9097 case tcc_exceptional
:
9101 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
9102 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
9105 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
9106 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
9112 case tcc_expression
:
9114 case tcc_comparison
:
9118 len
= first_rtl_op (code
);
9119 for (i
= 0; i
< len
; ++i
)
9120 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
9122 case tcc_declaration
:
9123 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
9124 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
9125 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
9126 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
9127 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
9128 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
9129 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
9130 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
9131 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
9132 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
9133 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
9136 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
9137 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
9138 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
9139 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
9140 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
9141 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
9142 if (INTEGRAL_TYPE_P (expr
)
9143 || SCALAR_FLOAT_TYPE_P (expr
))
9145 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
9146 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
9148 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
9149 if (TREE_CODE (expr
) == RECORD_TYPE
9150 || TREE_CODE (expr
) == UNION_TYPE
9151 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
9152 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
9153 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
9162 /* Perform constant folding and related simplification of initializer
9163 expression EXPR. This behaves identically to "fold" but ignores
9164 potential run-time traps and exceptions that fold must preserve. */
9167 fold_initializer (tree expr
)
9169 int saved_signaling_nans
= flag_signaling_nans
;
9170 int saved_trapping_math
= flag_trapping_math
;
9171 int saved_trapv
= flag_trapv
;
9174 flag_signaling_nans
= 0;
9175 flag_trapping_math
= 0;
9178 result
= fold (expr
);
9180 flag_signaling_nans
= saved_signaling_nans
;
9181 flag_trapping_math
= saved_trapping_math
;
9182 flag_trapv
= saved_trapv
;
9187 /* Determine if first argument is a multiple of second argument. Return 0 if
9188 it is not, or we cannot easily determined it to be.
9190 An example of the sort of thing we care about (at this point; this routine
9191 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9192 fold cases do now) is discovering that
9194 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9200 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9202 This code also handles discovering that
9204 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9206 is a multiple of 8 so we don't have to worry about dealing with a
9209 Note that we *look* inside a SAVE_EXPR only to determine how it was
9210 calculated; it is not safe for fold to do much of anything else with the
9211 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9212 at run time. For example, the latter example above *cannot* be implemented
9213 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9214 evaluation time of the original SAVE_EXPR is not necessarily the same at
9215 the time the new expression is evaluated. The only optimization of this
9216 sort that would be valid is changing
9218 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9222 SAVE_EXPR (I) * SAVE_EXPR (J)
9224 (where the same SAVE_EXPR (J) is used in the original and the
9225 transformed version). */
9228 multiple_of_p (tree type
, tree top
, tree bottom
)
9230 if (operand_equal_p (top
, bottom
, 0))
9233 if (TREE_CODE (type
) != INTEGER_TYPE
)
9236 switch (TREE_CODE (top
))
9239 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
9240 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
9244 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
9245 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
9248 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
9252 op1
= TREE_OPERAND (top
, 1);
9253 /* const_binop may not detect overflow correctly,
9254 so check for it explicitly here. */
9255 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
9256 > TREE_INT_CST_LOW (op1
)
9257 && TREE_INT_CST_HIGH (op1
) == 0
9258 && 0 != (t1
= fold_convert (type
,
9259 const_binop (LSHIFT_EXPR
,
9262 && ! TREE_OVERFLOW (t1
))
9263 return multiple_of_p (type
, t1
, bottom
);
9268 /* Can't handle conversions from non-integral or wider integral type. */
9269 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
9270 || (TYPE_PRECISION (type
)
9271 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
9274 /* .. fall through ... */
9277 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
9280 if (TREE_CODE (bottom
) != INTEGER_CST
9281 || (TYPE_UNSIGNED (type
)
9282 && (tree_int_cst_sgn (top
) < 0
9283 || tree_int_cst_sgn (bottom
) < 0)))
9285 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,
9293 /* Return true if `t' is known to be non-negative. */
9296 tree_expr_nonnegative_p (tree t
)
9298 switch (TREE_CODE (t
))
9304 return tree_int_cst_sgn (t
) >= 0;
9307 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
9310 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
9311 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9312 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9314 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9315 both unsigned and at least 2 bits shorter than the result. */
9316 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
9317 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
9318 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
9320 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
9321 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
9322 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
9323 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
9325 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
9326 TYPE_PRECISION (inner2
)) + 1;
9327 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
9333 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
9335 /* x * x for floating point x is always non-negative. */
9336 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
9338 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9339 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9342 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9343 both unsigned and their total bits is shorter than the result. */
9344 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
9345 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
9346 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
9348 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
9349 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
9350 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
9351 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
9352 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
9353 < TYPE_PRECISION (TREE_TYPE (t
));
9357 case TRUNC_DIV_EXPR
:
9359 case FLOOR_DIV_EXPR
:
9360 case ROUND_DIV_EXPR
:
9361 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9362 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9364 case TRUNC_MOD_EXPR
:
9366 case FLOOR_MOD_EXPR
:
9367 case ROUND_MOD_EXPR
:
9368 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9371 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9372 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9375 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
9376 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9379 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9380 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9384 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
9385 tree outer_type
= TREE_TYPE (t
);
9387 if (TREE_CODE (outer_type
) == REAL_TYPE
)
9389 if (TREE_CODE (inner_type
) == REAL_TYPE
)
9390 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9391 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
9393 if (TYPE_UNSIGNED (inner_type
))
9395 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9398 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
9400 if (TREE_CODE (inner_type
) == REAL_TYPE
)
9401 return tree_expr_nonnegative_p (TREE_OPERAND (t
,0));
9402 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
9403 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
9404 && TYPE_UNSIGNED (inner_type
);
9410 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
9411 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
9413 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9415 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9416 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9418 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9419 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9421 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9423 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t
, 1)));
9425 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9426 case NON_LVALUE_EXPR
:
9427 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9429 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9433 tree temp
= TARGET_EXPR_SLOT (t
);
9434 t
= TARGET_EXPR_INITIAL (t
);
9436 /* If the initializer is non-void, then it's a normal expression
9437 that will be assigned to the slot. */
9438 if (!VOID_TYPE_P (t
))
9439 return tree_expr_nonnegative_p (t
);
9441 /* Otherwise, the initializer sets the slot in some way. One common
9442 way is an assignment statement at the end of the initializer. */
9445 if (TREE_CODE (t
) == BIND_EXPR
)
9446 t
= expr_last (BIND_EXPR_BODY (t
));
9447 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
9448 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
9449 t
= expr_last (TREE_OPERAND (t
, 0));
9450 else if (TREE_CODE (t
) == STATEMENT_LIST
)
9455 if (TREE_CODE (t
) == MODIFY_EXPR
9456 && TREE_OPERAND (t
, 0) == temp
)
9457 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9464 tree fndecl
= get_callee_fndecl (t
);
9465 tree arglist
= TREE_OPERAND (t
, 1);
9467 && DECL_BUILT_IN (fndecl
)
9468 && DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_MD
)
9469 switch (DECL_FUNCTION_CODE (fndecl
))
9471 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9472 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9473 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9474 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9476 CASE_BUILTIN_F (BUILT_IN_ACOS
)
9477 CASE_BUILTIN_F (BUILT_IN_ACOSH
)
9478 CASE_BUILTIN_F (BUILT_IN_CABS
)
9479 CASE_BUILTIN_F (BUILT_IN_COSH
)
9480 CASE_BUILTIN_F (BUILT_IN_ERFC
)
9481 CASE_BUILTIN_F (BUILT_IN_EXP
)
9482 CASE_BUILTIN_F (BUILT_IN_EXP10
)
9483 CASE_BUILTIN_F (BUILT_IN_EXP2
)
9484 CASE_BUILTIN_F (BUILT_IN_FABS
)
9485 CASE_BUILTIN_F (BUILT_IN_FDIM
)
9486 CASE_BUILTIN_F (BUILT_IN_FREXP
)
9487 CASE_BUILTIN_F (BUILT_IN_HYPOT
)
9488 CASE_BUILTIN_F (BUILT_IN_POW10
)
9489 CASE_BUILTIN_I (BUILT_IN_FFS
)
9490 CASE_BUILTIN_I (BUILT_IN_PARITY
)
9491 CASE_BUILTIN_I (BUILT_IN_POPCOUNT
)
9495 CASE_BUILTIN_F (BUILT_IN_SQRT
)
9496 /* sqrt(-0.0) is -0.0. */
9497 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
9499 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
9501 CASE_BUILTIN_F (BUILT_IN_ASINH
)
9502 CASE_BUILTIN_F (BUILT_IN_ATAN
)
9503 CASE_BUILTIN_F (BUILT_IN_ATANH
)
9504 CASE_BUILTIN_F (BUILT_IN_CBRT
)
9505 CASE_BUILTIN_F (BUILT_IN_CEIL
)
9506 CASE_BUILTIN_F (BUILT_IN_ERF
)
9507 CASE_BUILTIN_F (BUILT_IN_EXPM1
)
9508 CASE_BUILTIN_F (BUILT_IN_FLOOR
)
9509 CASE_BUILTIN_F (BUILT_IN_FMOD
)
9510 CASE_BUILTIN_F (BUILT_IN_LDEXP
)
9511 CASE_BUILTIN_F (BUILT_IN_LLRINT
)
9512 CASE_BUILTIN_F (BUILT_IN_LLROUND
)
9513 CASE_BUILTIN_F (BUILT_IN_LRINT
)
9514 CASE_BUILTIN_F (BUILT_IN_LROUND
)
9515 CASE_BUILTIN_F (BUILT_IN_MODF
)
9516 CASE_BUILTIN_F (BUILT_IN_NEARBYINT
)
9517 CASE_BUILTIN_F (BUILT_IN_POW
)
9518 CASE_BUILTIN_F (BUILT_IN_RINT
)
9519 CASE_BUILTIN_F (BUILT_IN_ROUND
)
9520 CASE_BUILTIN_F (BUILT_IN_SIGNBIT
)
9521 CASE_BUILTIN_F (BUILT_IN_SINH
)
9522 CASE_BUILTIN_F (BUILT_IN_TANH
)
9523 CASE_BUILTIN_F (BUILT_IN_TRUNC
)
9524 /* True if the 1st argument is nonnegative. */
9525 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
9527 CASE_BUILTIN_F (BUILT_IN_FMAX
)
9528 /* True if the 1st OR 2nd arguments are nonnegative. */
9529 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
9530 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
9532 CASE_BUILTIN_F (BUILT_IN_FMIN
)
9533 /* True if the 1st AND 2nd arguments are nonnegative. */
9534 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
9535 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
9537 CASE_BUILTIN_F (BUILT_IN_COPYSIGN
)
9538 /* True if the 2nd argument is nonnegative. */
9539 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
9543 #undef CASE_BUILTIN_F
9544 #undef CASE_BUILTIN_I
9548 /* ... fall through ... */
9551 if (truth_value_p (TREE_CODE (t
)))
9552 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9556 /* We don't know sign of `t', so be conservative and return false. */
9560 /* Return true when T is an address and is known to be nonzero.
9561 For floating point we further ensure that T is not denormal.
9562 Similar logic is present in nonzero_address in rtlanal.h. */
9565 tree_expr_nonzero_p (tree t
)
9567 tree type
= TREE_TYPE (t
);
9569 /* Doing something useful for floating point would need more work. */
9570 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9573 switch (TREE_CODE (t
))
9576 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
9577 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
9580 /* We used to test for !integer_zerop here. This does not work correctly
9581 if TREE_CONSTANT_OVERFLOW (t). */
9582 return (TREE_INT_CST_LOW (t
) != 0
9583 || TREE_INT_CST_HIGH (t
) != 0);
9586 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
9588 /* With the presence of negative values it is hard
9589 to say something. */
9590 if (!tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9591 || !tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
9593 /* One of operands must be positive and the other non-negative. */
9594 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
9595 || tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
9600 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
9602 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
9603 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
9609 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
9610 tree outer_type
= TREE_TYPE (t
);
9612 return (TYPE_PRECISION (inner_type
) >= TYPE_PRECISION (outer_type
)
9613 && tree_expr_nonzero_p (TREE_OPERAND (t
, 0)));
9619 tree base
= get_base_address (TREE_OPERAND (t
, 0));
9624 /* Weak declarations may link to NULL. */
9626 return !DECL_WEAK (base
);
9628 /* Constants are never weak. */
9629 if (CONSTANT_CLASS_P (base
))
9636 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
9637 && tree_expr_nonzero_p (TREE_OPERAND (t
, 2)));
9640 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
9641 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
9644 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 0)))
9646 /* When both operands are nonzero, then MAX must be too. */
9647 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1)))
9650 /* MAX where operand 0 is positive is positive. */
9651 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9653 /* MAX where operand 1 is positive is positive. */
9654 else if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
9655 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
9662 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1));
9665 case NON_LVALUE_EXPR
:
9666 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
9669 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
9670 || tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
9678 /* See if we are applying CODE, a relational to the highest or lowest
9679 possible integer of TYPE. If so, then the result is a compile
9683 fold_relational_hi_lo (enum tree_code
*code_p
, const tree type
, tree
*op0_p
,
9688 enum tree_code code
= *code_p
;
9689 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1
)));
9691 if (TREE_CODE (op1
) == INTEGER_CST
9692 && ! TREE_CONSTANT_OVERFLOW (op1
)
9693 && width
<= HOST_BITS_PER_WIDE_INT
9694 && (INTEGRAL_TYPE_P (TREE_TYPE (op1
))
9695 || POINTER_TYPE_P (TREE_TYPE (op1
))))
9697 unsigned HOST_WIDE_INT signed_max
;
9698 unsigned HOST_WIDE_INT max
, min
;
9700 signed_max
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1)) - 1;
9702 if (TYPE_UNSIGNED (TREE_TYPE (op1
)))
9704 max
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
9710 min
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
9713 if (TREE_INT_CST_HIGH (op1
) == 0
9714 && TREE_INT_CST_LOW (op1
) == max
)
9718 return omit_one_operand (type
, integer_zero_node
, op0
);
9724 return omit_one_operand (type
, integer_one_node
, op0
);
9730 /* The GE_EXPR and LT_EXPR cases above are not normally
9731 reached because of previous transformations. */
9736 else if (TREE_INT_CST_HIGH (op1
) == 0
9737 && TREE_INT_CST_LOW (op1
) == max
- 1)
9742 *op1_p
= const_binop (PLUS_EXPR
, op1
, integer_one_node
, 0);
9746 *op1_p
= const_binop (PLUS_EXPR
, op1
, integer_one_node
, 0);
9751 else if (TREE_INT_CST_HIGH (op1
) == (min
? -1 : 0)
9752 && TREE_INT_CST_LOW (op1
) == min
)
9756 return omit_one_operand (type
, integer_zero_node
, op0
);
9763 return omit_one_operand (type
, integer_one_node
, op0
);
9772 else if (TREE_INT_CST_HIGH (op1
) == (min
? -1 : 0)
9773 && TREE_INT_CST_LOW (op1
) == min
+ 1)
9778 *op1_p
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
9782 *op1_p
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
9788 else if (TREE_INT_CST_HIGH (op1
) == 0
9789 && TREE_INT_CST_LOW (op1
) == signed_max
9790 && TYPE_UNSIGNED (TREE_TYPE (op1
))
9791 /* signed_type does not work on pointer types. */
9792 && INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
9794 /* The following case also applies to X < signed_max+1
9795 and X >= signed_max+1 because previous transformations. */
9796 if (code
== LE_EXPR
|| code
== GT_EXPR
)
9798 tree st0
, st1
, exp
, retval
;
9799 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (op0
));
9800 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (op1
));
9802 exp
= build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
9804 fold_convert (st0
, op0
),
9805 fold_convert (st1
, integer_zero_node
));
9808 = nondestructive_fold_binary_to_constant (TREE_CODE (exp
),
9810 TREE_OPERAND (exp
, 0),
9811 TREE_OPERAND (exp
, 1));
9813 /* If we are in gimple form, then returning EXP would create
9814 non-gimple expressions. Clearing it is safe and insures
9815 we do not allow a non-gimple expression to escape. */
9819 return (retval
? retval
: exp
);
9828 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9829 attempt to fold the expression to a constant without modifying TYPE,
9832 If the expression could be simplified to a constant, then return
9833 the constant. If the expression would not be simplified to a
9834 constant, then return NULL_TREE.
9836 Note this is primarily designed to be called after gimplification
9837 of the tree structures and when at least one operand is a constant.
9838 As a result of those simplifying assumptions this routine is far
9839 simpler than the generic fold routine. */
9842 nondestructive_fold_binary_to_constant (enum tree_code code
, tree type
,
9850 /* If this is a commutative operation, and ARG0 is a constant, move it
9851 to ARG1 to reduce the number of tests below. */
9852 if (commutative_tree_code (code
)
9853 && (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
))
9860 /* If either operand is a complex type, extract its real component. */
9861 if (TREE_CODE (op0
) == COMPLEX_CST
)
9862 subop0
= TREE_REALPART (op0
);
9866 if (TREE_CODE (op1
) == COMPLEX_CST
)
9867 subop1
= TREE_REALPART (op1
);
9871 /* Note if either argument is not a real or integer constant.
9872 With a few exceptions, simplification is limited to cases
9873 where both arguments are constants. */
9874 if ((TREE_CODE (subop0
) != INTEGER_CST
9875 && TREE_CODE (subop0
) != REAL_CST
)
9876 || (TREE_CODE (subop1
) != INTEGER_CST
9877 && TREE_CODE (subop1
) != REAL_CST
))
9883 /* (plus (address) (const_int)) is a constant. */
9884 if (TREE_CODE (op0
) == PLUS_EXPR
9885 && TREE_CODE (op1
) == INTEGER_CST
9886 && (TREE_CODE (TREE_OPERAND (op0
, 0)) == ADDR_EXPR
9887 || (TREE_CODE (TREE_OPERAND (op0
, 0)) == NOP_EXPR
9888 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0
, 0), 0))
9890 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
9892 return build2 (PLUS_EXPR
, type
, TREE_OPERAND (op0
, 0),
9893 const_binop (PLUS_EXPR
, op1
,
9894 TREE_OPERAND (op0
, 1), 0));
9902 /* Both arguments are constants. Simplify. */
9903 tem
= const_binop (code
, op0
, op1
, 0);
9904 if (tem
!= NULL_TREE
)
9906 /* The return value should always have the same type as
9907 the original expression. */
9908 if (TREE_TYPE (tem
) != type
)
9909 tem
= fold_convert (type
, tem
);
9916 /* Fold &x - &x. This can happen from &x.foo - &x.
9917 This is unsafe for certain floats even in non-IEEE formats.
9918 In IEEE, it is unsafe because it does wrong for NaNs.
9919 Also note that operand_equal_p is always false if an
9920 operand is volatile. */
9921 if (! FLOAT_TYPE_P (type
) && operand_equal_p (op0
, op1
, 0))
9922 return fold_convert (type
, integer_zero_node
);
9928 /* Special case multiplication or bitwise AND where one argument
9930 if (! FLOAT_TYPE_P (type
) && integer_zerop (op1
))
9931 return omit_one_operand (type
, op1
, op0
);
9933 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0
)))
9934 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0
)))
9935 && real_zerop (op1
))
9936 return omit_one_operand (type
, op1
, op0
);
9941 /* Special case when we know the result will be all ones. */
9942 if (integer_all_onesp (op1
))
9943 return omit_one_operand (type
, op1
, op0
);
9947 case TRUNC_DIV_EXPR
:
9948 case ROUND_DIV_EXPR
:
9949 case FLOOR_DIV_EXPR
:
9951 case EXACT_DIV_EXPR
:
9952 case TRUNC_MOD_EXPR
:
9953 case ROUND_MOD_EXPR
:
9954 case FLOOR_MOD_EXPR
:
9957 /* Division by zero is undefined. */
9958 if (integer_zerop (op1
))
9961 if (TREE_CODE (op1
) == REAL_CST
9962 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1
)))
9963 && real_zerop (op1
))
9969 if (INTEGRAL_TYPE_P (type
)
9970 && operand_equal_p (op1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
9971 return omit_one_operand (type
, op1
, op0
);
9976 if (INTEGRAL_TYPE_P (type
)
9977 && TYPE_MAX_VALUE (type
)
9978 && operand_equal_p (op1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
9979 return omit_one_operand (type
, op1
, op0
);
9984 /* Optimize -1 >> x for arithmetic right shifts. */
9985 if (integer_all_onesp (op0
) && ! TYPE_UNSIGNED (type
))
9986 return omit_one_operand (type
, op0
, op1
);
9987 /* ... fall through ... */
9990 if (integer_zerop (op0
))
9991 return omit_one_operand (type
, op0
, op1
);
9993 /* Since negative shift count is not well-defined, don't
9994 try to compute it in the compiler. */
9995 if (TREE_CODE (op1
) == INTEGER_CST
&& tree_int_cst_sgn (op1
) < 0)
10002 /* -1 rotated either direction by any amount is still -1. */
10003 if (integer_all_onesp (op0
))
10004 return omit_one_operand (type
, op0
, op1
);
10006 /* 0 rotated either direction by any amount is still zero. */
10007 if (integer_zerop (op0
))
10008 return omit_one_operand (type
, op0
, op1
);
10014 return build_complex (type
, op0
, op1
);
10023 /* If one arg is a real or integer constant, put it last. */
10024 if ((TREE_CODE (op0
) == INTEGER_CST
10025 && TREE_CODE (op1
) != INTEGER_CST
)
10026 || (TREE_CODE (op0
) == REAL_CST
10027 && TREE_CODE (op0
) != REAL_CST
))
10034 code
= swap_tree_comparison (code
);
10037 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10038 This transformation affects the cases which are handled in later
10039 optimizations involving comparisons with non-negative constants. */
10040 if (TREE_CODE (op1
) == INTEGER_CST
10041 && TREE_CODE (op0
) != INTEGER_CST
10042 && tree_int_cst_sgn (op1
) > 0)
10048 op1
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
10053 op1
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
10061 tem
= fold_relational_hi_lo (&code
, type
, &op0
, &op1
);
10065 /* Fall through. */
10068 case UNORDERED_EXPR
:
10078 return fold_relational_const (code
, type
, op0
, op1
);
10081 /* This could probably be handled. */
10084 case TRUTH_AND_EXPR
:
10085 /* If second arg is constant zero, result is zero, but first arg
10086 must be evaluated. */
10087 if (integer_zerop (op1
))
10088 return omit_one_operand (type
, op1
, op0
);
10089 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10090 case will be handled here. */
10091 if (integer_zerop (op0
))
10092 return omit_one_operand (type
, op0
, op1
);
10093 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
10094 return constant_boolean_node (true, type
);
10097 case TRUTH_OR_EXPR
:
10098 /* If second arg is constant true, result is true, but we must
10099 evaluate first arg. */
10100 if (TREE_CODE (op1
) == INTEGER_CST
&& ! integer_zerop (op1
))
10101 return omit_one_operand (type
, op1
, op0
);
10102 /* Likewise for first arg, but note this only occurs here for
10104 if (TREE_CODE (op0
) == INTEGER_CST
&& ! integer_zerop (op0
))
10105 return omit_one_operand (type
, op0
, op1
);
10106 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
10107 return constant_boolean_node (false, type
);
10110 case TRUTH_XOR_EXPR
:
10111 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
10113 int x
= ! integer_zerop (op0
) ^ ! integer_zerop (op1
);
10114 return constant_boolean_node (x
, type
);
10123 /* Given the components of a unary expression CODE, TYPE and OP0,
10124 attempt to fold the expression to a constant without modifying
10127 If the expression could be simplified to a constant, then return
10128 the constant. If the expression would not be simplified to a
10129 constant, then return NULL_TREE.
10131 Note this is primarily designed to be called after gimplification
10132 of the tree structures and when op0 is a constant. As a result
10133 of those simplifying assumptions this routine is far simpler than
10134 the generic fold routine. */
10137 nondestructive_fold_unary_to_constant (enum tree_code code
, tree type
,
10140 /* Make sure we have a suitable constant argument. */
10141 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
10145 if (TREE_CODE (op0
) == COMPLEX_CST
)
10146 subop
= TREE_REALPART (op0
);
10150 if (TREE_CODE (subop
) != INTEGER_CST
&& TREE_CODE (subop
) != REAL_CST
)
10159 case FIX_TRUNC_EXPR
:
10160 case FIX_FLOOR_EXPR
:
10161 case FIX_CEIL_EXPR
:
10162 return fold_convert_const (code
, type
, op0
);
10165 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
10166 return fold_negate_const (op0
, type
);
10171 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
10172 return fold_abs_const (op0
, type
);
10177 if (TREE_CODE (op0
) == INTEGER_CST
)
10178 return fold_not_const (op0
, type
);
10182 case REALPART_EXPR
:
10183 if (TREE_CODE (op0
) == COMPLEX_CST
)
10184 return TREE_REALPART (op0
);
10188 case IMAGPART_EXPR
:
10189 if (TREE_CODE (op0
) == COMPLEX_CST
)
10190 return TREE_IMAGPART (op0
);
10195 if (TREE_CODE (op0
) == COMPLEX_CST
10196 && TREE_CODE (TREE_TYPE (op0
)) == COMPLEX_TYPE
)
10197 return build_complex (type
, TREE_REALPART (op0
),
10198 negate_expr (TREE_IMAGPART (op0
)));
10206 /* If EXP represents referencing an element in a constant string
10207 (either via pointer arithmetic or array indexing), return the
10208 tree representing the value accessed, otherwise return NULL. */
10211 fold_read_from_constant_string (tree exp
)
10213 if (TREE_CODE (exp
) == INDIRECT_REF
|| TREE_CODE (exp
) == ARRAY_REF
)
10215 tree exp1
= TREE_OPERAND (exp
, 0);
10219 if (TREE_CODE (exp
) == INDIRECT_REF
)
10220 string
= string_constant (exp1
, &index
);
10223 tree low_bound
= array_ref_low_bound (exp
);
10224 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
10226 /* Optimize the special-case of a zero lower bound.
10228 We convert the low_bound to sizetype to avoid some problems
10229 with constant folding. (E.g. suppose the lower bound is 1,
10230 and its mode is QI. Without the conversion,l (ARRAY
10231 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10232 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10233 if (! integer_zerop (low_bound
))
10234 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
10240 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (string
))
10241 && TREE_CODE (string
) == STRING_CST
10242 && TREE_CODE (index
) == INTEGER_CST
10243 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
10244 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
10246 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
10247 return fold_convert (TREE_TYPE (exp
),
10248 build_int_cst (NULL_TREE
,
10249 (TREE_STRING_POINTER (string
)
10250 [TREE_INT_CST_LOW (index
)])));
10255 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10256 an integer constant or real constant.
10258 TYPE is the type of the result. */
10261 fold_negate_const (tree arg0
, tree type
)
10263 tree t
= NULL_TREE
;
10265 switch (TREE_CODE (arg0
))
10269 unsigned HOST_WIDE_INT low
;
10270 HOST_WIDE_INT high
;
10271 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
10272 TREE_INT_CST_HIGH (arg0
),
10274 t
= build_int_cst_wide (type
, low
, high
);
10275 t
= force_fit_type (t
, 1,
10276 (overflow
| TREE_OVERFLOW (arg0
))
10277 && !TYPE_UNSIGNED (type
),
10278 TREE_CONSTANT_OVERFLOW (arg0
));
10283 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
10287 gcc_unreachable ();
10293 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10294 an integer constant or real constant.
10296 TYPE is the type of the result. */
10299 fold_abs_const (tree arg0
, tree type
)
10301 tree t
= NULL_TREE
;
10303 switch (TREE_CODE (arg0
))
10306 /* If the value is unsigned, then the absolute value is
10307 the same as the ordinary value. */
10308 if (TYPE_UNSIGNED (type
))
10310 /* Similarly, if the value is non-negative. */
10311 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
10313 /* If the value is negative, then the absolute value is
10317 unsigned HOST_WIDE_INT low
;
10318 HOST_WIDE_INT high
;
10319 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
10320 TREE_INT_CST_HIGH (arg0
),
10322 t
= build_int_cst_wide (type
, low
, high
);
10323 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg0
),
10324 TREE_CONSTANT_OVERFLOW (arg0
));
10329 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
10330 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
10336 gcc_unreachable ();
10342 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10343 constant. TYPE is the type of the result. */
10346 fold_not_const (tree arg0
, tree type
)
10348 tree t
= NULL_TREE
;
10350 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
10352 t
= build_int_cst_wide (type
,
10353 ~ TREE_INT_CST_LOW (arg0
),
10354 ~ TREE_INT_CST_HIGH (arg0
));
10355 t
= force_fit_type (t
, 0, TREE_OVERFLOW (arg0
),
10356 TREE_CONSTANT_OVERFLOW (arg0
));
10361 /* Given CODE, a relational operator, the target type, TYPE and two
10362 constant operands OP0 and OP1, return the result of the
10363 relational operation. If the result is not a compile time
10364 constant, then return NULL_TREE. */
10367 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
10369 int result
, invert
;
10371 /* From here on, the only cases we handle are when the result is
10372 known to be a constant. */
10374 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
10376 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
10377 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
10379 /* Handle the cases where either operand is a NaN. */
10380 if (real_isnan (c0
) || real_isnan (c1
))
10390 case UNORDERED_EXPR
:
10404 if (flag_trapping_math
)
10410 gcc_unreachable ();
10413 return constant_boolean_node (result
, type
);
10416 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
10419 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10421 To compute GT, swap the arguments and do LT.
10422 To compute GE, do LT and invert the result.
10423 To compute LE, swap the arguments, do LT and invert the result.
10424 To compute NE, do EQ and invert the result.
10426 Therefore, the code below must handle only EQ and LT. */
10428 if (code
== LE_EXPR
|| code
== GT_EXPR
)
10433 code
= swap_tree_comparison (code
);
10436 /* Note that it is safe to invert for real values here because we
10437 have already handled the one case that it matters. */
10440 if (code
== NE_EXPR
|| code
== GE_EXPR
)
10443 code
= invert_tree_comparison (code
, false);
10446 /* Compute a result for LT or EQ if args permit;
10447 Otherwise return T. */
10448 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
10450 if (code
== EQ_EXPR
)
10451 result
= tree_int_cst_equal (op0
, op1
);
10452 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
10453 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
10455 result
= INT_CST_LT (op0
, op1
);
10462 return constant_boolean_node (result
, type
);
10465 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10466 avoid confusing the gimplify process. */
10469 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
10471 /* The size of the object is not relevant when talking about its address. */
10472 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
10473 t
= TREE_OPERAND (t
, 0);
10475 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
10476 if (TREE_CODE (t
) == INDIRECT_REF
10477 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
10479 t
= TREE_OPERAND (t
, 0);
10480 if (TREE_TYPE (t
) != ptrtype
)
10481 t
= build1 (NOP_EXPR
, ptrtype
, t
);
10487 while (handled_component_p (base
)
10488 || TREE_CODE (base
) == REALPART_EXPR
10489 || TREE_CODE (base
) == IMAGPART_EXPR
)
10490 base
= TREE_OPERAND (base
, 0);
10492 TREE_ADDRESSABLE (base
) = 1;
10494 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
10501 build_fold_addr_expr (tree t
)
10503 return build_fold_addr_expr_with_type (t
, build_pointer_type (TREE_TYPE (t
)));
10506 /* Builds an expression for an indirection through T, simplifying some
10510 build_fold_indirect_ref (tree t
)
10512 tree type
= TREE_TYPE (TREE_TYPE (t
));
10517 if (TREE_CODE (sub
) == ADDR_EXPR
)
10519 tree op
= TREE_OPERAND (sub
, 0);
10520 tree optype
= TREE_TYPE (op
);
10522 if (lang_hooks
.types_compatible_p (type
, optype
))
10524 /* *(foo *)&fooarray => fooarray[0] */
10525 else if (TREE_CODE (optype
) == ARRAY_TYPE
10526 && lang_hooks
.types_compatible_p (type
, TREE_TYPE (optype
)))
10527 return build4 (ARRAY_REF
, type
, op
, size_zero_node
, NULL_TREE
, NULL_TREE
);
10530 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10531 subtype
= TREE_TYPE (sub
);
10532 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
10533 && lang_hooks
.types_compatible_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
10535 sub
= build_fold_indirect_ref (sub
);
10536 return build4 (ARRAY_REF
, type
, sub
, size_zero_node
, NULL_TREE
, NULL_TREE
);
10539 return build1 (INDIRECT_REF
, type
, t
);
10542 /* Strip non-trapping, non-side-effecting tree nodes from an expression
10543 whose result is ignored. The type of the returned tree need not be
10544 the same as the original expression. */
10547 fold_ignored_result (tree t
)
10549 if (!TREE_SIDE_EFFECTS (t
))
10550 return integer_zero_node
;
10553 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
10556 t
= TREE_OPERAND (t
, 0);
10560 case tcc_comparison
:
10561 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
10562 t
= TREE_OPERAND (t
, 0);
10563 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
10564 t
= TREE_OPERAND (t
, 1);
10569 case tcc_expression
:
10570 switch (TREE_CODE (t
))
10572 case COMPOUND_EXPR
:
10573 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
10575 t
= TREE_OPERAND (t
, 0);
10579 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
10580 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
10582 t
= TREE_OPERAND (t
, 0);
10595 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
10596 This can only be applied to objects of a sizetype. */
10599 round_up (tree value
, int divisor
)
10601 tree div
= NULL_TREE
;
10603 gcc_assert (divisor
> 0);
10607 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10608 have to do anything. Only do this when we are not given a const,
10609 because in that case, this check is more expensive than just
10611 if (TREE_CODE (value
) != INTEGER_CST
)
10613 div
= build_int_cst (TREE_TYPE (value
), divisor
);
10615 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
10619 /* If divisor is a power of two, simplify this to bit manipulation. */
10620 if (divisor
== (divisor
& -divisor
))
10624 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
10625 value
= size_binop (PLUS_EXPR
, value
, t
);
10626 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
10627 value
= size_binop (BIT_AND_EXPR
, value
, t
);
10632 div
= build_int_cst (TREE_TYPE (value
), divisor
);
10633 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
10634 value
= size_binop (MULT_EXPR
, value
, div
);
10640 /* Likewise, but round down. */
10643 round_down (tree value
, int divisor
)
10645 tree div
= NULL_TREE
;
10647 gcc_assert (divisor
> 0);
10651 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10652 have to do anything. Only do this when we are not given a const,
10653 because in that case, this check is more expensive than just
10655 if (TREE_CODE (value
) != INTEGER_CST
)
10657 div
= build_int_cst (TREE_TYPE (value
), divisor
);
10659 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
10663 /* If divisor is a power of two, simplify this to bit manipulation. */
10664 if (divisor
== (divisor
& -divisor
))
10668 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
10669 value
= size_binop (BIT_AND_EXPR
, value
, t
);
10674 div
= build_int_cst (TREE_TYPE (value
), divisor
);
10675 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
10676 value
= size_binop (MULT_EXPR
, value
, div
);
10682 /* Returns true if addresses of E1 and E2 differ by a constant, false
10683 otherwise. If they do, &E1 - &E2 is stored in *DIFF. */
10686 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
10689 HOST_WIDE_INT bitsize1
, bitsize2
;
10690 HOST_WIDE_INT bitpos1
, bitpos2
;
10691 tree toffset1
, toffset2
, tdiff
, type
;
10692 enum machine_mode mode1
, mode2
;
10693 int unsignedp1
, unsignedp2
, volatilep1
, volatilep2
;
10695 core1
= get_inner_reference (e1
, &bitsize1
, &bitpos1
, &toffset1
, &mode1
,
10696 &unsignedp1
, &volatilep1
);
10697 core2
= get_inner_reference (e2
, &bitsize2
, &bitpos2
, &toffset2
, &mode2
,
10698 &unsignedp2
, &volatilep2
);
10700 if (bitpos1
% BITS_PER_UNIT
!= 0
10701 || bitpos2
% BITS_PER_UNIT
!= 0
10702 || !operand_equal_p (core1
, core2
, 0))
10705 if (toffset1
&& toffset2
)
10707 type
= TREE_TYPE (toffset1
);
10708 if (type
!= TREE_TYPE (toffset2
))
10709 toffset2
= fold_convert (type
, toffset2
);
10711 tdiff
= fold (build2 (MINUS_EXPR
, type
, toffset1
, toffset2
));
10712 if (!host_integerp (tdiff
, 0))
10715 *diff
= tree_low_cst (tdiff
, 0);
10717 else if (toffset1
|| toffset2
)
10719 /* If only one of the offsets is non-constant, the difference cannot
10726 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;