1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code
{
84 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
85 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
86 static bool negate_mathfn_p (enum built_in_function
);
87 static bool negate_expr_p (tree
);
88 static tree
negate_expr (tree
);
89 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
90 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
91 static tree
const_binop (enum tree_code
, tree
, tree
, int);
92 static enum tree_code
invert_tree_comparison (enum tree_code
, bool);
93 static enum comparison_code
comparison_to_compcode (enum tree_code
);
94 static enum tree_code
compcode_to_comparison (enum comparison_code
);
95 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
96 enum tree_code
, tree
, tree
, tree
);
97 static int truth_value_p (enum tree_code
);
98 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
99 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
100 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
101 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
102 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
103 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
104 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
105 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
106 enum machine_mode
*, int *, int *,
108 static int all_ones_mask_p (tree
, int);
109 static tree
sign_bit_p (tree
, tree
);
110 static int simple_operand_p (tree
);
111 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
112 static tree
make_range (tree
, int *, tree
*, tree
*);
113 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
114 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
116 static tree
fold_range_test (enum tree_code
, tree
, tree
, tree
);
117 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
118 static tree
unextend (tree
, int, int, tree
);
119 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
120 static tree
optimize_minmax_comparison (enum tree_code
, tree
, tree
, tree
);
121 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
);
122 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
);
123 static int multiple_of_p (tree
, tree
, tree
);
124 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
,
127 static bool fold_real_zero_addition_p (tree
, tree
, int);
128 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
130 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
131 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
132 static bool reorder_operands_p (tree
, tree
);
133 static tree
fold_negate_const (tree
, tree
);
134 static tree
fold_not_const (tree
, tree
);
135 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
136 static bool tree_expr_nonzero_p (tree
);
138 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
139 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
140 and SUM1. Then this yields nonzero if overflow occurred during the
143 Overflow occurs if A and B have the same sign, but A and SUM differ in
144 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
146 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
148 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
149 We do that by representing the two-word integer in 4 words, with only
150 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
151 number. The value of the word is LOWPART + HIGHPART * BASE. */
154 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
155 #define HIGHPART(x) \
156 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
157 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
159 /* Unpack a two-word integer into 4 words.
160 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
161 WORDS points to the array of HOST_WIDE_INTs. */
164 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
166 words
[0] = LOWPART (low
);
167 words
[1] = HIGHPART (low
);
168 words
[2] = LOWPART (hi
);
169 words
[3] = HIGHPART (hi
);
172 /* Pack an array of 4 words into a two-word integer.
173 WORDS points to the array of words.
174 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
177 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
180 *low
= words
[0] + words
[1] * BASE
;
181 *hi
= words
[2] + words
[3] * BASE
;
184 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
185 in overflow of the value, when >0 we are only interested in signed
186 overflow, for <0 we are interested in any overflow. OVERFLOWED
187 indicates whether overflow has already occurred. CONST_OVERFLOWED
188 indicates whether constant overflow has already occurred. We force
189 T's value to be within range of T's type (by setting to 0 or 1 all
190 the bits outside the type's range). We set TREE_OVERFLOWED if,
191 OVERFLOWED is nonzero,
192 or OVERFLOWABLE is >0 and signed overflow occurs
193 or OVERFLOWABLE is <0 and any overflow occurs
194 We set TREE_CONSTANT_OVERFLOWED if,
195 CONST_OVERFLOWED is nonzero
196 or we set TREE_OVERFLOWED.
197 We return either the original T, or a copy. */
200 force_fit_type (tree t
, int overflowable
,
201 bool overflowed
, bool overflowed_const
)
203 unsigned HOST_WIDE_INT low
;
206 int sign_extended_type
;
208 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
210 low
= TREE_INT_CST_LOW (t
);
211 high
= TREE_INT_CST_HIGH (t
);
213 if (POINTER_TYPE_P (TREE_TYPE (t
))
214 || TREE_CODE (TREE_TYPE (t
)) == OFFSET_TYPE
)
217 prec
= TYPE_PRECISION (TREE_TYPE (t
));
218 /* Size types *are* sign extended. */
219 sign_extended_type
= (!TYPE_UNSIGNED (TREE_TYPE (t
))
220 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
221 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))));
223 /* First clear all bits that are beyond the type's precision. */
225 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
227 else if (prec
> HOST_BITS_PER_WIDE_INT
)
228 high
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
232 if (prec
< HOST_BITS_PER_WIDE_INT
)
233 low
&= ~((HOST_WIDE_INT
) (-1) << prec
);
236 if (!sign_extended_type
)
237 /* No sign extension */;
238 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
239 /* Correct width already. */;
240 else if (prec
> HOST_BITS_PER_WIDE_INT
)
242 /* Sign extend top half? */
243 if (high
& ((unsigned HOST_WIDE_INT
)1
244 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
245 high
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
247 else if (prec
== HOST_BITS_PER_WIDE_INT
)
249 if ((HOST_WIDE_INT
)low
< 0)
254 /* Sign extend bottom half? */
255 if (low
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
258 low
|= (HOST_WIDE_INT
)(-1) << prec
;
262 /* If the value changed, return a new node. */
263 if (overflowed
|| overflowed_const
264 || low
!= TREE_INT_CST_LOW (t
) || high
!= TREE_INT_CST_HIGH (t
))
266 t
= build_int_cst_wide (TREE_TYPE (t
), low
, high
);
270 || (overflowable
> 0 && sign_extended_type
))
273 TREE_OVERFLOW (t
) = 1;
274 TREE_CONSTANT_OVERFLOW (t
) = 1;
276 else if (overflowed_const
)
279 TREE_CONSTANT_OVERFLOW (t
) = 1;
286 /* Add two doubleword integers with doubleword result.
287 Each argument is given as two `HOST_WIDE_INT' pieces.
288 One argument is L1 and H1; the other, L2 and H2.
289 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
292 add_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
293 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
294 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
296 unsigned HOST_WIDE_INT l
;
300 h
= h1
+ h2
+ (l
< l1
);
304 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
307 /* Negate a doubleword integer with doubleword result.
308 Return nonzero if the operation overflows, assuming it's signed.
309 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
310 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
313 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
314 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
320 return (*hv
& h1
) < 0;
330 /* Multiply two doubleword integers with doubleword result.
331 Return nonzero if the operation overflows, assuming it's signed.
332 Each argument is given as two `HOST_WIDE_INT' pieces.
333 One argument is L1 and H1; the other, L2 and H2.
334 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
337 mul_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
338 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
339 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
341 HOST_WIDE_INT arg1
[4];
342 HOST_WIDE_INT arg2
[4];
343 HOST_WIDE_INT prod
[4 * 2];
344 unsigned HOST_WIDE_INT carry
;
346 unsigned HOST_WIDE_INT toplow
, neglow
;
347 HOST_WIDE_INT tophigh
, neghigh
;
349 encode (arg1
, l1
, h1
);
350 encode (arg2
, l2
, h2
);
352 memset (prod
, 0, sizeof prod
);
354 for (i
= 0; i
< 4; i
++)
357 for (j
= 0; j
< 4; j
++)
360 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
361 carry
+= arg1
[i
] * arg2
[j
];
362 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
364 prod
[k
] = LOWPART (carry
);
365 carry
= HIGHPART (carry
);
370 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
372 /* Check for overflow by calculating the top half of the answer in full;
373 it should agree with the low half's sign bit. */
374 decode (prod
+ 4, &toplow
, &tophigh
);
377 neg_double (l2
, h2
, &neglow
, &neghigh
);
378 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
382 neg_double (l1
, h1
, &neglow
, &neghigh
);
383 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
385 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
388 /* Shift the doubleword integer in L1, H1 left by COUNT places
389 keeping only PREC bits of result.
390 Shift right if COUNT is negative.
391 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
392 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
395 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
396 HOST_WIDE_INT count
, unsigned int prec
,
397 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
399 unsigned HOST_WIDE_INT signmask
;
403 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
407 if (SHIFT_COUNT_TRUNCATED
)
410 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
412 /* Shifting by the host word size is undefined according to the
413 ANSI standard, so we must handle this as a special case. */
417 else if (count
>= HOST_BITS_PER_WIDE_INT
)
419 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
424 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
425 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
429 /* Sign extend all bits that are beyond the precision. */
431 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
432 ? ((unsigned HOST_WIDE_INT
) *hv
433 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
434 : (*lv
>> (prec
- 1))) & 1);
436 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
438 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
440 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
441 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
446 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
447 *lv
|= signmask
<< prec
;
451 /* Shift the doubleword integer in L1, H1 right by COUNT places
452 keeping only PREC bits of result. COUNT must be positive.
453 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
454 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
457 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
458 HOST_WIDE_INT count
, unsigned int prec
,
459 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
462 unsigned HOST_WIDE_INT signmask
;
465 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
468 if (SHIFT_COUNT_TRUNCATED
)
471 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
473 /* Shifting by the host word size is undefined according to the
474 ANSI standard, so we must handle this as a special case. */
478 else if (count
>= HOST_BITS_PER_WIDE_INT
)
481 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
485 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
487 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
490 /* Zero / sign extend all bits that are beyond the precision. */
492 if (count
>= (HOST_WIDE_INT
)prec
)
497 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
499 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
501 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
502 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
507 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
508 *lv
|= signmask
<< (prec
- count
);
512 /* Rotate the doubleword integer in L1, H1 left by COUNT places
513 keeping only PREC bits of result.
514 Rotate right if COUNT is negative.
515 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
518 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
519 HOST_WIDE_INT count
, unsigned int prec
,
520 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
522 unsigned HOST_WIDE_INT s1l
, s2l
;
523 HOST_WIDE_INT s1h
, s2h
;
529 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
530 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
535 /* Rotate the doubleword integer in L1, H1 left by COUNT places
536 keeping only PREC bits of result. COUNT must be positive.
537 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
540 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
541 HOST_WIDE_INT count
, unsigned int prec
,
542 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
544 unsigned HOST_WIDE_INT s1l
, s2l
;
545 HOST_WIDE_INT s1h
, s2h
;
551 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
552 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
557 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
558 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
559 CODE is a tree code for a kind of division, one of
560 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
562 It controls how the quotient is rounded to an integer.
563 Return nonzero if the operation overflows.
564 UNS nonzero says do unsigned division. */
567 div_and_round_double (enum tree_code code
, int uns
,
568 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
569 HOST_WIDE_INT hnum_orig
,
570 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
571 HOST_WIDE_INT hden_orig
,
572 unsigned HOST_WIDE_INT
*lquo
,
573 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
577 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
578 HOST_WIDE_INT den
[4], quo
[4];
580 unsigned HOST_WIDE_INT work
;
581 unsigned HOST_WIDE_INT carry
= 0;
582 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
583 HOST_WIDE_INT hnum
= hnum_orig
;
584 unsigned HOST_WIDE_INT lden
= lden_orig
;
585 HOST_WIDE_INT hden
= hden_orig
;
588 if (hden
== 0 && lden
== 0)
589 overflow
= 1, lden
= 1;
591 /* Calculate quotient sign and convert operands to unsigned. */
597 /* (minimum integer) / (-1) is the only overflow case. */
598 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
599 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
605 neg_double (lden
, hden
, &lden
, &hden
);
609 if (hnum
== 0 && hden
== 0)
610 { /* single precision */
612 /* This unsigned division rounds toward zero. */
618 { /* trivial case: dividend < divisor */
619 /* hden != 0 already checked. */
626 memset (quo
, 0, sizeof quo
);
628 memset (num
, 0, sizeof num
); /* to zero 9th element */
629 memset (den
, 0, sizeof den
);
631 encode (num
, lnum
, hnum
);
632 encode (den
, lden
, hden
);
634 /* Special code for when the divisor < BASE. */
635 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
637 /* hnum != 0 already checked. */
638 for (i
= 4 - 1; i
>= 0; i
--)
640 work
= num
[i
] + carry
* BASE
;
641 quo
[i
] = work
/ lden
;
647 /* Full double precision division,
648 with thanks to Don Knuth's "Seminumerical Algorithms". */
649 int num_hi_sig
, den_hi_sig
;
650 unsigned HOST_WIDE_INT quo_est
, scale
;
652 /* Find the highest nonzero divisor digit. */
653 for (i
= 4 - 1;; i
--)
660 /* Insure that the first digit of the divisor is at least BASE/2.
661 This is required by the quotient digit estimation algorithm. */
663 scale
= BASE
/ (den
[den_hi_sig
] + 1);
665 { /* scale divisor and dividend */
667 for (i
= 0; i
<= 4 - 1; i
++)
669 work
= (num
[i
] * scale
) + carry
;
670 num
[i
] = LOWPART (work
);
671 carry
= HIGHPART (work
);
676 for (i
= 0; i
<= 4 - 1; i
++)
678 work
= (den
[i
] * scale
) + carry
;
679 den
[i
] = LOWPART (work
);
680 carry
= HIGHPART (work
);
681 if (den
[i
] != 0) den_hi_sig
= i
;
688 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
690 /* Guess the next quotient digit, quo_est, by dividing the first
691 two remaining dividend digits by the high order quotient digit.
692 quo_est is never low and is at most 2 high. */
693 unsigned HOST_WIDE_INT tmp
;
695 num_hi_sig
= i
+ den_hi_sig
+ 1;
696 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
697 if (num
[num_hi_sig
] != den
[den_hi_sig
])
698 quo_est
= work
/ den
[den_hi_sig
];
702 /* Refine quo_est so it's usually correct, and at most one high. */
703 tmp
= work
- quo_est
* den
[den_hi_sig
];
705 && (den
[den_hi_sig
- 1] * quo_est
706 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
709 /* Try QUO_EST as the quotient digit, by multiplying the
710 divisor by QUO_EST and subtracting from the remaining dividend.
711 Keep in mind that QUO_EST is the I - 1st digit. */
714 for (j
= 0; j
<= den_hi_sig
; j
++)
716 work
= quo_est
* den
[j
] + carry
;
717 carry
= HIGHPART (work
);
718 work
= num
[i
+ j
] - LOWPART (work
);
719 num
[i
+ j
] = LOWPART (work
);
720 carry
+= HIGHPART (work
) != 0;
723 /* If quo_est was high by one, then num[i] went negative and
724 we need to correct things. */
725 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
728 carry
= 0; /* add divisor back in */
729 for (j
= 0; j
<= den_hi_sig
; j
++)
731 work
= num
[i
+ j
] + den
[j
] + carry
;
732 carry
= HIGHPART (work
);
733 num
[i
+ j
] = LOWPART (work
);
736 num
[num_hi_sig
] += carry
;
739 /* Store the quotient digit. */
744 decode (quo
, lquo
, hquo
);
747 /* If result is negative, make it so. */
749 neg_double (*lquo
, *hquo
, lquo
, hquo
);
751 /* Compute trial remainder: rem = num - (quo * den) */
752 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
753 neg_double (*lrem
, *hrem
, lrem
, hrem
);
754 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
759 case TRUNC_MOD_EXPR
: /* round toward zero */
760 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
764 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
765 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
768 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
776 case CEIL_MOD_EXPR
: /* round toward positive infinity */
777 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
779 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
787 case ROUND_MOD_EXPR
: /* round to closest integer */
789 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
790 HOST_WIDE_INT habs_rem
= *hrem
;
791 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
792 HOST_WIDE_INT habs_den
= hden
, htwice
;
794 /* Get absolute values. */
796 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
798 neg_double (lden
, hden
, &labs_den
, &habs_den
);
800 /* If (2 * abs (lrem) >= abs (lden)) */
801 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
802 labs_rem
, habs_rem
, <wice
, &htwice
);
804 if (((unsigned HOST_WIDE_INT
) habs_den
805 < (unsigned HOST_WIDE_INT
) htwice
)
806 || (((unsigned HOST_WIDE_INT
) habs_den
807 == (unsigned HOST_WIDE_INT
) htwice
)
808 && (labs_den
< ltwice
)))
812 add_double (*lquo
, *hquo
,
813 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
816 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
828 /* Compute true remainder: rem = num - (quo * den) */
829 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
830 neg_double (*lrem
, *hrem
, lrem
, hrem
);
831 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
835 /* Return true if built-in mathematical function specified by CODE
836 preserves the sign of it argument, i.e. -f(x) == f(-x). */
839 negate_mathfn_p (enum built_in_function code
)
863 /* Check whether we may negate an integer constant T without causing
867 may_negate_without_overflow_p (tree t
)
869 unsigned HOST_WIDE_INT val
;
873 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
875 type
= TREE_TYPE (t
);
876 if (TYPE_UNSIGNED (type
))
879 prec
= TYPE_PRECISION (type
);
880 if (prec
> HOST_BITS_PER_WIDE_INT
)
882 if (TREE_INT_CST_LOW (t
) != 0)
884 prec
-= HOST_BITS_PER_WIDE_INT
;
885 val
= TREE_INT_CST_HIGH (t
);
888 val
= TREE_INT_CST_LOW (t
);
889 if (prec
< HOST_BITS_PER_WIDE_INT
)
890 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
891 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
894 /* Determine whether an expression T can be cheaply negated using
895 the function negate_expr. */
898 negate_expr_p (tree t
)
905 type
= TREE_TYPE (t
);
908 switch (TREE_CODE (t
))
911 if (TYPE_UNSIGNED (type
) || ! flag_trapv
)
914 /* Check that -CST will not overflow type. */
915 return may_negate_without_overflow_p (t
);
922 return negate_expr_p (TREE_REALPART (t
))
923 && negate_expr_p (TREE_IMAGPART (t
));
926 if (FLOAT_TYPE_P (type
) && !flag_unsafe_math_optimizations
)
928 /* -(A + B) -> (-B) - A. */
929 if (negate_expr_p (TREE_OPERAND (t
, 1))
930 && reorder_operands_p (TREE_OPERAND (t
, 0),
931 TREE_OPERAND (t
, 1)))
933 /* -(A + B) -> (-A) - B. */
934 return negate_expr_p (TREE_OPERAND (t
, 0));
937 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
938 return (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
939 && reorder_operands_p (TREE_OPERAND (t
, 0),
940 TREE_OPERAND (t
, 1));
943 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
949 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
950 return negate_expr_p (TREE_OPERAND (t
, 1))
951 || negate_expr_p (TREE_OPERAND (t
, 0));
955 /* Negate -((double)float) as (double)(-float). */
956 if (TREE_CODE (type
) == REAL_TYPE
)
958 tree tem
= strip_float_extensions (t
);
960 return negate_expr_p (tem
);
965 /* Negate -f(x) as f(-x). */
966 if (negate_mathfn_p (builtin_mathfn_code (t
)))
967 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1)));
971 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
972 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
974 tree op1
= TREE_OPERAND (t
, 1);
975 if (TREE_INT_CST_HIGH (op1
) == 0
976 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
977 == TREE_INT_CST_LOW (op1
))
988 /* Given T, an expression, return the negation of T. Allow for T to be
989 null, in which case return null. */
1000 type
= TREE_TYPE (t
);
1001 STRIP_SIGN_NOPS (t
);
1003 switch (TREE_CODE (t
))
1006 tem
= fold_negate_const (t
, type
);
1007 if (! TREE_OVERFLOW (tem
)
1008 || TYPE_UNSIGNED (type
)
1014 tem
= fold_negate_const (t
, type
);
1015 /* Two's complement FP formats, such as c4x, may overflow. */
1016 if (! TREE_OVERFLOW (tem
) || ! flag_trapping_math
)
1017 return fold_convert (type
, tem
);
1022 tree rpart
= negate_expr (TREE_REALPART (t
));
1023 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1025 if ((TREE_CODE (rpart
) == REAL_CST
1026 && TREE_CODE (ipart
) == REAL_CST
)
1027 || (TREE_CODE (rpart
) == INTEGER_CST
1028 && TREE_CODE (ipart
) == INTEGER_CST
))
1029 return build_complex (type
, rpart
, ipart
);
1034 return fold_convert (type
, TREE_OPERAND (t
, 0));
1037 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1039 /* -(A + B) -> (-B) - A. */
1040 if (negate_expr_p (TREE_OPERAND (t
, 1))
1041 && reorder_operands_p (TREE_OPERAND (t
, 0),
1042 TREE_OPERAND (t
, 1)))
1044 tem
= negate_expr (TREE_OPERAND (t
, 1));
1045 tem
= fold_build2 (MINUS_EXPR
, TREE_TYPE (t
),
1046 tem
, TREE_OPERAND (t
, 0));
1047 return fold_convert (type
, tem
);
1050 /* -(A + B) -> (-A) - B. */
1051 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1053 tem
= negate_expr (TREE_OPERAND (t
, 0));
1054 tem
= fold_build2 (MINUS_EXPR
, TREE_TYPE (t
),
1055 tem
, TREE_OPERAND (t
, 1));
1056 return fold_convert (type
, tem
);
1062 /* - (A - B) -> B - A */
1063 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1064 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1065 return fold_convert (type
,
1066 fold_build2 (MINUS_EXPR
, TREE_TYPE (t
),
1067 TREE_OPERAND (t
, 1),
1068 TREE_OPERAND (t
, 0)));
1072 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1078 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1080 tem
= TREE_OPERAND (t
, 1);
1081 if (negate_expr_p (tem
))
1082 return fold_convert (type
,
1083 fold_build2 (TREE_CODE (t
), TREE_TYPE (t
),
1084 TREE_OPERAND (t
, 0),
1085 negate_expr (tem
)));
1086 tem
= TREE_OPERAND (t
, 0);
1087 if (negate_expr_p (tem
))
1088 return fold_convert (type
,
1089 fold_build2 (TREE_CODE (t
), TREE_TYPE (t
),
1091 TREE_OPERAND (t
, 1)));
1096 /* Convert -((double)float) into (double)(-float). */
1097 if (TREE_CODE (type
) == REAL_TYPE
)
1099 tem
= strip_float_extensions (t
);
1100 if (tem
!= t
&& negate_expr_p (tem
))
1101 return fold_convert (type
, negate_expr (tem
));
1106 /* Negate -f(x) as f(-x). */
1107 if (negate_mathfn_p (builtin_mathfn_code (t
))
1108 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1))))
1110 tree fndecl
, arg
, arglist
;
1112 fndecl
= get_callee_fndecl (t
);
1113 arg
= negate_expr (TREE_VALUE (TREE_OPERAND (t
, 1)));
1114 arglist
= build_tree_list (NULL_TREE
, arg
);
1115 return build_function_call_expr (fndecl
, arglist
);
1120 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1121 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1123 tree op1
= TREE_OPERAND (t
, 1);
1124 if (TREE_INT_CST_HIGH (op1
) == 0
1125 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1126 == TREE_INT_CST_LOW (op1
))
1128 tree ntype
= TYPE_UNSIGNED (type
)
1129 ? lang_hooks
.types
.signed_type (type
)
1130 : lang_hooks
.types
.unsigned_type (type
);
1131 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1132 temp
= fold_build2 (RSHIFT_EXPR
, ntype
, temp
, op1
);
1133 return fold_convert (type
, temp
);
1142 tem
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1143 return fold_convert (type
, tem
);
1146 /* Split a tree IN into a constant, literal and variable parts that could be
1147 combined with CODE to make IN. "constant" means an expression with
1148 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1149 commutative arithmetic operation. Store the constant part into *CONP,
1150 the literal in *LITP and return the variable part. If a part isn't
1151 present, set it to null. If the tree does not decompose in this way,
1152 return the entire tree as the variable part and the other parts as null.
1154 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1155 case, we negate an operand that was subtracted. Except if it is a
1156 literal for which we use *MINUS_LITP instead.
1158 If NEGATE_P is true, we are negating all of IN, again except a literal
1159 for which we use *MINUS_LITP instead.
1161 If IN is itself a literal or constant, return it as appropriate.
1163 Note that we do not guarantee that any of the three values will be the
1164 same type as IN, but they will have the same signedness and mode. */
1167 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1168 tree
*minus_litp
, int negate_p
)
1176 /* Strip any conversions that don't change the machine mode or signedness. */
1177 STRIP_SIGN_NOPS (in
);
1179 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1181 else if (TREE_CODE (in
) == code
1182 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1183 /* We can associate addition and subtraction together (even
1184 though the C standard doesn't say so) for integers because
1185 the value is not affected. For reals, the value might be
1186 affected, so we can't. */
1187 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1188 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1190 tree op0
= TREE_OPERAND (in
, 0);
1191 tree op1
= TREE_OPERAND (in
, 1);
1192 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1193 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1195 /* First see if either of the operands is a literal, then a constant. */
1196 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1197 *litp
= op0
, op0
= 0;
1198 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1199 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1201 if (op0
!= 0 && TREE_CONSTANT (op0
))
1202 *conp
= op0
, op0
= 0;
1203 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1204 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1206 /* If we haven't dealt with either operand, this is not a case we can
1207 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1208 if (op0
!= 0 && op1
!= 0)
1213 var
= op1
, neg_var_p
= neg1_p
;
1215 /* Now do any needed negations. */
1217 *minus_litp
= *litp
, *litp
= 0;
1219 *conp
= negate_expr (*conp
);
1221 var
= negate_expr (var
);
1223 else if (TREE_CONSTANT (in
))
1231 *minus_litp
= *litp
, *litp
= 0;
1232 else if (*minus_litp
)
1233 *litp
= *minus_litp
, *minus_litp
= 0;
1234 *conp
= negate_expr (*conp
);
1235 var
= negate_expr (var
);
1241 /* Re-associate trees split by the above function. T1 and T2 are either
1242 expressions to associate or null. Return the new expression, if any. If
1243 we build an operation, do it in TYPE and with CODE. */
1246 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1253 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1254 try to fold this since we will have infinite recursion. But do
1255 deal with any NEGATE_EXPRs. */
1256 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1257 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1259 if (code
== PLUS_EXPR
)
1261 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1262 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1263 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1264 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1265 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1266 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1267 else if (integer_zerop (t2
))
1268 return fold_convert (type
, t1
);
1270 else if (code
== MINUS_EXPR
)
1272 if (integer_zerop (t2
))
1273 return fold_convert (type
, t1
);
1276 return build2 (code
, type
, fold_convert (type
, t1
),
1277 fold_convert (type
, t2
));
1280 return fold_build2 (code
, type
, fold_convert (type
, t1
),
1281 fold_convert (type
, t2
));
1284 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1285 to produce a new constant.
1287 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1290 int_const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1292 unsigned HOST_WIDE_INT int1l
, int2l
;
1293 HOST_WIDE_INT int1h
, int2h
;
1294 unsigned HOST_WIDE_INT low
;
1296 unsigned HOST_WIDE_INT garbagel
;
1297 HOST_WIDE_INT garbageh
;
1299 tree type
= TREE_TYPE (arg1
);
1300 int uns
= TYPE_UNSIGNED (type
);
1302 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1305 int1l
= TREE_INT_CST_LOW (arg1
);
1306 int1h
= TREE_INT_CST_HIGH (arg1
);
1307 int2l
= TREE_INT_CST_LOW (arg2
);
1308 int2h
= TREE_INT_CST_HIGH (arg2
);
1313 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1317 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1321 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1327 /* It's unclear from the C standard whether shifts can overflow.
1328 The following code ignores overflow; perhaps a C standard
1329 interpretation ruling is needed. */
1330 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1337 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1342 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1346 neg_double (int2l
, int2h
, &low
, &hi
);
1347 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1348 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1352 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1355 case TRUNC_DIV_EXPR
:
1356 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1357 case EXACT_DIV_EXPR
:
1358 /* This is a shortcut for a common special case. */
1359 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1360 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1361 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1362 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1364 if (code
== CEIL_DIV_EXPR
)
1367 low
= int1l
/ int2l
, hi
= 0;
1371 /* ... fall through ... */
1373 case ROUND_DIV_EXPR
:
1374 if (int2h
== 0 && int2l
== 1)
1376 low
= int1l
, hi
= int1h
;
1379 if (int1l
== int2l
&& int1h
== int2h
1380 && ! (int1l
== 0 && int1h
== 0))
1385 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1386 &low
, &hi
, &garbagel
, &garbageh
);
1389 case TRUNC_MOD_EXPR
:
1390 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1391 /* This is a shortcut for a common special case. */
1392 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1393 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1394 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1395 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1397 if (code
== CEIL_MOD_EXPR
)
1399 low
= int1l
% int2l
, hi
= 0;
1403 /* ... fall through ... */
1405 case ROUND_MOD_EXPR
:
1406 overflow
= div_and_round_double (code
, uns
,
1407 int1l
, int1h
, int2l
, int2h
,
1408 &garbagel
, &garbageh
, &low
, &hi
);
1414 low
= (((unsigned HOST_WIDE_INT
) int1h
1415 < (unsigned HOST_WIDE_INT
) int2h
)
1416 || (((unsigned HOST_WIDE_INT
) int1h
1417 == (unsigned HOST_WIDE_INT
) int2h
)
1420 low
= (int1h
< int2h
1421 || (int1h
== int2h
&& int1l
< int2l
));
1423 if (low
== (code
== MIN_EXPR
))
1424 low
= int1l
, hi
= int1h
;
1426 low
= int2l
, hi
= int2h
;
1433 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1437 /* Propagate overflow flags ourselves. */
1438 if (((!uns
|| is_sizetype
) && overflow
)
1439 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1442 TREE_OVERFLOW (t
) = 1;
1443 TREE_CONSTANT_OVERFLOW (t
) = 1;
1445 else if (TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
))
1448 TREE_CONSTANT_OVERFLOW (t
) = 1;
1452 t
= force_fit_type (t
, 1,
1453 ((!uns
|| is_sizetype
) && overflow
)
1454 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
),
1455 TREE_CONSTANT_OVERFLOW (arg1
)
1456 | TREE_CONSTANT_OVERFLOW (arg2
));
1461 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1462 constant. We assume ARG1 and ARG2 have the same data type, or at least
1463 are the same kind of constant and the same machine mode.
1465 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1468 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1473 if (TREE_CODE (arg1
) == INTEGER_CST
)
1474 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1476 if (TREE_CODE (arg1
) == REAL_CST
)
1478 enum machine_mode mode
;
1481 REAL_VALUE_TYPE value
;
1482 REAL_VALUE_TYPE result
;
1486 d1
= TREE_REAL_CST (arg1
);
1487 d2
= TREE_REAL_CST (arg2
);
1489 type
= TREE_TYPE (arg1
);
1490 mode
= TYPE_MODE (type
);
1492 /* Don't perform operation if we honor signaling NaNs and
1493 either operand is a NaN. */
1494 if (HONOR_SNANS (mode
)
1495 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1498 /* Don't perform operation if it would raise a division
1499 by zero exception. */
1500 if (code
== RDIV_EXPR
1501 && REAL_VALUES_EQUAL (d2
, dconst0
)
1502 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1505 /* If either operand is a NaN, just return it. Otherwise, set up
1506 for floating-point trap; we return an overflow. */
1507 if (REAL_VALUE_ISNAN (d1
))
1509 else if (REAL_VALUE_ISNAN (d2
))
1512 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1513 real_convert (&result
, mode
, &value
);
1515 /* Don't constant fold this floating point operation if the
1516 result may dependent upon the run-time rounding mode and
1517 flag_rounding_math is set, or if GCC's software emulation
1518 is unable to accurately represent the result. */
1520 if ((flag_rounding_math
1521 || (REAL_MODE_FORMAT_COMPOSITE_P (mode
)
1522 && !flag_unsafe_math_optimizations
))
1523 && (inexact
|| !real_identical (&result
, &value
)))
1526 t
= build_real (type
, result
);
1528 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1529 TREE_CONSTANT_OVERFLOW (t
)
1531 | TREE_CONSTANT_OVERFLOW (arg1
)
1532 | TREE_CONSTANT_OVERFLOW (arg2
);
1535 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1537 tree type
= TREE_TYPE (arg1
);
1538 tree r1
= TREE_REALPART (arg1
);
1539 tree i1
= TREE_IMAGPART (arg1
);
1540 tree r2
= TREE_REALPART (arg2
);
1541 tree i2
= TREE_IMAGPART (arg2
);
1547 t
= build_complex (type
,
1548 const_binop (PLUS_EXPR
, r1
, r2
, notrunc
),
1549 const_binop (PLUS_EXPR
, i1
, i2
, notrunc
));
1553 t
= build_complex (type
,
1554 const_binop (MINUS_EXPR
, r1
, r2
, notrunc
),
1555 const_binop (MINUS_EXPR
, i1
, i2
, notrunc
));
1559 t
= build_complex (type
,
1560 const_binop (MINUS_EXPR
,
1561 const_binop (MULT_EXPR
,
1563 const_binop (MULT_EXPR
,
1566 const_binop (PLUS_EXPR
,
1567 const_binop (MULT_EXPR
,
1569 const_binop (MULT_EXPR
,
1577 = const_binop (PLUS_EXPR
,
1578 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1579 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1582 t
= build_complex (type
,
1584 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1585 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1586 const_binop (PLUS_EXPR
,
1587 const_binop (MULT_EXPR
, r1
, r2
,
1589 const_binop (MULT_EXPR
, i1
, i2
,
1592 magsquared
, notrunc
),
1594 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1595 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1596 const_binop (MINUS_EXPR
,
1597 const_binop (MULT_EXPR
, i1
, r2
,
1599 const_binop (MULT_EXPR
, r1
, i2
,
1602 magsquared
, notrunc
));
1614 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1615 indicates which particular sizetype to create. */
1618 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1620 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1623 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1624 is a tree code. The type of the result is taken from the operands.
1625 Both must be the same type integer type and it must be a size type.
1626 If the operands are constant, so is the result. */
1629 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
1631 tree type
= TREE_TYPE (arg0
);
1633 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1634 && type
== TREE_TYPE (arg1
));
1636 /* Handle the special case of two integer constants faster. */
1637 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1639 /* And some specific cases even faster than that. */
1640 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1642 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1643 && integer_zerop (arg1
))
1645 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1648 /* Handle general case of two integer constants. */
1649 return int_const_binop (code
, arg0
, arg1
, 0);
1652 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1653 return error_mark_node
;
1655 return fold_build2 (code
, type
, arg0
, arg1
);
1658 /* Given two values, either both of sizetype or both of bitsizetype,
1659 compute the difference between the two values. Return the value
1660 in signed type corresponding to the type of the operands. */
1663 size_diffop (tree arg0
, tree arg1
)
1665 tree type
= TREE_TYPE (arg0
);
1668 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1669 && type
== TREE_TYPE (arg1
));
1671 /* If the type is already signed, just do the simple thing. */
1672 if (!TYPE_UNSIGNED (type
))
1673 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1675 ctype
= type
== bitsizetype
? sbitsizetype
: ssizetype
;
1677 /* If either operand is not a constant, do the conversions to the signed
1678 type and subtract. The hardware will do the right thing with any
1679 overflow in the subtraction. */
1680 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1681 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
1682 fold_convert (ctype
, arg1
));
1684 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1685 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1686 overflow) and negate (which can't either). Special-case a result
1687 of zero while we're here. */
1688 if (tree_int_cst_equal (arg0
, arg1
))
1689 return fold_convert (ctype
, integer_zero_node
);
1690 else if (tree_int_cst_lt (arg1
, arg0
))
1691 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1693 return size_binop (MINUS_EXPR
, fold_convert (ctype
, integer_zero_node
),
1694 fold_convert (ctype
, size_binop (MINUS_EXPR
,
1698 /* A subroutine of fold_convert_const handling conversions of an
1699 INTEGER_CST to another integer type. */
1702 fold_convert_const_int_from_int (tree type
, tree arg1
)
1706 /* Given an integer constant, make new constant with new type,
1707 appropriately sign-extended or truncated. */
1708 t
= build_int_cst_wide (type
, TREE_INT_CST_LOW (arg1
),
1709 TREE_INT_CST_HIGH (arg1
));
1711 t
= force_fit_type (t
,
1712 /* Don't set the overflow when
1713 converting a pointer */
1714 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1715 (TREE_INT_CST_HIGH (arg1
) < 0
1716 && (TYPE_UNSIGNED (type
)
1717 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1718 | TREE_OVERFLOW (arg1
),
1719 TREE_CONSTANT_OVERFLOW (arg1
));
1724 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1725 to an integer type. */
1728 fold_convert_const_int_from_real (enum tree_code code
, tree type
, tree arg1
)
1733 /* The following code implements the floating point to integer
1734 conversion rules required by the Java Language Specification,
1735 that IEEE NaNs are mapped to zero and values that overflow
1736 the target precision saturate, i.e. values greater than
1737 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1738 are mapped to INT_MIN. These semantics are allowed by the
1739 C and C++ standards that simply state that the behavior of
1740 FP-to-integer conversion is unspecified upon overflow. */
1742 HOST_WIDE_INT high
, low
;
1744 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1748 case FIX_TRUNC_EXPR
:
1749 real_trunc (&r
, VOIDmode
, &x
);
1753 real_ceil (&r
, VOIDmode
, &x
);
1756 case FIX_FLOOR_EXPR
:
1757 real_floor (&r
, VOIDmode
, &x
);
1760 case FIX_ROUND_EXPR
:
1761 real_round (&r
, VOIDmode
, &x
);
1768 /* If R is NaN, return zero and show we have an overflow. */
1769 if (REAL_VALUE_ISNAN (r
))
1776 /* See if R is less than the lower bound or greater than the
1781 tree lt
= TYPE_MIN_VALUE (type
);
1782 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1783 if (REAL_VALUES_LESS (r
, l
))
1786 high
= TREE_INT_CST_HIGH (lt
);
1787 low
= TREE_INT_CST_LOW (lt
);
1793 tree ut
= TYPE_MAX_VALUE (type
);
1796 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1797 if (REAL_VALUES_LESS (u
, r
))
1800 high
= TREE_INT_CST_HIGH (ut
);
1801 low
= TREE_INT_CST_LOW (ut
);
1807 REAL_VALUE_TO_INT (&low
, &high
, r
);
1809 t
= build_int_cst_wide (type
, low
, high
);
1811 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg1
),
1812 TREE_CONSTANT_OVERFLOW (arg1
));
1816 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1817 to another floating point type. */
1820 fold_convert_const_real_from_real (tree type
, tree arg1
)
1822 REAL_VALUE_TYPE value
;
1825 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1826 t
= build_real (type
, value
);
1828 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1829 TREE_CONSTANT_OVERFLOW (t
)
1830 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1834 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1835 type TYPE. If no simplification can be done return NULL_TREE. */
1838 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1840 if (TREE_TYPE (arg1
) == type
)
1843 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
1845 if (TREE_CODE (arg1
) == INTEGER_CST
)
1846 return fold_convert_const_int_from_int (type
, arg1
);
1847 else if (TREE_CODE (arg1
) == REAL_CST
)
1848 return fold_convert_const_int_from_real (code
, type
, arg1
);
1850 else if (TREE_CODE (type
) == REAL_TYPE
)
1852 if (TREE_CODE (arg1
) == INTEGER_CST
)
1853 return build_real_from_int_cst (type
, arg1
);
1854 if (TREE_CODE (arg1
) == REAL_CST
)
1855 return fold_convert_const_real_from_real (type
, arg1
);
1860 /* Construct a vector of zero elements of vector type TYPE. */
1863 build_zero_vector (tree type
)
1868 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1869 units
= TYPE_VECTOR_SUBPARTS (type
);
1872 for (i
= 0; i
< units
; i
++)
1873 list
= tree_cons (NULL_TREE
, elem
, list
);
1874 return build_vector (type
, list
);
1877 /* Convert expression ARG to type TYPE. Used by the middle-end for
1878 simple conversions in preference to calling the front-end's convert. */
1881 fold_convert (tree type
, tree arg
)
1883 tree orig
= TREE_TYPE (arg
);
1889 if (TREE_CODE (arg
) == ERROR_MARK
1890 || TREE_CODE (type
) == ERROR_MARK
1891 || TREE_CODE (orig
) == ERROR_MARK
)
1892 return error_mark_node
;
1894 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
)
1895 || lang_hooks
.types_compatible_p (TYPE_MAIN_VARIANT (type
),
1896 TYPE_MAIN_VARIANT (orig
)))
1897 return fold_build1 (NOP_EXPR
, type
, arg
);
1899 switch (TREE_CODE (type
))
1901 case INTEGER_TYPE
: case CHAR_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1902 case POINTER_TYPE
: case REFERENCE_TYPE
:
1904 if (TREE_CODE (arg
) == INTEGER_CST
)
1906 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1907 if (tem
!= NULL_TREE
)
1910 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1911 || TREE_CODE (orig
) == OFFSET_TYPE
)
1912 return fold_build1 (NOP_EXPR
, type
, arg
);
1913 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1915 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1916 return fold_convert (type
, tem
);
1918 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1919 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1920 return fold_build1 (NOP_EXPR
, type
, arg
);
1923 if (TREE_CODE (arg
) == INTEGER_CST
)
1925 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1926 if (tem
!= NULL_TREE
)
1929 else if (TREE_CODE (arg
) == REAL_CST
)
1931 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1932 if (tem
!= NULL_TREE
)
1936 switch (TREE_CODE (orig
))
1938 case INTEGER_TYPE
: case CHAR_TYPE
:
1939 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1940 case POINTER_TYPE
: case REFERENCE_TYPE
:
1941 return fold_build1 (FLOAT_EXPR
, type
, arg
);
1944 return fold_build1 (flag_float_store
? CONVERT_EXPR
: NOP_EXPR
,
1948 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1949 return fold_convert (type
, tem
);
1956 switch (TREE_CODE (orig
))
1958 case INTEGER_TYPE
: case CHAR_TYPE
:
1959 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1960 case POINTER_TYPE
: case REFERENCE_TYPE
:
1962 return build2 (COMPLEX_EXPR
, type
,
1963 fold_convert (TREE_TYPE (type
), arg
),
1964 fold_convert (TREE_TYPE (type
), integer_zero_node
));
1969 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1971 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
1972 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
1973 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
1976 arg
= save_expr (arg
);
1977 rpart
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1978 ipart
= fold_build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
1979 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
1980 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
1981 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
1989 if (integer_zerop (arg
))
1990 return build_zero_vector (type
);
1991 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1992 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1993 || TREE_CODE (orig
) == VECTOR_TYPE
);
1994 return fold_build1 (NOP_EXPR
, type
, arg
);
1997 return fold_build1 (CONVERT_EXPR
, type
, fold_ignored_result (arg
));
2004 /* Return false if expr can be assumed not to be an value, true
2008 maybe_lvalue_p (tree x
)
2010 /* We only need to wrap lvalue tree codes. */
2011 switch (TREE_CODE (x
))
2022 case ALIGN_INDIRECT_REF
:
2023 case MISALIGNED_INDIRECT_REF
:
2025 case ARRAY_RANGE_REF
:
2031 case PREINCREMENT_EXPR
:
2032 case PREDECREMENT_EXPR
:
2034 case TRY_CATCH_EXPR
:
2035 case WITH_CLEANUP_EXPR
:
2046 /* Assume the worst for front-end tree codes. */
2047 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2055 /* Return an expr equal to X but certainly not valid as an lvalue. */
2060 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2065 if (! maybe_lvalue_p (x
))
2067 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2070 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2071 Zero means allow extended lvalues. */
2073 int pedantic_lvalues
;
2075 /* When pedantic, return an expr equal to X but certainly not valid as a
2076 pedantic lvalue. Otherwise, return X. */
2079 pedantic_non_lvalue (tree x
)
2081 if (pedantic_lvalues
)
2082 return non_lvalue (x
);
2087 /* Given a tree comparison code, return the code that is the logical inverse
2088 of the given code. It is not safe to do this for floating-point
2089 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2090 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2092 static enum tree_code
2093 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2095 if (honor_nans
&& flag_trapping_math
)
2105 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2107 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2109 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2111 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2125 return UNORDERED_EXPR
;
2126 case UNORDERED_EXPR
:
2127 return ORDERED_EXPR
;
2133 /* Similar, but return the comparison that results if the operands are
2134 swapped. This is safe for floating-point. */
2137 swap_tree_comparison (enum tree_code code
)
2158 /* Convert a comparison tree code from an enum tree_code representation
2159 into a compcode bit-based encoding. This function is the inverse of
2160 compcode_to_comparison. */
2162 static enum comparison_code
2163 comparison_to_compcode (enum tree_code code
)
2180 return COMPCODE_ORD
;
2181 case UNORDERED_EXPR
:
2182 return COMPCODE_UNORD
;
2184 return COMPCODE_UNLT
;
2186 return COMPCODE_UNEQ
;
2188 return COMPCODE_UNLE
;
2190 return COMPCODE_UNGT
;
2192 return COMPCODE_LTGT
;
2194 return COMPCODE_UNGE
;
2200 /* Convert a compcode bit-based encoding of a comparison operator back
2201 to GCC's enum tree_code representation. This function is the
2202 inverse of comparison_to_compcode. */
2204 static enum tree_code
2205 compcode_to_comparison (enum comparison_code code
)
2222 return ORDERED_EXPR
;
2223 case COMPCODE_UNORD
:
2224 return UNORDERED_EXPR
;
2242 /* Return a tree for the comparison which is the combination of
2243 doing the AND or OR (depending on CODE) of the two operations LCODE
2244 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2245 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2246 if this makes the transformation invalid. */
2249 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2250 enum tree_code rcode
, tree truth_type
,
2251 tree ll_arg
, tree lr_arg
)
2253 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2254 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2255 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2256 enum comparison_code compcode
;
2260 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2261 compcode
= lcompcode
& rcompcode
;
2264 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2265 compcode
= lcompcode
| rcompcode
;
2274 /* Eliminate unordered comparisons, as well as LTGT and ORD
2275 which are not used unless the mode has NaNs. */
2276 compcode
&= ~COMPCODE_UNORD
;
2277 if (compcode
== COMPCODE_LTGT
)
2278 compcode
= COMPCODE_NE
;
2279 else if (compcode
== COMPCODE_ORD
)
2280 compcode
= COMPCODE_TRUE
;
2282 else if (flag_trapping_math
)
2284 /* Check that the original operation and the optimized ones will trap
2285 under the same condition. */
2286 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2287 && (lcompcode
!= COMPCODE_EQ
)
2288 && (lcompcode
!= COMPCODE_ORD
);
2289 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2290 && (rcompcode
!= COMPCODE_EQ
)
2291 && (rcompcode
!= COMPCODE_ORD
);
2292 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2293 && (compcode
!= COMPCODE_EQ
)
2294 && (compcode
!= COMPCODE_ORD
);
2296 /* In a short-circuited boolean expression the LHS might be
2297 such that the RHS, if evaluated, will never trap. For
2298 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2299 if neither x nor y is NaN. (This is a mixed blessing: for
2300 example, the expression above will never trap, hence
2301 optimizing it to x < y would be invalid). */
2302 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2303 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2306 /* If the comparison was short-circuited, and only the RHS
2307 trapped, we may now generate a spurious trap. */
2309 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2312 /* If we changed the conditions that cause a trap, we lose. */
2313 if ((ltrap
|| rtrap
) != trap
)
2317 if (compcode
== COMPCODE_TRUE
)
2318 return constant_boolean_node (true, truth_type
);
2319 else if (compcode
== COMPCODE_FALSE
)
2320 return constant_boolean_node (false, truth_type
);
2322 return fold_build2 (compcode_to_comparison (compcode
),
2323 truth_type
, ll_arg
, lr_arg
);
2326 /* Return nonzero if CODE is a tree code that represents a truth value. */
2329 truth_value_p (enum tree_code code
)
2331 return (TREE_CODE_CLASS (code
) == tcc_comparison
2332 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2333 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2334 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2337 /* Return nonzero if two operands (typically of the same tree node)
2338 are necessarily equal. If either argument has side-effects this
2339 function returns zero. FLAGS modifies behavior as follows:
2341 If OEP_ONLY_CONST is set, only return nonzero for constants.
2342 This function tests whether the operands are indistinguishable;
2343 it does not test whether they are equal using C's == operation.
2344 The distinction is important for IEEE floating point, because
2345 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2346 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2348 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2349 even though it may hold multiple values during a function.
2350 This is because a GCC tree node guarantees that nothing else is
2351 executed between the evaluation of its "operands" (which may often
2352 be evaluated in arbitrary order). Hence if the operands themselves
2353 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2354 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2355 unset means assuming isochronic (or instantaneous) tree equivalence.
2356 Unless comparing arbitrary expression trees, such as from different
2357 statements, this flag can usually be left unset.
2359 If OEP_PURE_SAME is set, then pure functions with identical arguments
2360 are considered the same. It is used when the caller has other ways
2361 to ensure that global memory is unchanged in between. */
2364 operand_equal_p (tree arg0
, tree arg1
, unsigned int flags
)
2366 /* If either is ERROR_MARK, they aren't equal. */
2367 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
2370 /* If both types don't have the same signedness, then we can't consider
2371 them equal. We must check this before the STRIP_NOPS calls
2372 because they may change the signedness of the arguments. */
2373 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2379 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2380 /* This is needed for conversions and for COMPONENT_REF.
2381 Might as well play it safe and always test this. */
2382 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2383 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2384 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2387 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2388 We don't care about side effects in that case because the SAVE_EXPR
2389 takes care of that for us. In all other cases, two expressions are
2390 equal if they have no side effects. If we have two identical
2391 expressions with side effects that should be treated the same due
2392 to the only side effects being identical SAVE_EXPR's, that will
2393 be detected in the recursive calls below. */
2394 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2395 && (TREE_CODE (arg0
) == SAVE_EXPR
2396 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2399 /* Next handle constant cases, those for which we can return 1 even
2400 if ONLY_CONST is set. */
2401 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2402 switch (TREE_CODE (arg0
))
2405 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2406 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2407 && tree_int_cst_equal (arg0
, arg1
));
2410 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2411 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2412 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2413 TREE_REAL_CST (arg1
)));
2419 if (TREE_CONSTANT_OVERFLOW (arg0
)
2420 || TREE_CONSTANT_OVERFLOW (arg1
))
2423 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2424 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2427 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2430 v1
= TREE_CHAIN (v1
);
2431 v2
= TREE_CHAIN (v2
);
2438 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2440 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2444 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2445 && ! memcmp (TREE_STRING_POINTER (arg0
),
2446 TREE_STRING_POINTER (arg1
),
2447 TREE_STRING_LENGTH (arg0
)));
2450 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2456 if (flags
& OEP_ONLY_CONST
)
2459 /* Define macros to test an operand from arg0 and arg1 for equality and a
2460 variant that allows null and views null as being different from any
2461 non-null value. In the latter case, if either is null, the both
2462 must be; otherwise, do the normal comparison. */
2463 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2464 TREE_OPERAND (arg1, N), flags)
2466 #define OP_SAME_WITH_NULL(N) \
2467 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2468 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2470 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2473 /* Two conversions are equal only if signedness and modes match. */
2474 switch (TREE_CODE (arg0
))
2479 case FIX_TRUNC_EXPR
:
2480 case FIX_FLOOR_EXPR
:
2481 case FIX_ROUND_EXPR
:
2482 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2483 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2493 case tcc_comparison
:
2495 if (OP_SAME (0) && OP_SAME (1))
2498 /* For commutative ops, allow the other order. */
2499 return (commutative_tree_code (TREE_CODE (arg0
))
2500 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2501 TREE_OPERAND (arg1
, 1), flags
)
2502 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2503 TREE_OPERAND (arg1
, 0), flags
));
2506 /* If either of the pointer (or reference) expressions we are
2507 dereferencing contain a side effect, these cannot be equal. */
2508 if (TREE_SIDE_EFFECTS (arg0
)
2509 || TREE_SIDE_EFFECTS (arg1
))
2512 switch (TREE_CODE (arg0
))
2515 case ALIGN_INDIRECT_REF
:
2516 case MISALIGNED_INDIRECT_REF
:
2522 case ARRAY_RANGE_REF
:
2523 /* Operands 2 and 3 may be null. */
2526 && OP_SAME_WITH_NULL (2)
2527 && OP_SAME_WITH_NULL (3));
2530 /* Handle operand 2 the same as for ARRAY_REF. */
2531 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2534 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2540 case tcc_expression
:
2541 switch (TREE_CODE (arg0
))
2544 case TRUTH_NOT_EXPR
:
2547 case TRUTH_ANDIF_EXPR
:
2548 case TRUTH_ORIF_EXPR
:
2549 return OP_SAME (0) && OP_SAME (1);
2551 case TRUTH_AND_EXPR
:
2553 case TRUTH_XOR_EXPR
:
2554 if (OP_SAME (0) && OP_SAME (1))
2557 /* Otherwise take into account this is a commutative operation. */
2558 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2559 TREE_OPERAND (arg1
, 1), flags
)
2560 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2561 TREE_OPERAND (arg1
, 0), flags
));
2564 /* If the CALL_EXPRs call different functions, then they
2565 clearly can not be equal. */
2570 unsigned int cef
= call_expr_flags (arg0
);
2571 if (flags
& OEP_PURE_SAME
)
2572 cef
&= ECF_CONST
| ECF_PURE
;
2579 /* Now see if all the arguments are the same. operand_equal_p
2580 does not handle TREE_LIST, so we walk the operands here
2581 feeding them to operand_equal_p. */
2582 arg0
= TREE_OPERAND (arg0
, 1);
2583 arg1
= TREE_OPERAND (arg1
, 1);
2584 while (arg0
&& arg1
)
2586 if (! operand_equal_p (TREE_VALUE (arg0
), TREE_VALUE (arg1
),
2590 arg0
= TREE_CHAIN (arg0
);
2591 arg1
= TREE_CHAIN (arg1
);
2594 /* If we get here and both argument lists are exhausted
2595 then the CALL_EXPRs are equal. */
2596 return ! (arg0
|| arg1
);
2602 case tcc_declaration
:
2603 /* Consider __builtin_sqrt equal to sqrt. */
2604 return (TREE_CODE (arg0
) == FUNCTION_DECL
2605 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2606 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2607 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2614 #undef OP_SAME_WITH_NULL
2617 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2618 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2620 When in doubt, return 0. */
2623 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2625 int unsignedp1
, unsignedpo
;
2626 tree primarg0
, primarg1
, primother
;
2627 unsigned int correct_width
;
2629 if (operand_equal_p (arg0
, arg1
, 0))
2632 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2633 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2636 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2637 and see if the inner values are the same. This removes any
2638 signedness comparison, which doesn't matter here. */
2639 primarg0
= arg0
, primarg1
= arg1
;
2640 STRIP_NOPS (primarg0
);
2641 STRIP_NOPS (primarg1
);
2642 if (operand_equal_p (primarg0
, primarg1
, 0))
2645 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2646 actual comparison operand, ARG0.
2648 First throw away any conversions to wider types
2649 already present in the operands. */
2651 primarg1
= get_narrower (arg1
, &unsignedp1
);
2652 primother
= get_narrower (other
, &unsignedpo
);
2654 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2655 if (unsignedp1
== unsignedpo
2656 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2657 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2659 tree type
= TREE_TYPE (arg0
);
2661 /* Make sure shorter operand is extended the right way
2662 to match the longer operand. */
2663 primarg1
= fold_convert (lang_hooks
.types
.signed_or_unsigned_type
2664 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2666 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2673 /* See if ARG is an expression that is either a comparison or is performing
2674 arithmetic on comparisons. The comparisons must only be comparing
2675 two different values, which will be stored in *CVAL1 and *CVAL2; if
2676 they are nonzero it means that some operands have already been found.
2677 No variables may be used anywhere else in the expression except in the
2678 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2679 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2681 If this is true, return 1. Otherwise, return zero. */
2684 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2686 enum tree_code code
= TREE_CODE (arg
);
2687 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2689 /* We can handle some of the tcc_expression cases here. */
2690 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2692 else if (class == tcc_expression
2693 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2694 || code
== COMPOUND_EXPR
))
2697 else if (class == tcc_expression
&& code
== SAVE_EXPR
2698 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2700 /* If we've already found a CVAL1 or CVAL2, this expression is
2701 two complex to handle. */
2702 if (*cval1
|| *cval2
)
2712 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2715 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2716 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2717 cval1
, cval2
, save_p
));
2722 case tcc_expression
:
2723 if (code
== COND_EXPR
)
2724 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2725 cval1
, cval2
, save_p
)
2726 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2727 cval1
, cval2
, save_p
)
2728 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2729 cval1
, cval2
, save_p
));
2732 case tcc_comparison
:
2733 /* First see if we can handle the first operand, then the second. For
2734 the second operand, we know *CVAL1 can't be zero. It must be that
2735 one side of the comparison is each of the values; test for the
2736 case where this isn't true by failing if the two operands
2739 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2740 TREE_OPERAND (arg
, 1), 0))
2744 *cval1
= TREE_OPERAND (arg
, 0);
2745 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2747 else if (*cval2
== 0)
2748 *cval2
= TREE_OPERAND (arg
, 0);
2749 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2754 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2756 else if (*cval2
== 0)
2757 *cval2
= TREE_OPERAND (arg
, 1);
2758 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2770 /* ARG is a tree that is known to contain just arithmetic operations and
2771 comparisons. Evaluate the operations in the tree substituting NEW0 for
2772 any occurrence of OLD0 as an operand of a comparison and likewise for
2776 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
2778 tree type
= TREE_TYPE (arg
);
2779 enum tree_code code
= TREE_CODE (arg
);
2780 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2782 /* We can handle some of the tcc_expression cases here. */
2783 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2785 else if (class == tcc_expression
2786 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2792 return fold_build1 (code
, type
,
2793 eval_subst (TREE_OPERAND (arg
, 0),
2794 old0
, new0
, old1
, new1
));
2797 return fold_build2 (code
, type
,
2798 eval_subst (TREE_OPERAND (arg
, 0),
2799 old0
, new0
, old1
, new1
),
2800 eval_subst (TREE_OPERAND (arg
, 1),
2801 old0
, new0
, old1
, new1
));
2803 case tcc_expression
:
2807 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
2810 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
2813 return fold_build3 (code
, type
,
2814 eval_subst (TREE_OPERAND (arg
, 0),
2815 old0
, new0
, old1
, new1
),
2816 eval_subst (TREE_OPERAND (arg
, 1),
2817 old0
, new0
, old1
, new1
),
2818 eval_subst (TREE_OPERAND (arg
, 2),
2819 old0
, new0
, old1
, new1
));
2823 /* Fall through - ??? */
2825 case tcc_comparison
:
2827 tree arg0
= TREE_OPERAND (arg
, 0);
2828 tree arg1
= TREE_OPERAND (arg
, 1);
2830 /* We need to check both for exact equality and tree equality. The
2831 former will be true if the operand has a side-effect. In that
2832 case, we know the operand occurred exactly once. */
2834 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2836 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2839 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2841 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2844 return fold_build2 (code
, type
, arg0
, arg1
);
2852 /* Return a tree for the case when the result of an expression is RESULT
2853 converted to TYPE and OMITTED was previously an operand of the expression
2854 but is now not needed (e.g., we folded OMITTED * 0).
2856 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2857 the conversion of RESULT to TYPE. */
2860 omit_one_operand (tree type
, tree result
, tree omitted
)
2862 tree t
= fold_convert (type
, result
);
2864 if (TREE_SIDE_EFFECTS (omitted
))
2865 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2867 return non_lvalue (t
);
2870 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2873 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
2875 tree t
= fold_convert (type
, result
);
2877 if (TREE_SIDE_EFFECTS (omitted
))
2878 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2880 return pedantic_non_lvalue (t
);
2883 /* Return a tree for the case when the result of an expression is RESULT
2884 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2885 of the expression but are now not needed.
2887 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2888 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2889 evaluated before OMITTED2. Otherwise, if neither has side effects,
2890 just do the conversion of RESULT to TYPE. */
2893 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
2895 tree t
= fold_convert (type
, result
);
2897 if (TREE_SIDE_EFFECTS (omitted2
))
2898 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
2899 if (TREE_SIDE_EFFECTS (omitted1
))
2900 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
2902 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
2906 /* Return a simplified tree node for the truth-negation of ARG. This
2907 never alters ARG itself. We assume that ARG is an operation that
2908 returns a truth value (0 or 1).
2910 FIXME: one would think we would fold the result, but it causes
2911 problems with the dominator optimizer. */
2913 invert_truthvalue (tree arg
)
2915 tree type
= TREE_TYPE (arg
);
2916 enum tree_code code
= TREE_CODE (arg
);
2918 if (code
== ERROR_MARK
)
2921 /* If this is a comparison, we can simply invert it, except for
2922 floating-point non-equality comparisons, in which case we just
2923 enclose a TRUTH_NOT_EXPR around what we have. */
2925 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
2927 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
2928 if (FLOAT_TYPE_P (op_type
)
2929 && flag_trapping_math
2930 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
2931 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
2932 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2935 code
= invert_tree_comparison (code
,
2936 HONOR_NANS (TYPE_MODE (op_type
)));
2937 if (code
== ERROR_MARK
)
2938 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2940 return build2 (code
, type
,
2941 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
2948 return constant_boolean_node (integer_zerop (arg
), type
);
2950 case TRUTH_AND_EXPR
:
2951 return build2 (TRUTH_OR_EXPR
, type
,
2952 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2953 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2956 return build2 (TRUTH_AND_EXPR
, type
,
2957 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2958 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2960 case TRUTH_XOR_EXPR
:
2961 /* Here we can invert either operand. We invert the first operand
2962 unless the second operand is a TRUTH_NOT_EXPR in which case our
2963 result is the XOR of the first operand with the inside of the
2964 negation of the second operand. */
2966 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
2967 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
2968 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
2970 return build2 (TRUTH_XOR_EXPR
, type
,
2971 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2972 TREE_OPERAND (arg
, 1));
2974 case TRUTH_ANDIF_EXPR
:
2975 return build2 (TRUTH_ORIF_EXPR
, type
,
2976 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2977 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2979 case TRUTH_ORIF_EXPR
:
2980 return build2 (TRUTH_ANDIF_EXPR
, type
,
2981 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2982 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2984 case TRUTH_NOT_EXPR
:
2985 return TREE_OPERAND (arg
, 0);
2988 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2989 invert_truthvalue (TREE_OPERAND (arg
, 1)),
2990 invert_truthvalue (TREE_OPERAND (arg
, 2)));
2993 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2994 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2996 case NON_LVALUE_EXPR
:
2997 return invert_truthvalue (TREE_OPERAND (arg
, 0));
3000 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3005 return build1 (TREE_CODE (arg
), type
,
3006 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3009 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3011 return build2 (EQ_EXPR
, type
, arg
,
3012 fold_convert (type
, integer_zero_node
));
3015 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3017 case CLEANUP_POINT_EXPR
:
3018 return build1 (CLEANUP_POINT_EXPR
, type
,
3019 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3024 gcc_assert (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
);
3025 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3028 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3029 operands are another bit-wise operation with a common input. If so,
3030 distribute the bit operations to save an operation and possibly two if
3031 constants are involved. For example, convert
3032 (A | B) & (A | C) into A | (B & C)
3033 Further simplification will occur if B and C are constants.
3035 If this optimization cannot be done, 0 will be returned. */
3038 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3043 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3044 || TREE_CODE (arg0
) == code
3045 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3046 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3049 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3051 common
= TREE_OPERAND (arg0
, 0);
3052 left
= TREE_OPERAND (arg0
, 1);
3053 right
= TREE_OPERAND (arg1
, 1);
3055 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3057 common
= TREE_OPERAND (arg0
, 0);
3058 left
= TREE_OPERAND (arg0
, 1);
3059 right
= TREE_OPERAND (arg1
, 0);
3061 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3063 common
= TREE_OPERAND (arg0
, 1);
3064 left
= TREE_OPERAND (arg0
, 0);
3065 right
= TREE_OPERAND (arg1
, 1);
3067 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3069 common
= TREE_OPERAND (arg0
, 1);
3070 left
= TREE_OPERAND (arg0
, 0);
3071 right
= TREE_OPERAND (arg1
, 0);
3076 return fold_build2 (TREE_CODE (arg0
), type
, common
,
3077 fold_build2 (code
, type
, left
, right
));
3080 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3081 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3084 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
3091 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3092 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3093 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3094 && host_integerp (size
, 0)
3095 && tree_low_cst (size
, 0) == bitsize
)
3096 return fold_convert (type
, inner
);
3099 result
= build3 (BIT_FIELD_REF
, type
, inner
,
3100 size_int (bitsize
), bitsize_int (bitpos
));
3102 BIT_FIELD_REF_UNSIGNED (result
) = unsignedp
;
3107 /* Optimize a bit-field compare.
3109 There are two cases: First is a compare against a constant and the
3110 second is a comparison of two items where the fields are at the same
3111 bit position relative to the start of a chunk (byte, halfword, word)
3112 large enough to contain it. In these cases we can avoid the shift
3113 implicit in bitfield extractions.
3115 For constants, we emit a compare of the shifted constant with the
3116 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3117 compared. For two fields at the same position, we do the ANDs with the
3118 similar mask and compare the result of the ANDs.
3120 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3121 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3122 are the left and right operands of the comparison, respectively.
3124 If the optimization described above can be done, we return the resulting
3125 tree. Otherwise we return zero. */
3128 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
3131 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3132 tree type
= TREE_TYPE (lhs
);
3133 tree signed_type
, unsigned_type
;
3134 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3135 enum machine_mode lmode
, rmode
, nmode
;
3136 int lunsignedp
, runsignedp
;
3137 int lvolatilep
= 0, rvolatilep
= 0;
3138 tree linner
, rinner
= NULL_TREE
;
3142 /* Get all the information about the extractions being done. If the bit size
3143 if the same as the size of the underlying object, we aren't doing an
3144 extraction at all and so can do nothing. We also don't want to
3145 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3146 then will no longer be able to replace it. */
3147 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3148 &lunsignedp
, &lvolatilep
, false);
3149 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3150 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3155 /* If this is not a constant, we can only do something if bit positions,
3156 sizes, and signedness are the same. */
3157 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3158 &runsignedp
, &rvolatilep
, false);
3160 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3161 || lunsignedp
!= runsignedp
|| offset
!= 0
3162 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3166 /* See if we can find a mode to refer to this field. We should be able to,
3167 but fail if we can't. */
3168 nmode
= get_best_mode (lbitsize
, lbitpos
,
3169 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3170 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3171 TYPE_ALIGN (TREE_TYPE (rinner
))),
3172 word_mode
, lvolatilep
|| rvolatilep
);
3173 if (nmode
== VOIDmode
)
3176 /* Set signed and unsigned types of the precision of this mode for the
3178 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3179 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3181 /* Compute the bit position and size for the new reference and our offset
3182 within it. If the new reference is the same size as the original, we
3183 won't optimize anything, so return zero. */
3184 nbitsize
= GET_MODE_BITSIZE (nmode
);
3185 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3187 if (nbitsize
== lbitsize
)
3190 if (BYTES_BIG_ENDIAN
)
3191 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3193 /* Make the mask to be used against the extracted field. */
3194 mask
= build_int_cst (unsigned_type
, -1);
3195 mask
= force_fit_type (mask
, 0, false, false);
3196 mask
= fold_convert (unsigned_type
, mask
);
3197 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
3198 mask
= const_binop (RSHIFT_EXPR
, mask
,
3199 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
3202 /* If not comparing with constant, just rework the comparison
3204 return build2 (code
, compare_type
,
3205 build2 (BIT_AND_EXPR
, unsigned_type
,
3206 make_bit_field_ref (linner
, unsigned_type
,
3207 nbitsize
, nbitpos
, 1),
3209 build2 (BIT_AND_EXPR
, unsigned_type
,
3210 make_bit_field_ref (rinner
, unsigned_type
,
3211 nbitsize
, nbitpos
, 1),
3214 /* Otherwise, we are handling the constant case. See if the constant is too
3215 big for the field. Warn and return a tree of for 0 (false) if so. We do
3216 this not only for its own sake, but to avoid having to test for this
3217 error case below. If we didn't, we might generate wrong code.
3219 For unsigned fields, the constant shifted right by the field length should
3220 be all zero. For signed fields, the high-order bits should agree with
3225 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3226 fold_convert (unsigned_type
, rhs
),
3227 size_int (lbitsize
), 0)))
3229 warning ("comparison is always %d due to width of bit-field",
3231 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3236 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
3237 size_int (lbitsize
- 1), 0);
3238 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3240 warning ("comparison is always %d due to width of bit-field",
3242 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3246 /* Single-bit compares should always be against zero. */
3247 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3249 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3250 rhs
= fold_convert (type
, integer_zero_node
);
3253 /* Make a new bitfield reference, shift the constant over the
3254 appropriate number of bits and mask it with the computed mask
3255 (in case this was a signed field). If we changed it, make a new one. */
3256 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3259 TREE_SIDE_EFFECTS (lhs
) = 1;
3260 TREE_THIS_VOLATILE (lhs
) = 1;
3263 rhs
= fold (const_binop (BIT_AND_EXPR
,
3264 const_binop (LSHIFT_EXPR
,
3265 fold_convert (unsigned_type
, rhs
),
3266 size_int (lbitpos
), 0),
3269 return build2 (code
, compare_type
,
3270 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
3274 /* Subroutine for fold_truthop: decode a field reference.
3276 If EXP is a comparison reference, we return the innermost reference.
3278 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3279 set to the starting bit number.
3281 If the innermost field can be completely contained in a mode-sized
3282 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3284 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3285 otherwise it is not changed.
3287 *PUNSIGNEDP is set to the signedness of the field.
3289 *PMASK is set to the mask used. This is either contained in a
3290 BIT_AND_EXPR or derived from the width of the field.
3292 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3294 Return 0 if this is not a component reference or is one that we can't
3295 do anything with. */
3298 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3299 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3300 int *punsignedp
, int *pvolatilep
,
3301 tree
*pmask
, tree
*pand_mask
)
3303 tree outer_type
= 0;
3305 tree mask
, inner
, offset
;
3307 unsigned int precision
;
3309 /* All the optimizations using this function assume integer fields.
3310 There are problems with FP fields since the type_for_size call
3311 below can fail for, e.g., XFmode. */
3312 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3315 /* We are interested in the bare arrangement of bits, so strip everything
3316 that doesn't affect the machine mode. However, record the type of the
3317 outermost expression if it may matter below. */
3318 if (TREE_CODE (exp
) == NOP_EXPR
3319 || TREE_CODE (exp
) == CONVERT_EXPR
3320 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3321 outer_type
= TREE_TYPE (exp
);
3324 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3326 and_mask
= TREE_OPERAND (exp
, 1);
3327 exp
= TREE_OPERAND (exp
, 0);
3328 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3329 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3333 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3334 punsignedp
, pvolatilep
, false);
3335 if ((inner
== exp
&& and_mask
== 0)
3336 || *pbitsize
< 0 || offset
!= 0
3337 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3340 /* If the number of bits in the reference is the same as the bitsize of
3341 the outer type, then the outer type gives the signedness. Otherwise
3342 (in case of a small bitfield) the signedness is unchanged. */
3343 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3344 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3346 /* Compute the mask to access the bitfield. */
3347 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3348 precision
= TYPE_PRECISION (unsigned_type
);
3350 mask
= build_int_cst (unsigned_type
, -1);
3351 mask
= force_fit_type (mask
, 0, false, false);
3353 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3354 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3356 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3358 mask
= fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3359 fold_convert (unsigned_type
, and_mask
), mask
);
3362 *pand_mask
= and_mask
;
3366 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3370 all_ones_mask_p (tree mask
, int size
)
3372 tree type
= TREE_TYPE (mask
);
3373 unsigned int precision
= TYPE_PRECISION (type
);
3376 tmask
= build_int_cst (lang_hooks
.types
.signed_type (type
), -1);
3377 tmask
= force_fit_type (tmask
, 0, false, false);
3380 tree_int_cst_equal (mask
,
3381 const_binop (RSHIFT_EXPR
,
3382 const_binop (LSHIFT_EXPR
, tmask
,
3383 size_int (precision
- size
),
3385 size_int (precision
- size
), 0));
3388 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3389 represents the sign bit of EXP's type. If EXP represents a sign
3390 or zero extension, also test VAL against the unextended type.
3391 The return value is the (sub)expression whose sign bit is VAL,
3392 or NULL_TREE otherwise. */
3395 sign_bit_p (tree exp
, tree val
)
3397 unsigned HOST_WIDE_INT mask_lo
, lo
;
3398 HOST_WIDE_INT mask_hi
, hi
;
3402 /* Tree EXP must have an integral type. */
3403 t
= TREE_TYPE (exp
);
3404 if (! INTEGRAL_TYPE_P (t
))
3407 /* Tree VAL must be an integer constant. */
3408 if (TREE_CODE (val
) != INTEGER_CST
3409 || TREE_CONSTANT_OVERFLOW (val
))
3412 width
= TYPE_PRECISION (t
);
3413 if (width
> HOST_BITS_PER_WIDE_INT
)
3415 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3418 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3419 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3425 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3428 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3429 >> (HOST_BITS_PER_WIDE_INT
- width
));
3432 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3433 treat VAL as if it were unsigned. */
3434 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3435 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3438 /* Handle extension from a narrower type. */
3439 if (TREE_CODE (exp
) == NOP_EXPR
3440 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3441 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3446 /* Subroutine for fold_truthop: determine if an operand is simple enough
3447 to be evaluated unconditionally. */
3450 simple_operand_p (tree exp
)
3452 /* Strip any conversions that don't change the machine mode. */
3455 return (CONSTANT_CLASS_P (exp
)
3456 || TREE_CODE (exp
) == SSA_NAME
3458 && ! TREE_ADDRESSABLE (exp
)
3459 && ! TREE_THIS_VOLATILE (exp
)
3460 && ! DECL_NONLOCAL (exp
)
3461 /* Don't regard global variables as simple. They may be
3462 allocated in ways unknown to the compiler (shared memory,
3463 #pragma weak, etc). */
3464 && ! TREE_PUBLIC (exp
)
3465 && ! DECL_EXTERNAL (exp
)
3466 /* Loading a static variable is unduly expensive, but global
3467 registers aren't expensive. */
3468 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3471 /* The following functions are subroutines to fold_range_test and allow it to
3472 try to change a logical combination of comparisons into a range test.
3475 X == 2 || X == 3 || X == 4 || X == 5
3479 (unsigned) (X - 2) <= 3
3481 We describe each set of comparisons as being either inside or outside
3482 a range, using a variable named like IN_P, and then describe the
3483 range with a lower and upper bound. If one of the bounds is omitted,
3484 it represents either the highest or lowest value of the type.
3486 In the comments below, we represent a range by two numbers in brackets
3487 preceded by a "+" to designate being inside that range, or a "-" to
3488 designate being outside that range, so the condition can be inverted by
3489 flipping the prefix. An omitted bound is represented by a "-". For
3490 example, "- [-, 10]" means being outside the range starting at the lowest
3491 possible value and ending at 10, in other words, being greater than 10.
3492 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3495 We set up things so that the missing bounds are handled in a consistent
3496 manner so neither a missing bound nor "true" and "false" need to be
3497 handled using a special case. */
3499 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3500 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3501 and UPPER1_P are nonzero if the respective argument is an upper bound
3502 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3503 must be specified for a comparison. ARG1 will be converted to ARG0's
3504 type if both are specified. */
3507 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3508 tree arg1
, int upper1_p
)
3514 /* If neither arg represents infinity, do the normal operation.
3515 Else, if not a comparison, return infinity. Else handle the special
3516 comparison rules. Note that most of the cases below won't occur, but
3517 are handled for consistency. */
3519 if (arg0
!= 0 && arg1
!= 0)
3521 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3522 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3524 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3527 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3530 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3531 for neither. In real maths, we cannot assume open ended ranges are
3532 the same. But, this is computer arithmetic, where numbers are finite.
3533 We can therefore make the transformation of any unbounded range with
3534 the value Z, Z being greater than any representable number. This permits
3535 us to treat unbounded ranges as equal. */
3536 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3537 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3541 result
= sgn0
== sgn1
;
3544 result
= sgn0
!= sgn1
;
3547 result
= sgn0
< sgn1
;
3550 result
= sgn0
<= sgn1
;
3553 result
= sgn0
> sgn1
;
3556 result
= sgn0
>= sgn1
;
3562 return constant_boolean_node (result
, type
);
3565 /* Given EXP, a logical expression, set the range it is testing into
3566 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3567 actually being tested. *PLOW and *PHIGH will be made of the same type
3568 as the returned expression. If EXP is not a comparison, we will most
3569 likely not be returning a useful value and range. */
3572 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
)
3574 enum tree_code code
;
3575 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
3576 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
3578 tree low
, high
, n_low
, n_high
;
3580 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3581 and see if we can refine the range. Some of the cases below may not
3582 happen, but it doesn't seem worth worrying about this. We "continue"
3583 the outer loop when we've changed something; otherwise we "break"
3584 the switch, which will "break" the while. */
3587 low
= high
= fold_convert (TREE_TYPE (exp
), integer_zero_node
);
3591 code
= TREE_CODE (exp
);
3592 exp_type
= TREE_TYPE (exp
);
3594 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3596 if (TREE_CODE_LENGTH (code
) > 0)
3597 arg0
= TREE_OPERAND (exp
, 0);
3598 if (TREE_CODE_CLASS (code
) == tcc_comparison
3599 || TREE_CODE_CLASS (code
) == tcc_unary
3600 || TREE_CODE_CLASS (code
) == tcc_binary
)
3601 arg0_type
= TREE_TYPE (arg0
);
3602 if (TREE_CODE_CLASS (code
) == tcc_binary
3603 || TREE_CODE_CLASS (code
) == tcc_comparison
3604 || (TREE_CODE_CLASS (code
) == tcc_expression
3605 && TREE_CODE_LENGTH (code
) > 1))
3606 arg1
= TREE_OPERAND (exp
, 1);
3611 case TRUTH_NOT_EXPR
:
3612 in_p
= ! in_p
, exp
= arg0
;
3615 case EQ_EXPR
: case NE_EXPR
:
3616 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3617 /* We can only do something if the range is testing for zero
3618 and if the second operand is an integer constant. Note that
3619 saying something is "in" the range we make is done by
3620 complementing IN_P since it will set in the initial case of
3621 being not equal to zero; "out" is leaving it alone. */
3622 if (low
== 0 || high
== 0
3623 || ! integer_zerop (low
) || ! integer_zerop (high
)
3624 || TREE_CODE (arg1
) != INTEGER_CST
)
3629 case NE_EXPR
: /* - [c, c] */
3632 case EQ_EXPR
: /* + [c, c] */
3633 in_p
= ! in_p
, low
= high
= arg1
;
3635 case GT_EXPR
: /* - [-, c] */
3636 low
= 0, high
= arg1
;
3638 case GE_EXPR
: /* + [c, -] */
3639 in_p
= ! in_p
, low
= arg1
, high
= 0;
3641 case LT_EXPR
: /* - [c, -] */
3642 low
= arg1
, high
= 0;
3644 case LE_EXPR
: /* + [-, c] */
3645 in_p
= ! in_p
, low
= 0, high
= arg1
;
3651 /* If this is an unsigned comparison, we also know that EXP is
3652 greater than or equal to zero. We base the range tests we make
3653 on that fact, so we record it here so we can parse existing
3654 range tests. We test arg0_type since often the return type
3655 of, e.g. EQ_EXPR, is boolean. */
3656 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3658 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3660 fold_convert (arg0_type
, integer_zero_node
),
3664 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3666 /* If the high bound is missing, but we have a nonzero low
3667 bound, reverse the range so it goes from zero to the low bound
3669 if (high
== 0 && low
&& ! integer_zerop (low
))
3672 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3673 integer_one_node
, 0);
3674 low
= fold_convert (arg0_type
, integer_zero_node
);
3682 /* (-x) IN [a,b] -> x in [-b, -a] */
3683 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3684 fold_convert (exp_type
, integer_zero_node
),
3686 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3687 fold_convert (exp_type
, integer_zero_node
),
3689 low
= n_low
, high
= n_high
;
3695 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3696 fold_convert (exp_type
, integer_one_node
));
3699 case PLUS_EXPR
: case MINUS_EXPR
:
3700 if (TREE_CODE (arg1
) != INTEGER_CST
)
3703 /* If EXP is signed, any overflow in the computation is undefined,
3704 so we don't worry about it so long as our computations on
3705 the bounds don't overflow. For unsigned, overflow is defined
3706 and this is exactly the right thing. */
3707 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3708 arg0_type
, low
, 0, arg1
, 0);
3709 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3710 arg0_type
, high
, 1, arg1
, 0);
3711 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3712 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3715 /* Check for an unsigned range which has wrapped around the maximum
3716 value thus making n_high < n_low, and normalize it. */
3717 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3719 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3720 integer_one_node
, 0);
3721 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3722 integer_one_node
, 0);
3724 /* If the range is of the form +/- [ x+1, x ], we won't
3725 be able to normalize it. But then, it represents the
3726 whole range or the empty set, so make it
3728 if (tree_int_cst_equal (n_low
, low
)
3729 && tree_int_cst_equal (n_high
, high
))
3735 low
= n_low
, high
= n_high
;
3740 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
3741 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3744 if (! INTEGRAL_TYPE_P (arg0_type
)
3745 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3746 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3749 n_low
= low
, n_high
= high
;
3752 n_low
= fold_convert (arg0_type
, n_low
);
3755 n_high
= fold_convert (arg0_type
, n_high
);
3758 /* If we're converting arg0 from an unsigned type, to exp,
3759 a signed type, we will be doing the comparison as unsigned.
3760 The tests above have already verified that LOW and HIGH
3763 So we have to ensure that we will handle large unsigned
3764 values the same way that the current signed bounds treat
3767 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
3770 tree equiv_type
= lang_hooks
.types
.type_for_mode
3771 (TYPE_MODE (arg0_type
), 1);
3773 /* A range without an upper bound is, naturally, unbounded.
3774 Since convert would have cropped a very large value, use
3775 the max value for the destination type. */
3777 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3778 : TYPE_MAX_VALUE (arg0_type
);
3780 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
3781 high_positive
= fold_build2 (RSHIFT_EXPR
, arg0_type
,
3782 fold_convert (arg0_type
,
3784 fold_convert (arg0_type
,
3787 /* If the low bound is specified, "and" the range with the
3788 range for which the original unsigned value will be
3792 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3793 1, n_low
, n_high
, 1,
3794 fold_convert (arg0_type
,
3799 in_p
= (n_in_p
== in_p
);
3803 /* Otherwise, "or" the range with the range of the input
3804 that will be interpreted as negative. */
3805 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3806 0, n_low
, n_high
, 1,
3807 fold_convert (arg0_type
,
3812 in_p
= (in_p
!= n_in_p
);
3817 low
= n_low
, high
= n_high
;
3827 /* If EXP is a constant, we can evaluate whether this is true or false. */
3828 if (TREE_CODE (exp
) == INTEGER_CST
)
3830 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
3832 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3838 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3842 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3843 type, TYPE, return an expression to test if EXP is in (or out of, depending
3844 on IN_P) the range. Return 0 if the test couldn't be created. */
3847 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
3849 tree etype
= TREE_TYPE (exp
);
3854 value
= build_range_check (type
, exp
, 1, low
, high
);
3856 return invert_truthvalue (value
);
3861 if (low
== 0 && high
== 0)
3862 return fold_convert (type
, integer_one_node
);
3865 return fold_build2 (LE_EXPR
, type
, exp
, high
);
3868 return fold_build2 (GE_EXPR
, type
, exp
, low
);
3870 if (operand_equal_p (low
, high
, 0))
3871 return fold_build2 (EQ_EXPR
, type
, exp
, low
);
3873 if (integer_zerop (low
))
3875 if (! TYPE_UNSIGNED (etype
))
3877 etype
= lang_hooks
.types
.unsigned_type (etype
);
3878 high
= fold_convert (etype
, high
);
3879 exp
= fold_convert (etype
, exp
);
3881 return build_range_check (type
, exp
, 1, 0, high
);
3884 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3885 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
3887 unsigned HOST_WIDE_INT lo
;
3891 prec
= TYPE_PRECISION (etype
);
3892 if (prec
<= HOST_BITS_PER_WIDE_INT
)
3895 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
3899 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
3900 lo
= (unsigned HOST_WIDE_INT
) -1;
3903 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
3905 if (TYPE_UNSIGNED (etype
))
3907 etype
= lang_hooks
.types
.signed_type (etype
);
3908 exp
= fold_convert (etype
, exp
);
3910 return fold_build2 (GT_EXPR
, type
, exp
,
3911 fold_convert (etype
, integer_zero_node
));
3915 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
3916 if (value
!= 0 && TREE_OVERFLOW (value
) && ! TYPE_UNSIGNED (etype
))
3918 tree utype
, minv
, maxv
;
3920 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3921 for the type in question, as we rely on this here. */
3922 switch (TREE_CODE (etype
))
3927 utype
= lang_hooks
.types
.unsigned_type (etype
);
3928 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
3929 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
3930 integer_one_node
, 1);
3931 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
3932 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
3936 high
= fold_convert (etype
, high
);
3937 low
= fold_convert (etype
, low
);
3938 exp
= fold_convert (etype
, exp
);
3939 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
3947 if (value
!= 0 && ! TREE_OVERFLOW (value
))
3948 return build_range_check (type
,
3949 fold_build2 (MINUS_EXPR
, etype
, exp
, low
),
3950 1, fold_convert (etype
, integer_zero_node
),
3956 /* Given two ranges, see if we can merge them into one. Return 1 if we
3957 can, 0 if we can't. Set the output range into the specified parameters. */
3960 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
3961 tree high0
, int in1_p
, tree low1
, tree high1
)
3969 int lowequal
= ((low0
== 0 && low1
== 0)
3970 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3971 low0
, 0, low1
, 0)));
3972 int highequal
= ((high0
== 0 && high1
== 0)
3973 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3974 high0
, 1, high1
, 1)));
3976 /* Make range 0 be the range that starts first, or ends last if they
3977 start at the same value. Swap them if it isn't. */
3978 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3981 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3982 high1
, 1, high0
, 1))))
3984 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
3985 tem
= low0
, low0
= low1
, low1
= tem
;
3986 tem
= high0
, high0
= high1
, high1
= tem
;
3989 /* Now flag two cases, whether the ranges are disjoint or whether the
3990 second range is totally subsumed in the first. Note that the tests
3991 below are simplified by the ones above. */
3992 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
3993 high0
, 1, low1
, 0));
3994 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3995 high1
, 1, high0
, 1));
3997 /* We now have four cases, depending on whether we are including or
3998 excluding the two ranges. */
4001 /* If they don't overlap, the result is false. If the second range
4002 is a subset it is the result. Otherwise, the range is from the start
4003 of the second to the end of the first. */
4005 in_p
= 0, low
= high
= 0;
4007 in_p
= 1, low
= low1
, high
= high1
;
4009 in_p
= 1, low
= low1
, high
= high0
;
4012 else if (in0_p
&& ! in1_p
)
4014 /* If they don't overlap, the result is the first range. If they are
4015 equal, the result is false. If the second range is a subset of the
4016 first, and the ranges begin at the same place, we go from just after
4017 the end of the first range to the end of the second. If the second
4018 range is not a subset of the first, or if it is a subset and both
4019 ranges end at the same place, the range starts at the start of the
4020 first range and ends just before the second range.
4021 Otherwise, we can't describe this as a single range. */
4023 in_p
= 1, low
= low0
, high
= high0
;
4024 else if (lowequal
&& highequal
)
4025 in_p
= 0, low
= high
= 0;
4026 else if (subset
&& lowequal
)
4028 in_p
= 1, high
= high0
;
4029 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high1
, 0,
4030 integer_one_node
, 0);
4032 else if (! subset
|| highequal
)
4034 in_p
= 1, low
= low0
;
4035 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
4036 integer_one_node
, 0);
4042 else if (! in0_p
&& in1_p
)
4044 /* If they don't overlap, the result is the second range. If the second
4045 is a subset of the first, the result is false. Otherwise,
4046 the range starts just after the first range and ends at the
4047 end of the second. */
4049 in_p
= 1, low
= low1
, high
= high1
;
4050 else if (subset
|| highequal
)
4051 in_p
= 0, low
= high
= 0;
4054 in_p
= 1, high
= high1
;
4055 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4056 integer_one_node
, 0);
4062 /* The case where we are excluding both ranges. Here the complex case
4063 is if they don't overlap. In that case, the only time we have a
4064 range is if they are adjacent. If the second is a subset of the
4065 first, the result is the first. Otherwise, the range to exclude
4066 starts at the beginning of the first range and ends at the end of the
4070 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4071 range_binop (PLUS_EXPR
, NULL_TREE
,
4073 integer_one_node
, 1),
4075 in_p
= 0, low
= low0
, high
= high1
;
4078 /* Canonicalize - [min, x] into - [-, x]. */
4079 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4080 switch (TREE_CODE (TREE_TYPE (low0
)))
4083 if (TYPE_PRECISION (TREE_TYPE (low0
))
4084 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4089 if (tree_int_cst_equal (low0
,
4090 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4094 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4095 && integer_zerop (low0
))
4102 /* Canonicalize - [x, max] into - [x, -]. */
4103 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4104 switch (TREE_CODE (TREE_TYPE (high1
)))
4107 if (TYPE_PRECISION (TREE_TYPE (high1
))
4108 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4113 if (tree_int_cst_equal (high1
,
4114 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4118 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4119 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4121 integer_one_node
, 1)))
4128 /* The ranges might be also adjacent between the maximum and
4129 minimum values of the given type. For
4130 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4131 return + [x + 1, y - 1]. */
4132 if (low0
== 0 && high1
== 0)
4134 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4135 integer_one_node
, 1);
4136 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
4137 integer_one_node
, 0);
4138 if (low
== 0 || high
== 0)
4148 in_p
= 0, low
= low0
, high
= high0
;
4150 in_p
= 0, low
= low0
, high
= high1
;
4153 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4158 /* Subroutine of fold, looking inside expressions of the form
4159 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4160 of the COND_EXPR. This function is being used also to optimize
4161 A op B ? C : A, by reversing the comparison first.
4163 Return a folded expression whose code is not a COND_EXPR
4164 anymore, or NULL_TREE if no folding opportunity is found. */
4167 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
4169 enum tree_code comp_code
= TREE_CODE (arg0
);
4170 tree arg00
= TREE_OPERAND (arg0
, 0);
4171 tree arg01
= TREE_OPERAND (arg0
, 1);
4172 tree arg1_type
= TREE_TYPE (arg1
);
4178 /* If we have A op 0 ? A : -A, consider applying the following
4181 A == 0? A : -A same as -A
4182 A != 0? A : -A same as A
4183 A >= 0? A : -A same as abs (A)
4184 A > 0? A : -A same as abs (A)
4185 A <= 0? A : -A same as -abs (A)
4186 A < 0? A : -A same as -abs (A)
4188 None of these transformations work for modes with signed
4189 zeros. If A is +/-0, the first two transformations will
4190 change the sign of the result (from +0 to -0, or vice
4191 versa). The last four will fix the sign of the result,
4192 even though the original expressions could be positive or
4193 negative, depending on the sign of A.
4195 Note that all these transformations are correct if A is
4196 NaN, since the two alternatives (A and -A) are also NaNs. */
4197 if ((FLOAT_TYPE_P (TREE_TYPE (arg01
))
4198 ? real_zerop (arg01
)
4199 : integer_zerop (arg01
))
4200 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4201 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4202 /* In the case that A is of the form X-Y, '-A' (arg2) may
4203 have already been folded to Y-X, check for that. */
4204 || (TREE_CODE (arg1
) == MINUS_EXPR
4205 && TREE_CODE (arg2
) == MINUS_EXPR
4206 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4207 TREE_OPERAND (arg2
, 1), 0)
4208 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4209 TREE_OPERAND (arg2
, 0), 0))))
4214 tem
= fold_convert (arg1_type
, arg1
);
4215 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
4218 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4221 if (flag_trapping_math
)
4226 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4227 arg1
= fold_convert (lang_hooks
.types
.signed_type
4228 (TREE_TYPE (arg1
)), arg1
);
4229 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4230 return pedantic_non_lvalue (fold_convert (type
, tem
));
4233 if (flag_trapping_math
)
4237 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4238 arg1
= fold_convert (lang_hooks
.types
.signed_type
4239 (TREE_TYPE (arg1
)), arg1
);
4240 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4241 return negate_expr (fold_convert (type
, tem
));
4243 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4247 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4248 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4249 both transformations are correct when A is NaN: A != 0
4250 is then true, and A == 0 is false. */
4252 if (integer_zerop (arg01
) && integer_zerop (arg2
))
4254 if (comp_code
== NE_EXPR
)
4255 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4256 else if (comp_code
== EQ_EXPR
)
4257 return fold_convert (type
, integer_zero_node
);
4260 /* Try some transformations of A op B ? A : B.
4262 A == B? A : B same as B
4263 A != B? A : B same as A
4264 A >= B? A : B same as max (A, B)
4265 A > B? A : B same as max (B, A)
4266 A <= B? A : B same as min (A, B)
4267 A < B? A : B same as min (B, A)
4269 As above, these transformations don't work in the presence
4270 of signed zeros. For example, if A and B are zeros of
4271 opposite sign, the first two transformations will change
4272 the sign of the result. In the last four, the original
4273 expressions give different results for (A=+0, B=-0) and
4274 (A=-0, B=+0), but the transformed expressions do not.
4276 The first two transformations are correct if either A or B
4277 is a NaN. In the first transformation, the condition will
4278 be false, and B will indeed be chosen. In the case of the
4279 second transformation, the condition A != B will be true,
4280 and A will be chosen.
4282 The conversions to max() and min() are not correct if B is
4283 a number and A is not. The conditions in the original
4284 expressions will be false, so all four give B. The min()
4285 and max() versions would give a NaN instead. */
4286 if (operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4287 /* Avoid these transformations if the COND_EXPR may be used
4288 as an lvalue in the C++ front-end. PR c++/19199. */
4290 || strcmp (lang_hooks
.name
, "GNU C++") != 0
4291 || ! maybe_lvalue_p (arg1
)
4292 || ! maybe_lvalue_p (arg2
)))
4294 tree comp_op0
= arg00
;
4295 tree comp_op1
= arg01
;
4296 tree comp_type
= TREE_TYPE (comp_op0
);
4298 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4299 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4309 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4311 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4316 /* In C++ a ?: expression can be an lvalue, so put the
4317 operand which will be used if they are equal first
4318 so that we can convert this back to the
4319 corresponding COND_EXPR. */
4320 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4322 comp_op0
= fold_convert (comp_type
, comp_op0
);
4323 comp_op1
= fold_convert (comp_type
, comp_op1
);
4324 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4325 ? fold_build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4326 : fold_build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
);
4327 return pedantic_non_lvalue (fold_convert (type
, tem
));
4334 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4336 comp_op0
= fold_convert (comp_type
, comp_op0
);
4337 comp_op1
= fold_convert (comp_type
, comp_op1
);
4338 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4339 ? fold_build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4340 : fold_build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
);
4341 return pedantic_non_lvalue (fold_convert (type
, tem
));
4345 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4346 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4349 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4350 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4353 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4358 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4359 we might still be able to simplify this. For example,
4360 if C1 is one less or one more than C2, this might have started
4361 out as a MIN or MAX and been transformed by this function.
4362 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4364 if (INTEGRAL_TYPE_P (type
)
4365 && TREE_CODE (arg01
) == INTEGER_CST
4366 && TREE_CODE (arg2
) == INTEGER_CST
)
4370 /* We can replace A with C1 in this case. */
4371 arg1
= fold_convert (type
, arg01
);
4372 return fold_build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
);
4375 /* If C1 is C2 + 1, this is min(A, C2). */
4376 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4378 && operand_equal_p (arg01
,
4379 const_binop (PLUS_EXPR
, arg2
,
4380 integer_one_node
, 0),
4382 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4387 /* If C1 is C2 - 1, this is min(A, C2). */
4388 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4390 && operand_equal_p (arg01
,
4391 const_binop (MINUS_EXPR
, arg2
,
4392 integer_one_node
, 0),
4394 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4399 /* If C1 is C2 - 1, this is max(A, C2). */
4400 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4402 && operand_equal_p (arg01
,
4403 const_binop (MINUS_EXPR
, arg2
,
4404 integer_one_node
, 0),
4406 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4411 /* If C1 is C2 + 1, this is max(A, C2). */
4412 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4414 && operand_equal_p (arg01
,
4415 const_binop (PLUS_EXPR
, arg2
,
4416 integer_one_node
, 0),
4418 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4432 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4433 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4436 /* EXP is some logical combination of boolean tests. See if we can
4437 merge it into some range test. Return the new tree if so. */
4440 fold_range_test (enum tree_code code
, tree type
, tree op0
, tree op1
)
4442 int or_op
= (code
== TRUTH_ORIF_EXPR
4443 || code
== TRUTH_OR_EXPR
);
4444 int in0_p
, in1_p
, in_p
;
4445 tree low0
, low1
, low
, high0
, high1
, high
;
4446 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
);
4447 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
);
4450 /* If this is an OR operation, invert both sides; we will invert
4451 again at the end. */
4453 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4455 /* If both expressions are the same, if we can merge the ranges, and we
4456 can build the range test, return it or it inverted. If one of the
4457 ranges is always true or always false, consider it to be the same
4458 expression as the other. */
4459 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4460 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4462 && 0 != (tem
= (build_range_check (type
,
4464 : rhs
!= 0 ? rhs
: integer_zero_node
,
4466 return or_op
? invert_truthvalue (tem
) : tem
;
4468 /* On machines where the branch cost is expensive, if this is a
4469 short-circuited branch and the underlying object on both sides
4470 is the same, make a non-short-circuit operation. */
4471 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4472 && lhs
!= 0 && rhs
!= 0
4473 && (code
== TRUTH_ANDIF_EXPR
4474 || code
== TRUTH_ORIF_EXPR
)
4475 && operand_equal_p (lhs
, rhs
, 0))
4477 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4478 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4479 which cases we can't do this. */
4480 if (simple_operand_p (lhs
))
4481 return build2 (code
== TRUTH_ANDIF_EXPR
4482 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4485 else if (lang_hooks
.decls
.global_bindings_p () == 0
4486 && ! CONTAINS_PLACEHOLDER_P (lhs
))
4488 tree common
= save_expr (lhs
);
4490 if (0 != (lhs
= build_range_check (type
, common
,
4491 or_op
? ! in0_p
: in0_p
,
4493 && (0 != (rhs
= build_range_check (type
, common
,
4494 or_op
? ! in1_p
: in1_p
,
4496 return build2 (code
== TRUTH_ANDIF_EXPR
4497 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4505 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4506 bit value. Arrange things so the extra bits will be set to zero if and
4507 only if C is signed-extended to its full width. If MASK is nonzero,
4508 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4511 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4513 tree type
= TREE_TYPE (c
);
4514 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4517 if (p
== modesize
|| unsignedp
)
4520 /* We work by getting just the sign bit into the low-order bit, then
4521 into the high-order bit, then sign-extend. We then XOR that value
4523 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
4524 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
4526 /* We must use a signed type in order to get an arithmetic right shift.
4527 However, we must also avoid introducing accidental overflows, so that
4528 a subsequent call to integer_zerop will work. Hence we must
4529 do the type conversion here. At this point, the constant is either
4530 zero or one, and the conversion to a signed type can never overflow.
4531 We could get an overflow if this conversion is done anywhere else. */
4532 if (TYPE_UNSIGNED (type
))
4533 temp
= fold_convert (lang_hooks
.types
.signed_type (type
), temp
);
4535 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
4536 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
4538 temp
= const_binop (BIT_AND_EXPR
, temp
,
4539 fold_convert (TREE_TYPE (c
), mask
), 0);
4540 /* If necessary, convert the type back to match the type of C. */
4541 if (TYPE_UNSIGNED (type
))
4542 temp
= fold_convert (type
, temp
);
4544 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
4547 /* Find ways of folding logical expressions of LHS and RHS:
4548 Try to merge two comparisons to the same innermost item.
4549 Look for range tests like "ch >= '0' && ch <= '9'".
4550 Look for combinations of simple terms on machines with expensive branches
4551 and evaluate the RHS unconditionally.
4553 For example, if we have p->a == 2 && p->b == 4 and we can make an
4554 object large enough to span both A and B, we can do this with a comparison
4555 against the object ANDed with the a mask.
4557 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4558 operations to do this with one comparison.
4560 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4561 function and the one above.
4563 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4564 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4566 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4569 We return the simplified tree or 0 if no optimization is possible. */
4572 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
4574 /* If this is the "or" of two comparisons, we can do something if
4575 the comparisons are NE_EXPR. If this is the "and", we can do something
4576 if the comparisons are EQ_EXPR. I.e.,
4577 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4579 WANTED_CODE is this operation code. For single bit fields, we can
4580 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4581 comparison for one-bit fields. */
4583 enum tree_code wanted_code
;
4584 enum tree_code lcode
, rcode
;
4585 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
4586 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
4587 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
4588 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
4589 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
4590 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
4591 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
4592 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
4593 enum machine_mode lnmode
, rnmode
;
4594 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
4595 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
4596 tree l_const
, r_const
;
4597 tree lntype
, rntype
, result
;
4598 int first_bit
, end_bit
;
4601 /* Start by getting the comparison codes. Fail if anything is volatile.
4602 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4603 it were surrounded with a NE_EXPR. */
4605 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
4608 lcode
= TREE_CODE (lhs
);
4609 rcode
= TREE_CODE (rhs
);
4611 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
4613 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
4614 fold_convert (TREE_TYPE (lhs
), integer_zero_node
));
4618 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
4620 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
4621 fold_convert (TREE_TYPE (rhs
), integer_zero_node
));
4625 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
4626 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
4629 ll_arg
= TREE_OPERAND (lhs
, 0);
4630 lr_arg
= TREE_OPERAND (lhs
, 1);
4631 rl_arg
= TREE_OPERAND (rhs
, 0);
4632 rr_arg
= TREE_OPERAND (rhs
, 1);
4634 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4635 if (simple_operand_p (ll_arg
)
4636 && simple_operand_p (lr_arg
))
4639 if (operand_equal_p (ll_arg
, rl_arg
, 0)
4640 && operand_equal_p (lr_arg
, rr_arg
, 0))
4642 result
= combine_comparisons (code
, lcode
, rcode
,
4643 truth_type
, ll_arg
, lr_arg
);
4647 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
4648 && operand_equal_p (lr_arg
, rl_arg
, 0))
4650 result
= combine_comparisons (code
, lcode
,
4651 swap_tree_comparison (rcode
),
4652 truth_type
, ll_arg
, lr_arg
);
4658 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
4659 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
4661 /* If the RHS can be evaluated unconditionally and its operands are
4662 simple, it wins to evaluate the RHS unconditionally on machines
4663 with expensive branches. In this case, this isn't a comparison
4664 that can be merged. Avoid doing this if the RHS is a floating-point
4665 comparison since those can trap. */
4667 if (BRANCH_COST
>= 2
4668 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
4669 && simple_operand_p (rl_arg
)
4670 && simple_operand_p (rr_arg
))
4672 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4673 if (code
== TRUTH_OR_EXPR
4674 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
4675 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
4676 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4677 return build2 (NE_EXPR
, truth_type
,
4678 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4680 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4682 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4683 if (code
== TRUTH_AND_EXPR
4684 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
4685 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
4686 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4687 return build2 (EQ_EXPR
, truth_type
,
4688 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4690 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4692 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
4693 return build2 (code
, truth_type
, lhs
, rhs
);
4696 /* See if the comparisons can be merged. Then get all the parameters for
4699 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
4700 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
4704 ll_inner
= decode_field_reference (ll_arg
,
4705 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
4706 &ll_unsignedp
, &volatilep
, &ll_mask
,
4708 lr_inner
= decode_field_reference (lr_arg
,
4709 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
4710 &lr_unsignedp
, &volatilep
, &lr_mask
,
4712 rl_inner
= decode_field_reference (rl_arg
,
4713 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
4714 &rl_unsignedp
, &volatilep
, &rl_mask
,
4716 rr_inner
= decode_field_reference (rr_arg
,
4717 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
4718 &rr_unsignedp
, &volatilep
, &rr_mask
,
4721 /* It must be true that the inner operation on the lhs of each
4722 comparison must be the same if we are to be able to do anything.
4723 Then see if we have constants. If not, the same must be true for
4725 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
4726 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
4729 if (TREE_CODE (lr_arg
) == INTEGER_CST
4730 && TREE_CODE (rr_arg
) == INTEGER_CST
)
4731 l_const
= lr_arg
, r_const
= rr_arg
;
4732 else if (lr_inner
== 0 || rr_inner
== 0
4733 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
4736 l_const
= r_const
= 0;
4738 /* If either comparison code is not correct for our logical operation,
4739 fail. However, we can convert a one-bit comparison against zero into
4740 the opposite comparison against that bit being set in the field. */
4742 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
4743 if (lcode
!= wanted_code
)
4745 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
4747 /* Make the left operand unsigned, since we are only interested
4748 in the value of one bit. Otherwise we are doing the wrong
4757 /* This is analogous to the code for l_const above. */
4758 if (rcode
!= wanted_code
)
4760 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
4769 /* After this point all optimizations will generate bit-field
4770 references, which we might not want. */
4771 if (! lang_hooks
.can_use_bit_fields_p ())
4774 /* See if we can find a mode that contains both fields being compared on
4775 the left. If we can't, fail. Otherwise, update all constants and masks
4776 to be relative to a field of that size. */
4777 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
4778 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
4779 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4780 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
4782 if (lnmode
== VOIDmode
)
4785 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
4786 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
4787 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
4788 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
4790 if (BYTES_BIG_ENDIAN
)
4792 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
4793 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
4796 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
4797 size_int (xll_bitpos
), 0);
4798 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
4799 size_int (xrl_bitpos
), 0);
4803 l_const
= fold_convert (lntype
, l_const
);
4804 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
4805 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
4806 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
4807 fold_build1 (BIT_NOT_EXPR
,
4811 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
4813 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4818 r_const
= fold_convert (lntype
, r_const
);
4819 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
4820 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
4821 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
4822 fold_build1 (BIT_NOT_EXPR
,
4826 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
4828 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4832 /* If the right sides are not constant, do the same for it. Also,
4833 disallow this optimization if a size or signedness mismatch occurs
4834 between the left and right sides. */
4837 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
4838 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
4839 /* Make sure the two fields on the right
4840 correspond to the left without being swapped. */
4841 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
4844 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
4845 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
4846 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4847 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
4849 if (rnmode
== VOIDmode
)
4852 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
4853 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
4854 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
4855 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
4857 if (BYTES_BIG_ENDIAN
)
4859 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
4860 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
4863 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
4864 size_int (xlr_bitpos
), 0);
4865 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
4866 size_int (xrr_bitpos
), 0);
4868 /* Make a mask that corresponds to both fields being compared.
4869 Do this for both items being compared. If the operands are the
4870 same size and the bits being compared are in the same position
4871 then we can do this by masking both and comparing the masked
4873 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4874 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
4875 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
4877 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4878 ll_unsignedp
|| rl_unsignedp
);
4879 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4880 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
4882 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
4883 lr_unsignedp
|| rr_unsignedp
);
4884 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
4885 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
4887 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
4890 /* There is still another way we can do something: If both pairs of
4891 fields being compared are adjacent, we may be able to make a wider
4892 field containing them both.
4894 Note that we still must mask the lhs/rhs expressions. Furthermore,
4895 the mask must be shifted to account for the shift done by
4896 make_bit_field_ref. */
4897 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
4898 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
4899 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
4900 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
4904 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
4905 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
4906 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
4907 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
4909 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
4910 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
4911 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
4912 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
4914 /* Convert to the smaller type before masking out unwanted bits. */
4916 if (lntype
!= rntype
)
4918 if (lnbitsize
> rnbitsize
)
4920 lhs
= fold_convert (rntype
, lhs
);
4921 ll_mask
= fold_convert (rntype
, ll_mask
);
4924 else if (lnbitsize
< rnbitsize
)
4926 rhs
= fold_convert (lntype
, rhs
);
4927 lr_mask
= fold_convert (lntype
, lr_mask
);
4932 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
4933 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
4935 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
4936 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
4938 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
4944 /* Handle the case of comparisons with constants. If there is something in
4945 common between the masks, those bits of the constants must be the same.
4946 If not, the condition is always false. Test for this to avoid generating
4947 incorrect code below. */
4948 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
4949 if (! integer_zerop (result
)
4950 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
4951 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
4953 if (wanted_code
== NE_EXPR
)
4955 warning ("%<or%> of unmatched not-equal tests is always 1");
4956 return constant_boolean_node (true, truth_type
);
4960 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4961 return constant_boolean_node (false, truth_type
);
4965 /* Construct the expression we will return. First get the component
4966 reference we will make. Unless the mask is all ones the width of
4967 that field, perform the mask operation. Then compare with the
4969 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4970 ll_unsignedp
|| rl_unsignedp
);
4972 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4973 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4974 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
4976 return build2 (wanted_code
, truth_type
, result
,
4977 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
4980 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4984 optimize_minmax_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
4987 enum tree_code op_code
;
4988 tree comp_const
= op1
;
4990 int consts_equal
, consts_lt
;
4993 STRIP_SIGN_NOPS (arg0
);
4995 op_code
= TREE_CODE (arg0
);
4996 minmax_const
= TREE_OPERAND (arg0
, 1);
4997 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
4998 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
4999 inner
= TREE_OPERAND (arg0
, 0);
5001 /* If something does not permit us to optimize, return the original tree. */
5002 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5003 || TREE_CODE (comp_const
) != INTEGER_CST
5004 || TREE_CONSTANT_OVERFLOW (comp_const
)
5005 || TREE_CODE (minmax_const
) != INTEGER_CST
5006 || TREE_CONSTANT_OVERFLOW (minmax_const
))
5009 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5010 and GT_EXPR, doing the rest with recursive calls using logical
5014 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5016 /* FIXME: We should be able to invert code without building a
5017 scratch tree node, but doing so would require us to
5018 duplicate a part of invert_truthvalue here. */
5019 tree tem
= invert_truthvalue (build2 (code
, type
, op0
, op1
));
5020 tem
= optimize_minmax_comparison (TREE_CODE (tem
),
5022 TREE_OPERAND (tem
, 0),
5023 TREE_OPERAND (tem
, 1));
5024 return invert_truthvalue (tem
);
5029 fold_build2 (TRUTH_ORIF_EXPR
, type
,
5030 optimize_minmax_comparison
5031 (EQ_EXPR
, type
, arg0
, comp_const
),
5032 optimize_minmax_comparison
5033 (GT_EXPR
, type
, arg0
, comp_const
));
5036 if (op_code
== MAX_EXPR
&& consts_equal
)
5037 /* MAX (X, 0) == 0 -> X <= 0 */
5038 return fold_build2 (LE_EXPR
, type
, inner
, comp_const
);
5040 else if (op_code
== MAX_EXPR
&& consts_lt
)
5041 /* MAX (X, 0) == 5 -> X == 5 */
5042 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5044 else if (op_code
== MAX_EXPR
)
5045 /* MAX (X, 0) == -1 -> false */
5046 return omit_one_operand (type
, integer_zero_node
, inner
);
5048 else if (consts_equal
)
5049 /* MIN (X, 0) == 0 -> X >= 0 */
5050 return fold_build2 (GE_EXPR
, type
, inner
, comp_const
);
5053 /* MIN (X, 0) == 5 -> false */
5054 return omit_one_operand (type
, integer_zero_node
, inner
);
5057 /* MIN (X, 0) == -1 -> X == -1 */
5058 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5061 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5062 /* MAX (X, 0) > 0 -> X > 0
5063 MAX (X, 0) > 5 -> X > 5 */
5064 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5066 else if (op_code
== MAX_EXPR
)
5067 /* MAX (X, 0) > -1 -> true */
5068 return omit_one_operand (type
, integer_one_node
, inner
);
5070 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5071 /* MIN (X, 0) > 0 -> false
5072 MIN (X, 0) > 5 -> false */
5073 return omit_one_operand (type
, integer_zero_node
, inner
);
5076 /* MIN (X, 0) > -1 -> X > -1 */
5077 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5084 /* T is an integer expression that is being multiplied, divided, or taken a
5085 modulus (CODE says which and what kind of divide or modulus) by a
5086 constant C. See if we can eliminate that operation by folding it with
5087 other operations already in T. WIDE_TYPE, if non-null, is a type that
5088 should be used for the computation if wider than our type.
5090 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5091 (X * 2) + (Y * 4). We must, however, be assured that either the original
5092 expression would not overflow or that overflow is undefined for the type
5093 in the language in question.
5095 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5096 the machine has a multiply-accumulate insn or that this is part of an
5097 addressing calculation.
5099 If we return a non-null expression, it is an equivalent form of the
5100 original computation, but need not be in the original type. */
5103 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5105 /* To avoid exponential search depth, refuse to allow recursion past
5106 three levels. Beyond that (1) it's highly unlikely that we'll find
5107 something interesting and (2) we've probably processed it before
5108 when we built the inner expression. */
5117 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
);
5124 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5126 tree type
= TREE_TYPE (t
);
5127 enum tree_code tcode
= TREE_CODE (t
);
5128 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5129 > GET_MODE_SIZE (TYPE_MODE (type
)))
5130 ? wide_type
: type
);
5132 int same_p
= tcode
== code
;
5133 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5135 /* Don't deal with constants of zero here; they confuse the code below. */
5136 if (integer_zerop (c
))
5139 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5140 op0
= TREE_OPERAND (t
, 0);
5142 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5143 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5145 /* Note that we need not handle conditional operations here since fold
5146 already handles those cases. So just do arithmetic here. */
5150 /* For a constant, we can always simplify if we are a multiply
5151 or (for divide and modulus) if it is a multiple of our constant. */
5152 if (code
== MULT_EXPR
5153 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
5154 return const_binop (code
, fold_convert (ctype
, t
),
5155 fold_convert (ctype
, c
), 0);
5158 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
5159 /* If op0 is an expression ... */
5160 if ((COMPARISON_CLASS_P (op0
)
5161 || UNARY_CLASS_P (op0
)
5162 || BINARY_CLASS_P (op0
)
5163 || EXPRESSION_CLASS_P (op0
))
5164 /* ... and is unsigned, and its type is smaller than ctype,
5165 then we cannot pass through as widening. */
5166 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
5167 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5168 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5169 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
5170 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
5171 /* ... or this is a truncation (t is narrower than op0),
5172 then we cannot pass through this narrowing. */
5173 || (GET_MODE_SIZE (TYPE_MODE (type
))
5174 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
5175 /* ... or signedness changes for division or modulus,
5176 then we cannot pass through this conversion. */
5177 || (code
!= MULT_EXPR
5178 && (TYPE_UNSIGNED (ctype
)
5179 != TYPE_UNSIGNED (TREE_TYPE (op0
))))))
5182 /* Pass the constant down and see if we can make a simplification. If
5183 we can, replace this expression with the inner simplification for
5184 possible later conversion to our or some other type. */
5185 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5186 && TREE_CODE (t2
) == INTEGER_CST
5187 && ! TREE_CONSTANT_OVERFLOW (t2
)
5188 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5190 ? ctype
: NULL_TREE
))))
5195 /* If widening the type changes it from signed to unsigned, then we
5196 must avoid building ABS_EXPR itself as unsigned. */
5197 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5199 tree cstype
= (*lang_hooks
.types
.signed_type
) (ctype
);
5200 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
)) != 0)
5202 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5203 return fold_convert (ctype
, t1
);
5209 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5210 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5213 case MIN_EXPR
: case MAX_EXPR
:
5214 /* If widening the type changes the signedness, then we can't perform
5215 this optimization as that changes the result. */
5216 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5219 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5220 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
5221 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5223 if (tree_int_cst_sgn (c
) < 0)
5224 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5226 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5227 fold_convert (ctype
, t2
));
5231 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5232 /* If the second operand is constant, this is a multiplication
5233 or floor division, by a power of two, so we can treat it that
5234 way unless the multiplier or divisor overflows. Signed
5235 left-shift overflow is implementation-defined rather than
5236 undefined in C90, so do not convert signed left shift into
5238 if (TREE_CODE (op1
) == INTEGER_CST
5239 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5240 /* const_binop may not detect overflow correctly,
5241 so check for it explicitly here. */
5242 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5243 && TREE_INT_CST_HIGH (op1
) == 0
5244 && 0 != (t1
= fold_convert (ctype
,
5245 const_binop (LSHIFT_EXPR
,
5248 && ! TREE_OVERFLOW (t1
))
5249 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5250 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5251 ctype
, fold_convert (ctype
, op0
), t1
),
5252 c
, code
, wide_type
);
5255 case PLUS_EXPR
: case MINUS_EXPR
:
5256 /* See if we can eliminate the operation on both sides. If we can, we
5257 can return a new PLUS or MINUS. If we can't, the only remaining
5258 cases where we can do anything are if the second operand is a
5260 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
5261 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
5262 if (t1
!= 0 && t2
!= 0
5263 && (code
== MULT_EXPR
5264 /* If not multiplication, we can only do this if both operands
5265 are divisible by c. */
5266 || (multiple_of_p (ctype
, op0
, c
)
5267 && multiple_of_p (ctype
, op1
, c
))))
5268 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5269 fold_convert (ctype
, t2
));
5271 /* If this was a subtraction, negate OP1 and set it to be an addition.
5272 This simplifies the logic below. */
5273 if (tcode
== MINUS_EXPR
)
5274 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5276 if (TREE_CODE (op1
) != INTEGER_CST
)
5279 /* If either OP1 or C are negative, this optimization is not safe for
5280 some of the division and remainder types while for others we need
5281 to change the code. */
5282 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5284 if (code
== CEIL_DIV_EXPR
)
5285 code
= FLOOR_DIV_EXPR
;
5286 else if (code
== FLOOR_DIV_EXPR
)
5287 code
= CEIL_DIV_EXPR
;
5288 else if (code
!= MULT_EXPR
5289 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5293 /* If it's a multiply or a division/modulus operation of a multiple
5294 of our constant, do the operation and verify it doesn't overflow. */
5295 if (code
== MULT_EXPR
5296 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5298 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5299 fold_convert (ctype
, c
), 0);
5300 /* We allow the constant to overflow with wrapping semantics. */
5302 || (TREE_OVERFLOW (op1
) && ! flag_wrapv
))
5308 /* If we have an unsigned type is not a sizetype, we cannot widen
5309 the operation since it will change the result if the original
5310 computation overflowed. */
5311 if (TYPE_UNSIGNED (ctype
)
5312 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5316 /* If we were able to eliminate our operation from the first side,
5317 apply our operation to the second side and reform the PLUS. */
5318 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5319 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5321 /* The last case is if we are a multiply. In that case, we can
5322 apply the distributive law to commute the multiply and addition
5323 if the multiplication of the constants doesn't overflow. */
5324 if (code
== MULT_EXPR
)
5325 return fold_build2 (tcode
, ctype
,
5326 fold_build2 (code
, ctype
,
5327 fold_convert (ctype
, op0
),
5328 fold_convert (ctype
, c
)),
5334 /* We have a special case here if we are doing something like
5335 (C * 8) % 4 since we know that's zero. */
5336 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5337 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5338 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5339 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5340 return omit_one_operand (type
, integer_zero_node
, op0
);
5342 /* ... fall through ... */
5344 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5345 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5346 /* If we can extract our operation from the LHS, do so and return a
5347 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5348 do something only if the second operand is a constant. */
5350 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5351 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5352 fold_convert (ctype
, op1
));
5353 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5354 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5355 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5356 fold_convert (ctype
, t1
));
5357 else if (TREE_CODE (op1
) != INTEGER_CST
)
5360 /* If these are the same operation types, we can associate them
5361 assuming no overflow. */
5363 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
5364 fold_convert (ctype
, c
), 0))
5365 && ! TREE_OVERFLOW (t1
))
5366 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
5368 /* If these operations "cancel" each other, we have the main
5369 optimizations of this pass, which occur when either constant is a
5370 multiple of the other, in which case we replace this with either an
5371 operation or CODE or TCODE.
5373 If we have an unsigned type that is not a sizetype, we cannot do
5374 this since it will change the result if the original computation
5376 if ((! TYPE_UNSIGNED (ctype
)
5377 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5379 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5380 || (tcode
== MULT_EXPR
5381 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5382 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
5384 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5385 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5386 fold_convert (ctype
,
5387 const_binop (TRUNC_DIV_EXPR
,
5389 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
5390 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5391 fold_convert (ctype
,
5392 const_binop (TRUNC_DIV_EXPR
,
5404 /* Return a node which has the indicated constant VALUE (either 0 or
5405 1), and is of the indicated TYPE. */
5408 constant_boolean_node (int value
, tree type
)
5410 if (type
== integer_type_node
)
5411 return value
? integer_one_node
: integer_zero_node
;
5412 else if (type
== boolean_type_node
)
5413 return value
? boolean_true_node
: boolean_false_node
;
5415 return build_int_cst (type
, value
);
5419 /* Return true if expr looks like an ARRAY_REF and set base and
5420 offset to the appropriate trees. If there is no offset,
5421 offset is set to NULL_TREE. */
5424 extract_array_ref (tree expr
, tree
*base
, tree
*offset
)
5426 /* We have to be careful with stripping nops as with the
5427 base type the meaning of the offset can change. */
5428 tree inner_expr
= expr
;
5429 STRIP_NOPS (inner_expr
);
5430 /* One canonical form is a PLUS_EXPR with the first
5431 argument being an ADDR_EXPR with a possible NOP_EXPR
5433 if (TREE_CODE (expr
) == PLUS_EXPR
)
5435 tree op0
= TREE_OPERAND (expr
, 0);
5437 if (TREE_CODE (op0
) == ADDR_EXPR
)
5439 *base
= TREE_OPERAND (expr
, 0);
5440 *offset
= TREE_OPERAND (expr
, 1);
5444 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5445 which we transform into an ADDR_EXPR with appropriate
5446 offset. For other arguments to the ADDR_EXPR we assume
5447 zero offset and as such do not care about the ADDR_EXPR
5448 type and strip possible nops from it. */
5449 else if (TREE_CODE (inner_expr
) == ADDR_EXPR
)
5451 tree op0
= TREE_OPERAND (inner_expr
, 0);
5452 if (TREE_CODE (op0
) == ARRAY_REF
)
5454 *base
= build_fold_addr_expr (TREE_OPERAND (op0
, 0));
5455 *offset
= TREE_OPERAND (op0
, 1);
5460 *offset
= NULL_TREE
;
5469 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5470 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5471 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5472 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5473 COND is the first argument to CODE; otherwise (as in the example
5474 given here), it is the second argument. TYPE is the type of the
5475 original expression. Return NULL_TREE if no simplification is
5479 fold_binary_op_with_conditional_arg (enum tree_code code
,
5480 tree type
, tree op0
, tree op1
,
5481 tree cond
, tree arg
, int cond_first_p
)
5483 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
5484 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
5485 tree test
, true_value
, false_value
;
5486 tree lhs
= NULL_TREE
;
5487 tree rhs
= NULL_TREE
;
5489 /* This transformation is only worthwhile if we don't have to wrap
5490 arg in a SAVE_EXPR, and the operation can be simplified on at least
5491 one of the branches once its pushed inside the COND_EXPR. */
5492 if (!TREE_CONSTANT (arg
))
5495 if (TREE_CODE (cond
) == COND_EXPR
)
5497 test
= TREE_OPERAND (cond
, 0);
5498 true_value
= TREE_OPERAND (cond
, 1);
5499 false_value
= TREE_OPERAND (cond
, 2);
5500 /* If this operand throws an expression, then it does not make
5501 sense to try to perform a logical or arithmetic operation
5503 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
5505 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
5510 tree testtype
= TREE_TYPE (cond
);
5512 true_value
= constant_boolean_node (true, testtype
);
5513 false_value
= constant_boolean_node (false, testtype
);
5516 arg
= fold_convert (arg_type
, arg
);
5519 true_value
= fold_convert (cond_type
, true_value
);
5521 lhs
= fold_build2 (code
, type
, true_value
, arg
);
5523 lhs
= fold_build2 (code
, type
, arg
, true_value
);
5527 false_value
= fold_convert (cond_type
, false_value
);
5529 rhs
= fold_build2 (code
, type
, false_value
, arg
);
5531 rhs
= fold_build2 (code
, type
, arg
, false_value
);
5534 test
= fold_build3 (COND_EXPR
, type
, test
, lhs
, rhs
);
5535 return fold_convert (type
, test
);
5539 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5541 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5542 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5543 ADDEND is the same as X.
5545 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5546 and finite. The problematic cases are when X is zero, and its mode
5547 has signed zeros. In the case of rounding towards -infinity,
5548 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5549 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5552 fold_real_zero_addition_p (tree type
, tree addend
, int negate
)
5554 if (!real_zerop (addend
))
5557 /* Don't allow the fold with -fsignaling-nans. */
5558 if (HONOR_SNANS (TYPE_MODE (type
)))
5561 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5562 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
5565 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5566 if (TREE_CODE (addend
) == REAL_CST
5567 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
5570 /* The mode has signed zeros, and we have to honor their sign.
5571 In this situation, there is only one case we can return true for.
5572 X - 0 is the same as X unless rounding towards -infinity is
5574 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
5577 /* Subroutine of fold() that checks comparisons of built-in math
5578 functions against real constants.
5580 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5581 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5582 is the type of the result and ARG0 and ARG1 are the operands of the
5583 comparison. ARG1 must be a TREE_REAL_CST.
5585 The function returns the constant folded tree if a simplification
5586 can be made, and NULL_TREE otherwise. */
5589 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
5590 tree type
, tree arg0
, tree arg1
)
5594 if (BUILTIN_SQRT_P (fcode
))
5596 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5597 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
5599 c
= TREE_REAL_CST (arg1
);
5600 if (REAL_VALUE_NEGATIVE (c
))
5602 /* sqrt(x) < y is always false, if y is negative. */
5603 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
5604 return omit_one_operand (type
, integer_zero_node
, arg
);
5606 /* sqrt(x) > y is always true, if y is negative and we
5607 don't care about NaNs, i.e. negative values of x. */
5608 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
5609 return omit_one_operand (type
, integer_one_node
, arg
);
5611 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5612 return fold_build2 (GE_EXPR
, type
, arg
,
5613 build_real (TREE_TYPE (arg
), dconst0
));
5615 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
5619 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5620 real_convert (&c2
, mode
, &c2
);
5622 if (REAL_VALUE_ISINF (c2
))
5624 /* sqrt(x) > y is x == +Inf, when y is very large. */
5625 if (HONOR_INFINITIES (mode
))
5626 return fold_build2 (EQ_EXPR
, type
, arg
,
5627 build_real (TREE_TYPE (arg
), c2
));
5629 /* sqrt(x) > y is always false, when y is very large
5630 and we don't care about infinities. */
5631 return omit_one_operand (type
, integer_zero_node
, arg
);
5634 /* sqrt(x) > c is the same as x > c*c. */
5635 return fold_build2 (code
, type
, arg
,
5636 build_real (TREE_TYPE (arg
), c2
));
5638 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
5642 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5643 real_convert (&c2
, mode
, &c2
);
5645 if (REAL_VALUE_ISINF (c2
))
5647 /* sqrt(x) < y is always true, when y is a very large
5648 value and we don't care about NaNs or Infinities. */
5649 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
5650 return omit_one_operand (type
, integer_one_node
, arg
);
5652 /* sqrt(x) < y is x != +Inf when y is very large and we
5653 don't care about NaNs. */
5654 if (! HONOR_NANS (mode
))
5655 return fold_build2 (NE_EXPR
, type
, arg
,
5656 build_real (TREE_TYPE (arg
), c2
));
5658 /* sqrt(x) < y is x >= 0 when y is very large and we
5659 don't care about Infinities. */
5660 if (! HONOR_INFINITIES (mode
))
5661 return fold_build2 (GE_EXPR
, type
, arg
,
5662 build_real (TREE_TYPE (arg
), dconst0
));
5664 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5665 if (lang_hooks
.decls
.global_bindings_p () != 0
5666 || CONTAINS_PLACEHOLDER_P (arg
))
5669 arg
= save_expr (arg
);
5670 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
5671 fold_build2 (GE_EXPR
, type
, arg
,
5672 build_real (TREE_TYPE (arg
),
5674 fold_build2 (NE_EXPR
, type
, arg
,
5675 build_real (TREE_TYPE (arg
),
5679 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5680 if (! HONOR_NANS (mode
))
5681 return fold_build2 (code
, type
, arg
,
5682 build_real (TREE_TYPE (arg
), c2
));
5684 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5685 if (lang_hooks
.decls
.global_bindings_p () == 0
5686 && ! CONTAINS_PLACEHOLDER_P (arg
))
5688 arg
= save_expr (arg
);
5689 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
5690 fold_build2 (GE_EXPR
, type
, arg
,
5691 build_real (TREE_TYPE (arg
),
5693 fold_build2 (code
, type
, arg
,
5694 build_real (TREE_TYPE (arg
),
5703 /* Subroutine of fold() that optimizes comparisons against Infinities,
5704 either +Inf or -Inf.
5706 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5707 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5708 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5710 The function returns the constant folded tree if a simplification
5711 can be made, and NULL_TREE otherwise. */
5714 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5716 enum machine_mode mode
;
5717 REAL_VALUE_TYPE max
;
5721 mode
= TYPE_MODE (TREE_TYPE (arg0
));
5723 /* For negative infinity swap the sense of the comparison. */
5724 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
5726 code
= swap_tree_comparison (code
);
5731 /* x > +Inf is always false, if with ignore sNANs. */
5732 if (HONOR_SNANS (mode
))
5734 return omit_one_operand (type
, integer_zero_node
, arg0
);
5737 /* x <= +Inf is always true, if we don't case about NaNs. */
5738 if (! HONOR_NANS (mode
))
5739 return omit_one_operand (type
, integer_one_node
, arg0
);
5741 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5742 if (lang_hooks
.decls
.global_bindings_p () == 0
5743 && ! CONTAINS_PLACEHOLDER_P (arg0
))
5745 arg0
= save_expr (arg0
);
5746 return fold_build2 (EQ_EXPR
, type
, arg0
, arg0
);
5752 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5753 real_maxval (&max
, neg
, mode
);
5754 return fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5755 arg0
, build_real (TREE_TYPE (arg0
), max
));
5758 /* x < +Inf is always equal to x <= DBL_MAX. */
5759 real_maxval (&max
, neg
, mode
);
5760 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5761 arg0
, build_real (TREE_TYPE (arg0
), max
));
5764 /* x != +Inf is always equal to !(x > DBL_MAX). */
5765 real_maxval (&max
, neg
, mode
);
5766 if (! HONOR_NANS (mode
))
5767 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5768 arg0
, build_real (TREE_TYPE (arg0
), max
));
5770 /* The transformation below creates non-gimple code and thus is
5771 not appropriate if we are in gimple form. */
5775 temp
= fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5776 arg0
, build_real (TREE_TYPE (arg0
), max
));
5777 return fold_build1 (TRUTH_NOT_EXPR
, type
, temp
);
5786 /* Subroutine of fold() that optimizes comparisons of a division by
5787 a nonzero integer constant against an integer constant, i.e.
5790 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5791 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5792 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5794 The function returns the constant folded tree if a simplification
5795 can be made, and NULL_TREE otherwise. */
5798 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5800 tree prod
, tmp
, hi
, lo
;
5801 tree arg00
= TREE_OPERAND (arg0
, 0);
5802 tree arg01
= TREE_OPERAND (arg0
, 1);
5803 unsigned HOST_WIDE_INT lpart
;
5804 HOST_WIDE_INT hpart
;
5807 /* We have to do this the hard way to detect unsigned overflow.
5808 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5809 overflow
= mul_double (TREE_INT_CST_LOW (arg01
),
5810 TREE_INT_CST_HIGH (arg01
),
5811 TREE_INT_CST_LOW (arg1
),
5812 TREE_INT_CST_HIGH (arg1
), &lpart
, &hpart
);
5813 prod
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
5814 prod
= force_fit_type (prod
, -1, overflow
, false);
5816 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)))
5818 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5821 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5822 overflow
= add_double (TREE_INT_CST_LOW (prod
),
5823 TREE_INT_CST_HIGH (prod
),
5824 TREE_INT_CST_LOW (tmp
),
5825 TREE_INT_CST_HIGH (tmp
),
5827 hi
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
5828 hi
= force_fit_type (hi
, -1, overflow
| TREE_OVERFLOW (prod
),
5829 TREE_CONSTANT_OVERFLOW (prod
));
5831 else if (tree_int_cst_sgn (arg01
) >= 0)
5833 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5834 switch (tree_int_cst_sgn (arg1
))
5837 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
5842 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
5847 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
5857 /* A negative divisor reverses the relational operators. */
5858 code
= swap_tree_comparison (code
);
5860 tmp
= int_const_binop (PLUS_EXPR
, arg01
, integer_one_node
, 0);
5861 switch (tree_int_cst_sgn (arg1
))
5864 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
5869 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
5874 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
5886 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
5887 return omit_one_operand (type
, integer_zero_node
, arg00
);
5888 if (TREE_OVERFLOW (hi
))
5889 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
5890 if (TREE_OVERFLOW (lo
))
5891 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
5892 return build_range_check (type
, arg00
, 1, lo
, hi
);
5895 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
5896 return omit_one_operand (type
, integer_one_node
, arg00
);
5897 if (TREE_OVERFLOW (hi
))
5898 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
5899 if (TREE_OVERFLOW (lo
))
5900 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
5901 return build_range_check (type
, arg00
, 0, lo
, hi
);
5904 if (TREE_OVERFLOW (lo
))
5905 return omit_one_operand (type
, integer_zero_node
, arg00
);
5906 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
5909 if (TREE_OVERFLOW (hi
))
5910 return omit_one_operand (type
, integer_one_node
, arg00
);
5911 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
5914 if (TREE_OVERFLOW (hi
))
5915 return omit_one_operand (type
, integer_zero_node
, arg00
);
5916 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
5919 if (TREE_OVERFLOW (lo
))
5920 return omit_one_operand (type
, integer_one_node
, arg00
);
5921 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
5931 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5932 equality/inequality test, then return a simplified form of
5933 the test using shifts and logical operations. Otherwise return
5934 NULL. TYPE is the desired result type. */
5937 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
5940 /* If this is testing a single bit, we can optimize the test. */
5941 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
5942 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
5943 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
5945 tree inner
= TREE_OPERAND (arg0
, 0);
5946 tree type
= TREE_TYPE (arg0
);
5947 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
5948 enum machine_mode operand_mode
= TYPE_MODE (type
);
5950 tree signed_type
, unsigned_type
, intermediate_type
;
5953 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5954 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5955 arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
5956 if (arg00
!= NULL_TREE
5957 /* This is only a win if casting to a signed type is cheap,
5958 i.e. when arg00's type is not a partial mode. */
5959 && TYPE_PRECISION (TREE_TYPE (arg00
))
5960 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
5962 tree stype
= lang_hooks
.types
.signed_type (TREE_TYPE (arg00
));
5963 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
5964 result_type
, fold_convert (stype
, arg00
),
5965 fold_convert (stype
, integer_zero_node
));
5968 /* Otherwise we have (A & C) != 0 where C is a single bit,
5969 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5970 Similarly for (A & C) == 0. */
5972 /* If INNER is a right shift of a constant and it plus BITNUM does
5973 not overflow, adjust BITNUM and INNER. */
5974 if (TREE_CODE (inner
) == RSHIFT_EXPR
5975 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
5976 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
5977 && bitnum
< TYPE_PRECISION (type
)
5978 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
5979 bitnum
- TYPE_PRECISION (type
)))
5981 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
5982 inner
= TREE_OPERAND (inner
, 0);
5985 /* If we are going to be able to omit the AND below, we must do our
5986 operations as unsigned. If we must use the AND, we have a choice.
5987 Normally unsigned is faster, but for some machines signed is. */
5988 #ifdef LOAD_EXTEND_OP
5989 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
5990 && !flag_syntax_only
) ? 0 : 1;
5995 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
5996 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
5997 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
5998 inner
= fold_convert (intermediate_type
, inner
);
6001 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6002 inner
, size_int (bitnum
));
6004 if (code
== EQ_EXPR
)
6005 inner
= fold_build2 (BIT_XOR_EXPR
, intermediate_type
,
6006 inner
, integer_one_node
);
6008 /* Put the AND last so it can combine with more things. */
6009 inner
= build2 (BIT_AND_EXPR
, intermediate_type
,
6010 inner
, integer_one_node
);
6012 /* Make sure to return the proper type. */
6013 inner
= fold_convert (result_type
, inner
);
6020 /* Check whether we are allowed to reorder operands arg0 and arg1,
6021 such that the evaluation of arg1 occurs before arg0. */
6024 reorder_operands_p (tree arg0
, tree arg1
)
6026 if (! flag_evaluation_order
)
6028 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6030 return ! TREE_SIDE_EFFECTS (arg0
)
6031 && ! TREE_SIDE_EFFECTS (arg1
);
6034 /* Test whether it is preferable two swap two operands, ARG0 and
6035 ARG1, for example because ARG0 is an integer constant and ARG1
6036 isn't. If REORDER is true, only recommend swapping if we can
6037 evaluate the operands in reverse order. */
6040 tree_swap_operands_p (tree arg0
, tree arg1
, bool reorder
)
6042 STRIP_SIGN_NOPS (arg0
);
6043 STRIP_SIGN_NOPS (arg1
);
6045 if (TREE_CODE (arg1
) == INTEGER_CST
)
6047 if (TREE_CODE (arg0
) == INTEGER_CST
)
6050 if (TREE_CODE (arg1
) == REAL_CST
)
6052 if (TREE_CODE (arg0
) == REAL_CST
)
6055 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6057 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6060 if (TREE_CONSTANT (arg1
))
6062 if (TREE_CONSTANT (arg0
))
6068 if (reorder
&& flag_evaluation_order
6069 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6077 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6078 for commutative and comparison operators. Ensuring a canonical
6079 form allows the optimizers to find additional redundancies without
6080 having to explicitly check for both orderings. */
6081 if (TREE_CODE (arg0
) == SSA_NAME
6082 && TREE_CODE (arg1
) == SSA_NAME
6083 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6089 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6090 ARG0 is extended to a wider type. */
6093 fold_widened_comparison (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6095 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6097 tree shorter_type
, outer_type
;
6101 if (arg0_unw
== arg0
)
6103 shorter_type
= TREE_TYPE (arg0_unw
);
6105 #ifdef HAVE_canonicalize_funcptr_for_compare
6106 /* Disable this optimization if we're casting a function pointer
6107 type on targets that require function pointer canonicalization. */
6108 if (HAVE_canonicalize_funcptr_for_compare
6109 && TREE_CODE (shorter_type
) == POINTER_TYPE
6110 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6114 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6117 arg1_unw
= get_unwidened (arg1
, shorter_type
);
6121 /* If possible, express the comparison in the shorter mode. */
6122 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6123 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6124 && (TREE_TYPE (arg1_unw
) == shorter_type
6125 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6126 && TREE_CODE (shorter_type
) == INTEGER_TYPE
6127 && int_fits_type_p (arg1_unw
, shorter_type
))))
6128 return fold_build2 (code
, type
, arg0_unw
,
6129 fold_convert (shorter_type
, arg1_unw
));
6131 if (TREE_CODE (arg1_unw
) != INTEGER_CST
)
6134 /* If we are comparing with the integer that does not fit into the range
6135 of the shorter type, the result is known. */
6136 outer_type
= TREE_TYPE (arg1_unw
);
6137 min
= lower_bound_in_type (outer_type
, shorter_type
);
6138 max
= upper_bound_in_type (outer_type
, shorter_type
);
6140 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6142 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6149 return omit_one_operand (type
, integer_zero_node
, arg0
);
6154 return omit_one_operand (type
, integer_one_node
, arg0
);
6160 return omit_one_operand (type
, integer_one_node
, arg0
);
6162 return omit_one_operand (type
, integer_zero_node
, arg0
);
6167 return omit_one_operand (type
, integer_zero_node
, arg0
);
6169 return omit_one_operand (type
, integer_one_node
, arg0
);
6178 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6179 ARG0 just the signedness is changed. */
6182 fold_sign_changed_comparison (enum tree_code code
, tree type
,
6183 tree arg0
, tree arg1
)
6185 tree arg0_inner
, tmp
;
6186 tree inner_type
, outer_type
;
6188 if (TREE_CODE (arg0
) != NOP_EXPR
6189 && TREE_CODE (arg0
) != CONVERT_EXPR
)
6192 outer_type
= TREE_TYPE (arg0
);
6193 arg0_inner
= TREE_OPERAND (arg0
, 0);
6194 inner_type
= TREE_TYPE (arg0_inner
);
6196 #ifdef HAVE_canonicalize_funcptr_for_compare
6197 /* Disable this optimization if we're casting a function pointer
6198 type on targets that require function pointer canonicalization. */
6199 if (HAVE_canonicalize_funcptr_for_compare
6200 && TREE_CODE (inner_type
) == POINTER_TYPE
6201 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6205 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6208 if (TREE_CODE (arg1
) != INTEGER_CST
6209 && !((TREE_CODE (arg1
) == NOP_EXPR
6210 || TREE_CODE (arg1
) == CONVERT_EXPR
)
6211 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6214 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6219 if (TREE_CODE (arg1
) == INTEGER_CST
)
6221 tmp
= build_int_cst_wide (inner_type
,
6222 TREE_INT_CST_LOW (arg1
),
6223 TREE_INT_CST_HIGH (arg1
));
6224 arg1
= force_fit_type (tmp
, 0,
6225 TREE_OVERFLOW (arg1
),
6226 TREE_CONSTANT_OVERFLOW (arg1
));
6229 arg1
= fold_convert (inner_type
, arg1
);
6231 return fold_build2 (code
, type
, arg0_inner
, arg1
);
6234 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6235 step of the array. ADDR is the address. MULT is the multiplicative expression.
6236 If the function succeeds, the new address expression is returned. Otherwise
6237 NULL_TREE is returned. */
6240 try_move_mult_to_index (enum tree_code code
, tree addr
, tree mult
)
6242 tree s
, delta
, step
;
6243 tree arg0
= TREE_OPERAND (mult
, 0), arg1
= TREE_OPERAND (mult
, 1);
6244 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6251 if (TREE_CODE (arg0
) == INTEGER_CST
)
6256 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6264 for (;; ref
= TREE_OPERAND (ref
, 0))
6266 if (TREE_CODE (ref
) == ARRAY_REF
)
6268 step
= array_ref_element_size (ref
);
6270 if (TREE_CODE (step
) != INTEGER_CST
)
6273 itype
= TREE_TYPE (step
);
6275 /* If the type sizes do not match, we might run into problems
6276 when one of them would overflow. */
6277 if (TYPE_PRECISION (itype
) != TYPE_PRECISION (TREE_TYPE (s
)))
6280 if (!operand_equal_p (step
, fold_convert (itype
, s
), 0))
6283 delta
= fold_convert (itype
, delta
);
6287 if (!handled_component_p (ref
))
6291 /* We found the suitable array reference. So copy everything up to it,
6292 and replace the index. */
6294 pref
= TREE_OPERAND (addr
, 0);
6295 ret
= copy_node (pref
);
6300 pref
= TREE_OPERAND (pref
, 0);
6301 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6302 pos
= TREE_OPERAND (pos
, 0);
6305 TREE_OPERAND (pos
, 1) = fold_build2 (code
, itype
,
6306 TREE_OPERAND (pos
, 1),
6309 return build1 (ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6313 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6314 means A >= Y && A != MAX, but in this case we know that
6315 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6318 fold_to_nonsharp_ineq_using_bound (tree ineq
, tree bound
)
6320 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6322 if (TREE_CODE (bound
) == LT_EXPR
)
6323 a
= TREE_OPERAND (bound
, 0);
6324 else if (TREE_CODE (bound
) == GT_EXPR
)
6325 a
= TREE_OPERAND (bound
, 1);
6329 typea
= TREE_TYPE (a
);
6330 if (!INTEGRAL_TYPE_P (typea
)
6331 && !POINTER_TYPE_P (typea
))
6334 if (TREE_CODE (ineq
) == LT_EXPR
)
6336 a1
= TREE_OPERAND (ineq
, 1);
6337 y
= TREE_OPERAND (ineq
, 0);
6339 else if (TREE_CODE (ineq
) == GT_EXPR
)
6341 a1
= TREE_OPERAND (ineq
, 0);
6342 y
= TREE_OPERAND (ineq
, 1);
6347 if (TREE_TYPE (a1
) != typea
)
6350 diff
= fold_build2 (MINUS_EXPR
, typea
, a1
, a
);
6351 if (!integer_onep (diff
))
6354 return fold_build2 (GE_EXPR
, type
, a
, y
);
6357 /* Fold complex addition when both components are accessible by parts.
6358 Return non-null if successful. CODE should be PLUS_EXPR for addition,
6359 or MINUS_EXPR for subtraction. */
6362 fold_complex_add (tree type
, tree ac
, tree bc
, enum tree_code code
)
6364 tree ar
, ai
, br
, bi
, rr
, ri
, inner_type
;
6366 if (TREE_CODE (ac
) == COMPLEX_EXPR
)
6367 ar
= TREE_OPERAND (ac
, 0), ai
= TREE_OPERAND (ac
, 1);
6368 else if (TREE_CODE (ac
) == COMPLEX_CST
)
6369 ar
= TREE_REALPART (ac
), ai
= TREE_IMAGPART (ac
);
6373 if (TREE_CODE (bc
) == COMPLEX_EXPR
)
6374 br
= TREE_OPERAND (bc
, 0), bi
= TREE_OPERAND (bc
, 1);
6375 else if (TREE_CODE (bc
) == COMPLEX_CST
)
6376 br
= TREE_REALPART (bc
), bi
= TREE_IMAGPART (bc
);
6380 inner_type
= TREE_TYPE (type
);
6382 rr
= fold_build2 (code
, inner_type
, ar
, br
);
6383 ri
= fold_build2 (code
, inner_type
, ai
, bi
);
6385 return fold_build2 (COMPLEX_EXPR
, type
, rr
, ri
);
6388 /* Perform some simplifications of complex multiplication when one or more
6389 of the components are constants or zeros. Return non-null if successful. */
6392 fold_complex_mult_parts (tree type
, tree ar
, tree ai
, tree br
, tree bi
)
6394 tree rr
, ri
, inner_type
, zero
;
6395 bool ar0
, ai0
, br0
, bi0
, bi1
;
6397 inner_type
= TREE_TYPE (type
);
6400 if (SCALAR_FLOAT_TYPE_P (inner_type
))
6402 ar0
= ai0
= br0
= bi0
= bi1
= false;
6404 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6406 if (TREE_CODE (ar
) == REAL_CST
6407 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar
), dconst0
))
6408 ar0
= true, zero
= ar
;
6410 if (TREE_CODE (ai
) == REAL_CST
6411 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai
), dconst0
))
6412 ai0
= true, zero
= ai
;
6414 if (TREE_CODE (br
) == REAL_CST
6415 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br
), dconst0
))
6416 br0
= true, zero
= br
;
6418 if (TREE_CODE (bi
) == REAL_CST
)
6420 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi
), dconst0
))
6421 bi0
= true, zero
= bi
;
6422 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi
), dconst1
))
6428 ar0
= integer_zerop (ar
);
6431 ai0
= integer_zerop (ai
);
6434 br0
= integer_zerop (br
);
6437 bi0
= integer_zerop (bi
);
6444 bi1
= integer_onep (bi
);
6447 /* We won't optimize anything below unless something is zero. */
6451 if (ai0
&& br0
&& bi1
)
6456 else if (ai0
&& bi0
)
6458 rr
= fold_build2 (MULT_EXPR
, inner_type
, ar
, br
);
6461 else if (ai0
&& br0
)
6464 ri
= fold_build2 (MULT_EXPR
, inner_type
, ar
, bi
);
6466 else if (ar0
&& bi0
)
6469 ri
= fold_build2 (MULT_EXPR
, inner_type
, ai
, br
);
6471 else if (ar0
&& br0
)
6473 rr
= fold_build2 (MULT_EXPR
, inner_type
, ai
, bi
);
6474 rr
= fold_build1 (NEGATE_EXPR
, inner_type
, rr
);
6479 rr
= fold_build2 (MULT_EXPR
, inner_type
, ar
, br
);
6480 ri
= fold_build2 (MULT_EXPR
, inner_type
, ai
, br
);
6484 rr
= fold_build2 (MULT_EXPR
, inner_type
, ar
, br
);
6485 ri
= fold_build2 (MULT_EXPR
, inner_type
, ar
, bi
);
6489 rr
= fold_build2 (MULT_EXPR
, inner_type
, ai
, bi
);
6490 rr
= fold_build1 (NEGATE_EXPR
, inner_type
, rr
);
6491 ri
= fold_build2 (MULT_EXPR
, inner_type
, ar
, bi
);
6495 rr
= fold_build2 (MULT_EXPR
, inner_type
, ai
, bi
);
6496 rr
= fold_build1 (NEGATE_EXPR
, inner_type
, rr
);
6497 ri
= fold_build2 (MULT_EXPR
, inner_type
, ai
, br
);
6502 return fold_build2 (COMPLEX_EXPR
, type
, rr
, ri
);
6506 fold_complex_mult (tree type
, tree ac
, tree bc
)
6508 tree ar
, ai
, br
, bi
;
6510 if (TREE_CODE (ac
) == COMPLEX_EXPR
)
6511 ar
= TREE_OPERAND (ac
, 0), ai
= TREE_OPERAND (ac
, 1);
6512 else if (TREE_CODE (ac
) == COMPLEX_CST
)
6513 ar
= TREE_REALPART (ac
), ai
= TREE_IMAGPART (ac
);
6517 if (TREE_CODE (bc
) == COMPLEX_EXPR
)
6518 br
= TREE_OPERAND (bc
, 0), bi
= TREE_OPERAND (bc
, 1);
6519 else if (TREE_CODE (bc
) == COMPLEX_CST
)
6520 br
= TREE_REALPART (bc
), bi
= TREE_IMAGPART (bc
);
6524 return fold_complex_mult_parts (type
, ar
, ai
, br
, bi
);
6527 /* Perform some simplifications of complex division when one or more of
6528 the components are constants or zeros. Return non-null if successful. */
6531 fold_complex_div_parts (tree type
, tree ar
, tree ai
, tree br
, tree bi
,
6532 enum tree_code code
)
6534 tree rr
, ri
, inner_type
, zero
;
6535 bool ar0
, ai0
, br0
, bi0
, bi1
;
6537 inner_type
= TREE_TYPE (type
);
6540 if (SCALAR_FLOAT_TYPE_P (inner_type
))
6542 ar0
= ai0
= br0
= bi0
= bi1
= false;
6544 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6546 if (TREE_CODE (ar
) == REAL_CST
6547 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar
), dconst0
))
6548 ar0
= true, zero
= ar
;
6550 if (TREE_CODE (ai
) == REAL_CST
6551 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai
), dconst0
))
6552 ai0
= true, zero
= ai
;
6554 if (TREE_CODE (br
) == REAL_CST
6555 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br
), dconst0
))
6556 br0
= true, zero
= br
;
6558 if (TREE_CODE (bi
) == REAL_CST
)
6560 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi
), dconst0
))
6561 bi0
= true, zero
= bi
;
6562 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi
), dconst1
))
6568 ar0
= integer_zerop (ar
);
6571 ai0
= integer_zerop (ai
);
6574 br0
= integer_zerop (br
);
6577 bi0
= integer_zerop (bi
);
6584 bi1
= integer_onep (bi
);
6587 /* We won't optimize anything below unless something is zero. */
6593 rr
= fold_build2 (code
, inner_type
, ar
, br
);
6596 else if (ai0
&& br0
)
6599 ri
= fold_build2 (code
, inner_type
, ar
, bi
);
6600 ri
= fold_build1 (NEGATE_EXPR
, inner_type
, ri
);
6602 else if (ar0
&& bi0
)
6605 ri
= fold_build2 (code
, inner_type
, ai
, br
);
6607 else if (ar0
&& br0
)
6609 rr
= fold_build2 (code
, inner_type
, ai
, bi
);
6614 rr
= fold_build2 (code
, inner_type
, ar
, br
);
6615 ri
= fold_build2 (code
, inner_type
, ai
, br
);
6619 rr
= fold_build2 (code
, inner_type
, ai
, bi
);
6620 ri
= fold_build2 (code
, inner_type
, ar
, bi
);
6621 ri
= fold_build1 (NEGATE_EXPR
, inner_type
, ri
);
6626 return fold_build2 (COMPLEX_EXPR
, type
, rr
, ri
);
6630 fold_complex_div (tree type
, tree ac
, tree bc
, enum tree_code code
)
6632 tree ar
, ai
, br
, bi
;
6634 if (TREE_CODE (ac
) == COMPLEX_EXPR
)
6635 ar
= TREE_OPERAND (ac
, 0), ai
= TREE_OPERAND (ac
, 1);
6636 else if (TREE_CODE (ac
) == COMPLEX_CST
)
6637 ar
= TREE_REALPART (ac
), ai
= TREE_IMAGPART (ac
);
6641 if (TREE_CODE (bc
) == COMPLEX_EXPR
)
6642 br
= TREE_OPERAND (bc
, 0), bi
= TREE_OPERAND (bc
, 1);
6643 else if (TREE_CODE (bc
) == COMPLEX_CST
)
6644 br
= TREE_REALPART (bc
), bi
= TREE_IMAGPART (bc
);
6648 return fold_complex_div_parts (type
, ar
, ai
, br
, bi
, code
);
6651 /* Fold a unary expression of code CODE and type TYPE with operand
6652 OP0. Return the folded expression if folding is successful.
6653 Otherwise, return NULL_TREE. */
6656 fold_unary (enum tree_code code
, tree type
, tree op0
)
6660 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
6662 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
6663 && TREE_CODE_LENGTH (code
) == 1);
6668 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
6670 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6671 STRIP_SIGN_NOPS (arg0
);
6675 /* Strip any conversions that don't change the mode. This
6676 is safe for every expression, except for a comparison
6677 expression because its signedness is derived from its
6680 Note that this is done as an internal manipulation within
6681 the constant folder, in order to find the simplest
6682 representation of the arguments so that their form can be
6683 studied. In any cases, the appropriate type conversions
6684 should be put back in the tree that will get out of the
6690 if (TREE_CODE_CLASS (code
) == tcc_unary
)
6692 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
6693 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6694 fold_build1 (code
, type
, TREE_OPERAND (arg0
, 1)));
6695 else if (TREE_CODE (arg0
) == COND_EXPR
)
6697 tree arg01
= TREE_OPERAND (arg0
, 1);
6698 tree arg02
= TREE_OPERAND (arg0
, 2);
6699 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
6700 arg01
= fold_build1 (code
, type
, arg01
);
6701 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
6702 arg02
= fold_build1 (code
, type
, arg02
);
6703 tem
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6706 /* If this was a conversion, and all we did was to move into
6707 inside the COND_EXPR, bring it back out. But leave it if
6708 it is a conversion from integer to integer and the
6709 result precision is no wider than a word since such a
6710 conversion is cheap and may be optimized away by combine,
6711 while it couldn't if it were outside the COND_EXPR. Then return
6712 so we don't get into an infinite recursion loop taking the
6713 conversion out and then back in. */
6715 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
6716 || code
== NON_LVALUE_EXPR
)
6717 && TREE_CODE (tem
) == COND_EXPR
6718 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
6719 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
6720 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
6721 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
6722 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
6723 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
6724 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
6726 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
6727 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
6728 || flag_syntax_only
))
6729 tem
= build1 (code
, type
,
6731 TREE_TYPE (TREE_OPERAND
6732 (TREE_OPERAND (tem
, 1), 0)),
6733 TREE_OPERAND (tem
, 0),
6734 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
6735 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
6738 else if (COMPARISON_CLASS_P (arg0
))
6740 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
6742 arg0
= copy_node (arg0
);
6743 TREE_TYPE (arg0
) = type
;
6746 else if (TREE_CODE (type
) != INTEGER_TYPE
)
6747 return fold_build3 (COND_EXPR
, type
, arg0
,
6748 fold_build1 (code
, type
,
6750 fold_build1 (code
, type
,
6751 integer_zero_node
));
6760 case FIX_TRUNC_EXPR
:
6762 case FIX_FLOOR_EXPR
:
6763 case FIX_ROUND_EXPR
:
6764 if (TREE_TYPE (op0
) == type
)
6767 /* Handle cases of two conversions in a row. */
6768 if (TREE_CODE (op0
) == NOP_EXPR
6769 || TREE_CODE (op0
) == CONVERT_EXPR
)
6771 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
6772 tree inter_type
= TREE_TYPE (op0
);
6773 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
6774 int inside_ptr
= POINTER_TYPE_P (inside_type
);
6775 int inside_float
= FLOAT_TYPE_P (inside_type
);
6776 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
6777 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
6778 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
6779 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
6780 int inter_ptr
= POINTER_TYPE_P (inter_type
);
6781 int inter_float
= FLOAT_TYPE_P (inter_type
);
6782 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
6783 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
6784 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
6785 int final_int
= INTEGRAL_TYPE_P (type
);
6786 int final_ptr
= POINTER_TYPE_P (type
);
6787 int final_float
= FLOAT_TYPE_P (type
);
6788 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
6789 unsigned int final_prec
= TYPE_PRECISION (type
);
6790 int final_unsignedp
= TYPE_UNSIGNED (type
);
6792 /* In addition to the cases of two conversions in a row
6793 handled below, if we are converting something to its own
6794 type via an object of identical or wider precision, neither
6795 conversion is needed. */
6796 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
6797 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
6798 && inter_prec
>= final_prec
)
6799 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6801 /* Likewise, if the intermediate and final types are either both
6802 float or both integer, we don't need the middle conversion if
6803 it is wider than the final type and doesn't change the signedness
6804 (for integers). Avoid this if the final type is a pointer
6805 since then we sometimes need the inner conversion. Likewise if
6806 the outer has a precision not equal to the size of its mode. */
6807 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
6808 || (inter_float
&& inside_float
)
6809 || (inter_vec
&& inside_vec
))
6810 && inter_prec
>= inside_prec
6811 && (inter_float
|| inter_vec
6812 || inter_unsignedp
== inside_unsignedp
)
6813 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6814 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6816 && (! final_vec
|| inter_prec
== inside_prec
))
6817 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6819 /* If we have a sign-extension of a zero-extended value, we can
6820 replace that by a single zero-extension. */
6821 if (inside_int
&& inter_int
&& final_int
6822 && inside_prec
< inter_prec
&& inter_prec
< final_prec
6823 && inside_unsignedp
&& !inter_unsignedp
)
6824 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6826 /* Two conversions in a row are not needed unless:
6827 - some conversion is floating-point (overstrict for now), or
6828 - some conversion is a vector (overstrict for now), or
6829 - the intermediate type is narrower than both initial and
6831 - the intermediate type and innermost type differ in signedness,
6832 and the outermost type is wider than the intermediate, or
6833 - the initial type is a pointer type and the precisions of the
6834 intermediate and final types differ, or
6835 - the final type is a pointer type and the precisions of the
6836 initial and intermediate types differ. */
6837 if (! inside_float
&& ! inter_float
&& ! final_float
6838 && ! inside_vec
&& ! inter_vec
&& ! final_vec
6839 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
6840 && ! (inside_int
&& inter_int
6841 && inter_unsignedp
!= inside_unsignedp
6842 && inter_prec
< final_prec
)
6843 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
6844 == (final_unsignedp
&& final_prec
> inter_prec
))
6845 && ! (inside_ptr
&& inter_prec
!= final_prec
)
6846 && ! (final_ptr
&& inside_prec
!= inter_prec
)
6847 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6848 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6850 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6853 if (TREE_CODE (op0
) == MODIFY_EXPR
6854 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
6855 /* Detect assigning a bitfield. */
6856 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
6857 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
6859 /* Don't leave an assignment inside a conversion
6860 unless assigning a bitfield. */
6861 tem
= fold_build1 (code
, type
, TREE_OPERAND (op0
, 1));
6862 /* First do the assignment, then return converted constant. */
6863 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
6864 TREE_NO_WARNING (tem
) = 1;
6865 TREE_USED (tem
) = 1;
6869 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6870 constants (if x has signed type, the sign bit cannot be set
6871 in c). This folds extension into the BIT_AND_EXPR. */
6872 if (INTEGRAL_TYPE_P (type
)
6873 && TREE_CODE (type
) != BOOLEAN_TYPE
6874 && TREE_CODE (op0
) == BIT_AND_EXPR
6875 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
6878 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
6881 if (TYPE_UNSIGNED (TREE_TYPE (and))
6882 || (TYPE_PRECISION (type
)
6883 <= TYPE_PRECISION (TREE_TYPE (and))))
6885 else if (TYPE_PRECISION (TREE_TYPE (and1
))
6886 <= HOST_BITS_PER_WIDE_INT
6887 && host_integerp (and1
, 1))
6889 unsigned HOST_WIDE_INT cst
;
6891 cst
= tree_low_cst (and1
, 1);
6892 cst
&= (HOST_WIDE_INT
) -1
6893 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
6894 change
= (cst
== 0);
6895 #ifdef LOAD_EXTEND_OP
6897 && !flag_syntax_only
6898 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
6901 tree uns
= lang_hooks
.types
.unsigned_type (TREE_TYPE (and0
));
6902 and0
= fold_convert (uns
, and0
);
6903 and1
= fold_convert (uns
, and1
);
6909 tem
= build_int_cst_wide (type
, TREE_INT_CST_LOW (and1
),
6910 TREE_INT_CST_HIGH (and1
));
6911 tem
= force_fit_type (tem
, 0, TREE_OVERFLOW (and1
),
6912 TREE_CONSTANT_OVERFLOW (and1
));
6913 return fold_build2 (BIT_AND_EXPR
, type
,
6914 fold_convert (type
, and0
), tem
);
6918 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6919 T2 being pointers to types of the same size. */
6920 if (POINTER_TYPE_P (type
)
6921 && BINARY_CLASS_P (arg0
)
6922 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
6923 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
6925 tree arg00
= TREE_OPERAND (arg0
, 0);
6927 tree t1
= TREE_TYPE (arg00
);
6928 tree tt0
= TREE_TYPE (t0
);
6929 tree tt1
= TREE_TYPE (t1
);
6930 tree s0
= TYPE_SIZE (tt0
);
6931 tree s1
= TYPE_SIZE (tt1
);
6933 if (s0
&& s1
&& operand_equal_p (s0
, s1
, OEP_ONLY_CONST
))
6934 return build2 (TREE_CODE (arg0
), t0
, fold_convert (t0
, arg00
),
6935 TREE_OPERAND (arg0
, 1));
6938 tem
= fold_convert_const (code
, type
, arg0
);
6939 return tem
? tem
: NULL_TREE
;
6941 case VIEW_CONVERT_EXPR
:
6942 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
6943 return build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
6947 if (negate_expr_p (arg0
))
6948 return fold_convert (type
, negate_expr (arg0
));
6949 /* Convert - (~A) to A + 1. */
6950 if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == BIT_NOT_EXPR
)
6951 return fold_build2 (PLUS_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6952 build_int_cst (type
, 1));
6956 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
6957 return fold_abs_const (arg0
, type
);
6958 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
6959 return fold_build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
6960 /* Convert fabs((double)float) into (double)fabsf(float). */
6961 else if (TREE_CODE (arg0
) == NOP_EXPR
6962 && TREE_CODE (type
) == REAL_TYPE
)
6964 tree targ0
= strip_float_extensions (arg0
);
6966 return fold_convert (type
, fold_build1 (ABS_EXPR
,
6970 else if (tree_expr_nonnegative_p (arg0
))
6973 /* Strip sign ops from argument. */
6974 if (TREE_CODE (type
) == REAL_TYPE
)
6976 tem
= fold_strip_sign_ops (arg0
);
6978 return fold_build1 (ABS_EXPR
, type
, fold_convert (type
, tem
));
6983 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
6984 return fold_convert (type
, arg0
);
6985 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
6986 return build2 (COMPLEX_EXPR
, type
,
6987 TREE_OPERAND (arg0
, 0),
6988 negate_expr (TREE_OPERAND (arg0
, 1)));
6989 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
6990 return build_complex (type
, TREE_REALPART (arg0
),
6991 negate_expr (TREE_IMAGPART (arg0
)));
6992 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
6993 return fold_build2 (TREE_CODE (arg0
), type
,
6994 fold_build1 (CONJ_EXPR
, type
,
6995 TREE_OPERAND (arg0
, 0)),
6996 fold_build1 (CONJ_EXPR
, type
,
6997 TREE_OPERAND (arg0
, 1)));
6998 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
6999 return TREE_OPERAND (arg0
, 0);
7003 if (TREE_CODE (arg0
) == INTEGER_CST
)
7004 return fold_not_const (arg0
, type
);
7005 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
7006 return TREE_OPERAND (arg0
, 0);
7007 /* Convert ~ (-A) to A - 1. */
7008 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
7009 return fold_build2 (MINUS_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7010 build_int_cst (type
, 1));
7011 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7012 else if (INTEGRAL_TYPE_P (type
)
7013 && ((TREE_CODE (arg0
) == MINUS_EXPR
7014 && integer_onep (TREE_OPERAND (arg0
, 1)))
7015 || (TREE_CODE (arg0
) == PLUS_EXPR
7016 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
7017 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7020 case TRUTH_NOT_EXPR
:
7021 /* The argument to invert_truthvalue must have Boolean type. */
7022 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
7023 arg0
= fold_convert (boolean_type_node
, arg0
);
7025 /* Note that the operand of this must be an int
7026 and its values must be 0 or 1.
7027 ("true" is a fixed value perhaps depending on the language,
7028 but we don't handle values other than 1 correctly yet.) */
7029 tem
= invert_truthvalue (arg0
);
7030 /* Avoid infinite recursion. */
7031 if (TREE_CODE (tem
) == TRUTH_NOT_EXPR
)
7033 return fold_convert (type
, tem
);
7036 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7038 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7039 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
7040 TREE_OPERAND (arg0
, 1));
7041 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
7042 return TREE_REALPART (arg0
);
7043 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7044 return fold_build2 (TREE_CODE (arg0
), type
,
7045 fold_build1 (REALPART_EXPR
, type
,
7046 TREE_OPERAND (arg0
, 0)),
7047 fold_build1 (REALPART_EXPR
, type
,
7048 TREE_OPERAND (arg0
, 1)));
7052 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7053 return fold_convert (type
, integer_zero_node
);
7054 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7055 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
7056 TREE_OPERAND (arg0
, 0));
7057 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
7058 return TREE_IMAGPART (arg0
);
7059 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7060 return fold_build2 (TREE_CODE (arg0
), type
,
7061 fold_build1 (IMAGPART_EXPR
, type
,
7062 TREE_OPERAND (arg0
, 0)),
7063 fold_build1 (IMAGPART_EXPR
, type
,
7064 TREE_OPERAND (arg0
, 1)));
7069 } /* switch (code) */
7072 /* Fold a binary expression of code CODE and type TYPE with operands
7073 OP0 and OP1. Return the folded expression if folding is
7074 successful. Otherwise, return NULL_TREE. */
7077 fold_binary (enum tree_code code
, tree type
, tree op0
, tree op1
)
7079 tree t1
= NULL_TREE
;
7081 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
7082 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7084 /* WINS will be nonzero when the switch is done
7085 if all operands are constant. */
7088 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7089 && TREE_CODE_LENGTH (code
) == 2);
7098 /* Strip any conversions that don't change the mode. This is
7099 safe for every expression, except for a comparison expression
7100 because its signedness is derived from its operands. So, in
7101 the latter case, only strip conversions that don't change the
7104 Note that this is done as an internal manipulation within the
7105 constant folder, in order to find the simplest representation
7106 of the arguments so that their form can be studied. In any
7107 cases, the appropriate type conversions should be put back in
7108 the tree that will get out of the constant folder. */
7109 if (kind
== tcc_comparison
)
7110 STRIP_SIGN_NOPS (arg0
);
7114 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7115 subop
= TREE_REALPART (arg0
);
7119 if (TREE_CODE (subop
) != INTEGER_CST
7120 && TREE_CODE (subop
) != REAL_CST
)
7121 /* Note that TREE_CONSTANT isn't enough:
7122 static var addresses are constant but we can't
7123 do arithmetic on them. */
7131 /* Strip any conversions that don't change the mode. This is
7132 safe for every expression, except for a comparison expression
7133 because its signedness is derived from its operands. So, in
7134 the latter case, only strip conversions that don't change the
7137 Note that this is done as an internal manipulation within the
7138 constant folder, in order to find the simplest representation
7139 of the arguments so that their form can be studied. In any
7140 cases, the appropriate type conversions should be put back in
7141 the tree that will get out of the constant folder. */
7142 if (kind
== tcc_comparison
)
7143 STRIP_SIGN_NOPS (arg1
);
7147 if (TREE_CODE (arg1
) == COMPLEX_CST
)
7148 subop
= TREE_REALPART (arg1
);
7152 if (TREE_CODE (subop
) != INTEGER_CST
7153 && TREE_CODE (subop
) != REAL_CST
)
7154 /* Note that TREE_CONSTANT isn't enough:
7155 static var addresses are constant but we can't
7156 do arithmetic on them. */
7160 /* If this is a commutative operation, and ARG0 is a constant, move it
7161 to ARG1 to reduce the number of tests below. */
7162 if (commutative_tree_code (code
)
7163 && tree_swap_operands_p (arg0
, arg1
, true))
7164 return fold_build2 (code
, type
, op1
, op0
);
7166 /* Now WINS is set as described above,
7167 ARG0 is the first operand of EXPR,
7168 and ARG1 is the second operand (if it has more than one operand).
7170 First check for cases where an arithmetic operation is applied to a
7171 compound, conditional, or comparison operation. Push the arithmetic
7172 operation inside the compound or conditional to see if any folding
7173 can then be done. Convert comparison to conditional for this purpose.
7174 The also optimizes non-constant cases that used to be done in
7177 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7178 one of the operands is a comparison and the other is a comparison, a
7179 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7180 code below would make the expression more complex. Change it to a
7181 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7182 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7184 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
7185 || code
== EQ_EXPR
|| code
== NE_EXPR
)
7186 && ((truth_value_p (TREE_CODE (arg0
))
7187 && (truth_value_p (TREE_CODE (arg1
))
7188 || (TREE_CODE (arg1
) == BIT_AND_EXPR
7189 && integer_onep (TREE_OPERAND (arg1
, 1)))))
7190 || (truth_value_p (TREE_CODE (arg1
))
7191 && (truth_value_p (TREE_CODE (arg0
))
7192 || (TREE_CODE (arg0
) == BIT_AND_EXPR
7193 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
7195 tem
= fold_build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
7196 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
7199 fold_convert (boolean_type_node
, arg0
),
7200 fold_convert (boolean_type_node
, arg1
));
7202 if (code
== EQ_EXPR
)
7203 tem
= invert_truthvalue (tem
);
7205 return fold_convert (type
, tem
);
7208 if (TREE_CODE_CLASS (code
) == tcc_comparison
7209 && TREE_CODE (arg0
) == COMPOUND_EXPR
)
7210 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7211 fold_build2 (code
, type
, TREE_OPERAND (arg0
, 1), arg1
));
7212 else if (TREE_CODE_CLASS (code
) == tcc_comparison
7213 && TREE_CODE (arg1
) == COMPOUND_EXPR
)
7214 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
7215 fold_build2 (code
, type
, arg0
, TREE_OPERAND (arg1
, 1)));
7216 else if (TREE_CODE_CLASS (code
) == tcc_binary
7217 || TREE_CODE_CLASS (code
) == tcc_comparison
)
7219 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7220 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7221 fold_build2 (code
, type
, TREE_OPERAND (arg0
, 1),
7223 if (TREE_CODE (arg1
) == COMPOUND_EXPR
7224 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
7225 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
7226 fold_build2 (code
, type
,
7227 arg0
, TREE_OPERAND (arg1
, 1)));
7229 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
7231 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
7233 /*cond_first_p=*/1);
7234 if (tem
!= NULL_TREE
)
7238 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
7240 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
7242 /*cond_first_p=*/0);
7243 if (tem
!= NULL_TREE
)
7251 /* A + (-B) -> A - B */
7252 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
7253 return fold_build2 (MINUS_EXPR
, type
,
7254 fold_convert (type
, arg0
),
7255 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
7256 /* (-A) + B -> B - A */
7257 if (TREE_CODE (arg0
) == NEGATE_EXPR
7258 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
7259 return fold_build2 (MINUS_EXPR
, type
,
7260 fold_convert (type
, arg1
),
7261 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
7262 /* Convert ~A + 1 to -A. */
7263 if (INTEGRAL_TYPE_P (type
)
7264 && TREE_CODE (arg0
) == BIT_NOT_EXPR
7265 && integer_onep (arg1
))
7266 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7268 if (TREE_CODE (type
) == COMPLEX_TYPE
)
7270 tem
= fold_complex_add (type
, arg0
, arg1
, PLUS_EXPR
);
7275 if (! FLOAT_TYPE_P (type
))
7277 if (integer_zerop (arg1
))
7278 return non_lvalue (fold_convert (type
, arg0
));
7280 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7281 with a constant, and the two constants have no bits in common,
7282 we should treat this as a BIT_IOR_EXPR since this may produce more
7284 if (TREE_CODE (arg0
) == BIT_AND_EXPR
7285 && TREE_CODE (arg1
) == BIT_AND_EXPR
7286 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7287 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
7288 && integer_zerop (const_binop (BIT_AND_EXPR
,
7289 TREE_OPERAND (arg0
, 1),
7290 TREE_OPERAND (arg1
, 1), 0)))
7292 code
= BIT_IOR_EXPR
;
7296 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7297 (plus (plus (mult) (mult)) (foo)) so that we can
7298 take advantage of the factoring cases below. */
7299 if (((TREE_CODE (arg0
) == PLUS_EXPR
7300 || TREE_CODE (arg0
) == MINUS_EXPR
)
7301 && TREE_CODE (arg1
) == MULT_EXPR
)
7302 || ((TREE_CODE (arg1
) == PLUS_EXPR
7303 || TREE_CODE (arg1
) == MINUS_EXPR
)
7304 && TREE_CODE (arg0
) == MULT_EXPR
))
7306 tree parg0
, parg1
, parg
, marg
;
7307 enum tree_code pcode
;
7309 if (TREE_CODE (arg1
) == MULT_EXPR
)
7310 parg
= arg0
, marg
= arg1
;
7312 parg
= arg1
, marg
= arg0
;
7313 pcode
= TREE_CODE (parg
);
7314 parg0
= TREE_OPERAND (parg
, 0);
7315 parg1
= TREE_OPERAND (parg
, 1);
7319 if (TREE_CODE (parg0
) == MULT_EXPR
7320 && TREE_CODE (parg1
) != MULT_EXPR
)
7321 return fold_build2 (pcode
, type
,
7322 fold_build2 (PLUS_EXPR
, type
,
7323 fold_convert (type
, parg0
),
7324 fold_convert (type
, marg
)),
7325 fold_convert (type
, parg1
));
7326 if (TREE_CODE (parg0
) != MULT_EXPR
7327 && TREE_CODE (parg1
) == MULT_EXPR
)
7328 return fold_build2 (PLUS_EXPR
, type
,
7329 fold_convert (type
, parg0
),
7330 fold_build2 (pcode
, type
,
7331 fold_convert (type
, marg
),
7336 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
)
7338 tree arg00
, arg01
, arg10
, arg11
;
7339 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7341 /* (A * C) + (B * C) -> (A+B) * C.
7342 We are most concerned about the case where C is a constant,
7343 but other combinations show up during loop reduction. Since
7344 it is not difficult, try all four possibilities. */
7346 arg00
= TREE_OPERAND (arg0
, 0);
7347 arg01
= TREE_OPERAND (arg0
, 1);
7348 arg10
= TREE_OPERAND (arg1
, 0);
7349 arg11
= TREE_OPERAND (arg1
, 1);
7352 if (operand_equal_p (arg01
, arg11
, 0))
7353 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7354 else if (operand_equal_p (arg00
, arg10
, 0))
7355 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7356 else if (operand_equal_p (arg00
, arg11
, 0))
7357 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7358 else if (operand_equal_p (arg01
, arg10
, 0))
7359 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7361 /* No identical multiplicands; see if we can find a common
7362 power-of-two factor in non-power-of-two multiplies. This
7363 can help in multi-dimensional array access. */
7364 else if (TREE_CODE (arg01
) == INTEGER_CST
7365 && TREE_CODE (arg11
) == INTEGER_CST
7366 && TREE_INT_CST_HIGH (arg01
) == 0
7367 && TREE_INT_CST_HIGH (arg11
) == 0)
7369 HOST_WIDE_INT int01
, int11
, tmp
;
7370 int01
= TREE_INT_CST_LOW (arg01
);
7371 int11
= TREE_INT_CST_LOW (arg11
);
7373 /* Move min of absolute values to int11. */
7374 if ((int01
>= 0 ? int01
: -int01
)
7375 < (int11
>= 0 ? int11
: -int11
))
7377 tmp
= int01
, int01
= int11
, int11
= tmp
;
7378 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7379 alt0
= arg01
, arg01
= arg11
, arg11
= alt0
;
7382 if (exact_log2 (int11
) > 0 && int01
% int11
== 0)
7384 alt0
= fold_build2 (MULT_EXPR
, type
, arg00
,
7385 build_int_cst (NULL_TREE
,
7393 return fold_build2 (MULT_EXPR
, type
,
7394 fold_build2 (PLUS_EXPR
, type
,
7395 fold_convert (type
, alt0
),
7396 fold_convert (type
, alt1
)),
7397 fold_convert (type
, same
));
7400 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7401 of the array. Loop optimizer sometimes produce this type of
7403 if (TREE_CODE (arg0
) == ADDR_EXPR
7404 && TREE_CODE (arg1
) == MULT_EXPR
)
7406 tem
= try_move_mult_to_index (PLUS_EXPR
, arg0
, arg1
);
7408 return fold_convert (type
, fold (tem
));
7410 else if (TREE_CODE (arg1
) == ADDR_EXPR
7411 && TREE_CODE (arg0
) == MULT_EXPR
)
7413 tem
= try_move_mult_to_index (PLUS_EXPR
, arg1
, arg0
);
7415 return fold_convert (type
, fold (tem
));
7420 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7421 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
7422 return non_lvalue (fold_convert (type
, arg0
));
7424 /* Likewise if the operands are reversed. */
7425 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
7426 return non_lvalue (fold_convert (type
, arg1
));
7428 /* Convert X + -C into X - C. */
7429 if (TREE_CODE (arg1
) == REAL_CST
7430 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
7432 tem
= fold_negate_const (arg1
, type
);
7433 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
7434 return fold_build2 (MINUS_EXPR
, type
,
7435 fold_convert (type
, arg0
),
7436 fold_convert (type
, tem
));
7439 /* Convert x+x into x*2.0. */
7440 if (operand_equal_p (arg0
, arg1
, 0)
7441 && SCALAR_FLOAT_TYPE_P (type
))
7442 return fold_build2 (MULT_EXPR
, type
, arg0
,
7443 build_real (type
, dconst2
));
7445 /* Convert x*c+x into x*(c+1). */
7446 if (flag_unsafe_math_optimizations
7447 && TREE_CODE (arg0
) == MULT_EXPR
7448 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
7449 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
7450 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7454 c
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
7455 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7456 return fold_build2 (MULT_EXPR
, type
, arg1
,
7457 build_real (type
, c
));
7460 /* Convert x+x*c into x*(c+1). */
7461 if (flag_unsafe_math_optimizations
7462 && TREE_CODE (arg1
) == MULT_EXPR
7463 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
7464 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
7465 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
7469 c
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
7470 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7471 return fold_build2 (MULT_EXPR
, type
, arg0
,
7472 build_real (type
, c
));
7475 /* Convert x*c1+x*c2 into x*(c1+c2). */
7476 if (flag_unsafe_math_optimizations
7477 && TREE_CODE (arg0
) == MULT_EXPR
7478 && TREE_CODE (arg1
) == MULT_EXPR
7479 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
7480 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
7481 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
7482 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
7483 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7484 TREE_OPERAND (arg1
, 0), 0))
7486 REAL_VALUE_TYPE c1
, c2
;
7488 c1
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
7489 c2
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
7490 real_arithmetic (&c1
, PLUS_EXPR
, &c1
, &c2
);
7491 return fold_build2 (MULT_EXPR
, type
,
7492 TREE_OPERAND (arg0
, 0),
7493 build_real (type
, c1
));
7495 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7496 if (flag_unsafe_math_optimizations
7497 && TREE_CODE (arg1
) == PLUS_EXPR
7498 && TREE_CODE (arg0
) != MULT_EXPR
)
7500 tree tree10
= TREE_OPERAND (arg1
, 0);
7501 tree tree11
= TREE_OPERAND (arg1
, 1);
7502 if (TREE_CODE (tree11
) == MULT_EXPR
7503 && TREE_CODE (tree10
) == MULT_EXPR
)
7506 tree0
= fold_build2 (PLUS_EXPR
, type
, arg0
, tree10
);
7507 return fold_build2 (PLUS_EXPR
, type
, tree0
, tree11
);
7510 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7511 if (flag_unsafe_math_optimizations
7512 && TREE_CODE (arg0
) == PLUS_EXPR
7513 && TREE_CODE (arg1
) != MULT_EXPR
)
7515 tree tree00
= TREE_OPERAND (arg0
, 0);
7516 tree tree01
= TREE_OPERAND (arg0
, 1);
7517 if (TREE_CODE (tree01
) == MULT_EXPR
7518 && TREE_CODE (tree00
) == MULT_EXPR
)
7521 tree0
= fold_build2 (PLUS_EXPR
, type
, tree01
, arg1
);
7522 return fold_build2 (PLUS_EXPR
, type
, tree00
, tree0
);
7528 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7529 is a rotate of A by C1 bits. */
7530 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7531 is a rotate of A by B bits. */
7533 enum tree_code code0
, code1
;
7534 code0
= TREE_CODE (arg0
);
7535 code1
= TREE_CODE (arg1
);
7536 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
7537 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
7538 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7539 TREE_OPERAND (arg1
, 0), 0)
7540 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7542 tree tree01
, tree11
;
7543 enum tree_code code01
, code11
;
7545 tree01
= TREE_OPERAND (arg0
, 1);
7546 tree11
= TREE_OPERAND (arg1
, 1);
7547 STRIP_NOPS (tree01
);
7548 STRIP_NOPS (tree11
);
7549 code01
= TREE_CODE (tree01
);
7550 code11
= TREE_CODE (tree11
);
7551 if (code01
== INTEGER_CST
7552 && code11
== INTEGER_CST
7553 && TREE_INT_CST_HIGH (tree01
) == 0
7554 && TREE_INT_CST_HIGH (tree11
) == 0
7555 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
7556 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
7557 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7558 code0
== LSHIFT_EXPR
? tree01
: tree11
);
7559 else if (code11
== MINUS_EXPR
)
7561 tree tree110
, tree111
;
7562 tree110
= TREE_OPERAND (tree11
, 0);
7563 tree111
= TREE_OPERAND (tree11
, 1);
7564 STRIP_NOPS (tree110
);
7565 STRIP_NOPS (tree111
);
7566 if (TREE_CODE (tree110
) == INTEGER_CST
7567 && 0 == compare_tree_int (tree110
,
7569 (TREE_TYPE (TREE_OPERAND
7571 && operand_equal_p (tree01
, tree111
, 0))
7572 return build2 ((code0
== LSHIFT_EXPR
7575 type
, TREE_OPERAND (arg0
, 0), tree01
);
7577 else if (code01
== MINUS_EXPR
)
7579 tree tree010
, tree011
;
7580 tree010
= TREE_OPERAND (tree01
, 0);
7581 tree011
= TREE_OPERAND (tree01
, 1);
7582 STRIP_NOPS (tree010
);
7583 STRIP_NOPS (tree011
);
7584 if (TREE_CODE (tree010
) == INTEGER_CST
7585 && 0 == compare_tree_int (tree010
,
7587 (TREE_TYPE (TREE_OPERAND
7589 && operand_equal_p (tree11
, tree011
, 0))
7590 return build2 ((code0
!= LSHIFT_EXPR
7593 type
, TREE_OPERAND (arg0
, 0), tree11
);
7599 /* In most languages, can't associate operations on floats through
7600 parentheses. Rather than remember where the parentheses were, we
7601 don't associate floats at all, unless the user has specified
7602 -funsafe-math-optimizations. */
7605 && (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
7607 tree var0
, con0
, lit0
, minus_lit0
;
7608 tree var1
, con1
, lit1
, minus_lit1
;
7610 /* Split both trees into variables, constants, and literals. Then
7611 associate each group together, the constants with literals,
7612 then the result with variables. This increases the chances of
7613 literals being recombined later and of generating relocatable
7614 expressions for the sum of a constant and literal. */
7615 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
7616 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
7617 code
== MINUS_EXPR
);
7619 /* Only do something if we found more than two objects. Otherwise,
7620 nothing has changed and we risk infinite recursion. */
7621 if (2 < ((var0
!= 0) + (var1
!= 0)
7622 + (con0
!= 0) + (con1
!= 0)
7623 + (lit0
!= 0) + (lit1
!= 0)
7624 + (minus_lit0
!= 0) + (minus_lit1
!= 0)))
7626 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7627 if (code
== MINUS_EXPR
)
7630 var0
= associate_trees (var0
, var1
, code
, type
);
7631 con0
= associate_trees (con0
, con1
, code
, type
);
7632 lit0
= associate_trees (lit0
, lit1
, code
, type
);
7633 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
7635 /* Preserve the MINUS_EXPR if the negative part of the literal is
7636 greater than the positive part. Otherwise, the multiplicative
7637 folding code (i.e extract_muldiv) may be fooled in case
7638 unsigned constants are subtracted, like in the following
7639 example: ((X*2 + 4) - 8U)/2. */
7640 if (minus_lit0
&& lit0
)
7642 if (TREE_CODE (lit0
) == INTEGER_CST
7643 && TREE_CODE (minus_lit0
) == INTEGER_CST
7644 && tree_int_cst_lt (lit0
, minus_lit0
))
7646 minus_lit0
= associate_trees (minus_lit0
, lit0
,
7652 lit0
= associate_trees (lit0
, minus_lit0
,
7660 return fold_convert (type
,
7661 associate_trees (var0
, minus_lit0
,
7665 con0
= associate_trees (con0
, minus_lit0
,
7667 return fold_convert (type
,
7668 associate_trees (var0
, con0
,
7673 con0
= associate_trees (con0
, lit0
, code
, type
);
7674 return fold_convert (type
, associate_trees (var0
, con0
,
7681 t1
= const_binop (code
, arg0
, arg1
, 0);
7682 if (t1
!= NULL_TREE
)
7684 /* The return value should always have
7685 the same type as the original expression. */
7686 if (TREE_TYPE (t1
) != type
)
7687 t1
= fold_convert (type
, t1
);
7694 /* A - (-B) -> A + B */
7695 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
7696 return fold_build2 (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0));
7697 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7698 if (TREE_CODE (arg0
) == NEGATE_EXPR
7699 && (FLOAT_TYPE_P (type
)
7700 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
))
7701 && negate_expr_p (arg1
)
7702 && reorder_operands_p (arg0
, arg1
))
7703 return fold_build2 (MINUS_EXPR
, type
, negate_expr (arg1
),
7704 TREE_OPERAND (arg0
, 0));
7705 /* Convert -A - 1 to ~A. */
7706 if (INTEGRAL_TYPE_P (type
)
7707 && TREE_CODE (arg0
) == NEGATE_EXPR
7708 && integer_onep (arg1
))
7709 return fold_build1 (BIT_NOT_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7711 /* Convert -1 - A to ~A. */
7712 if (INTEGRAL_TYPE_P (type
)
7713 && integer_all_onesp (arg0
))
7714 return fold_build1 (BIT_NOT_EXPR
, type
, arg1
);
7716 if (TREE_CODE (type
) == COMPLEX_TYPE
)
7718 tem
= fold_complex_add (type
, arg0
, arg1
, MINUS_EXPR
);
7723 if (! FLOAT_TYPE_P (type
))
7725 if (! wins
&& integer_zerop (arg0
))
7726 return negate_expr (fold_convert (type
, arg1
));
7727 if (integer_zerop (arg1
))
7728 return non_lvalue (fold_convert (type
, arg0
));
7730 /* Fold A - (A & B) into ~B & A. */
7731 if (!TREE_SIDE_EFFECTS (arg0
)
7732 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
7734 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
7735 return fold_build2 (BIT_AND_EXPR
, type
,
7736 fold_build1 (BIT_NOT_EXPR
, type
,
7737 TREE_OPERAND (arg1
, 0)),
7739 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7740 return fold_build2 (BIT_AND_EXPR
, type
,
7741 fold_build1 (BIT_NOT_EXPR
, type
,
7742 TREE_OPERAND (arg1
, 1)),
7746 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7747 any power of 2 minus 1. */
7748 if (TREE_CODE (arg0
) == BIT_AND_EXPR
7749 && TREE_CODE (arg1
) == BIT_AND_EXPR
7750 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7751 TREE_OPERAND (arg1
, 0), 0))
7753 tree mask0
= TREE_OPERAND (arg0
, 1);
7754 tree mask1
= TREE_OPERAND (arg1
, 1);
7755 tree tem
= fold_build1 (BIT_NOT_EXPR
, type
, mask0
);
7757 if (operand_equal_p (tem
, mask1
, 0))
7759 tem
= fold_build2 (BIT_XOR_EXPR
, type
,
7760 TREE_OPERAND (arg0
, 0), mask1
);
7761 return fold_build2 (MINUS_EXPR
, type
, tem
, mask1
);
7766 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7767 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
7768 return non_lvalue (fold_convert (type
, arg0
));
7770 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7771 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7772 (-ARG1 + ARG0) reduces to -ARG1. */
7773 else if (!wins
&& fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
7774 return negate_expr (fold_convert (type
, arg1
));
7776 /* Fold &x - &x. This can happen from &x.foo - &x.
7777 This is unsafe for certain floats even in non-IEEE formats.
7778 In IEEE, it is unsafe because it does wrong for NaNs.
7779 Also note that operand_equal_p is always false if an operand
7782 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
7783 && operand_equal_p (arg0
, arg1
, 0))
7784 return fold_convert (type
, integer_zero_node
);
7786 /* A - B -> A + (-B) if B is easily negatable. */
7787 if (!wins
&& negate_expr_p (arg1
)
7788 && ((FLOAT_TYPE_P (type
)
7789 /* Avoid this transformation if B is a positive REAL_CST. */
7790 && (TREE_CODE (arg1
) != REAL_CST
7791 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
7792 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
)))
7793 return fold_build2 (PLUS_EXPR
, type
, arg0
, negate_expr (arg1
));
7795 /* Try folding difference of addresses. */
7799 if ((TREE_CODE (arg0
) == ADDR_EXPR
7800 || TREE_CODE (arg1
) == ADDR_EXPR
)
7801 && ptr_difference_const (arg0
, arg1
, &diff
))
7802 return build_int_cst_type (type
, diff
);
7805 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7806 of the array. Loop optimizer sometimes produce this type of
7808 if (TREE_CODE (arg0
) == ADDR_EXPR
7809 && TREE_CODE (arg1
) == MULT_EXPR
)
7811 tem
= try_move_mult_to_index (MINUS_EXPR
, arg0
, arg1
);
7813 return fold_convert (type
, fold (tem
));
7816 if (TREE_CODE (arg0
) == MULT_EXPR
7817 && TREE_CODE (arg1
) == MULT_EXPR
7818 && (!FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
7820 /* (A * C) - (B * C) -> (A-B) * C. */
7821 if (operand_equal_p (TREE_OPERAND (arg0
, 1),
7822 TREE_OPERAND (arg1
, 1), 0))
7823 return fold_build2 (MULT_EXPR
, type
,
7824 fold_build2 (MINUS_EXPR
, type
,
7825 TREE_OPERAND (arg0
, 0),
7826 TREE_OPERAND (arg1
, 0)),
7827 TREE_OPERAND (arg0
, 1));
7828 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7829 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
7830 TREE_OPERAND (arg1
, 0), 0))
7831 return fold_build2 (MULT_EXPR
, type
,
7832 TREE_OPERAND (arg0
, 0),
7833 fold_build2 (MINUS_EXPR
, type
,
7834 TREE_OPERAND (arg0
, 1),
7835 TREE_OPERAND (arg1
, 1)));
7841 /* (-A) * (-B) -> A * B */
7842 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
7843 return fold_build2 (MULT_EXPR
, type
,
7844 TREE_OPERAND (arg0
, 0),
7845 negate_expr (arg1
));
7846 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
7847 return fold_build2 (MULT_EXPR
, type
,
7849 TREE_OPERAND (arg1
, 0));
7851 if (TREE_CODE (type
) == COMPLEX_TYPE
)
7853 tem
= fold_complex_mult (type
, arg0
, arg1
);
7858 if (! FLOAT_TYPE_P (type
))
7860 if (integer_zerop (arg1
))
7861 return omit_one_operand (type
, arg1
, arg0
);
7862 if (integer_onep (arg1
))
7863 return non_lvalue (fold_convert (type
, arg0
));
7864 /* Transform x * -1 into -x. */
7865 if (integer_all_onesp (arg1
))
7866 return fold_convert (type
, negate_expr (arg0
));
7868 /* (a * (1 << b)) is (a << b) */
7869 if (TREE_CODE (arg1
) == LSHIFT_EXPR
7870 && integer_onep (TREE_OPERAND (arg1
, 0)))
7871 return fold_build2 (LSHIFT_EXPR
, type
, arg0
,
7872 TREE_OPERAND (arg1
, 1));
7873 if (TREE_CODE (arg0
) == LSHIFT_EXPR
7874 && integer_onep (TREE_OPERAND (arg0
, 0)))
7875 return fold_build2 (LSHIFT_EXPR
, type
, arg1
,
7876 TREE_OPERAND (arg0
, 1));
7878 if (TREE_CODE (arg1
) == INTEGER_CST
7879 && 0 != (tem
= extract_muldiv (op0
,
7880 fold_convert (type
, arg1
),
7882 return fold_convert (type
, tem
);
7887 /* Maybe fold x * 0 to 0. The expressions aren't the same
7888 when x is NaN, since x * 0 is also NaN. Nor are they the
7889 same in modes with signed zeros, since multiplying a
7890 negative value by 0 gives -0, not +0. */
7891 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
7892 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
7893 && real_zerop (arg1
))
7894 return omit_one_operand (type
, arg1
, arg0
);
7895 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7896 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7897 && real_onep (arg1
))
7898 return non_lvalue (fold_convert (type
, arg0
));
7900 /* Transform x * -1.0 into -x. */
7901 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7902 && real_minus_onep (arg1
))
7903 return fold_convert (type
, negate_expr (arg0
));
7905 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7906 if (flag_unsafe_math_optimizations
7907 && TREE_CODE (arg0
) == RDIV_EXPR
7908 && TREE_CODE (arg1
) == REAL_CST
7909 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
7911 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
7914 return fold_build2 (RDIV_EXPR
, type
, tem
,
7915 TREE_OPERAND (arg0
, 1));
7918 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7919 if (operand_equal_p (arg0
, arg1
, 0))
7921 tree tem
= fold_strip_sign_ops (arg0
);
7922 if (tem
!= NULL_TREE
)
7924 tem
= fold_convert (type
, tem
);
7925 return fold_build2 (MULT_EXPR
, type
, tem
, tem
);
7929 if (flag_unsafe_math_optimizations
)
7931 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
7932 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
7934 /* Optimizations of root(...)*root(...). */
7935 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
7937 tree rootfn
, arg
, arglist
;
7938 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7939 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7941 /* Optimize sqrt(x)*sqrt(x) as x. */
7942 if (BUILTIN_SQRT_P (fcode0
)
7943 && operand_equal_p (arg00
, arg10
, 0)
7944 && ! HONOR_SNANS (TYPE_MODE (type
)))
7947 /* Optimize root(x)*root(y) as root(x*y). */
7948 rootfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7949 arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
7950 arglist
= build_tree_list (NULL_TREE
, arg
);
7951 return build_function_call_expr (rootfn
, arglist
);
7954 /* Optimize expN(x)*expN(y) as expN(x+y). */
7955 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
7957 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7958 tree arg
= fold_build2 (PLUS_EXPR
, type
,
7959 TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7960 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
7961 tree arglist
= build_tree_list (NULL_TREE
, arg
);
7962 return build_function_call_expr (expfn
, arglist
);
7965 /* Optimizations of pow(...)*pow(...). */
7966 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
7967 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
7968 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
7970 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7971 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
7973 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7974 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
7977 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7978 if (operand_equal_p (arg01
, arg11
, 0))
7980 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7981 tree arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
7982 tree arglist
= tree_cons (NULL_TREE
, arg
,
7983 build_tree_list (NULL_TREE
,
7985 return build_function_call_expr (powfn
, arglist
);
7988 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7989 if (operand_equal_p (arg00
, arg10
, 0))
7991 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7992 tree arg
= fold_build2 (PLUS_EXPR
, type
, arg01
, arg11
);
7993 tree arglist
= tree_cons (NULL_TREE
, arg00
,
7994 build_tree_list (NULL_TREE
,
7996 return build_function_call_expr (powfn
, arglist
);
8000 /* Optimize tan(x)*cos(x) as sin(x). */
8001 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
8002 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
8003 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
8004 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
8005 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
8006 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
8007 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
8008 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
8010 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8012 if (sinfn
!= NULL_TREE
)
8013 return build_function_call_expr (sinfn
,
8014 TREE_OPERAND (arg0
, 1));
8017 /* Optimize x*pow(x,c) as pow(x,c+1). */
8018 if (fcode1
== BUILT_IN_POW
8019 || fcode1
== BUILT_IN_POWF
8020 || fcode1
== BUILT_IN_POWL
)
8022 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
8023 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
8025 if (TREE_CODE (arg11
) == REAL_CST
8026 && ! TREE_CONSTANT_OVERFLOW (arg11
)
8027 && operand_equal_p (arg0
, arg10
, 0))
8029 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
8033 c
= TREE_REAL_CST (arg11
);
8034 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
8035 arg
= build_real (type
, c
);
8036 arglist
= build_tree_list (NULL_TREE
, arg
);
8037 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
8038 return build_function_call_expr (powfn
, arglist
);
8042 /* Optimize pow(x,c)*x as pow(x,c+1). */
8043 if (fcode0
== BUILT_IN_POW
8044 || fcode0
== BUILT_IN_POWF
8045 || fcode0
== BUILT_IN_POWL
)
8047 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
8048 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
8050 if (TREE_CODE (arg01
) == REAL_CST
8051 && ! TREE_CONSTANT_OVERFLOW (arg01
)
8052 && operand_equal_p (arg1
, arg00
, 0))
8054 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8058 c
= TREE_REAL_CST (arg01
);
8059 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
8060 arg
= build_real (type
, c
);
8061 arglist
= build_tree_list (NULL_TREE
, arg
);
8062 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
8063 return build_function_call_expr (powfn
, arglist
);
8067 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8069 && operand_equal_p (arg0
, arg1
, 0))
8071 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
8075 tree arg
= build_real (type
, dconst2
);
8076 tree arglist
= build_tree_list (NULL_TREE
, arg
);
8077 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
8078 return build_function_call_expr (powfn
, arglist
);
8087 if (integer_all_onesp (arg1
))
8088 return omit_one_operand (type
, arg1
, arg0
);
8089 if (integer_zerop (arg1
))
8090 return non_lvalue (fold_convert (type
, arg0
));
8091 if (operand_equal_p (arg0
, arg1
, 0))
8092 return non_lvalue (fold_convert (type
, arg0
));
8095 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8096 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8098 t1
= build_int_cst (type
, -1);
8099 t1
= force_fit_type (t1
, 0, false, false);
8100 return omit_one_operand (type
, t1
, arg1
);
8104 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8105 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8107 t1
= build_int_cst (type
, -1);
8108 t1
= force_fit_type (t1
, 0, false, false);
8109 return omit_one_operand (type
, t1
, arg0
);
8112 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
8113 if (t1
!= NULL_TREE
)
8116 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8118 This results in more efficient code for machines without a NAND
8119 instruction. Combine will canonicalize to the first form
8120 which will allow use of NAND instructions provided by the
8121 backend if they exist. */
8122 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8123 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8125 return fold_build1 (BIT_NOT_EXPR
, type
,
8126 build2 (BIT_AND_EXPR
, type
,
8127 TREE_OPERAND (arg0
, 0),
8128 TREE_OPERAND (arg1
, 0)));
8131 /* See if this can be simplified into a rotate first. If that
8132 is unsuccessful continue in the association code. */
8136 if (integer_zerop (arg1
))
8137 return non_lvalue (fold_convert (type
, arg0
));
8138 if (integer_all_onesp (arg1
))
8139 return fold_build1 (BIT_NOT_EXPR
, type
, arg0
);
8140 if (operand_equal_p (arg0
, arg1
, 0))
8141 return omit_one_operand (type
, integer_zero_node
, arg0
);
8144 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8145 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8147 t1
= build_int_cst (type
, -1);
8148 t1
= force_fit_type (t1
, 0, false, false);
8149 return omit_one_operand (type
, t1
, arg1
);
8153 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8154 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8156 t1
= build_int_cst (type
, -1);
8157 t1
= force_fit_type (t1
, 0, false, false);
8158 return omit_one_operand (type
, t1
, arg0
);
8161 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8162 with a constant, and the two constants have no bits in common,
8163 we should treat this as a BIT_IOR_EXPR since this may produce more
8165 if (TREE_CODE (arg0
) == BIT_AND_EXPR
8166 && TREE_CODE (arg1
) == BIT_AND_EXPR
8167 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8168 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8169 && integer_zerop (const_binop (BIT_AND_EXPR
,
8170 TREE_OPERAND (arg0
, 1),
8171 TREE_OPERAND (arg1
, 1), 0)))
8173 code
= BIT_IOR_EXPR
;
8177 /* See if this can be simplified into a rotate first. If that
8178 is unsuccessful continue in the association code. */
8182 if (integer_all_onesp (arg1
))
8183 return non_lvalue (fold_convert (type
, arg0
));
8184 if (integer_zerop (arg1
))
8185 return omit_one_operand (type
, arg1
, arg0
);
8186 if (operand_equal_p (arg0
, arg1
, 0))
8187 return non_lvalue (fold_convert (type
, arg0
));
8189 /* ~X & X is always zero. */
8190 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8191 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8192 return omit_one_operand (type
, integer_zero_node
, arg1
);
8194 /* X & ~X is always zero. */
8195 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8196 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8197 return omit_one_operand (type
, integer_zero_node
, arg0
);
8199 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
8200 if (t1
!= NULL_TREE
)
8202 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8203 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
8204 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
8207 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
8209 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
8210 && (~TREE_INT_CST_LOW (arg1
)
8211 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
8212 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
8215 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8217 This results in more efficient code for machines without a NOR
8218 instruction. Combine will canonicalize to the first form
8219 which will allow use of NOR instructions provided by the
8220 backend if they exist. */
8221 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8222 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8224 return fold_build1 (BIT_NOT_EXPR
, type
,
8225 build2 (BIT_IOR_EXPR
, type
,
8226 TREE_OPERAND (arg0
, 0),
8227 TREE_OPERAND (arg1
, 0)));
8233 /* Don't touch a floating-point divide by zero unless the mode
8234 of the constant can represent infinity. */
8235 if (TREE_CODE (arg1
) == REAL_CST
8236 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
8237 && real_zerop (arg1
))
8240 /* (-A) / (-B) -> A / B */
8241 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
8242 return fold_build2 (RDIV_EXPR
, type
,
8243 TREE_OPERAND (arg0
, 0),
8244 negate_expr (arg1
));
8245 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
8246 return fold_build2 (RDIV_EXPR
, type
,
8248 TREE_OPERAND (arg1
, 0));
8250 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8251 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8252 && real_onep (arg1
))
8253 return non_lvalue (fold_convert (type
, arg0
));
8255 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8256 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8257 && real_minus_onep (arg1
))
8258 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
8260 /* If ARG1 is a constant, we can convert this to a multiply by the
8261 reciprocal. This does not have the same rounding properties,
8262 so only do this if -funsafe-math-optimizations. We can actually
8263 always safely do it if ARG1 is a power of two, but it's hard to
8264 tell if it is or not in a portable manner. */
8265 if (TREE_CODE (arg1
) == REAL_CST
)
8267 if (flag_unsafe_math_optimizations
8268 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
8270 return fold_build2 (MULT_EXPR
, type
, arg0
, tem
);
8271 /* Find the reciprocal if optimizing and the result is exact. */
8275 r
= TREE_REAL_CST (arg1
);
8276 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
8278 tem
= build_real (type
, r
);
8279 return fold_build2 (MULT_EXPR
, type
, arg0
, tem
);
8283 /* Convert A/B/C to A/(B*C). */
8284 if (flag_unsafe_math_optimizations
8285 && TREE_CODE (arg0
) == RDIV_EXPR
)
8286 return fold_build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8287 fold_build2 (MULT_EXPR
, type
,
8288 TREE_OPERAND (arg0
, 1), arg1
));
8290 /* Convert A/(B/C) to (A/B)*C. */
8291 if (flag_unsafe_math_optimizations
8292 && TREE_CODE (arg1
) == RDIV_EXPR
)
8293 return fold_build2 (MULT_EXPR
, type
,
8294 fold_build2 (RDIV_EXPR
, type
, arg0
,
8295 TREE_OPERAND (arg1
, 0)),
8296 TREE_OPERAND (arg1
, 1));
8298 /* Convert C1/(X*C2) into (C1/C2)/X. */
8299 if (flag_unsafe_math_optimizations
8300 && TREE_CODE (arg1
) == MULT_EXPR
8301 && TREE_CODE (arg0
) == REAL_CST
8302 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
8304 tree tem
= const_binop (RDIV_EXPR
, arg0
,
8305 TREE_OPERAND (arg1
, 1), 0);
8307 return fold_build2 (RDIV_EXPR
, type
, tem
,
8308 TREE_OPERAND (arg1
, 0));
8311 if (TREE_CODE (type
) == COMPLEX_TYPE
)
8313 tem
= fold_complex_div (type
, arg0
, arg1
, code
);
8318 if (flag_unsafe_math_optimizations
)
8320 enum built_in_function fcode
= builtin_mathfn_code (arg1
);
8321 /* Optimize x/expN(y) into x*expN(-y). */
8322 if (BUILTIN_EXPONENT_P (fcode
))
8324 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
8325 tree arg
= negate_expr (TREE_VALUE (TREE_OPERAND (arg1
, 1)));
8326 tree arglist
= build_tree_list (NULL_TREE
,
8327 fold_convert (type
, arg
));
8328 arg1
= build_function_call_expr (expfn
, arglist
);
8329 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
8332 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8333 if (fcode
== BUILT_IN_POW
8334 || fcode
== BUILT_IN_POWF
8335 || fcode
== BUILT_IN_POWL
)
8337 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
8338 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
8339 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
, 1)));
8340 tree neg11
= fold_convert (type
, negate_expr (arg11
));
8341 tree arglist
= tree_cons(NULL_TREE
, arg10
,
8342 build_tree_list (NULL_TREE
, neg11
));
8343 arg1
= build_function_call_expr (powfn
, arglist
);
8344 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
8348 if (flag_unsafe_math_optimizations
)
8350 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
8351 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
8353 /* Optimize sin(x)/cos(x) as tan(x). */
8354 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
8355 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
8356 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
8357 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
8358 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
8360 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
8362 if (tanfn
!= NULL_TREE
)
8363 return build_function_call_expr (tanfn
,
8364 TREE_OPERAND (arg0
, 1));
8367 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8368 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
8369 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
8370 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
8371 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
8372 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
8374 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
8376 if (tanfn
!= NULL_TREE
)
8378 tree tmp
= TREE_OPERAND (arg0
, 1);
8379 tmp
= build_function_call_expr (tanfn
, tmp
);
8380 return fold_build2 (RDIV_EXPR
, type
,
8381 build_real (type
, dconst1
), tmp
);
8385 /* Optimize pow(x,c)/x as pow(x,c-1). */
8386 if (fcode0
== BUILT_IN_POW
8387 || fcode0
== BUILT_IN_POWF
8388 || fcode0
== BUILT_IN_POWL
)
8390 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
8391 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
8392 if (TREE_CODE (arg01
) == REAL_CST
8393 && ! TREE_CONSTANT_OVERFLOW (arg01
)
8394 && operand_equal_p (arg1
, arg00
, 0))
8396 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8400 c
= TREE_REAL_CST (arg01
);
8401 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
8402 arg
= build_real (type
, c
);
8403 arglist
= build_tree_list (NULL_TREE
, arg
);
8404 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
8405 return build_function_call_expr (powfn
, arglist
);
8411 case TRUNC_DIV_EXPR
:
8412 case ROUND_DIV_EXPR
:
8413 case FLOOR_DIV_EXPR
:
8415 case EXACT_DIV_EXPR
:
8416 if (integer_onep (arg1
))
8417 return non_lvalue (fold_convert (type
, arg0
));
8418 if (integer_zerop (arg1
))
8421 if (!TYPE_UNSIGNED (type
)
8422 && TREE_CODE (arg1
) == INTEGER_CST
8423 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
8424 && TREE_INT_CST_HIGH (arg1
) == -1)
8425 return fold_convert (type
, negate_expr (arg0
));
8427 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8428 operation, EXACT_DIV_EXPR.
8430 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8431 At one time others generated faster code, it's not clear if they do
8432 after the last round to changes to the DIV code in expmed.c. */
8433 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
8434 && multiple_of_p (type
, arg0
, arg1
))
8435 return fold_build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
);
8437 if (TREE_CODE (arg1
) == INTEGER_CST
8438 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
8439 return fold_convert (type
, tem
);
8441 if (TREE_CODE (type
) == COMPLEX_TYPE
)
8443 tem
= fold_complex_div (type
, arg0
, arg1
, code
);
8450 case FLOOR_MOD_EXPR
:
8451 case ROUND_MOD_EXPR
:
8452 case TRUNC_MOD_EXPR
:
8453 /* X % 1 is always zero, but be sure to preserve any side
8455 if (integer_onep (arg1
))
8456 return omit_one_operand (type
, integer_zero_node
, arg0
);
8458 /* X % 0, return X % 0 unchanged so that we can get the
8459 proper warnings and errors. */
8460 if (integer_zerop (arg1
))
8463 /* 0 % X is always zero, but be sure to preserve any side
8464 effects in X. Place this after checking for X == 0. */
8465 if (integer_zerop (arg0
))
8466 return omit_one_operand (type
, integer_zero_node
, arg1
);
8468 /* X % -1 is zero. */
8469 if (!TYPE_UNSIGNED (type
)
8470 && TREE_CODE (arg1
) == INTEGER_CST
8471 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
8472 && TREE_INT_CST_HIGH (arg1
) == -1)
8473 return omit_one_operand (type
, integer_zero_node
, arg0
);
8475 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
8476 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
8477 if (code
== TRUNC_MOD_EXPR
8478 && TYPE_UNSIGNED (type
)
8479 && integer_pow2p (arg1
))
8481 unsigned HOST_WIDE_INT high
, low
;
8485 l
= tree_log2 (arg1
);
8486 if (l
>= HOST_BITS_PER_WIDE_INT
)
8488 high
= ((unsigned HOST_WIDE_INT
) 1
8489 << (l
- HOST_BITS_PER_WIDE_INT
)) - 1;
8495 low
= ((unsigned HOST_WIDE_INT
) 1 << l
) - 1;
8498 mask
= build_int_cst_wide (type
, low
, high
);
8499 return fold_build2 (BIT_AND_EXPR
, type
,
8500 fold_convert (type
, arg0
), mask
);
8503 /* X % -C is the same as X % C. */
8504 if (code
== TRUNC_MOD_EXPR
8505 && !TYPE_UNSIGNED (type
)
8506 && TREE_CODE (arg1
) == INTEGER_CST
8507 && !TREE_CONSTANT_OVERFLOW (arg1
)
8508 && TREE_INT_CST_HIGH (arg1
) < 0
8510 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8511 && !sign_bit_p (arg1
, arg1
))
8512 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
8513 fold_convert (type
, negate_expr (arg1
)));
8515 /* X % -Y is the same as X % Y. */
8516 if (code
== TRUNC_MOD_EXPR
8517 && !TYPE_UNSIGNED (type
)
8518 && TREE_CODE (arg1
) == NEGATE_EXPR
8520 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
8521 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
8523 if (TREE_CODE (arg1
) == INTEGER_CST
8524 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
8525 return fold_convert (type
, tem
);
8531 if (integer_all_onesp (arg0
))
8532 return omit_one_operand (type
, arg0
, arg1
);
8536 /* Optimize -1 >> x for arithmetic right shifts. */
8537 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
8538 return omit_one_operand (type
, arg0
, arg1
);
8539 /* ... fall through ... */
8543 if (integer_zerop (arg1
))
8544 return non_lvalue (fold_convert (type
, arg0
));
8545 if (integer_zerop (arg0
))
8546 return omit_one_operand (type
, arg0
, arg1
);
8548 /* Since negative shift count is not well-defined,
8549 don't try to compute it in the compiler. */
8550 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
8552 /* Rewrite an LROTATE_EXPR by a constant into an
8553 RROTATE_EXPR by a new constant. */
8554 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
8556 tree tem
= build_int_cst (NULL_TREE
,
8557 GET_MODE_BITSIZE (TYPE_MODE (type
)));
8558 tem
= fold_convert (TREE_TYPE (arg1
), tem
);
8559 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
8560 return fold_build2 (RROTATE_EXPR
, type
, arg0
, tem
);
8563 /* If we have a rotate of a bit operation with the rotate count and
8564 the second operand of the bit operation both constant,
8565 permute the two operations. */
8566 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
8567 && (TREE_CODE (arg0
) == BIT_AND_EXPR
8568 || TREE_CODE (arg0
) == BIT_IOR_EXPR
8569 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
8570 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8571 return fold_build2 (TREE_CODE (arg0
), type
,
8572 fold_build2 (code
, type
,
8573 TREE_OPERAND (arg0
, 0), arg1
),
8574 fold_build2 (code
, type
,
8575 TREE_OPERAND (arg0
, 1), arg1
));
8577 /* Two consecutive rotates adding up to the width of the mode can
8579 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
8580 && TREE_CODE (arg0
) == RROTATE_EXPR
8581 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8582 && TREE_INT_CST_HIGH (arg1
) == 0
8583 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
8584 && ((TREE_INT_CST_LOW (arg1
)
8585 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
8586 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
8587 return TREE_OPERAND (arg0
, 0);
8592 if (operand_equal_p (arg0
, arg1
, 0))
8593 return omit_one_operand (type
, arg0
, arg1
);
8594 if (INTEGRAL_TYPE_P (type
)
8595 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
8596 return omit_one_operand (type
, arg1
, arg0
);
8600 if (operand_equal_p (arg0
, arg1
, 0))
8601 return omit_one_operand (type
, arg0
, arg1
);
8602 if (INTEGRAL_TYPE_P (type
)
8603 && TYPE_MAX_VALUE (type
)
8604 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
8605 return omit_one_operand (type
, arg1
, arg0
);
8608 case TRUTH_ANDIF_EXPR
:
8609 /* Note that the operands of this must be ints
8610 and their values must be 0 or 1.
8611 ("true" is a fixed value perhaps depending on the language.) */
8612 /* If first arg is constant zero, return it. */
8613 if (integer_zerop (arg0
))
8614 return fold_convert (type
, arg0
);
8615 case TRUTH_AND_EXPR
:
8616 /* If either arg is constant true, drop it. */
8617 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8618 return non_lvalue (fold_convert (type
, arg1
));
8619 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
8620 /* Preserve sequence points. */
8621 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
8622 return non_lvalue (fold_convert (type
, arg0
));
8623 /* If second arg is constant zero, result is zero, but first arg
8624 must be evaluated. */
8625 if (integer_zerop (arg1
))
8626 return omit_one_operand (type
, arg1
, arg0
);
8627 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8628 case will be handled here. */
8629 if (integer_zerop (arg0
))
8630 return omit_one_operand (type
, arg0
, arg1
);
8632 /* !X && X is always false. */
8633 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8634 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8635 return omit_one_operand (type
, integer_zero_node
, arg1
);
8636 /* X && !X is always false. */
8637 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8638 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8639 return omit_one_operand (type
, integer_zero_node
, arg0
);
8641 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8642 means A >= Y && A != MAX, but in this case we know that
8645 if (!TREE_SIDE_EFFECTS (arg0
)
8646 && !TREE_SIDE_EFFECTS (arg1
))
8648 tem
= fold_to_nonsharp_ineq_using_bound (arg0
, arg1
);
8650 return fold_build2 (code
, type
, tem
, arg1
);
8652 tem
= fold_to_nonsharp_ineq_using_bound (arg1
, arg0
);
8654 return fold_build2 (code
, type
, arg0
, tem
);
8658 /* We only do these simplifications if we are optimizing. */
8662 /* Check for things like (A || B) && (A || C). We can convert this
8663 to A || (B && C). Note that either operator can be any of the four
8664 truth and/or operations and the transformation will still be
8665 valid. Also note that we only care about order for the
8666 ANDIF and ORIF operators. If B contains side effects, this
8667 might change the truth-value of A. */
8668 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8669 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8670 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8671 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8672 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8673 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8675 tree a00
= TREE_OPERAND (arg0
, 0);
8676 tree a01
= TREE_OPERAND (arg0
, 1);
8677 tree a10
= TREE_OPERAND (arg1
, 0);
8678 tree a11
= TREE_OPERAND (arg1
, 1);
8679 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8680 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8681 && (code
== TRUTH_AND_EXPR
8682 || code
== TRUTH_OR_EXPR
));
8684 if (operand_equal_p (a00
, a10
, 0))
8685 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
8686 fold_build2 (code
, type
, a01
, a11
));
8687 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8688 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
8689 fold_build2 (code
, type
, a01
, a10
));
8690 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8691 return fold_build2 (TREE_CODE (arg0
), type
, a01
,
8692 fold_build2 (code
, type
, a00
, a11
));
8694 /* This case if tricky because we must either have commutative
8695 operators or else A10 must not have side-effects. */
8697 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8698 && operand_equal_p (a01
, a11
, 0))
8699 return fold_build2 (TREE_CODE (arg0
), type
,
8700 fold_build2 (code
, type
, a00
, a10
),
8704 /* See if we can build a range comparison. */
8705 if (0 != (tem
= fold_range_test (code
, type
, op0
, op1
)))
8708 /* Check for the possibility of merging component references. If our
8709 lhs is another similar operation, try to merge its rhs with our
8710 rhs. Then try to merge our lhs and rhs. */
8711 if (TREE_CODE (arg0
) == code
8712 && 0 != (tem
= fold_truthop (code
, type
,
8713 TREE_OPERAND (arg0
, 1), arg1
)))
8714 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8716 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
8721 case TRUTH_ORIF_EXPR
:
8722 /* Note that the operands of this must be ints
8723 and their values must be 0 or true.
8724 ("true" is a fixed value perhaps depending on the language.) */
8725 /* If first arg is constant true, return it. */
8726 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8727 return fold_convert (type
, arg0
);
8729 /* If either arg is constant zero, drop it. */
8730 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
8731 return non_lvalue (fold_convert (type
, arg1
));
8732 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
8733 /* Preserve sequence points. */
8734 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
8735 return non_lvalue (fold_convert (type
, arg0
));
8736 /* If second arg is constant true, result is true, but we must
8737 evaluate first arg. */
8738 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
8739 return omit_one_operand (type
, arg1
, arg0
);
8740 /* Likewise for first arg, but note this only occurs here for
8742 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8743 return omit_one_operand (type
, arg0
, arg1
);
8745 /* !X || X is always true. */
8746 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8747 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8748 return omit_one_operand (type
, integer_one_node
, arg1
);
8749 /* X || !X is always true. */
8750 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8751 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8752 return omit_one_operand (type
, integer_one_node
, arg0
);
8756 case TRUTH_XOR_EXPR
:
8757 /* If the second arg is constant zero, drop it. */
8758 if (integer_zerop (arg1
))
8759 return non_lvalue (fold_convert (type
, arg0
));
8760 /* If the second arg is constant true, this is a logical inversion. */
8761 if (integer_onep (arg1
))
8763 /* Only call invert_truthvalue if operand is a truth value. */
8764 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8765 tem
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
8767 tem
= invert_truthvalue (arg0
);
8768 return non_lvalue (fold_convert (type
, tem
));
8770 /* Identical arguments cancel to zero. */
8771 if (operand_equal_p (arg0
, arg1
, 0))
8772 return omit_one_operand (type
, integer_zero_node
, arg0
);
8774 /* !X ^ X is always true. */
8775 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8776 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8777 return omit_one_operand (type
, integer_one_node
, arg1
);
8779 /* X ^ !X is always true. */
8780 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8781 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8782 return omit_one_operand (type
, integer_one_node
, arg0
);
8792 /* If one arg is a real or integer constant, put it last. */
8793 if (tree_swap_operands_p (arg0
, arg1
, true))
8794 return fold_build2 (swap_tree_comparison (code
), type
, op1
, op0
);
8796 /* If this is an equality comparison of the address of a non-weak
8797 object against zero, then we know the result. */
8798 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8799 && TREE_CODE (arg0
) == ADDR_EXPR
8800 && DECL_P (TREE_OPERAND (arg0
, 0))
8801 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
8802 && integer_zerop (arg1
))
8803 return constant_boolean_node (code
!= EQ_EXPR
, type
);
8805 /* If this is an equality comparison of the address of two non-weak,
8806 unaliased symbols neither of which are extern (since we do not
8807 have access to attributes for externs), then we know the result. */
8808 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8809 && TREE_CODE (arg0
) == ADDR_EXPR
8810 && DECL_P (TREE_OPERAND (arg0
, 0))
8811 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
8812 && ! lookup_attribute ("alias",
8813 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
8814 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
8815 && TREE_CODE (arg1
) == ADDR_EXPR
8816 && DECL_P (TREE_OPERAND (arg1
, 0))
8817 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
8818 && ! lookup_attribute ("alias",
8819 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
8820 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
8821 return constant_boolean_node (operand_equal_p (arg0
, arg1
, 0)
8822 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
8825 /* If this is a comparison of two exprs that look like an
8826 ARRAY_REF of the same object, then we can fold this to a
8827 comparison of the two offsets. */
8828 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
8830 tree base0
, offset0
, base1
, offset1
;
8832 if (extract_array_ref (arg0
, &base0
, &offset0
)
8833 && extract_array_ref (arg1
, &base1
, &offset1
)
8834 && operand_equal_p (base0
, base1
, 0))
8836 if (offset0
== NULL_TREE
8837 && offset1
== NULL_TREE
)
8839 offset0
= integer_zero_node
;
8840 offset1
= integer_zero_node
;
8842 else if (offset0
== NULL_TREE
)
8843 offset0
= build_int_cst (TREE_TYPE (offset1
), 0);
8844 else if (offset1
== NULL_TREE
)
8845 offset1
= build_int_cst (TREE_TYPE (offset0
), 0);
8847 if (TREE_TYPE (offset0
) == TREE_TYPE (offset1
))
8848 return fold_build2 (code
, type
, offset0
, offset1
);
8852 /* Transform comparisons of the form X +- C CMP X. */
8853 if ((code
!= EQ_EXPR
&& code
!= NE_EXPR
)
8854 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8855 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
8856 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
8857 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
8858 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8859 && !TYPE_UNSIGNED (TREE_TYPE (arg1
))
8860 && !(flag_wrapv
|| flag_trapv
))))
8862 tree arg01
= TREE_OPERAND (arg0
, 1);
8863 enum tree_code code0
= TREE_CODE (arg0
);
8866 if (TREE_CODE (arg01
) == REAL_CST
)
8867 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
8869 is_positive
= tree_int_cst_sgn (arg01
);
8871 /* (X - c) > X becomes false. */
8873 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
8874 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
8875 return constant_boolean_node (0, type
);
8877 /* Likewise (X + c) < X becomes false. */
8879 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
8880 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
8881 return constant_boolean_node (0, type
);
8883 /* Convert (X - c) <= X to true. */
8884 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
8886 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
8887 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
8888 return constant_boolean_node (1, type
);
8890 /* Convert (X + c) >= X to true. */
8891 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
8893 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
8894 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
8895 return constant_boolean_node (1, type
);
8897 if (TREE_CODE (arg01
) == INTEGER_CST
)
8899 /* Convert X + c > X and X - c < X to true for integers. */
8901 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
8902 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
8903 return constant_boolean_node (1, type
);
8906 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
8907 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
8908 return constant_boolean_node (1, type
);
8910 /* Convert X + c <= X and X - c >= X to false for integers. */
8912 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
8913 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
8914 return constant_boolean_node (0, type
);
8917 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
8918 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
8919 return constant_boolean_node (0, type
);
8923 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8925 tree targ0
= strip_float_extensions (arg0
);
8926 tree targ1
= strip_float_extensions (arg1
);
8927 tree newtype
= TREE_TYPE (targ0
);
8929 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8930 newtype
= TREE_TYPE (targ1
);
8932 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8933 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
8934 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
8935 fold_convert (newtype
, targ1
));
8937 /* (-a) CMP (-b) -> b CMP a */
8938 if (TREE_CODE (arg0
) == NEGATE_EXPR
8939 && TREE_CODE (arg1
) == NEGATE_EXPR
)
8940 return fold_build2 (code
, type
, TREE_OPERAND (arg1
, 0),
8941 TREE_OPERAND (arg0
, 0));
8943 if (TREE_CODE (arg1
) == REAL_CST
)
8945 REAL_VALUE_TYPE cst
;
8946 cst
= TREE_REAL_CST (arg1
);
8948 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8949 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8951 fold_build2 (swap_tree_comparison (code
), type
,
8952 TREE_OPERAND (arg0
, 0),
8953 build_real (TREE_TYPE (arg1
),
8954 REAL_VALUE_NEGATE (cst
)));
8956 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8957 /* a CMP (-0) -> a CMP 0 */
8958 if (REAL_VALUE_MINUS_ZERO (cst
))
8959 return fold_build2 (code
, type
, arg0
,
8960 build_real (TREE_TYPE (arg1
), dconst0
));
8962 /* x != NaN is always true, other ops are always false. */
8963 if (REAL_VALUE_ISNAN (cst
)
8964 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
8966 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
8967 return omit_one_operand (type
, tem
, arg0
);
8970 /* Fold comparisons against infinity. */
8971 if (REAL_VALUE_ISINF (cst
))
8973 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
8974 if (tem
!= NULL_TREE
)
8979 /* If this is a comparison of a real constant with a PLUS_EXPR
8980 or a MINUS_EXPR of a real constant, we can convert it into a
8981 comparison with a revised real constant as long as no overflow
8982 occurs when unsafe_math_optimizations are enabled. */
8983 if (flag_unsafe_math_optimizations
8984 && TREE_CODE (arg1
) == REAL_CST
8985 && (TREE_CODE (arg0
) == PLUS_EXPR
8986 || TREE_CODE (arg0
) == MINUS_EXPR
)
8987 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
8988 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
8989 ? MINUS_EXPR
: PLUS_EXPR
,
8990 arg1
, TREE_OPERAND (arg0
, 1), 0))
8991 && ! TREE_CONSTANT_OVERFLOW (tem
))
8992 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8994 /* Likewise, we can simplify a comparison of a real constant with
8995 a MINUS_EXPR whose first operand is also a real constant, i.e.
8996 (c1 - x) < c2 becomes x > c1-c2. */
8997 if (flag_unsafe_math_optimizations
8998 && TREE_CODE (arg1
) == REAL_CST
8999 && TREE_CODE (arg0
) == MINUS_EXPR
9000 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9001 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9003 && ! TREE_CONSTANT_OVERFLOW (tem
))
9004 return fold_build2 (swap_tree_comparison (code
), type
,
9005 TREE_OPERAND (arg0
, 1), tem
);
9007 /* Fold comparisons against built-in math functions. */
9008 if (TREE_CODE (arg1
) == REAL_CST
9009 && flag_unsafe_math_optimizations
9010 && ! flag_errno_math
)
9012 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9014 if (fcode
!= END_BUILTINS
)
9016 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
9017 if (tem
!= NULL_TREE
)
9023 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9024 if (TREE_CONSTANT (arg1
)
9025 && (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
9026 || TREE_CODE (arg0
) == POSTDECREMENT_EXPR
)
9027 /* This optimization is invalid for ordered comparisons
9028 if CONST+INCR overflows or if foo+incr might overflow.
9029 This optimization is invalid for floating point due to rounding.
9030 For pointer types we assume overflow doesn't happen. */
9031 && (POINTER_TYPE_P (TREE_TYPE (arg0
))
9032 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9033 && (code
== EQ_EXPR
|| code
== NE_EXPR
))))
9035 tree varop
, newconst
;
9037 if (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
)
9039 newconst
= fold_build2 (PLUS_EXPR
, TREE_TYPE (arg0
),
9040 arg1
, TREE_OPERAND (arg0
, 1));
9041 varop
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (arg0
),
9042 TREE_OPERAND (arg0
, 0),
9043 TREE_OPERAND (arg0
, 1));
9047 newconst
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg0
),
9048 arg1
, TREE_OPERAND (arg0
, 1));
9049 varop
= build2 (PREDECREMENT_EXPR
, TREE_TYPE (arg0
),
9050 TREE_OPERAND (arg0
, 0),
9051 TREE_OPERAND (arg0
, 1));
9055 /* If VAROP is a reference to a bitfield, we must mask
9056 the constant by the width of the field. */
9057 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
9058 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop
, 0), 1))
9059 && host_integerp (DECL_SIZE (TREE_OPERAND
9060 (TREE_OPERAND (varop
, 0), 1)), 1))
9062 tree fielddecl
= TREE_OPERAND (TREE_OPERAND (varop
, 0), 1);
9063 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (fielddecl
), 1);
9064 tree folded_compare
, shift
;
9066 /* First check whether the comparison would come out
9067 always the same. If we don't do that we would
9068 change the meaning with the masking. */
9069 folded_compare
= fold_build2 (code
, type
,
9070 TREE_OPERAND (varop
, 0), arg1
);
9071 if (integer_zerop (folded_compare
)
9072 || integer_onep (folded_compare
))
9073 return omit_one_operand (type
, folded_compare
, varop
);
9075 shift
= build_int_cst (NULL_TREE
,
9076 TYPE_PRECISION (TREE_TYPE (varop
)) - size
);
9077 shift
= fold_convert (TREE_TYPE (varop
), shift
);
9078 newconst
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (varop
),
9080 newconst
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (varop
),
9084 return fold_build2 (code
, type
, varop
, newconst
);
9087 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9088 This transformation affects the cases which are handled in later
9089 optimizations involving comparisons with non-negative constants. */
9090 if (TREE_CODE (arg1
) == INTEGER_CST
9091 && TREE_CODE (arg0
) != INTEGER_CST
9092 && tree_int_cst_sgn (arg1
) > 0)
9097 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
9098 return fold_build2 (GT_EXPR
, type
, arg0
, arg1
);
9101 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
9102 return fold_build2 (LE_EXPR
, type
, arg0
, arg1
);
9109 /* Comparisons with the highest or lowest possible integer of
9110 the specified size will have known values. */
9112 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1
)));
9114 if (TREE_CODE (arg1
) == INTEGER_CST
9115 && ! TREE_CONSTANT_OVERFLOW (arg1
)
9116 && width
<= 2 * HOST_BITS_PER_WIDE_INT
9117 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9118 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
9120 HOST_WIDE_INT signed_max_hi
;
9121 unsigned HOST_WIDE_INT signed_max_lo
;
9122 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
9124 if (width
<= HOST_BITS_PER_WIDE_INT
)
9126 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
9131 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
9133 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
9139 max_lo
= signed_max_lo
;
9140 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
9146 width
-= HOST_BITS_PER_WIDE_INT
;
9148 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
9153 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
9155 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
9160 max_hi
= signed_max_hi
;
9161 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
9165 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
9166 && TREE_INT_CST_LOW (arg1
) == max_lo
)
9170 return omit_one_operand (type
, integer_zero_node
, arg0
);
9173 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9176 return omit_one_operand (type
, integer_one_node
, arg0
);
9179 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
9181 /* The GE_EXPR and LT_EXPR cases above are not normally
9182 reached because of previous transformations. */
9187 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9189 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
9193 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
9194 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9196 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
9197 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
9201 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9203 && TREE_INT_CST_LOW (arg1
) == min_lo
)
9207 return omit_one_operand (type
, integer_zero_node
, arg0
);
9210 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9213 return omit_one_operand (type
, integer_one_node
, arg0
);
9216 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
9221 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9223 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
9227 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
9228 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
9230 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
9231 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9236 else if (!in_gimple_form
9237 && TREE_INT_CST_HIGH (arg1
) == signed_max_hi
9238 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
9239 && TYPE_UNSIGNED (TREE_TYPE (arg1
))
9240 /* signed_type does not work on pointer types. */
9241 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
9243 /* The following case also applies to X < signed_max+1
9244 and X >= signed_max+1 because previous transformations. */
9245 if (code
== LE_EXPR
|| code
== GT_EXPR
)
9248 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (arg0
));
9249 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (arg1
));
9251 (build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
9252 type
, fold_convert (st0
, arg0
),
9253 fold_convert (st1
, integer_zero_node
)));
9259 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9260 a MINUS_EXPR of a constant, we can convert it into a comparison with
9261 a revised constant as long as no overflow occurs. */
9262 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9263 && TREE_CODE (arg1
) == INTEGER_CST
9264 && (TREE_CODE (arg0
) == PLUS_EXPR
9265 || TREE_CODE (arg0
) == MINUS_EXPR
)
9266 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9267 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9268 ? MINUS_EXPR
: PLUS_EXPR
,
9269 arg1
, TREE_OPERAND (arg0
, 1), 0))
9270 && ! TREE_CONSTANT_OVERFLOW (tem
))
9271 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9273 /* Similarly for a NEGATE_EXPR. */
9274 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9275 && TREE_CODE (arg0
) == NEGATE_EXPR
9276 && TREE_CODE (arg1
) == INTEGER_CST
9277 && 0 != (tem
= negate_expr (arg1
))
9278 && TREE_CODE (tem
) == INTEGER_CST
9279 && ! TREE_CONSTANT_OVERFLOW (tem
))
9280 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9282 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9283 for !=. Don't do this for ordered comparisons due to overflow. */
9284 else if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
9285 && integer_zerop (arg1
) && TREE_CODE (arg0
) == MINUS_EXPR
)
9286 return fold_build2 (code
, type
,
9287 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
9289 else if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9290 && (TREE_CODE (arg0
) == NOP_EXPR
9291 || TREE_CODE (arg0
) == CONVERT_EXPR
))
9293 /* If we are widening one operand of an integer comparison,
9294 see if the other operand is similarly being widened. Perhaps we
9295 can do the comparison in the narrower type. */
9296 tem
= fold_widened_comparison (code
, type
, arg0
, arg1
);
9300 /* Or if we are changing signedness. */
9301 tem
= fold_sign_changed_comparison (code
, type
, arg0
, arg1
);
9306 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9307 constant, we can simplify it. */
9308 else if (TREE_CODE (arg1
) == INTEGER_CST
9309 && (TREE_CODE (arg0
) == MIN_EXPR
9310 || TREE_CODE (arg0
) == MAX_EXPR
)
9311 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9313 tem
= optimize_minmax_comparison (code
, type
, op0
, op1
);
9320 /* If we are comparing an ABS_EXPR with a constant, we can
9321 convert all the cases into explicit comparisons, but they may
9322 well not be faster than doing the ABS and one comparison.
9323 But ABS (X) <= C is a range comparison, which becomes a subtraction
9324 and a comparison, and is probably faster. */
9325 else if (code
== LE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
9326 && TREE_CODE (arg0
) == ABS_EXPR
9327 && ! TREE_SIDE_EFFECTS (arg0
)
9328 && (0 != (tem
= negate_expr (arg1
)))
9329 && TREE_CODE (tem
) == INTEGER_CST
9330 && ! TREE_CONSTANT_OVERFLOW (tem
))
9331 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
9332 build2 (GE_EXPR
, type
,
9333 TREE_OPERAND (arg0
, 0), tem
),
9334 build2 (LE_EXPR
, type
,
9335 TREE_OPERAND (arg0
, 0), arg1
));
9337 /* Convert ABS_EXPR<x> >= 0 to true. */
9338 else if (code
== GE_EXPR
9339 && tree_expr_nonnegative_p (arg0
)
9340 && (integer_zerop (arg1
)
9341 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
9342 && real_zerop (arg1
))))
9343 return omit_one_operand (type
, integer_one_node
, arg0
);
9345 /* Convert ABS_EXPR<x> < 0 to false. */
9346 else if (code
== LT_EXPR
9347 && tree_expr_nonnegative_p (arg0
)
9348 && (integer_zerop (arg1
) || real_zerop (arg1
)))
9349 return omit_one_operand (type
, integer_zero_node
, arg0
);
9351 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9352 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9353 && TREE_CODE (arg0
) == ABS_EXPR
9354 && (integer_zerop (arg1
) || real_zerop (arg1
)))
9355 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
9357 /* If this is an EQ or NE comparison with zero and ARG0 is
9358 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9359 two operations, but the latter can be done in one less insn
9360 on machines that have only two-operand insns or on which a
9361 constant cannot be the first operand. */
9362 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
9363 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
9365 tree arg00
= TREE_OPERAND (arg0
, 0);
9366 tree arg01
= TREE_OPERAND (arg0
, 1);
9367 if (TREE_CODE (arg00
) == LSHIFT_EXPR
9368 && integer_onep (TREE_OPERAND (arg00
, 0)))
9370 fold_build2 (code
, type
,
9371 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9372 build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
9373 arg01
, TREE_OPERAND (arg00
, 1)),
9374 fold_convert (TREE_TYPE (arg0
),
9377 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
9378 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
9380 fold_build2 (code
, type
,
9381 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9382 build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
9383 arg00
, TREE_OPERAND (arg01
, 1)),
9384 fold_convert (TREE_TYPE (arg0
),
9389 /* If this is an NE or EQ comparison of zero against the result of a
9390 signed MOD operation whose second operand is a power of 2, make
9391 the MOD operation unsigned since it is simpler and equivalent. */
9392 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
9393 && integer_zerop (arg1
)
9394 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
9395 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
9396 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
9397 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
9398 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
9399 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9401 tree newtype
= lang_hooks
.types
.unsigned_type (TREE_TYPE (arg0
));
9402 tree newmod
= fold_build2 (TREE_CODE (arg0
), newtype
,
9403 fold_convert (newtype
,
9404 TREE_OPERAND (arg0
, 0)),
9405 fold_convert (newtype
,
9406 TREE_OPERAND (arg0
, 1)));
9408 return fold_build2 (code
, type
, newmod
,
9409 fold_convert (newtype
, arg1
));
9412 /* If this is an NE comparison of zero with an AND of one, remove the
9413 comparison since the AND will give the correct value. */
9414 if (code
== NE_EXPR
&& integer_zerop (arg1
)
9415 && TREE_CODE (arg0
) == BIT_AND_EXPR
9416 && integer_onep (TREE_OPERAND (arg0
, 1)))
9417 return fold_convert (type
, arg0
);
9419 /* If we have (A & C) == C where C is a power of 2, convert this into
9420 (A & C) != 0. Similarly for NE_EXPR. */
9421 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9422 && TREE_CODE (arg0
) == BIT_AND_EXPR
9423 && integer_pow2p (TREE_OPERAND (arg0
, 1))
9424 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9425 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
9426 arg0
, fold_convert (TREE_TYPE (arg0
),
9427 integer_zero_node
));
9429 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
9430 2, then fold the expression into shifts and logical operations. */
9431 tem
= fold_single_bit_test (code
, arg0
, arg1
, type
);
9435 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9436 Similarly for NE_EXPR. */
9437 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9438 && TREE_CODE (arg0
) == BIT_AND_EXPR
9439 && TREE_CODE (arg1
) == INTEGER_CST
9440 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9442 tree notc
= fold_build1 (BIT_NOT_EXPR
,
9443 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
9444 TREE_OPERAND (arg0
, 1));
9445 tree dandnotc
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9447 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
9448 if (integer_nonzerop (dandnotc
))
9449 return omit_one_operand (type
, rslt
, arg0
);
9452 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9453 Similarly for NE_EXPR. */
9454 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9455 && TREE_CODE (arg0
) == BIT_IOR_EXPR
9456 && TREE_CODE (arg1
) == INTEGER_CST
9457 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9459 tree notd
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
9460 tree candnotd
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9461 TREE_OPERAND (arg0
, 1), notd
);
9462 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
9463 if (integer_nonzerop (candnotd
))
9464 return omit_one_operand (type
, rslt
, arg0
);
9467 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9468 and similarly for >= into !=. */
9469 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
9470 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
9471 && TREE_CODE (arg1
) == LSHIFT_EXPR
9472 && integer_onep (TREE_OPERAND (arg1
, 0)))
9473 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
9474 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
9475 TREE_OPERAND (arg1
, 1)),
9476 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
9478 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
9479 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
9480 && (TREE_CODE (arg1
) == NOP_EXPR
9481 || TREE_CODE (arg1
) == CONVERT_EXPR
)
9482 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
9483 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
9485 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
9486 fold_convert (TREE_TYPE (arg0
),
9487 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
9488 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
9490 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
9492 /* Simplify comparison of something with itself. (For IEEE
9493 floating-point, we can only do some of these simplifications.) */
9494 if (operand_equal_p (arg0
, arg1
, 0))
9499 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9500 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9501 return constant_boolean_node (1, type
);
9506 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9507 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9508 return constant_boolean_node (1, type
);
9509 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9512 /* For NE, we can only do this simplification if integer
9513 or we don't honor IEEE floating point NaNs. */
9514 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9515 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9517 /* ... fall through ... */
9520 return constant_boolean_node (0, type
);
9526 /* If we are comparing an expression that just has comparisons
9527 of two integer values, arithmetic expressions of those comparisons,
9528 and constants, we can simplify it. There are only three cases
9529 to check: the two values can either be equal, the first can be
9530 greater, or the second can be greater. Fold the expression for
9531 those three values. Since each value must be 0 or 1, we have
9532 eight possibilities, each of which corresponds to the constant 0
9533 or 1 or one of the six possible comparisons.
9535 This handles common cases like (a > b) == 0 but also handles
9536 expressions like ((x > y) - (y > x)) > 0, which supposedly
9537 occur in macroized code. */
9539 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9541 tree cval1
= 0, cval2
= 0;
9544 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9545 /* Don't handle degenerate cases here; they should already
9546 have been handled anyway. */
9547 && cval1
!= 0 && cval2
!= 0
9548 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9549 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9550 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9551 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9552 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9553 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9554 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9556 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9557 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9559 /* We can't just pass T to eval_subst in case cval1 or cval2
9560 was the same as ARG1. */
9563 = fold_build2 (code
, type
,
9564 eval_subst (arg0
, cval1
, maxval
,
9568 = fold_build2 (code
, type
,
9569 eval_subst (arg0
, cval1
, maxval
,
9573 = fold_build2 (code
, type
,
9574 eval_subst (arg0
, cval1
, minval
,
9578 /* All three of these results should be 0 or 1. Confirm they
9579 are. Then use those values to select the proper code
9582 if ((integer_zerop (high_result
)
9583 || integer_onep (high_result
))
9584 && (integer_zerop (equal_result
)
9585 || integer_onep (equal_result
))
9586 && (integer_zerop (low_result
)
9587 || integer_onep (low_result
)))
9589 /* Make a 3-bit mask with the high-order bit being the
9590 value for `>', the next for '=', and the low for '<'. */
9591 switch ((integer_onep (high_result
) * 4)
9592 + (integer_onep (equal_result
) * 2)
9593 + integer_onep (low_result
))
9597 return omit_one_operand (type
, integer_zero_node
, arg0
);
9618 return omit_one_operand (type
, integer_one_node
, arg0
);
9622 return save_expr (build2 (code
, type
, cval1
, cval2
));
9624 return fold_build2 (code
, type
, cval1
, cval2
);
9629 /* If this is a comparison of a field, we may be able to simplify it. */
9630 if (((TREE_CODE (arg0
) == COMPONENT_REF
9631 && lang_hooks
.can_use_bit_fields_p ())
9632 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
9633 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
9634 /* Handle the constant case even without -O
9635 to make sure the warnings are given. */
9636 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
9638 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
9643 /* If this is a comparison of complex values and either or both sides
9644 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
9645 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
9646 This may prevent needless evaluations. */
9647 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9648 && TREE_CODE (TREE_TYPE (arg0
)) == COMPLEX_TYPE
9649 && (TREE_CODE (arg0
) == COMPLEX_EXPR
9650 || TREE_CODE (arg1
) == COMPLEX_EXPR
9651 || TREE_CODE (arg0
) == COMPLEX_CST
9652 || TREE_CODE (arg1
) == COMPLEX_CST
))
9654 tree subtype
= TREE_TYPE (TREE_TYPE (arg0
));
9655 tree real0
, imag0
, real1
, imag1
;
9657 arg0
= save_expr (arg0
);
9658 arg1
= save_expr (arg1
);
9659 real0
= fold_build1 (REALPART_EXPR
, subtype
, arg0
);
9660 imag0
= fold_build1 (IMAGPART_EXPR
, subtype
, arg0
);
9661 real1
= fold_build1 (REALPART_EXPR
, subtype
, arg1
);
9662 imag1
= fold_build1 (IMAGPART_EXPR
, subtype
, arg1
);
9664 return fold_build2 ((code
== EQ_EXPR
? TRUTH_ANDIF_EXPR
9667 fold_build2 (code
, type
, real0
, real1
),
9668 fold_build2 (code
, type
, imag0
, imag1
));
9671 /* Optimize comparisons of strlen vs zero to a compare of the
9672 first character of the string vs zero. To wit,
9673 strlen(ptr) == 0 => *ptr == 0
9674 strlen(ptr) != 0 => *ptr != 0
9675 Other cases should reduce to one of these two (or a constant)
9676 due to the return value of strlen being unsigned. */
9677 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9678 && integer_zerop (arg1
)
9679 && TREE_CODE (arg0
) == CALL_EXPR
)
9681 tree fndecl
= get_callee_fndecl (arg0
);
9685 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
9686 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
9687 && (arglist
= TREE_OPERAND (arg0
, 1))
9688 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
9689 && ! TREE_CHAIN (arglist
))
9690 return fold_build2 (code
, type
,
9691 build1 (INDIRECT_REF
, char_type_node
,
9692 TREE_VALUE (arglist
)),
9693 fold_convert (char_type_node
,
9694 integer_zero_node
));
9697 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9698 into a single range test. */
9699 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9700 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9701 && TREE_CODE (arg1
) == INTEGER_CST
9702 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9703 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9704 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9705 && !TREE_OVERFLOW (arg1
))
9707 t1
= fold_div_compare (code
, type
, arg0
, arg1
);
9708 if (t1
!= NULL_TREE
)
9712 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9713 && !TREE_SIDE_EFFECTS (arg0
)
9714 && integer_zerop (arg1
)
9715 && tree_expr_nonzero_p (arg0
))
9716 return constant_boolean_node (code
==NE_EXPR
, type
);
9718 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
9719 return t1
== NULL_TREE
? NULL_TREE
: t1
;
9721 case UNORDERED_EXPR
:
9729 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9731 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
9732 if (t1
!= NULL_TREE
)
9736 /* If the first operand is NaN, the result is constant. */
9737 if (TREE_CODE (arg0
) == REAL_CST
9738 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
9739 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
9741 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
9744 return omit_one_operand (type
, t1
, arg1
);
9747 /* If the second operand is NaN, the result is constant. */
9748 if (TREE_CODE (arg1
) == REAL_CST
9749 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
9750 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
9752 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
9755 return omit_one_operand (type
, t1
, arg0
);
9758 /* Simplify unordered comparison of something with itself. */
9759 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
9760 && operand_equal_p (arg0
, arg1
, 0))
9761 return constant_boolean_node (1, type
);
9763 if (code
== LTGT_EXPR
9764 && !flag_trapping_math
9765 && operand_equal_p (arg0
, arg1
, 0))
9766 return constant_boolean_node (0, type
);
9768 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9770 tree targ0
= strip_float_extensions (arg0
);
9771 tree targ1
= strip_float_extensions (arg1
);
9772 tree newtype
= TREE_TYPE (targ0
);
9774 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9775 newtype
= TREE_TYPE (targ1
);
9777 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9778 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
9779 fold_convert (newtype
, targ1
));
9785 /* When pedantic, a compound expression can be neither an lvalue
9786 nor an integer constant expression. */
9787 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
9789 /* Don't let (0, 0) be null pointer constant. */
9790 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
9791 : fold_convert (type
, arg1
);
9792 return pedantic_non_lvalue (tem
);
9796 return build_complex (type
, arg0
, arg1
);
9801 } /* switch (code) */
9804 /* Fold a ternary expression of code CODE and type TYPE with operands
9805 OP0, OP1, and OP2. Return the folded expression if folding is
9806 successful. Otherwise, return NULL_TREE. */
9809 fold_ternary (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
9812 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
9813 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9815 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9816 && TREE_CODE_LENGTH (code
) == 3);
9818 /* Strip any conversions that don't change the mode. This is safe
9819 for every expression, except for a comparison expression because
9820 its signedness is derived from its operands. So, in the latter
9821 case, only strip conversions that don't change the signedness.
9823 Note that this is done as an internal manipulation within the
9824 constant folder, in order to find the simplest representation of
9825 the arguments so that their form can be studied. In any cases,
9826 the appropriate type conversions should be put back in the tree
9827 that will get out of the constant folder. */
9843 if (TREE_CODE (arg0
) == CONSTRUCTOR
9844 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
9846 tree m
= purpose_member (arg1
, CONSTRUCTOR_ELTS (arg0
));
9848 return TREE_VALUE (m
);
9853 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9854 so all simple results must be passed through pedantic_non_lvalue. */
9855 if (TREE_CODE (arg0
) == INTEGER_CST
)
9857 tem
= integer_zerop (arg0
) ? op2
: op1
;
9858 /* Only optimize constant conditions when the selected branch
9859 has the same type as the COND_EXPR. This avoids optimizing
9860 away "c ? x : throw", where the throw has a void type. */
9861 if (! VOID_TYPE_P (TREE_TYPE (tem
))
9862 || VOID_TYPE_P (type
))
9863 return pedantic_non_lvalue (tem
);
9866 if (operand_equal_p (arg1
, op2
, 0))
9867 return pedantic_omit_one_operand (type
, arg1
, arg0
);
9869 /* If we have A op B ? A : C, we may be able to convert this to a
9870 simpler expression, depending on the operation and the values
9871 of B and C. Signed zeros prevent all of these transformations,
9872 for reasons given above each one.
9874 Also try swapping the arguments and inverting the conditional. */
9875 if (COMPARISON_CLASS_P (arg0
)
9876 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
9877 arg1
, TREE_OPERAND (arg0
, 1))
9878 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
9880 tem
= fold_cond_expr_with_comparison (type
, arg0
, op1
, op2
);
9885 if (COMPARISON_CLASS_P (arg0
)
9886 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
9888 TREE_OPERAND (arg0
, 1))
9889 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
9891 tem
= invert_truthvalue (arg0
);
9892 if (COMPARISON_CLASS_P (tem
))
9894 tem
= fold_cond_expr_with_comparison (type
, tem
, op2
, op1
);
9900 /* If the second operand is simpler than the third, swap them
9901 since that produces better jump optimization results. */
9902 if (tree_swap_operands_p (op1
, op2
, false))
9904 /* See if this can be inverted. If it can't, possibly because
9905 it was a floating-point inequality comparison, don't do
9907 tem
= invert_truthvalue (arg0
);
9909 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
9910 return fold_build3 (code
, type
, tem
, op2
, op1
);
9913 /* Convert A ? 1 : 0 to simply A. */
9914 if (integer_onep (op1
)
9915 && integer_zerop (op2
)
9916 /* If we try to convert OP0 to our type, the
9917 call to fold will try to move the conversion inside
9918 a COND, which will recurse. In that case, the COND_EXPR
9919 is probably the best choice, so leave it alone. */
9920 && type
== TREE_TYPE (arg0
))
9921 return pedantic_non_lvalue (arg0
);
9923 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9924 over COND_EXPR in cases such as floating point comparisons. */
9925 if (integer_zerop (op1
)
9926 && integer_onep (op2
)
9927 && truth_value_p (TREE_CODE (arg0
)))
9928 return pedantic_non_lvalue (fold_convert (type
,
9929 invert_truthvalue (arg0
)));
9931 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9932 if (TREE_CODE (arg0
) == LT_EXPR
9933 && integer_zerop (TREE_OPERAND (arg0
, 1))
9934 && integer_zerop (op2
)
9935 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
9936 return fold_convert (type
, fold_build2 (BIT_AND_EXPR
,
9937 TREE_TYPE (tem
), tem
, arg1
));
9939 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9940 already handled above. */
9941 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9942 && integer_onep (TREE_OPERAND (arg0
, 1))
9943 && integer_zerop (op2
)
9944 && integer_pow2p (arg1
))
9946 tree tem
= TREE_OPERAND (arg0
, 0);
9948 if (TREE_CODE (tem
) == RSHIFT_EXPR
9949 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
9950 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
9951 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
9952 return fold_build2 (BIT_AND_EXPR
, type
,
9953 TREE_OPERAND (tem
, 0), arg1
);
9956 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9957 is probably obsolete because the first operand should be a
9958 truth value (that's why we have the two cases above), but let's
9959 leave it in until we can confirm this for all front-ends. */
9960 if (integer_zerop (op2
)
9961 && TREE_CODE (arg0
) == NE_EXPR
9962 && integer_zerop (TREE_OPERAND (arg0
, 1))
9963 && integer_pow2p (arg1
)
9964 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
9965 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
9966 arg1
, OEP_ONLY_CONST
))
9967 return pedantic_non_lvalue (fold_convert (type
,
9968 TREE_OPERAND (arg0
, 0)));
9970 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9971 if (integer_zerop (op2
)
9972 && truth_value_p (TREE_CODE (arg0
))
9973 && truth_value_p (TREE_CODE (arg1
)))
9974 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, arg0
, arg1
);
9976 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9977 if (integer_onep (op2
)
9978 && truth_value_p (TREE_CODE (arg0
))
9979 && truth_value_p (TREE_CODE (arg1
)))
9981 /* Only perform transformation if ARG0 is easily inverted. */
9982 tem
= invert_truthvalue (arg0
);
9983 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
9984 return fold_build2 (TRUTH_ORIF_EXPR
, type
, tem
, arg1
);
9987 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9988 if (integer_zerop (arg1
)
9989 && truth_value_p (TREE_CODE (arg0
))
9990 && truth_value_p (TREE_CODE (op2
)))
9992 /* Only perform transformation if ARG0 is easily inverted. */
9993 tem
= invert_truthvalue (arg0
);
9994 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
9995 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, tem
, op2
);
9998 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9999 if (integer_onep (arg1
)
10000 && truth_value_p (TREE_CODE (arg0
))
10001 && truth_value_p (TREE_CODE (op2
)))
10002 return fold_build2 (TRUTH_ORIF_EXPR
, type
, arg0
, op2
);
10007 /* Check for a built-in function. */
10008 if (TREE_CODE (op0
) == ADDR_EXPR
10009 && TREE_CODE (TREE_OPERAND (op0
, 0)) == FUNCTION_DECL
10010 && DECL_BUILT_IN (TREE_OPERAND (op0
, 0)))
10012 tree fndecl
= TREE_OPERAND (op0
, 0);
10013 tree arglist
= op1
;
10014 tree tmp
= fold_builtin (fndecl
, arglist
, false);
10022 } /* switch (code) */
10025 /* Perform constant folding and related simplification of EXPR.
10026 The related simplifications include x*1 => x, x*0 => 0, etc.,
10027 and application of the associative law.
10028 NOP_EXPR conversions may be removed freely (as long as we
10029 are careful not to change the type of the overall expression).
10030 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10031 but we can constant-fold them if they have constant operands. */
10033 #ifdef ENABLE_FOLD_CHECKING
10034 # define fold(x) fold_1 (x)
10035 static tree
fold_1 (tree
);
10041 const tree t
= expr
;
10042 enum tree_code code
= TREE_CODE (t
);
10043 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10046 /* Return right away if a constant. */
10047 if (kind
== tcc_constant
)
10050 if (IS_EXPR_CODE_CLASS (kind
))
10052 tree type
= TREE_TYPE (t
);
10053 tree op0
, op1
, op2
;
10055 switch (TREE_CODE_LENGTH (code
))
10058 op0
= TREE_OPERAND (t
, 0);
10059 tem
= fold_unary (code
, type
, op0
);
10060 return tem
? tem
: expr
;
10062 op0
= TREE_OPERAND (t
, 0);
10063 op1
= TREE_OPERAND (t
, 1);
10064 tem
= fold_binary (code
, type
, op0
, op1
);
10065 return tem
? tem
: expr
;
10067 op0
= TREE_OPERAND (t
, 0);
10068 op1
= TREE_OPERAND (t
, 1);
10069 op2
= TREE_OPERAND (t
, 2);
10070 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
10071 return tem
? tem
: expr
;
10080 return fold (DECL_INITIAL (t
));
10084 /* Given ASSERT_EXPR <Y, COND>, return Y if COND can be folded
10085 to boolean_true_node. If COND folds to boolean_false_node,
10086 return ASSERT_EXPR <Y, 0>. Otherwise, return the original
10088 tree c
= fold (ASSERT_EXPR_COND (t
));
10089 if (c
== boolean_true_node
)
10090 return ASSERT_EXPR_VAR (t
);
10091 else if (c
== boolean_false_node
)
10092 return build (ASSERT_EXPR
, TREE_TYPE (t
), ASSERT_EXPR_VAR (t
), c
);
10099 } /* switch (code) */
10102 #ifdef ENABLE_FOLD_CHECKING
10105 static void fold_checksum_tree (tree
, struct md5_ctx
*, htab_t
);
10106 static void fold_check_failed (tree
, tree
);
10107 void print_fold_checksum (tree
);
10109 /* When --enable-checking=fold, compute a digest of expr before
10110 and after actual fold call to see if fold did not accidentally
10111 change original expr. */
10117 struct md5_ctx ctx
;
10118 unsigned char checksum_before
[16], checksum_after
[16];
10121 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10122 md5_init_ctx (&ctx
);
10123 fold_checksum_tree (expr
, &ctx
, ht
);
10124 md5_finish_ctx (&ctx
, checksum_before
);
10127 ret
= fold_1 (expr
);
10129 md5_init_ctx (&ctx
);
10130 fold_checksum_tree (expr
, &ctx
, ht
);
10131 md5_finish_ctx (&ctx
, checksum_after
);
10134 if (memcmp (checksum_before
, checksum_after
, 16))
10135 fold_check_failed (expr
, ret
);
10141 print_fold_checksum (tree expr
)
10143 struct md5_ctx ctx
;
10144 unsigned char checksum
[16], cnt
;
10147 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10148 md5_init_ctx (&ctx
);
10149 fold_checksum_tree (expr
, &ctx
, ht
);
10150 md5_finish_ctx (&ctx
, checksum
);
10152 for (cnt
= 0; cnt
< 16; ++cnt
)
10153 fprintf (stderr
, "%02x", checksum
[cnt
]);
10154 putc ('\n', stderr
);
10158 fold_check_failed (tree expr ATTRIBUTE_UNUSED
, tree ret ATTRIBUTE_UNUSED
)
10160 internal_error ("fold check: original tree changed by fold");
10164 fold_checksum_tree (tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
10167 enum tree_code code
;
10168 char buf
[sizeof (struct tree_decl
)];
10171 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
10172 <= sizeof (struct tree_decl
))
10173 && sizeof (struct tree_type
) <= sizeof (struct tree_decl
));
10176 slot
= htab_find_slot (ht
, expr
, INSERT
);
10180 code
= TREE_CODE (expr
);
10181 if (TREE_CODE_CLASS (code
) == tcc_declaration
10182 && DECL_ASSEMBLER_NAME_SET_P (expr
))
10184 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10185 memcpy (buf
, expr
, tree_size (expr
));
10187 SET_DECL_ASSEMBLER_NAME (expr
, NULL
);
10189 else if (TREE_CODE_CLASS (code
) == tcc_type
10190 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
10191 || TYPE_CACHED_VALUES_P (expr
)))
10193 /* Allow these fields to be modified. */
10194 memcpy (buf
, expr
, tree_size (expr
));
10196 TYPE_POINTER_TO (expr
) = NULL
;
10197 TYPE_REFERENCE_TO (expr
) = NULL
;
10198 if (TYPE_CACHED_VALUES_P (expr
))
10200 TYPE_CACHED_VALUES_P (expr
) = 0;
10201 TYPE_CACHED_VALUES (expr
) = NULL
;
10204 md5_process_bytes (expr
, tree_size (expr
), ctx
);
10205 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
10206 if (TREE_CODE_CLASS (code
) != tcc_type
10207 && TREE_CODE_CLASS (code
) != tcc_declaration
)
10208 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
10209 switch (TREE_CODE_CLASS (code
))
10215 md5_process_bytes (TREE_STRING_POINTER (expr
),
10216 TREE_STRING_LENGTH (expr
), ctx
);
10219 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
10220 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
10223 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
10229 case tcc_exceptional
:
10233 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
10234 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
10237 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
10238 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
10244 case tcc_expression
:
10245 case tcc_reference
:
10246 case tcc_comparison
:
10249 case tcc_statement
:
10250 len
= TREE_CODE_LENGTH (code
);
10251 for (i
= 0; i
< len
; ++i
)
10252 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
10254 case tcc_declaration
:
10255 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
10256 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
10257 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
10258 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
10259 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
10260 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
10261 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
10262 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
10263 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
10264 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
10265 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
10268 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
10269 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
10270 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
10271 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
10272 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
10273 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
10274 if (INTEGRAL_TYPE_P (expr
)
10275 || SCALAR_FLOAT_TYPE_P (expr
))
10277 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
10278 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
10280 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
10281 if (TREE_CODE (expr
) == RECORD_TYPE
10282 || TREE_CODE (expr
) == UNION_TYPE
10283 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
10284 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
10285 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
10294 /* Fold a unary tree expression with code CODE of type TYPE with an
10295 operand OP0. Return a folded expression if successful. Otherwise,
10296 return a tree expression with code CODE of type TYPE with an
10300 fold_build1 (enum tree_code code
, tree type
, tree op0
)
10302 tree tem
= fold_unary (code
, type
, op0
);
10306 return build1 (code
, type
, op0
);
10309 /* Fold a binary tree expression with code CODE of type TYPE with
10310 operands OP0 and OP1. Return a folded expression if successful.
10311 Otherwise, return a tree expression with code CODE of type TYPE
10312 with operands OP0 and OP1. */
10315 fold_build2 (enum tree_code code
, tree type
, tree op0
, tree op1
)
10317 tree tem
= fold_binary (code
, type
, op0
, op1
);
10321 return build2 (code
, type
, op0
, op1
);
10324 /* Fold a ternary tree expression with code CODE of type TYPE with
10325 operands OP0, OP1, and OP2. Return a folded expression if
10326 successful. Otherwise, return a tree expression with code CODE of
10327 type TYPE with operands OP0, OP1, and OP2. */
10330 fold_build3 (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
10332 tree tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
10336 return build3 (code
, type
, op0
, op1
, op2
);
10339 /* Perform constant folding and related simplification of initializer
10340 expression EXPR. This behaves identically to "fold" but ignores
10341 potential run-time traps and exceptions that fold must preserve. */
10344 fold_initializer (tree expr
)
10346 int saved_signaling_nans
= flag_signaling_nans
;
10347 int saved_trapping_math
= flag_trapping_math
;
10348 int saved_rounding_math
= flag_rounding_math
;
10349 int saved_trapv
= flag_trapv
;
10352 flag_signaling_nans
= 0;
10353 flag_trapping_math
= 0;
10354 flag_rounding_math
= 0;
10357 result
= fold (expr
);
10359 flag_signaling_nans
= saved_signaling_nans
;
10360 flag_trapping_math
= saved_trapping_math
;
10361 flag_rounding_math
= saved_rounding_math
;
10362 flag_trapv
= saved_trapv
;
10367 /* Determine if first argument is a multiple of second argument. Return 0 if
10368 it is not, or we cannot easily determined it to be.
10370 An example of the sort of thing we care about (at this point; this routine
10371 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10372 fold cases do now) is discovering that
10374 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10380 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10382 This code also handles discovering that
10384 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10386 is a multiple of 8 so we don't have to worry about dealing with a
10387 possible remainder.
10389 Note that we *look* inside a SAVE_EXPR only to determine how it was
10390 calculated; it is not safe for fold to do much of anything else with the
10391 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10392 at run time. For example, the latter example above *cannot* be implemented
10393 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10394 evaluation time of the original SAVE_EXPR is not necessarily the same at
10395 the time the new expression is evaluated. The only optimization of this
10396 sort that would be valid is changing
10398 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10402 SAVE_EXPR (I) * SAVE_EXPR (J)
10404 (where the same SAVE_EXPR (J) is used in the original and the
10405 transformed version). */
10408 multiple_of_p (tree type
, tree top
, tree bottom
)
10410 if (operand_equal_p (top
, bottom
, 0))
10413 if (TREE_CODE (type
) != INTEGER_TYPE
)
10416 switch (TREE_CODE (top
))
10419 /* Bitwise and provides a power of two multiple. If the mask is
10420 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10421 if (!integer_pow2p (bottom
))
10426 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
10427 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
10431 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
10432 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
10435 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
10439 op1
= TREE_OPERAND (top
, 1);
10440 /* const_binop may not detect overflow correctly,
10441 so check for it explicitly here. */
10442 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
10443 > TREE_INT_CST_LOW (op1
)
10444 && TREE_INT_CST_HIGH (op1
) == 0
10445 && 0 != (t1
= fold_convert (type
,
10446 const_binop (LSHIFT_EXPR
,
10449 && ! TREE_OVERFLOW (t1
))
10450 return multiple_of_p (type
, t1
, bottom
);
10455 /* Can't handle conversions from non-integral or wider integral type. */
10456 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
10457 || (TYPE_PRECISION (type
)
10458 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
10461 /* .. fall through ... */
10464 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
10467 if (TREE_CODE (bottom
) != INTEGER_CST
10468 || (TYPE_UNSIGNED (type
)
10469 && (tree_int_cst_sgn (top
) < 0
10470 || tree_int_cst_sgn (bottom
) < 0)))
10472 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,
10480 /* Return true if `t' is known to be non-negative. */
10483 tree_expr_nonnegative_p (tree t
)
10485 switch (TREE_CODE (t
))
10491 return tree_int_cst_sgn (t
) >= 0;
10494 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
10497 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
10498 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10499 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10501 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10502 both unsigned and at least 2 bits shorter than the result. */
10503 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
10504 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
10505 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
10507 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
10508 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
10509 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
10510 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
10512 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
10513 TYPE_PRECISION (inner2
)) + 1;
10514 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
10520 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
10522 /* x * x for floating point x is always non-negative. */
10523 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
10525 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10526 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10529 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10530 both unsigned and their total bits is shorter than the result. */
10531 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
10532 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
10533 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
10535 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
10536 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
10537 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
10538 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
10539 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
10540 < TYPE_PRECISION (TREE_TYPE (t
));
10544 case TRUNC_DIV_EXPR
:
10545 case CEIL_DIV_EXPR
:
10546 case FLOOR_DIV_EXPR
:
10547 case ROUND_DIV_EXPR
:
10548 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10549 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10551 case TRUNC_MOD_EXPR
:
10552 case CEIL_MOD_EXPR
:
10553 case FLOOR_MOD_EXPR
:
10554 case ROUND_MOD_EXPR
:
10555 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10558 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10559 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10562 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
10563 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10566 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10567 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10571 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
10572 tree outer_type
= TREE_TYPE (t
);
10574 if (TREE_CODE (outer_type
) == REAL_TYPE
)
10576 if (TREE_CODE (inner_type
) == REAL_TYPE
)
10577 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10578 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
10580 if (TYPE_UNSIGNED (inner_type
))
10582 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10585 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
10587 if (TREE_CODE (inner_type
) == REAL_TYPE
)
10588 return tree_expr_nonnegative_p (TREE_OPERAND (t
,0));
10589 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
10590 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
10591 && TYPE_UNSIGNED (inner_type
);
10597 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
10598 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
10599 case COMPOUND_EXPR
:
10600 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10602 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10603 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10605 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10606 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10608 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10610 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t
, 1)));
10612 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10613 case NON_LVALUE_EXPR
:
10614 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10616 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10620 tree temp
= TARGET_EXPR_SLOT (t
);
10621 t
= TARGET_EXPR_INITIAL (t
);
10623 /* If the initializer is non-void, then it's a normal expression
10624 that will be assigned to the slot. */
10625 if (!VOID_TYPE_P (t
))
10626 return tree_expr_nonnegative_p (t
);
10628 /* Otherwise, the initializer sets the slot in some way. One common
10629 way is an assignment statement at the end of the initializer. */
10632 if (TREE_CODE (t
) == BIND_EXPR
)
10633 t
= expr_last (BIND_EXPR_BODY (t
));
10634 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
10635 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
10636 t
= expr_last (TREE_OPERAND (t
, 0));
10637 else if (TREE_CODE (t
) == STATEMENT_LIST
)
10642 if (TREE_CODE (t
) == MODIFY_EXPR
10643 && TREE_OPERAND (t
, 0) == temp
)
10644 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10651 tree fndecl
= get_callee_fndecl (t
);
10652 tree arglist
= TREE_OPERAND (t
, 1);
10653 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
10654 switch (DECL_FUNCTION_CODE (fndecl
))
10656 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10657 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10658 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10659 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10661 CASE_BUILTIN_F (BUILT_IN_ACOS
)
10662 CASE_BUILTIN_F (BUILT_IN_ACOSH
)
10663 CASE_BUILTIN_F (BUILT_IN_CABS
)
10664 CASE_BUILTIN_F (BUILT_IN_COSH
)
10665 CASE_BUILTIN_F (BUILT_IN_ERFC
)
10666 CASE_BUILTIN_F (BUILT_IN_EXP
)
10667 CASE_BUILTIN_F (BUILT_IN_EXP10
)
10668 CASE_BUILTIN_F (BUILT_IN_EXP2
)
10669 CASE_BUILTIN_F (BUILT_IN_FABS
)
10670 CASE_BUILTIN_F (BUILT_IN_FDIM
)
10671 CASE_BUILTIN_F (BUILT_IN_FREXP
)
10672 CASE_BUILTIN_F (BUILT_IN_HYPOT
)
10673 CASE_BUILTIN_F (BUILT_IN_POW10
)
10674 CASE_BUILTIN_I (BUILT_IN_FFS
)
10675 CASE_BUILTIN_I (BUILT_IN_PARITY
)
10676 CASE_BUILTIN_I (BUILT_IN_POPCOUNT
)
10680 CASE_BUILTIN_F (BUILT_IN_SQRT
)
10681 /* sqrt(-0.0) is -0.0. */
10682 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
10684 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
10686 CASE_BUILTIN_F (BUILT_IN_ASINH
)
10687 CASE_BUILTIN_F (BUILT_IN_ATAN
)
10688 CASE_BUILTIN_F (BUILT_IN_ATANH
)
10689 CASE_BUILTIN_F (BUILT_IN_CBRT
)
10690 CASE_BUILTIN_F (BUILT_IN_CEIL
)
10691 CASE_BUILTIN_F (BUILT_IN_ERF
)
10692 CASE_BUILTIN_F (BUILT_IN_EXPM1
)
10693 CASE_BUILTIN_F (BUILT_IN_FLOOR
)
10694 CASE_BUILTIN_F (BUILT_IN_FMOD
)
10695 CASE_BUILTIN_F (BUILT_IN_LCEIL
)
10696 CASE_BUILTIN_F (BUILT_IN_LDEXP
)
10697 CASE_BUILTIN_F (BUILT_IN_LFLOOR
)
10698 CASE_BUILTIN_F (BUILT_IN_LLCEIL
)
10699 CASE_BUILTIN_F (BUILT_IN_LLFLOOR
)
10700 CASE_BUILTIN_F (BUILT_IN_LLRINT
)
10701 CASE_BUILTIN_F (BUILT_IN_LLROUND
)
10702 CASE_BUILTIN_F (BUILT_IN_LRINT
)
10703 CASE_BUILTIN_F (BUILT_IN_LROUND
)
10704 CASE_BUILTIN_F (BUILT_IN_MODF
)
10705 CASE_BUILTIN_F (BUILT_IN_NEARBYINT
)
10706 CASE_BUILTIN_F (BUILT_IN_POW
)
10707 CASE_BUILTIN_F (BUILT_IN_RINT
)
10708 CASE_BUILTIN_F (BUILT_IN_ROUND
)
10709 CASE_BUILTIN_F (BUILT_IN_SIGNBIT
)
10710 CASE_BUILTIN_F (BUILT_IN_SINH
)
10711 CASE_BUILTIN_F (BUILT_IN_TANH
)
10712 CASE_BUILTIN_F (BUILT_IN_TRUNC
)
10713 /* True if the 1st argument is nonnegative. */
10714 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
10716 CASE_BUILTIN_F (BUILT_IN_FMAX
)
10717 /* True if the 1st OR 2nd arguments are nonnegative. */
10718 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
10719 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
10721 CASE_BUILTIN_F (BUILT_IN_FMIN
)
10722 /* True if the 1st AND 2nd arguments are nonnegative. */
10723 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
10724 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
10726 CASE_BUILTIN_F (BUILT_IN_COPYSIGN
)
10727 /* True if the 2nd argument is nonnegative. */
10728 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
10732 #undef CASE_BUILTIN_F
10733 #undef CASE_BUILTIN_I
10737 /* ... fall through ... */
10740 if (truth_value_p (TREE_CODE (t
)))
10741 /* Truth values evaluate to 0 or 1, which is nonnegative. */
10745 /* We don't know sign of `t', so be conservative and return false. */
10749 /* Return true when T is an address and is known to be nonzero.
10750 For floating point we further ensure that T is not denormal.
10751 Similar logic is present in nonzero_address in rtlanal.h. */
10754 tree_expr_nonzero_p (tree t
)
10756 tree type
= TREE_TYPE (t
);
10758 /* Doing something useful for floating point would need more work. */
10759 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
10762 switch (TREE_CODE (t
))
10765 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
10766 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
10769 /* We used to test for !integer_zerop here. This does not work correctly
10770 if TREE_CONSTANT_OVERFLOW (t). */
10771 return (TREE_INT_CST_LOW (t
) != 0
10772 || TREE_INT_CST_HIGH (t
) != 0);
10775 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
10777 /* With the presence of negative values it is hard
10778 to say something. */
10779 if (!tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10780 || !tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
10782 /* One of operands must be positive and the other non-negative. */
10783 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
10784 || tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
10789 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
10791 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
10792 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
10798 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
10799 tree outer_type
= TREE_TYPE (t
);
10801 return (TYPE_PRECISION (inner_type
) >= TYPE_PRECISION (outer_type
)
10802 && tree_expr_nonzero_p (TREE_OPERAND (t
, 0)));
10808 tree base
= get_base_address (TREE_OPERAND (t
, 0));
10813 /* Weak declarations may link to NULL. */
10815 return !DECL_WEAK (base
);
10817 /* Constants are never weak. */
10818 if (CONSTANT_CLASS_P (base
))
10825 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
10826 && tree_expr_nonzero_p (TREE_OPERAND (t
, 2)));
10829 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
10830 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
10833 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 0)))
10835 /* When both operands are nonzero, then MAX must be too. */
10836 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1)))
10839 /* MAX where operand 0 is positive is positive. */
10840 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10842 /* MAX where operand 1 is positive is positive. */
10843 else if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
10844 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
10848 case COMPOUND_EXPR
:
10851 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1));
10854 case NON_LVALUE_EXPR
:
10855 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
10858 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
10859 || tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
10867 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10868 attempt to fold the expression to a constant without modifying TYPE,
10871 If the expression could be simplified to a constant, then return
10872 the constant. If the expression would not be simplified to a
10873 constant, then return NULL_TREE. */
10876 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
10878 tree tem
= fold_binary (code
, type
, op0
, op1
);
10879 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
10882 /* Given the components of a unary expression CODE, TYPE and OP0,
10883 attempt to fold the expression to a constant without modifying
10886 If the expression could be simplified to a constant, then return
10887 the constant. If the expression would not be simplified to a
10888 constant, then return NULL_TREE. */
10891 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
10893 tree tem
= fold_unary (code
, type
, op0
);
10894 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
10897 /* If EXP represents referencing an element in a constant string
10898 (either via pointer arithmetic or array indexing), return the
10899 tree representing the value accessed, otherwise return NULL. */
10902 fold_read_from_constant_string (tree exp
)
10904 if (TREE_CODE (exp
) == INDIRECT_REF
|| TREE_CODE (exp
) == ARRAY_REF
)
10906 tree exp1
= TREE_OPERAND (exp
, 0);
10910 if (TREE_CODE (exp
) == INDIRECT_REF
)
10911 string
= string_constant (exp1
, &index
);
10914 tree low_bound
= array_ref_low_bound (exp
);
10915 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
10917 /* Optimize the special-case of a zero lower bound.
10919 We convert the low_bound to sizetype to avoid some problems
10920 with constant folding. (E.g. suppose the lower bound is 1,
10921 and its mode is QI. Without the conversion,l (ARRAY
10922 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10923 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10924 if (! integer_zerop (low_bound
))
10925 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
10931 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (string
))
10932 && TREE_CODE (string
) == STRING_CST
10933 && TREE_CODE (index
) == INTEGER_CST
10934 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
10935 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
10937 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
10938 return fold_convert (TREE_TYPE (exp
),
10939 build_int_cst (NULL_TREE
,
10940 (TREE_STRING_POINTER (string
)
10941 [TREE_INT_CST_LOW (index
)])));
10946 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10947 an integer constant or real constant.
10949 TYPE is the type of the result. */
10952 fold_negate_const (tree arg0
, tree type
)
10954 tree t
= NULL_TREE
;
10956 switch (TREE_CODE (arg0
))
10960 unsigned HOST_WIDE_INT low
;
10961 HOST_WIDE_INT high
;
10962 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
10963 TREE_INT_CST_HIGH (arg0
),
10965 t
= build_int_cst_wide (type
, low
, high
);
10966 t
= force_fit_type (t
, 1,
10967 (overflow
| TREE_OVERFLOW (arg0
))
10968 && !TYPE_UNSIGNED (type
),
10969 TREE_CONSTANT_OVERFLOW (arg0
));
10974 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
10978 gcc_unreachable ();
10984 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10985 an integer constant or real constant.
10987 TYPE is the type of the result. */
10990 fold_abs_const (tree arg0
, tree type
)
10992 tree t
= NULL_TREE
;
10994 switch (TREE_CODE (arg0
))
10997 /* If the value is unsigned, then the absolute value is
10998 the same as the ordinary value. */
10999 if (TYPE_UNSIGNED (type
))
11001 /* Similarly, if the value is non-negative. */
11002 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
11004 /* If the value is negative, then the absolute value is
11008 unsigned HOST_WIDE_INT low
;
11009 HOST_WIDE_INT high
;
11010 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
11011 TREE_INT_CST_HIGH (arg0
),
11013 t
= build_int_cst_wide (type
, low
, high
);
11014 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg0
),
11015 TREE_CONSTANT_OVERFLOW (arg0
));
11020 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
11021 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
11027 gcc_unreachable ();
11033 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11034 constant. TYPE is the type of the result. */
11037 fold_not_const (tree arg0
, tree type
)
11039 tree t
= NULL_TREE
;
11041 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
11043 t
= build_int_cst_wide (type
,
11044 ~ TREE_INT_CST_LOW (arg0
),
11045 ~ TREE_INT_CST_HIGH (arg0
));
11046 t
= force_fit_type (t
, 0, TREE_OVERFLOW (arg0
),
11047 TREE_CONSTANT_OVERFLOW (arg0
));
11052 /* Given CODE, a relational operator, the target type, TYPE and two
11053 constant operands OP0 and OP1, return the result of the
11054 relational operation. If the result is not a compile time
11055 constant, then return NULL_TREE. */
11058 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
11060 int result
, invert
;
11062 /* From here on, the only cases we handle are when the result is
11063 known to be a constant. */
11065 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
11067 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
11068 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
11070 /* Handle the cases where either operand is a NaN. */
11071 if (real_isnan (c0
) || real_isnan (c1
))
11081 case UNORDERED_EXPR
:
11095 if (flag_trapping_math
)
11101 gcc_unreachable ();
11104 return constant_boolean_node (result
, type
);
11107 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
11110 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11112 To compute GT, swap the arguments and do LT.
11113 To compute GE, do LT and invert the result.
11114 To compute LE, swap the arguments, do LT and invert the result.
11115 To compute NE, do EQ and invert the result.
11117 Therefore, the code below must handle only EQ and LT. */
11119 if (code
== LE_EXPR
|| code
== GT_EXPR
)
11124 code
= swap_tree_comparison (code
);
11127 /* Note that it is safe to invert for real values here because we
11128 have already handled the one case that it matters. */
11131 if (code
== NE_EXPR
|| code
== GE_EXPR
)
11134 code
= invert_tree_comparison (code
, false);
11137 /* Compute a result for LT or EQ if args permit;
11138 Otherwise return T. */
11139 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
11141 if (code
== EQ_EXPR
)
11142 result
= tree_int_cst_equal (op0
, op1
);
11143 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
11144 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
11146 result
= INT_CST_LT (op0
, op1
);
11153 return constant_boolean_node (result
, type
);
11156 /* Build an expression for the a clean point containing EXPR with type TYPE.
11157 Don't build a cleanup point expression for EXPR which don't have side
11161 fold_build_cleanup_point_expr (tree type
, tree expr
)
11163 /* If the expression does not have side effects then we don't have to wrap
11164 it with a cleanup point expression. */
11165 if (!TREE_SIDE_EFFECTS (expr
))
11168 /* If the expression is a return, check to see if the expression inside the
11169 return has no side effects or the right hand side of the modify expression
11170 inside the return. If either don't have side effects set we don't need to
11171 wrap the expression in a cleanup point expression. Note we don't check the
11172 left hand side of the modify because it should always be a return decl. */
11173 if (TREE_CODE (expr
) == RETURN_EXPR
)
11175 tree op
= TREE_OPERAND (expr
, 0);
11176 if (!op
|| !TREE_SIDE_EFFECTS (op
))
11178 op
= TREE_OPERAND (op
, 1);
11179 if (!TREE_SIDE_EFFECTS (op
))
11183 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
11186 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11187 avoid confusing the gimplify process. */
11190 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
11192 /* The size of the object is not relevant when talking about its address. */
11193 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
11194 t
= TREE_OPERAND (t
, 0);
11196 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11197 if (TREE_CODE (t
) == INDIRECT_REF
11198 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
11200 t
= TREE_OPERAND (t
, 0);
11201 if (TREE_TYPE (t
) != ptrtype
)
11202 t
= build1 (NOP_EXPR
, ptrtype
, t
);
11208 while (handled_component_p (base
))
11209 base
= TREE_OPERAND (base
, 0);
11211 TREE_ADDRESSABLE (base
) = 1;
11213 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
11220 build_fold_addr_expr (tree t
)
11222 return build_fold_addr_expr_with_type (t
, build_pointer_type (TREE_TYPE (t
)));
11225 /* Given a pointer value T, return a simplified version of an indirection
11226 through T, or NULL_TREE if no simplification is possible. */
11229 fold_indirect_ref_1 (tree t
)
11231 tree type
= TREE_TYPE (TREE_TYPE (t
));
11236 subtype
= TREE_TYPE (sub
);
11237 if (!POINTER_TYPE_P (subtype
))
11240 if (TREE_CODE (sub
) == ADDR_EXPR
)
11242 tree op
= TREE_OPERAND (sub
, 0);
11243 tree optype
= TREE_TYPE (op
);
11245 if (lang_hooks
.types_compatible_p (type
, optype
))
11247 /* *(foo *)&fooarray => fooarray[0] */
11248 else if (TREE_CODE (optype
) == ARRAY_TYPE
11249 && lang_hooks
.types_compatible_p (type
, TREE_TYPE (optype
)))
11251 tree type_domain
= TYPE_DOMAIN (optype
);
11252 tree min_val
= size_zero_node
;
11253 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
11254 min_val
= TYPE_MIN_VALUE (type_domain
);
11255 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
11259 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11260 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
11261 && lang_hooks
.types_compatible_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
11264 tree min_val
= size_zero_node
;
11265 sub
= build_fold_indirect_ref (sub
);
11266 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
11267 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
11268 min_val
= TYPE_MIN_VALUE (type_domain
);
11269 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
11275 /* Builds an expression for an indirection through T, simplifying some
11279 build_fold_indirect_ref (tree t
)
11281 tree sub
= fold_indirect_ref_1 (t
);
11286 return build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (t
)), t
);
11289 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11292 fold_indirect_ref (tree t
)
11294 tree sub
= fold_indirect_ref_1 (TREE_OPERAND (t
, 0));
11302 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11303 whose result is ignored. The type of the returned tree need not be
11304 the same as the original expression. */
11307 fold_ignored_result (tree t
)
11309 if (!TREE_SIDE_EFFECTS (t
))
11310 return integer_zero_node
;
11313 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
11316 t
= TREE_OPERAND (t
, 0);
11320 case tcc_comparison
:
11321 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
11322 t
= TREE_OPERAND (t
, 0);
11323 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
11324 t
= TREE_OPERAND (t
, 1);
11329 case tcc_expression
:
11330 switch (TREE_CODE (t
))
11332 case COMPOUND_EXPR
:
11333 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
11335 t
= TREE_OPERAND (t
, 0);
11339 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
11340 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
11342 t
= TREE_OPERAND (t
, 0);
11355 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11356 This can only be applied to objects of a sizetype. */
11359 round_up (tree value
, int divisor
)
11361 tree div
= NULL_TREE
;
11363 gcc_assert (divisor
> 0);
11367 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11368 have to do anything. Only do this when we are not given a const,
11369 because in that case, this check is more expensive than just
11371 if (TREE_CODE (value
) != INTEGER_CST
)
11373 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11375 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
11379 /* If divisor is a power of two, simplify this to bit manipulation. */
11380 if (divisor
== (divisor
& -divisor
))
11384 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
11385 value
= size_binop (PLUS_EXPR
, value
, t
);
11386 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
11387 value
= size_binop (BIT_AND_EXPR
, value
, t
);
11392 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11393 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
11394 value
= size_binop (MULT_EXPR
, value
, div
);
11400 /* Likewise, but round down. */
11403 round_down (tree value
, int divisor
)
11405 tree div
= NULL_TREE
;
11407 gcc_assert (divisor
> 0);
11411 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11412 have to do anything. Only do this when we are not given a const,
11413 because in that case, this check is more expensive than just
11415 if (TREE_CODE (value
) != INTEGER_CST
)
11417 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11419 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
11423 /* If divisor is a power of two, simplify this to bit manipulation. */
11424 if (divisor
== (divisor
& -divisor
))
11428 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
11429 value
= size_binop (BIT_AND_EXPR
, value
, t
);
11434 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11435 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
11436 value
= size_binop (MULT_EXPR
, value
, div
);
11442 /* Returns the pointer to the base of the object addressed by EXP and
11443 extracts the information about the offset of the access, storing it
11444 to PBITPOS and POFFSET. */
11447 split_address_to_core_and_offset (tree exp
,
11448 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
11451 enum machine_mode mode
;
11452 int unsignedp
, volatilep
;
11453 HOST_WIDE_INT bitsize
;
11455 if (TREE_CODE (exp
) == ADDR_EXPR
)
11457 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
11458 poffset
, &mode
, &unsignedp
, &volatilep
,
11461 if (TREE_CODE (core
) == INDIRECT_REF
)
11462 core
= TREE_OPERAND (core
, 0);
11468 *poffset
= NULL_TREE
;
11474 /* Returns true if addresses of E1 and E2 differ by a constant, false
11475 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11478 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
11481 HOST_WIDE_INT bitpos1
, bitpos2
;
11482 tree toffset1
, toffset2
, tdiff
, type
;
11484 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
11485 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
11487 if (bitpos1
% BITS_PER_UNIT
!= 0
11488 || bitpos2
% BITS_PER_UNIT
!= 0
11489 || !operand_equal_p (core1
, core2
, 0))
11492 if (toffset1
&& toffset2
)
11494 type
= TREE_TYPE (toffset1
);
11495 if (type
!= TREE_TYPE (toffset2
))
11496 toffset2
= fold_convert (type
, toffset2
);
11498 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
11499 if (!host_integerp (tdiff
, 0))
11502 *diff
= tree_low_cst (tdiff
, 0);
11504 else if (toffset1
|| toffset2
)
11506 /* If only one of the offsets is non-constant, the difference cannot
11513 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
11517 /* Simplify the floating point expression EXP when the sign of the
11518 result is not significant. Return NULL_TREE if no simplification
11522 fold_strip_sign_ops (tree exp
)
11526 switch (TREE_CODE (exp
))
11530 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
11531 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
11535 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
11537 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
11538 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
11539 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
11540 return fold_build2 (TREE_CODE (exp
), TREE_TYPE (exp
),
11541 arg0
? arg0
: TREE_OPERAND (exp
, 0),
11542 arg1
? arg1
: TREE_OPERAND (exp
, 1));