1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code
{
84 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
85 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
86 static bool negate_mathfn_p (enum built_in_function
);
87 static bool negate_expr_p (tree
);
88 static tree
negate_expr (tree
);
89 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
90 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
91 static tree
const_binop (enum tree_code
, tree
, tree
, int);
92 static tree
build_zero_vector (tree
);
93 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
94 static enum tree_code
invert_tree_comparison (enum tree_code
, bool);
95 static enum comparison_code
comparison_to_compcode (enum tree_code
);
96 static enum tree_code
compcode_to_comparison (enum comparison_code
);
97 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
98 enum tree_code
, tree
, tree
, tree
);
99 static int truth_value_p (enum tree_code
);
100 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
101 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
102 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
103 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
104 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
105 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
106 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
107 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
108 enum machine_mode
*, int *, int *,
110 static int all_ones_mask_p (tree
, int);
111 static tree
sign_bit_p (tree
, tree
);
112 static int simple_operand_p (tree
);
113 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
114 static tree
make_range (tree
, int *, tree
*, tree
*);
115 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
116 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
118 static tree
fold_range_test (tree
);
119 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
120 static tree
unextend (tree
, int, int, tree
);
121 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
122 static tree
optimize_minmax_comparison (tree
);
123 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
);
124 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
);
125 static int multiple_of_p (tree
, tree
, tree
);
126 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
, tree
,
128 static bool fold_real_zero_addition_p (tree
, tree
, int);
129 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
131 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
132 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
133 static bool reorder_operands_p (tree
, tree
);
134 static tree
fold_negate_const (tree
, tree
);
135 static tree
fold_not_const (tree
, tree
);
136 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
137 static tree
fold_relational_hi_lo (enum tree_code
*, const tree
,
139 static bool tree_expr_nonzero_p (tree
);
141 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
142 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
143 and SUM1. Then this yields nonzero if overflow occurred during the
146 Overflow occurs if A and B have the same sign, but A and SUM differ in
147 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
149 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
151 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
152 We do that by representing the two-word integer in 4 words, with only
153 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
154 number. The value of the word is LOWPART + HIGHPART * BASE. */
157 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
158 #define HIGHPART(x) \
159 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
160 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
162 /* Unpack a two-word integer into 4 words.
163 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
164 WORDS points to the array of HOST_WIDE_INTs. */
167 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
169 words
[0] = LOWPART (low
);
170 words
[1] = HIGHPART (low
);
171 words
[2] = LOWPART (hi
);
172 words
[3] = HIGHPART (hi
);
175 /* Pack an array of 4 words into a two-word integer.
176 WORDS points to the array of words.
177 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
180 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
183 *low
= words
[0] + words
[1] * BASE
;
184 *hi
= words
[2] + words
[3] * BASE
;
187 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
188 in overflow of the value, when >0 we are only interested in signed
189 overflow, for <0 we are interested in any overflow. OVERFLOWED
190 indicates whether overflow has already occurred. CONST_OVERFLOWED
191 indicates whether constant overflow has already occurred. We force
192 T's value to be within range of T's type (by setting to 0 or 1 all
193 the bits outside the type's range). We set TREE_OVERFLOWED if,
194 OVERFLOWED is nonzero,
195 or OVERFLOWABLE is >0 and signed overflow occurs
196 or OVERFLOWABLE is <0 and any overflow occurs
197 We set TREE_CONSTANT_OVERFLOWED if,
198 CONST_OVERFLOWED is nonzero
199 or we set TREE_OVERFLOWED.
200 We return either the original T, or a copy. */
203 force_fit_type (tree t
, int overflowable
,
204 bool overflowed
, bool overflowed_const
)
206 unsigned HOST_WIDE_INT low
;
209 int sign_extended_type
;
211 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
213 low
= TREE_INT_CST_LOW (t
);
214 high
= TREE_INT_CST_HIGH (t
);
216 if (POINTER_TYPE_P (TREE_TYPE (t
))
217 || TREE_CODE (TREE_TYPE (t
)) == OFFSET_TYPE
)
220 prec
= TYPE_PRECISION (TREE_TYPE (t
));
221 /* Size types *are* sign extended. */
222 sign_extended_type
= (!TYPE_UNSIGNED (TREE_TYPE (t
))
223 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
224 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))));
226 /* First clear all bits that are beyond the type's precision. */
228 if (prec
== 2 * HOST_BITS_PER_WIDE_INT
)
230 else if (prec
> HOST_BITS_PER_WIDE_INT
)
231 high
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
235 if (prec
< HOST_BITS_PER_WIDE_INT
)
236 low
&= ~((HOST_WIDE_INT
) (-1) << prec
);
239 if (!sign_extended_type
)
240 /* No sign extension */;
241 else if (prec
== 2 * HOST_BITS_PER_WIDE_INT
)
242 /* Correct width already. */;
243 else if (prec
> HOST_BITS_PER_WIDE_INT
)
245 /* Sign extend top half? */
246 if (high
& ((unsigned HOST_WIDE_INT
)1
247 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
248 high
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
250 else if (prec
== HOST_BITS_PER_WIDE_INT
)
252 if ((HOST_WIDE_INT
)low
< 0)
257 /* Sign extend bottom half? */
258 if (low
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
261 low
|= (HOST_WIDE_INT
)(-1) << prec
;
265 /* If the value changed, return a new node. */
266 if (overflowed
|| overflowed_const
267 || low
!= TREE_INT_CST_LOW (t
) || high
!= TREE_INT_CST_HIGH (t
))
269 t
= build_int_cst_wide (TREE_TYPE (t
), low
, high
);
273 || (overflowable
> 0 && sign_extended_type
))
276 TREE_OVERFLOW (t
) = 1;
277 TREE_CONSTANT_OVERFLOW (t
) = 1;
279 else if (overflowed_const
)
282 TREE_CONSTANT_OVERFLOW (t
) = 1;
289 /* Add two doubleword integers with doubleword result.
290 Each argument is given as two `HOST_WIDE_INT' pieces.
291 One argument is L1 and H1; the other, L2 and H2.
292 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
295 add_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
296 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
297 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
299 unsigned HOST_WIDE_INT l
;
303 h
= h1
+ h2
+ (l
< l1
);
307 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
310 /* Negate a doubleword integer with doubleword result.
311 Return nonzero if the operation overflows, assuming it's signed.
312 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
313 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
316 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
317 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
323 return (*hv
& h1
) < 0;
333 /* Multiply two doubleword integers with doubleword result.
334 Return nonzero if the operation overflows, assuming it's signed.
335 Each argument is given as two `HOST_WIDE_INT' pieces.
336 One argument is L1 and H1; the other, L2 and H2.
337 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
340 mul_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
341 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
342 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
344 HOST_WIDE_INT arg1
[4];
345 HOST_WIDE_INT arg2
[4];
346 HOST_WIDE_INT prod
[4 * 2];
347 unsigned HOST_WIDE_INT carry
;
349 unsigned HOST_WIDE_INT toplow
, neglow
;
350 HOST_WIDE_INT tophigh
, neghigh
;
352 encode (arg1
, l1
, h1
);
353 encode (arg2
, l2
, h2
);
355 memset (prod
, 0, sizeof prod
);
357 for (i
= 0; i
< 4; i
++)
360 for (j
= 0; j
< 4; j
++)
363 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
364 carry
+= arg1
[i
] * arg2
[j
];
365 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
367 prod
[k
] = LOWPART (carry
);
368 carry
= HIGHPART (carry
);
373 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
375 /* Check for overflow by calculating the top half of the answer in full;
376 it should agree with the low half's sign bit. */
377 decode (prod
+ 4, &toplow
, &tophigh
);
380 neg_double (l2
, h2
, &neglow
, &neghigh
);
381 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
385 neg_double (l1
, h1
, &neglow
, &neghigh
);
386 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
388 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
391 /* Shift the doubleword integer in L1, H1 left by COUNT places
392 keeping only PREC bits of result.
393 Shift right if COUNT is negative.
394 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
395 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
398 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
399 HOST_WIDE_INT count
, unsigned int prec
,
400 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
402 unsigned HOST_WIDE_INT signmask
;
406 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
410 if (SHIFT_COUNT_TRUNCATED
)
413 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
415 /* Shifting by the host word size is undefined according to the
416 ANSI standard, so we must handle this as a special case. */
420 else if (count
>= HOST_BITS_PER_WIDE_INT
)
422 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
427 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
428 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
432 /* Sign extend all bits that are beyond the precision. */
434 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
435 ? ((unsigned HOST_WIDE_INT
) *hv
436 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
437 : (*lv
>> (prec
- 1))) & 1);
439 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
441 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
443 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
444 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
449 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
450 *lv
|= signmask
<< prec
;
454 /* Shift the doubleword integer in L1, H1 right by COUNT places
455 keeping only PREC bits of result. COUNT must be positive.
456 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
457 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
460 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
461 HOST_WIDE_INT count
, unsigned int prec
,
462 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
465 unsigned HOST_WIDE_INT signmask
;
468 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
471 if (SHIFT_COUNT_TRUNCATED
)
474 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
476 /* Shifting by the host word size is undefined according to the
477 ANSI standard, so we must handle this as a special case. */
481 else if (count
>= HOST_BITS_PER_WIDE_INT
)
484 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
488 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
490 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
493 /* Zero / sign extend all bits that are beyond the precision. */
495 if (count
>= (HOST_WIDE_INT
)prec
)
500 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
502 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
504 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
505 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
510 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
511 *lv
|= signmask
<< (prec
- count
);
515 /* Rotate the doubleword integer in L1, H1 left by COUNT places
516 keeping only PREC bits of result.
517 Rotate right if COUNT is negative.
518 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
521 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
522 HOST_WIDE_INT count
, unsigned int prec
,
523 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
525 unsigned HOST_WIDE_INT s1l
, s2l
;
526 HOST_WIDE_INT s1h
, s2h
;
532 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
533 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
538 /* Rotate the doubleword integer in L1, H1 left by COUNT places
539 keeping only PREC bits of result. COUNT must be positive.
540 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
543 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
544 HOST_WIDE_INT count
, unsigned int prec
,
545 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
547 unsigned HOST_WIDE_INT s1l
, s2l
;
548 HOST_WIDE_INT s1h
, s2h
;
554 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
555 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
560 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
561 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
562 CODE is a tree code for a kind of division, one of
563 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
565 It controls how the quotient is rounded to an integer.
566 Return nonzero if the operation overflows.
567 UNS nonzero says do unsigned division. */
570 div_and_round_double (enum tree_code code
, int uns
,
571 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
572 HOST_WIDE_INT hnum_orig
,
573 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
574 HOST_WIDE_INT hden_orig
,
575 unsigned HOST_WIDE_INT
*lquo
,
576 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
580 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
581 HOST_WIDE_INT den
[4], quo
[4];
583 unsigned HOST_WIDE_INT work
;
584 unsigned HOST_WIDE_INT carry
= 0;
585 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
586 HOST_WIDE_INT hnum
= hnum_orig
;
587 unsigned HOST_WIDE_INT lden
= lden_orig
;
588 HOST_WIDE_INT hden
= hden_orig
;
591 if (hden
== 0 && lden
== 0)
592 overflow
= 1, lden
= 1;
594 /* Calculate quotient sign and convert operands to unsigned. */
600 /* (minimum integer) / (-1) is the only overflow case. */
601 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
602 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
608 neg_double (lden
, hden
, &lden
, &hden
);
612 if (hnum
== 0 && hden
== 0)
613 { /* single precision */
615 /* This unsigned division rounds toward zero. */
621 { /* trivial case: dividend < divisor */
622 /* hden != 0 already checked. */
629 memset (quo
, 0, sizeof quo
);
631 memset (num
, 0, sizeof num
); /* to zero 9th element */
632 memset (den
, 0, sizeof den
);
634 encode (num
, lnum
, hnum
);
635 encode (den
, lden
, hden
);
637 /* Special code for when the divisor < BASE. */
638 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
640 /* hnum != 0 already checked. */
641 for (i
= 4 - 1; i
>= 0; i
--)
643 work
= num
[i
] + carry
* BASE
;
644 quo
[i
] = work
/ lden
;
650 /* Full double precision division,
651 with thanks to Don Knuth's "Seminumerical Algorithms". */
652 int num_hi_sig
, den_hi_sig
;
653 unsigned HOST_WIDE_INT quo_est
, scale
;
655 /* Find the highest nonzero divisor digit. */
656 for (i
= 4 - 1;; i
--)
663 /* Insure that the first digit of the divisor is at least BASE/2.
664 This is required by the quotient digit estimation algorithm. */
666 scale
= BASE
/ (den
[den_hi_sig
] + 1);
668 { /* scale divisor and dividend */
670 for (i
= 0; i
<= 4 - 1; i
++)
672 work
= (num
[i
] * scale
) + carry
;
673 num
[i
] = LOWPART (work
);
674 carry
= HIGHPART (work
);
679 for (i
= 0; i
<= 4 - 1; i
++)
681 work
= (den
[i
] * scale
) + carry
;
682 den
[i
] = LOWPART (work
);
683 carry
= HIGHPART (work
);
684 if (den
[i
] != 0) den_hi_sig
= i
;
691 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
693 /* Guess the next quotient digit, quo_est, by dividing the first
694 two remaining dividend digits by the high order quotient digit.
695 quo_est is never low and is at most 2 high. */
696 unsigned HOST_WIDE_INT tmp
;
698 num_hi_sig
= i
+ den_hi_sig
+ 1;
699 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
700 if (num
[num_hi_sig
] != den
[den_hi_sig
])
701 quo_est
= work
/ den
[den_hi_sig
];
705 /* Refine quo_est so it's usually correct, and at most one high. */
706 tmp
= work
- quo_est
* den
[den_hi_sig
];
708 && (den
[den_hi_sig
- 1] * quo_est
709 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
712 /* Try QUO_EST as the quotient digit, by multiplying the
713 divisor by QUO_EST and subtracting from the remaining dividend.
714 Keep in mind that QUO_EST is the I - 1st digit. */
717 for (j
= 0; j
<= den_hi_sig
; j
++)
719 work
= quo_est
* den
[j
] + carry
;
720 carry
= HIGHPART (work
);
721 work
= num
[i
+ j
] - LOWPART (work
);
722 num
[i
+ j
] = LOWPART (work
);
723 carry
+= HIGHPART (work
) != 0;
726 /* If quo_est was high by one, then num[i] went negative and
727 we need to correct things. */
728 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
731 carry
= 0; /* add divisor back in */
732 for (j
= 0; j
<= den_hi_sig
; j
++)
734 work
= num
[i
+ j
] + den
[j
] + carry
;
735 carry
= HIGHPART (work
);
736 num
[i
+ j
] = LOWPART (work
);
739 num
[num_hi_sig
] += carry
;
742 /* Store the quotient digit. */
747 decode (quo
, lquo
, hquo
);
750 /* If result is negative, make it so. */
752 neg_double (*lquo
, *hquo
, lquo
, hquo
);
754 /* Compute trial remainder: rem = num - (quo * den) */
755 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
756 neg_double (*lrem
, *hrem
, lrem
, hrem
);
757 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
762 case TRUNC_MOD_EXPR
: /* round toward zero */
763 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
767 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
768 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
771 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
779 case CEIL_MOD_EXPR
: /* round toward positive infinity */
780 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
782 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
790 case ROUND_MOD_EXPR
: /* round to closest integer */
792 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
793 HOST_WIDE_INT habs_rem
= *hrem
;
794 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
795 HOST_WIDE_INT habs_den
= hden
, htwice
;
797 /* Get absolute values. */
799 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
801 neg_double (lden
, hden
, &labs_den
, &habs_den
);
803 /* If (2 * abs (lrem) >= abs (lden)) */
804 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
805 labs_rem
, habs_rem
, <wice
, &htwice
);
807 if (((unsigned HOST_WIDE_INT
) habs_den
808 < (unsigned HOST_WIDE_INT
) htwice
)
809 || (((unsigned HOST_WIDE_INT
) habs_den
810 == (unsigned HOST_WIDE_INT
) htwice
)
811 && (labs_den
< ltwice
)))
815 add_double (*lquo
, *hquo
,
816 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
819 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
831 /* Compute true remainder: rem = num - (quo * den) */
832 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
833 neg_double (*lrem
, *hrem
, lrem
, hrem
);
834 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
838 /* Return true if built-in mathematical function specified by CODE
839 preserves the sign of it argument, i.e. -f(x) == f(-x). */
842 negate_mathfn_p (enum built_in_function code
)
866 /* Check whether we may negate an integer constant T without causing
870 may_negate_without_overflow_p (tree t
)
872 unsigned HOST_WIDE_INT val
;
876 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
878 type
= TREE_TYPE (t
);
879 if (TYPE_UNSIGNED (type
))
882 prec
= TYPE_PRECISION (type
);
883 if (prec
> HOST_BITS_PER_WIDE_INT
)
885 if (TREE_INT_CST_LOW (t
) != 0)
887 prec
-= HOST_BITS_PER_WIDE_INT
;
888 val
= TREE_INT_CST_HIGH (t
);
891 val
= TREE_INT_CST_LOW (t
);
892 if (prec
< HOST_BITS_PER_WIDE_INT
)
893 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
894 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
897 /* Determine whether an expression T can be cheaply negated using
898 the function negate_expr. */
901 negate_expr_p (tree t
)
908 type
= TREE_TYPE (t
);
911 switch (TREE_CODE (t
))
914 if (TYPE_UNSIGNED (type
) || ! flag_trapv
)
917 /* Check that -CST will not overflow type. */
918 return may_negate_without_overflow_p (t
);
925 return negate_expr_p (TREE_REALPART (t
))
926 && negate_expr_p (TREE_IMAGPART (t
));
929 if (FLOAT_TYPE_P (type
) && !flag_unsafe_math_optimizations
)
931 /* -(A + B) -> (-B) - A. */
932 if (negate_expr_p (TREE_OPERAND (t
, 1))
933 && reorder_operands_p (TREE_OPERAND (t
, 0),
934 TREE_OPERAND (t
, 1)))
936 /* -(A + B) -> (-A) - B. */
937 return negate_expr_p (TREE_OPERAND (t
, 0));
940 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
941 return (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
942 && reorder_operands_p (TREE_OPERAND (t
, 0),
943 TREE_OPERAND (t
, 1));
946 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
952 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
953 return negate_expr_p (TREE_OPERAND (t
, 1))
954 || negate_expr_p (TREE_OPERAND (t
, 0));
958 /* Negate -((double)float) as (double)(-float). */
959 if (TREE_CODE (type
) == REAL_TYPE
)
961 tree tem
= strip_float_extensions (t
);
963 return negate_expr_p (tem
);
968 /* Negate -f(x) as f(-x). */
969 if (negate_mathfn_p (builtin_mathfn_code (t
)))
970 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1)));
974 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
975 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
977 tree op1
= TREE_OPERAND (t
, 1);
978 if (TREE_INT_CST_HIGH (op1
) == 0
979 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
980 == TREE_INT_CST_LOW (op1
))
991 /* Given T, an expression, return the negation of T. Allow for T to be
992 null, in which case return null. */
1003 type
= TREE_TYPE (t
);
1004 STRIP_SIGN_NOPS (t
);
1006 switch (TREE_CODE (t
))
1009 tem
= fold_negate_const (t
, type
);
1010 if (! TREE_OVERFLOW (tem
)
1011 || TYPE_UNSIGNED (type
)
1017 tem
= fold_negate_const (t
, type
);
1018 /* Two's complement FP formats, such as c4x, may overflow. */
1019 if (! TREE_OVERFLOW (tem
) || ! flag_trapping_math
)
1020 return fold_convert (type
, tem
);
1025 tree rpart
= negate_expr (TREE_REALPART (t
));
1026 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1028 if ((TREE_CODE (rpart
) == REAL_CST
1029 && TREE_CODE (ipart
) == REAL_CST
)
1030 || (TREE_CODE (rpart
) == INTEGER_CST
1031 && TREE_CODE (ipart
) == INTEGER_CST
))
1032 return build_complex (type
, rpart
, ipart
);
1037 return fold_convert (type
, TREE_OPERAND (t
, 0));
1040 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1042 /* -(A + B) -> (-B) - A. */
1043 if (negate_expr_p (TREE_OPERAND (t
, 1))
1044 && reorder_operands_p (TREE_OPERAND (t
, 0),
1045 TREE_OPERAND (t
, 1)))
1047 tem
= negate_expr (TREE_OPERAND (t
, 1));
1048 tem
= fold (build2 (MINUS_EXPR
, TREE_TYPE (t
),
1049 tem
, TREE_OPERAND (t
, 0)));
1050 return fold_convert (type
, tem
);
1053 /* -(A + B) -> (-A) - B. */
1054 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1056 tem
= negate_expr (TREE_OPERAND (t
, 0));
1057 tem
= fold (build2 (MINUS_EXPR
, TREE_TYPE (t
),
1058 tem
, TREE_OPERAND (t
, 1)));
1059 return fold_convert (type
, tem
);
1065 /* - (A - B) -> B - A */
1066 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1067 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1068 return fold_convert (type
,
1069 fold (build2 (MINUS_EXPR
, TREE_TYPE (t
),
1070 TREE_OPERAND (t
, 1),
1071 TREE_OPERAND (t
, 0))));
1075 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1081 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1083 tem
= TREE_OPERAND (t
, 1);
1084 if (negate_expr_p (tem
))
1085 return fold_convert (type
,
1086 fold (build2 (TREE_CODE (t
), TREE_TYPE (t
),
1087 TREE_OPERAND (t
, 0),
1088 negate_expr (tem
))));
1089 tem
= TREE_OPERAND (t
, 0);
1090 if (negate_expr_p (tem
))
1091 return fold_convert (type
,
1092 fold (build2 (TREE_CODE (t
), TREE_TYPE (t
),
1094 TREE_OPERAND (t
, 1))));
1099 /* Convert -((double)float) into (double)(-float). */
1100 if (TREE_CODE (type
) == REAL_TYPE
)
1102 tem
= strip_float_extensions (t
);
1103 if (tem
!= t
&& negate_expr_p (tem
))
1104 return fold_convert (type
, negate_expr (tem
));
1109 /* Negate -f(x) as f(-x). */
1110 if (negate_mathfn_p (builtin_mathfn_code (t
))
1111 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1))))
1113 tree fndecl
, arg
, arglist
;
1115 fndecl
= get_callee_fndecl (t
);
1116 arg
= negate_expr (TREE_VALUE (TREE_OPERAND (t
, 1)));
1117 arglist
= build_tree_list (NULL_TREE
, arg
);
1118 return build_function_call_expr (fndecl
, arglist
);
1123 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1124 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1126 tree op1
= TREE_OPERAND (t
, 1);
1127 if (TREE_INT_CST_HIGH (op1
) == 0
1128 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1129 == TREE_INT_CST_LOW (op1
))
1131 tree ntype
= TYPE_UNSIGNED (type
)
1132 ? lang_hooks
.types
.signed_type (type
)
1133 : lang_hooks
.types
.unsigned_type (type
);
1134 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1135 temp
= fold (build2 (RSHIFT_EXPR
, ntype
, temp
, op1
));
1136 return fold_convert (type
, temp
);
1145 tem
= fold (build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
));
1146 return fold_convert (type
, tem
);
1149 /* Split a tree IN into a constant, literal and variable parts that could be
1150 combined with CODE to make IN. "constant" means an expression with
1151 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1152 commutative arithmetic operation. Store the constant part into *CONP,
1153 the literal in *LITP and return the variable part. If a part isn't
1154 present, set it to null. If the tree does not decompose in this way,
1155 return the entire tree as the variable part and the other parts as null.
1157 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1158 case, we negate an operand that was subtracted. Except if it is a
1159 literal for which we use *MINUS_LITP instead.
1161 If NEGATE_P is true, we are negating all of IN, again except a literal
1162 for which we use *MINUS_LITP instead.
1164 If IN is itself a literal or constant, return it as appropriate.
1166 Note that we do not guarantee that any of the three values will be the
1167 same type as IN, but they will have the same signedness and mode. */
1170 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1171 tree
*minus_litp
, int negate_p
)
1179 /* Strip any conversions that don't change the machine mode or signedness. */
1180 STRIP_SIGN_NOPS (in
);
1182 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1184 else if (TREE_CODE (in
) == code
1185 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1186 /* We can associate addition and subtraction together (even
1187 though the C standard doesn't say so) for integers because
1188 the value is not affected. For reals, the value might be
1189 affected, so we can't. */
1190 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1191 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1193 tree op0
= TREE_OPERAND (in
, 0);
1194 tree op1
= TREE_OPERAND (in
, 1);
1195 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1196 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1198 /* First see if either of the operands is a literal, then a constant. */
1199 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1200 *litp
= op0
, op0
= 0;
1201 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1202 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1204 if (op0
!= 0 && TREE_CONSTANT (op0
))
1205 *conp
= op0
, op0
= 0;
1206 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1207 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1209 /* If we haven't dealt with either operand, this is not a case we can
1210 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1211 if (op0
!= 0 && op1
!= 0)
1216 var
= op1
, neg_var_p
= neg1_p
;
1218 /* Now do any needed negations. */
1220 *minus_litp
= *litp
, *litp
= 0;
1222 *conp
= negate_expr (*conp
);
1224 var
= negate_expr (var
);
1226 else if (TREE_CONSTANT (in
))
1234 *minus_litp
= *litp
, *litp
= 0;
1235 else if (*minus_litp
)
1236 *litp
= *minus_litp
, *minus_litp
= 0;
1237 *conp
= negate_expr (*conp
);
1238 var
= negate_expr (var
);
1244 /* Re-associate trees split by the above function. T1 and T2 are either
1245 expressions to associate or null. Return the new expression, if any. If
1246 we build an operation, do it in TYPE and with CODE. */
1249 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1256 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1257 try to fold this since we will have infinite recursion. But do
1258 deal with any NEGATE_EXPRs. */
1259 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1260 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1262 if (code
== PLUS_EXPR
)
1264 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1265 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1266 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1267 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1268 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1269 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1270 else if (integer_zerop (t2
))
1271 return fold_convert (type
, t1
);
1273 else if (code
== MINUS_EXPR
)
1275 if (integer_zerop (t2
))
1276 return fold_convert (type
, t1
);
1279 return build2 (code
, type
, fold_convert (type
, t1
),
1280 fold_convert (type
, t2
));
1283 return fold (build2 (code
, type
, fold_convert (type
, t1
),
1284 fold_convert (type
, t2
)));
1287 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1288 to produce a new constant.
1290 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1293 int_const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1295 unsigned HOST_WIDE_INT int1l
, int2l
;
1296 HOST_WIDE_INT int1h
, int2h
;
1297 unsigned HOST_WIDE_INT low
;
1299 unsigned HOST_WIDE_INT garbagel
;
1300 HOST_WIDE_INT garbageh
;
1302 tree type
= TREE_TYPE (arg1
);
1303 int uns
= TYPE_UNSIGNED (type
);
1305 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1307 int no_overflow
= 0;
1309 int1l
= TREE_INT_CST_LOW (arg1
);
1310 int1h
= TREE_INT_CST_HIGH (arg1
);
1311 int2l
= TREE_INT_CST_LOW (arg2
);
1312 int2h
= TREE_INT_CST_HIGH (arg2
);
1317 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1321 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1325 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1331 /* It's unclear from the C standard whether shifts can overflow.
1332 The following code ignores overflow; perhaps a C standard
1333 interpretation ruling is needed. */
1334 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1342 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1347 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1351 neg_double (int2l
, int2h
, &low
, &hi
);
1352 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1353 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1357 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1360 case TRUNC_DIV_EXPR
:
1361 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1362 case EXACT_DIV_EXPR
:
1363 /* This is a shortcut for a common special case. */
1364 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1365 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1366 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1367 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1369 if (code
== CEIL_DIV_EXPR
)
1372 low
= int1l
/ int2l
, hi
= 0;
1376 /* ... fall through ... */
1378 case ROUND_DIV_EXPR
:
1379 if (int2h
== 0 && int2l
== 1)
1381 low
= int1l
, hi
= int1h
;
1384 if (int1l
== int2l
&& int1h
== int2h
1385 && ! (int1l
== 0 && int1h
== 0))
1390 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1391 &low
, &hi
, &garbagel
, &garbageh
);
1394 case TRUNC_MOD_EXPR
:
1395 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1396 /* This is a shortcut for a common special case. */
1397 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1398 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1399 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1400 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1402 if (code
== CEIL_MOD_EXPR
)
1404 low
= int1l
% int2l
, hi
= 0;
1408 /* ... fall through ... */
1410 case ROUND_MOD_EXPR
:
1411 overflow
= div_and_round_double (code
, uns
,
1412 int1l
, int1h
, int2l
, int2h
,
1413 &garbagel
, &garbageh
, &low
, &hi
);
1419 low
= (((unsigned HOST_WIDE_INT
) int1h
1420 < (unsigned HOST_WIDE_INT
) int2h
)
1421 || (((unsigned HOST_WIDE_INT
) int1h
1422 == (unsigned HOST_WIDE_INT
) int2h
)
1425 low
= (int1h
< int2h
1426 || (int1h
== int2h
&& int1l
< int2l
));
1428 if (low
== (code
== MIN_EXPR
))
1429 low
= int1l
, hi
= int1h
;
1431 low
= int2l
, hi
= int2h
;
1438 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1442 /* Propagate overflow flags ourselves. */
1443 if (((!uns
|| is_sizetype
) && overflow
)
1444 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1447 TREE_OVERFLOW (t
) = 1;
1448 TREE_CONSTANT_OVERFLOW (t
) = 1;
1450 else if (TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
))
1453 TREE_CONSTANT_OVERFLOW (t
) = 1;
1457 t
= force_fit_type (t
, 1,
1458 ((!uns
|| is_sizetype
) && overflow
)
1459 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
),
1460 TREE_CONSTANT_OVERFLOW (arg1
)
1461 | TREE_CONSTANT_OVERFLOW (arg2
));
1466 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1467 constant. We assume ARG1 and ARG2 have the same data type, or at least
1468 are the same kind of constant and the same machine mode.
1470 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1473 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1478 if (TREE_CODE (arg1
) == INTEGER_CST
)
1479 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1481 if (TREE_CODE (arg1
) == REAL_CST
)
1483 enum machine_mode mode
;
1486 REAL_VALUE_TYPE value
;
1489 d1
= TREE_REAL_CST (arg1
);
1490 d2
= TREE_REAL_CST (arg2
);
1492 type
= TREE_TYPE (arg1
);
1493 mode
= TYPE_MODE (type
);
1495 /* Don't perform operation if we honor signaling NaNs and
1496 either operand is a NaN. */
1497 if (HONOR_SNANS (mode
)
1498 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1501 /* Don't perform operation if it would raise a division
1502 by zero exception. */
1503 if (code
== RDIV_EXPR
1504 && REAL_VALUES_EQUAL (d2
, dconst0
)
1505 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1508 /* If either operand is a NaN, just return it. Otherwise, set up
1509 for floating-point trap; we return an overflow. */
1510 if (REAL_VALUE_ISNAN (d1
))
1512 else if (REAL_VALUE_ISNAN (d2
))
1515 REAL_ARITHMETIC (value
, code
, d1
, d2
);
1517 t
= build_real (type
, real_value_truncate (mode
, value
));
1519 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1520 TREE_CONSTANT_OVERFLOW (t
)
1522 | TREE_CONSTANT_OVERFLOW (arg1
)
1523 | TREE_CONSTANT_OVERFLOW (arg2
);
1526 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1528 tree type
= TREE_TYPE (arg1
);
1529 tree r1
= TREE_REALPART (arg1
);
1530 tree i1
= TREE_IMAGPART (arg1
);
1531 tree r2
= TREE_REALPART (arg2
);
1532 tree i2
= TREE_IMAGPART (arg2
);
1538 t
= build_complex (type
,
1539 const_binop (PLUS_EXPR
, r1
, r2
, notrunc
),
1540 const_binop (PLUS_EXPR
, i1
, i2
, notrunc
));
1544 t
= build_complex (type
,
1545 const_binop (MINUS_EXPR
, r1
, r2
, notrunc
),
1546 const_binop (MINUS_EXPR
, i1
, i2
, notrunc
));
1550 t
= build_complex (type
,
1551 const_binop (MINUS_EXPR
,
1552 const_binop (MULT_EXPR
,
1554 const_binop (MULT_EXPR
,
1557 const_binop (PLUS_EXPR
,
1558 const_binop (MULT_EXPR
,
1560 const_binop (MULT_EXPR
,
1568 = const_binop (PLUS_EXPR
,
1569 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1570 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1573 t
= build_complex (type
,
1575 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1576 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1577 const_binop (PLUS_EXPR
,
1578 const_binop (MULT_EXPR
, r1
, r2
,
1580 const_binop (MULT_EXPR
, i1
, i2
,
1583 magsquared
, notrunc
),
1585 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1586 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1587 const_binop (MINUS_EXPR
,
1588 const_binop (MULT_EXPR
, i1
, r2
,
1590 const_binop (MULT_EXPR
, r1
, i2
,
1593 magsquared
, notrunc
));
1605 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1606 indicates which particular sizetype to create. */
1609 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1611 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1614 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1615 is a tree code. The type of the result is taken from the operands.
1616 Both must be the same type integer type and it must be a size type.
1617 If the operands are constant, so is the result. */
1620 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
1622 tree type
= TREE_TYPE (arg0
);
1624 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1625 && type
== TREE_TYPE (arg1
));
1627 /* Handle the special case of two integer constants faster. */
1628 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1630 /* And some specific cases even faster than that. */
1631 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1633 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1634 && integer_zerop (arg1
))
1636 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1639 /* Handle general case of two integer constants. */
1640 return int_const_binop (code
, arg0
, arg1
, 0);
1643 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1644 return error_mark_node
;
1646 return fold (build2 (code
, type
, arg0
, arg1
));
1649 /* Given two values, either both of sizetype or both of bitsizetype,
1650 compute the difference between the two values. Return the value
1651 in signed type corresponding to the type of the operands. */
1654 size_diffop (tree arg0
, tree arg1
)
1656 tree type
= TREE_TYPE (arg0
);
1659 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1660 && type
== TREE_TYPE (arg1
));
1662 /* If the type is already signed, just do the simple thing. */
1663 if (!TYPE_UNSIGNED (type
))
1664 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1666 ctype
= type
== bitsizetype
? sbitsizetype
: ssizetype
;
1668 /* If either operand is not a constant, do the conversions to the signed
1669 type and subtract. The hardware will do the right thing with any
1670 overflow in the subtraction. */
1671 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1672 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
1673 fold_convert (ctype
, arg1
));
1675 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1676 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1677 overflow) and negate (which can't either). Special-case a result
1678 of zero while we're here. */
1679 if (tree_int_cst_equal (arg0
, arg1
))
1680 return fold_convert (ctype
, integer_zero_node
);
1681 else if (tree_int_cst_lt (arg1
, arg0
))
1682 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1684 return size_binop (MINUS_EXPR
, fold_convert (ctype
, integer_zero_node
),
1685 fold_convert (ctype
, size_binop (MINUS_EXPR
,
1689 /* Construct a vector of zero elements of vector type TYPE. */
1692 build_zero_vector (tree type
)
1697 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1698 units
= TYPE_VECTOR_SUBPARTS (type
);
1701 for (i
= 0; i
< units
; i
++)
1702 list
= tree_cons (NULL_TREE
, elem
, list
);
1703 return build_vector (type
, list
);
1707 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1708 type TYPE. If no simplification can be done return NULL_TREE. */
1711 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1716 if (TREE_TYPE (arg1
) == type
)
1719 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
1721 if (TREE_CODE (arg1
) == INTEGER_CST
)
1723 /* If we would build a constant wider than GCC supports,
1724 leave the conversion unfolded. */
1725 if (TYPE_PRECISION (type
) > 2 * HOST_BITS_PER_WIDE_INT
)
1728 /* Given an integer constant, make new constant with new type,
1729 appropriately sign-extended or truncated. */
1730 t
= build_int_cst_wide (type
, TREE_INT_CST_LOW (arg1
),
1731 TREE_INT_CST_HIGH (arg1
));
1733 t
= force_fit_type (t
,
1734 /* Don't set the overflow when
1735 converting a pointer */
1736 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1737 (TREE_INT_CST_HIGH (arg1
) < 0
1738 && (TYPE_UNSIGNED (type
)
1739 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1740 | TREE_OVERFLOW (arg1
),
1741 TREE_CONSTANT_OVERFLOW (arg1
));
1744 else if (TREE_CODE (arg1
) == REAL_CST
)
1746 /* The following code implements the floating point to integer
1747 conversion rules required by the Java Language Specification,
1748 that IEEE NaNs are mapped to zero and values that overflow
1749 the target precision saturate, i.e. values greater than
1750 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1751 are mapped to INT_MIN. These semantics are allowed by the
1752 C and C++ standards that simply state that the behavior of
1753 FP-to-integer conversion is unspecified upon overflow. */
1755 HOST_WIDE_INT high
, low
;
1757 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1761 case FIX_TRUNC_EXPR
:
1762 real_trunc (&r
, VOIDmode
, &x
);
1766 real_ceil (&r
, VOIDmode
, &x
);
1769 case FIX_FLOOR_EXPR
:
1770 real_floor (&r
, VOIDmode
, &x
);
1773 case FIX_ROUND_EXPR
:
1774 real_round (&r
, VOIDmode
, &x
);
1781 /* If R is NaN, return zero and show we have an overflow. */
1782 if (REAL_VALUE_ISNAN (r
))
1789 /* See if R is less than the lower bound or greater than the
1794 tree lt
= TYPE_MIN_VALUE (type
);
1795 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1796 if (REAL_VALUES_LESS (r
, l
))
1799 high
= TREE_INT_CST_HIGH (lt
);
1800 low
= TREE_INT_CST_LOW (lt
);
1806 tree ut
= TYPE_MAX_VALUE (type
);
1809 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1810 if (REAL_VALUES_LESS (u
, r
))
1813 high
= TREE_INT_CST_HIGH (ut
);
1814 low
= TREE_INT_CST_LOW (ut
);
1820 REAL_VALUE_TO_INT (&low
, &high
, r
);
1822 t
= build_int_cst_wide (type
, low
, high
);
1824 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg1
),
1825 TREE_CONSTANT_OVERFLOW (arg1
));
1829 else if (TREE_CODE (type
) == REAL_TYPE
)
1831 if (TREE_CODE (arg1
) == INTEGER_CST
)
1832 return build_real_from_int_cst (type
, arg1
);
1833 if (TREE_CODE (arg1
) == REAL_CST
)
1835 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
1837 /* We make a copy of ARG1 so that we don't modify an
1838 existing constant tree. */
1839 t
= copy_node (arg1
);
1840 TREE_TYPE (t
) = type
;
1844 t
= build_real (type
,
1845 real_value_truncate (TYPE_MODE (type
),
1846 TREE_REAL_CST (arg1
)));
1848 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1849 TREE_CONSTANT_OVERFLOW (t
)
1850 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1857 /* Convert expression ARG to type TYPE. Used by the middle-end for
1858 simple conversions in preference to calling the front-end's convert. */
1861 fold_convert (tree type
, tree arg
)
1863 tree orig
= TREE_TYPE (arg
);
1869 if (TREE_CODE (arg
) == ERROR_MARK
1870 || TREE_CODE (type
) == ERROR_MARK
1871 || TREE_CODE (orig
) == ERROR_MARK
)
1872 return error_mark_node
;
1874 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
)
1875 || lang_hooks
.types_compatible_p (TYPE_MAIN_VARIANT (type
),
1876 TYPE_MAIN_VARIANT (orig
)))
1877 return fold (build1 (NOP_EXPR
, type
, arg
));
1879 switch (TREE_CODE (type
))
1881 case INTEGER_TYPE
: case CHAR_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1882 case POINTER_TYPE
: case REFERENCE_TYPE
:
1884 if (TREE_CODE (arg
) == INTEGER_CST
)
1886 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1887 if (tem
!= NULL_TREE
)
1890 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1891 || TREE_CODE (orig
) == OFFSET_TYPE
)
1892 return fold (build1 (NOP_EXPR
, type
, arg
));
1893 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1895 tem
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1896 return fold_convert (type
, tem
);
1898 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1899 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1900 return fold (build1 (NOP_EXPR
, type
, arg
));
1903 if (TREE_CODE (arg
) == INTEGER_CST
)
1905 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1906 if (tem
!= NULL_TREE
)
1909 else if (TREE_CODE (arg
) == REAL_CST
)
1911 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1912 if (tem
!= NULL_TREE
)
1916 switch (TREE_CODE (orig
))
1918 case INTEGER_TYPE
: case CHAR_TYPE
:
1919 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1920 case POINTER_TYPE
: case REFERENCE_TYPE
:
1921 return fold (build1 (FLOAT_EXPR
, type
, arg
));
1924 return fold (build1 (flag_float_store
? CONVERT_EXPR
: NOP_EXPR
,
1928 tem
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1929 return fold_convert (type
, tem
);
1936 switch (TREE_CODE (orig
))
1938 case INTEGER_TYPE
: case CHAR_TYPE
:
1939 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1940 case POINTER_TYPE
: case REFERENCE_TYPE
:
1942 return build2 (COMPLEX_EXPR
, type
,
1943 fold_convert (TREE_TYPE (type
), arg
),
1944 fold_convert (TREE_TYPE (type
), integer_zero_node
));
1949 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1951 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
1952 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
1953 return fold (build2 (COMPLEX_EXPR
, type
, rpart
, ipart
));
1956 arg
= save_expr (arg
);
1957 rpart
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1958 ipart
= fold (build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
));
1959 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
1960 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
1961 return fold (build2 (COMPLEX_EXPR
, type
, rpart
, ipart
));
1969 if (integer_zerop (arg
))
1970 return build_zero_vector (type
);
1971 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1972 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1973 || TREE_CODE (orig
) == VECTOR_TYPE
);
1974 return fold (build1 (NOP_EXPR
, type
, arg
));
1977 return fold (build1 (CONVERT_EXPR
, type
, fold_ignored_result (arg
)));
1984 /* Return an expr equal to X but certainly not valid as an lvalue. */
1989 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
1994 /* We only need to wrap lvalue tree codes. */
1995 switch (TREE_CODE (x
))
2006 case ALIGN_INDIRECT_REF
:
2007 case MISALIGNED_INDIRECT_REF
:
2009 case ARRAY_RANGE_REF
:
2015 case PREINCREMENT_EXPR
:
2016 case PREDECREMENT_EXPR
:
2018 case TRY_CATCH_EXPR
:
2019 case WITH_CLEANUP_EXPR
:
2030 /* Assume the worst for front-end tree codes. */
2031 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2035 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2038 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2039 Zero means allow extended lvalues. */
2041 int pedantic_lvalues
;
2043 /* When pedantic, return an expr equal to X but certainly not valid as a
2044 pedantic lvalue. Otherwise, return X. */
2047 pedantic_non_lvalue (tree x
)
2049 if (pedantic_lvalues
)
2050 return non_lvalue (x
);
2055 /* Given a tree comparison code, return the code that is the logical inverse
2056 of the given code. It is not safe to do this for floating-point
2057 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2058 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2060 static enum tree_code
2061 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2063 if (honor_nans
&& flag_trapping_math
)
2073 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2075 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2077 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2079 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2093 return UNORDERED_EXPR
;
2094 case UNORDERED_EXPR
:
2095 return ORDERED_EXPR
;
2101 /* Similar, but return the comparison that results if the operands are
2102 swapped. This is safe for floating-point. */
2105 swap_tree_comparison (enum tree_code code
)
2126 /* Convert a comparison tree code from an enum tree_code representation
2127 into a compcode bit-based encoding. This function is the inverse of
2128 compcode_to_comparison. */
2130 static enum comparison_code
2131 comparison_to_compcode (enum tree_code code
)
2148 return COMPCODE_ORD
;
2149 case UNORDERED_EXPR
:
2150 return COMPCODE_UNORD
;
2152 return COMPCODE_UNLT
;
2154 return COMPCODE_UNEQ
;
2156 return COMPCODE_UNLE
;
2158 return COMPCODE_UNGT
;
2160 return COMPCODE_LTGT
;
2162 return COMPCODE_UNGE
;
2168 /* Convert a compcode bit-based encoding of a comparison operator back
2169 to GCC's enum tree_code representation. This function is the
2170 inverse of comparison_to_compcode. */
2172 static enum tree_code
2173 compcode_to_comparison (enum comparison_code code
)
2190 return ORDERED_EXPR
;
2191 case COMPCODE_UNORD
:
2192 return UNORDERED_EXPR
;
2210 /* Return a tree for the comparison which is the combination of
2211 doing the AND or OR (depending on CODE) of the two operations LCODE
2212 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2213 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2214 if this makes the transformation invalid. */
2217 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2218 enum tree_code rcode
, tree truth_type
,
2219 tree ll_arg
, tree lr_arg
)
2221 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2222 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2223 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2224 enum comparison_code compcode
;
2228 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2229 compcode
= lcompcode
& rcompcode
;
2232 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2233 compcode
= lcompcode
| rcompcode
;
2242 /* Eliminate unordered comparisons, as well as LTGT and ORD
2243 which are not used unless the mode has NaNs. */
2244 compcode
&= ~COMPCODE_UNORD
;
2245 if (compcode
== COMPCODE_LTGT
)
2246 compcode
= COMPCODE_NE
;
2247 else if (compcode
== COMPCODE_ORD
)
2248 compcode
= COMPCODE_TRUE
;
2250 else if (flag_trapping_math
)
2252 /* Check that the original operation and the optimized ones will trap
2253 under the same condition. */
2254 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2255 && (lcompcode
!= COMPCODE_EQ
)
2256 && (lcompcode
!= COMPCODE_ORD
);
2257 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2258 && (rcompcode
!= COMPCODE_EQ
)
2259 && (rcompcode
!= COMPCODE_ORD
);
2260 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2261 && (compcode
!= COMPCODE_EQ
)
2262 && (compcode
!= COMPCODE_ORD
);
2264 /* In a short-circuited boolean expression the LHS might be
2265 such that the RHS, if evaluated, will never trap. For
2266 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2267 if neither x nor y is NaN. (This is a mixed blessing: for
2268 example, the expression above will never trap, hence
2269 optimizing it to x < y would be invalid). */
2270 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2271 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2274 /* If the comparison was short-circuited, and only the RHS
2275 trapped, we may now generate a spurious trap. */
2277 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2280 /* If we changed the conditions that cause a trap, we lose. */
2281 if ((ltrap
|| rtrap
) != trap
)
2285 if (compcode
== COMPCODE_TRUE
)
2286 return constant_boolean_node (true, truth_type
);
2287 else if (compcode
== COMPCODE_FALSE
)
2288 return constant_boolean_node (false, truth_type
);
2290 return fold (build2 (compcode_to_comparison (compcode
),
2291 truth_type
, ll_arg
, lr_arg
));
2294 /* Return nonzero if CODE is a tree code that represents a truth value. */
2297 truth_value_p (enum tree_code code
)
2299 return (TREE_CODE_CLASS (code
) == tcc_comparison
2300 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2301 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2302 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2305 /* Return nonzero if two operands (typically of the same tree node)
2306 are necessarily equal. If either argument has side-effects this
2307 function returns zero. FLAGS modifies behavior as follows:
2309 If OEP_ONLY_CONST is set, only return nonzero for constants.
2310 This function tests whether the operands are indistinguishable;
2311 it does not test whether they are equal using C's == operation.
2312 The distinction is important for IEEE floating point, because
2313 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2314 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2316 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2317 even though it may hold multiple values during a function.
2318 This is because a GCC tree node guarantees that nothing else is
2319 executed between the evaluation of its "operands" (which may often
2320 be evaluated in arbitrary order). Hence if the operands themselves
2321 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2322 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2323 unset means assuming isochronic (or instantaneous) tree equivalence.
2324 Unless comparing arbitrary expression trees, such as from different
2325 statements, this flag can usually be left unset.
2327 If OEP_PURE_SAME is set, then pure functions with identical arguments
2328 are considered the same. It is used when the caller has other ways
2329 to ensure that global memory is unchanged in between. */
2332 operand_equal_p (tree arg0
, tree arg1
, unsigned int flags
)
2334 /* If either is ERROR_MARK, they aren't equal. */
2335 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
2338 /* If both types don't have the same signedness, then we can't consider
2339 them equal. We must check this before the STRIP_NOPS calls
2340 because they may change the signedness of the arguments. */
2341 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2347 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2348 /* This is needed for conversions and for COMPONENT_REF.
2349 Might as well play it safe and always test this. */
2350 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2351 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2352 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2355 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2356 We don't care about side effects in that case because the SAVE_EXPR
2357 takes care of that for us. In all other cases, two expressions are
2358 equal if they have no side effects. If we have two identical
2359 expressions with side effects that should be treated the same due
2360 to the only side effects being identical SAVE_EXPR's, that will
2361 be detected in the recursive calls below. */
2362 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2363 && (TREE_CODE (arg0
) == SAVE_EXPR
2364 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2367 /* Next handle constant cases, those for which we can return 1 even
2368 if ONLY_CONST is set. */
2369 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2370 switch (TREE_CODE (arg0
))
2373 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2374 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2375 && tree_int_cst_equal (arg0
, arg1
));
2378 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2379 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2380 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2381 TREE_REAL_CST (arg1
)));
2387 if (TREE_CONSTANT_OVERFLOW (arg0
)
2388 || TREE_CONSTANT_OVERFLOW (arg1
))
2391 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2392 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2395 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2398 v1
= TREE_CHAIN (v1
);
2399 v2
= TREE_CHAIN (v2
);
2406 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2408 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2412 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2413 && ! memcmp (TREE_STRING_POINTER (arg0
),
2414 TREE_STRING_POINTER (arg1
),
2415 TREE_STRING_LENGTH (arg0
)));
2418 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2424 if (flags
& OEP_ONLY_CONST
)
2427 /* Define macros to test an operand from arg0 and arg1 for equality and a
2428 variant that allows null and views null as being different from any
2429 non-null value. In the latter case, if either is null, the both
2430 must be; otherwise, do the normal comparison. */
2431 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2432 TREE_OPERAND (arg1, N), flags)
2434 #define OP_SAME_WITH_NULL(N) \
2435 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2436 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2438 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2441 /* Two conversions are equal only if signedness and modes match. */
2442 switch (TREE_CODE (arg0
))
2447 case FIX_TRUNC_EXPR
:
2448 case FIX_FLOOR_EXPR
:
2449 case FIX_ROUND_EXPR
:
2450 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2451 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2461 case tcc_comparison
:
2463 if (OP_SAME (0) && OP_SAME (1))
2466 /* For commutative ops, allow the other order. */
2467 return (commutative_tree_code (TREE_CODE (arg0
))
2468 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2469 TREE_OPERAND (arg1
, 1), flags
)
2470 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2471 TREE_OPERAND (arg1
, 0), flags
));
2474 /* If either of the pointer (or reference) expressions we are
2475 dereferencing contain a side effect, these cannot be equal. */
2476 if (TREE_SIDE_EFFECTS (arg0
)
2477 || TREE_SIDE_EFFECTS (arg1
))
2480 switch (TREE_CODE (arg0
))
2483 case ALIGN_INDIRECT_REF
:
2484 case MISALIGNED_INDIRECT_REF
:
2490 case ARRAY_RANGE_REF
:
2491 /* Operands 2 and 3 may be null. */
2494 && OP_SAME_WITH_NULL (2)
2495 && OP_SAME_WITH_NULL (3));
2498 /* Handle operand 2 the same as for ARRAY_REF. */
2499 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2502 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2508 case tcc_expression
:
2509 switch (TREE_CODE (arg0
))
2512 case TRUTH_NOT_EXPR
:
2515 case TRUTH_ANDIF_EXPR
:
2516 case TRUTH_ORIF_EXPR
:
2517 return OP_SAME (0) && OP_SAME (1);
2519 case TRUTH_AND_EXPR
:
2521 case TRUTH_XOR_EXPR
:
2522 if (OP_SAME (0) && OP_SAME (1))
2525 /* Otherwise take into account this is a commutative operation. */
2526 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2527 TREE_OPERAND (arg1
, 1), flags
)
2528 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2529 TREE_OPERAND (arg1
, 0), flags
));
2532 /* If the CALL_EXPRs call different functions, then they
2533 clearly can not be equal. */
2538 unsigned int cef
= call_expr_flags (arg0
);
2539 if (flags
& OEP_PURE_SAME
)
2540 cef
&= ECF_CONST
| ECF_PURE
;
2547 /* Now see if all the arguments are the same. operand_equal_p
2548 does not handle TREE_LIST, so we walk the operands here
2549 feeding them to operand_equal_p. */
2550 arg0
= TREE_OPERAND (arg0
, 1);
2551 arg1
= TREE_OPERAND (arg1
, 1);
2552 while (arg0
&& arg1
)
2554 if (! operand_equal_p (TREE_VALUE (arg0
), TREE_VALUE (arg1
),
2558 arg0
= TREE_CHAIN (arg0
);
2559 arg1
= TREE_CHAIN (arg1
);
2562 /* If we get here and both argument lists are exhausted
2563 then the CALL_EXPRs are equal. */
2564 return ! (arg0
|| arg1
);
2570 case tcc_declaration
:
2571 /* Consider __builtin_sqrt equal to sqrt. */
2572 return (TREE_CODE (arg0
) == FUNCTION_DECL
2573 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2574 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2575 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2582 #undef OP_SAME_WITH_NULL
2585 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2586 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2588 When in doubt, return 0. */
2591 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2593 int unsignedp1
, unsignedpo
;
2594 tree primarg0
, primarg1
, primother
;
2595 unsigned int correct_width
;
2597 if (operand_equal_p (arg0
, arg1
, 0))
2600 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2601 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2604 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2605 and see if the inner values are the same. This removes any
2606 signedness comparison, which doesn't matter here. */
2607 primarg0
= arg0
, primarg1
= arg1
;
2608 STRIP_NOPS (primarg0
);
2609 STRIP_NOPS (primarg1
);
2610 if (operand_equal_p (primarg0
, primarg1
, 0))
2613 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2614 actual comparison operand, ARG0.
2616 First throw away any conversions to wider types
2617 already present in the operands. */
2619 primarg1
= get_narrower (arg1
, &unsignedp1
);
2620 primother
= get_narrower (other
, &unsignedpo
);
2622 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2623 if (unsignedp1
== unsignedpo
2624 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2625 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2627 tree type
= TREE_TYPE (arg0
);
2629 /* Make sure shorter operand is extended the right way
2630 to match the longer operand. */
2631 primarg1
= fold_convert (lang_hooks
.types
.signed_or_unsigned_type
2632 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2634 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2641 /* See if ARG is an expression that is either a comparison or is performing
2642 arithmetic on comparisons. The comparisons must only be comparing
2643 two different values, which will be stored in *CVAL1 and *CVAL2; if
2644 they are nonzero it means that some operands have already been found.
2645 No variables may be used anywhere else in the expression except in the
2646 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2647 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2649 If this is true, return 1. Otherwise, return zero. */
2652 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2654 enum tree_code code
= TREE_CODE (arg
);
2655 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2657 /* We can handle some of the tcc_expression cases here. */
2658 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2660 else if (class == tcc_expression
2661 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2662 || code
== COMPOUND_EXPR
))
2665 else if (class == tcc_expression
&& code
== SAVE_EXPR
2666 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2668 /* If we've already found a CVAL1 or CVAL2, this expression is
2669 two complex to handle. */
2670 if (*cval1
|| *cval2
)
2680 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2683 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2684 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2685 cval1
, cval2
, save_p
));
2690 case tcc_expression
:
2691 if (code
== COND_EXPR
)
2692 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2693 cval1
, cval2
, save_p
)
2694 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2695 cval1
, cval2
, save_p
)
2696 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2697 cval1
, cval2
, save_p
));
2700 case tcc_comparison
:
2701 /* First see if we can handle the first operand, then the second. For
2702 the second operand, we know *CVAL1 can't be zero. It must be that
2703 one side of the comparison is each of the values; test for the
2704 case where this isn't true by failing if the two operands
2707 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2708 TREE_OPERAND (arg
, 1), 0))
2712 *cval1
= TREE_OPERAND (arg
, 0);
2713 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2715 else if (*cval2
== 0)
2716 *cval2
= TREE_OPERAND (arg
, 0);
2717 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2722 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2724 else if (*cval2
== 0)
2725 *cval2
= TREE_OPERAND (arg
, 1);
2726 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2738 /* ARG is a tree that is known to contain just arithmetic operations and
2739 comparisons. Evaluate the operations in the tree substituting NEW0 for
2740 any occurrence of OLD0 as an operand of a comparison and likewise for
2744 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
2746 tree type
= TREE_TYPE (arg
);
2747 enum tree_code code
= TREE_CODE (arg
);
2748 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2750 /* We can handle some of the tcc_expression cases here. */
2751 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2753 else if (class == tcc_expression
2754 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2760 return fold (build1 (code
, type
,
2761 eval_subst (TREE_OPERAND (arg
, 0),
2762 old0
, new0
, old1
, new1
)));
2765 return fold (build2 (code
, type
,
2766 eval_subst (TREE_OPERAND (arg
, 0),
2767 old0
, new0
, old1
, new1
),
2768 eval_subst (TREE_OPERAND (arg
, 1),
2769 old0
, new0
, old1
, new1
)));
2771 case tcc_expression
:
2775 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
2778 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
2781 return fold (build3 (code
, type
,
2782 eval_subst (TREE_OPERAND (arg
, 0),
2783 old0
, new0
, old1
, new1
),
2784 eval_subst (TREE_OPERAND (arg
, 1),
2785 old0
, new0
, old1
, new1
),
2786 eval_subst (TREE_OPERAND (arg
, 2),
2787 old0
, new0
, old1
, new1
)));
2791 /* Fall through - ??? */
2793 case tcc_comparison
:
2795 tree arg0
= TREE_OPERAND (arg
, 0);
2796 tree arg1
= TREE_OPERAND (arg
, 1);
2798 /* We need to check both for exact equality and tree equality. The
2799 former will be true if the operand has a side-effect. In that
2800 case, we know the operand occurred exactly once. */
2802 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2804 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2807 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2809 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2812 return fold (build2 (code
, type
, arg0
, arg1
));
2820 /* Return a tree for the case when the result of an expression is RESULT
2821 converted to TYPE and OMITTED was previously an operand of the expression
2822 but is now not needed (e.g., we folded OMITTED * 0).
2824 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2825 the conversion of RESULT to TYPE. */
2828 omit_one_operand (tree type
, tree result
, tree omitted
)
2830 tree t
= fold_convert (type
, result
);
2832 if (TREE_SIDE_EFFECTS (omitted
))
2833 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2835 return non_lvalue (t
);
2838 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2841 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
2843 tree t
= fold_convert (type
, result
);
2845 if (TREE_SIDE_EFFECTS (omitted
))
2846 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2848 return pedantic_non_lvalue (t
);
2851 /* Return a tree for the case when the result of an expression is RESULT
2852 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2853 of the expression but are now not needed.
2855 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2856 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2857 evaluated before OMITTED2. Otherwise, if neither has side effects,
2858 just do the conversion of RESULT to TYPE. */
2861 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
2863 tree t
= fold_convert (type
, result
);
2865 if (TREE_SIDE_EFFECTS (omitted2
))
2866 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
2867 if (TREE_SIDE_EFFECTS (omitted1
))
2868 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
2870 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
2874 /* Return a simplified tree node for the truth-negation of ARG. This
2875 never alters ARG itself. We assume that ARG is an operation that
2876 returns a truth value (0 or 1).
2878 FIXME: one would think we would fold the result, but it causes
2879 problems with the dominator optimizer. */
2881 invert_truthvalue (tree arg
)
2883 tree type
= TREE_TYPE (arg
);
2884 enum tree_code code
= TREE_CODE (arg
);
2886 if (code
== ERROR_MARK
)
2889 /* If this is a comparison, we can simply invert it, except for
2890 floating-point non-equality comparisons, in which case we just
2891 enclose a TRUTH_NOT_EXPR around what we have. */
2893 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
2895 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
2896 if (FLOAT_TYPE_P (op_type
)
2897 && flag_trapping_math
2898 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
2899 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
2900 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2903 code
= invert_tree_comparison (code
,
2904 HONOR_NANS (TYPE_MODE (op_type
)));
2905 if (code
== ERROR_MARK
)
2906 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2908 return build2 (code
, type
,
2909 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
2916 return fold_convert (type
,
2917 build_int_cst (NULL_TREE
, integer_zerop (arg
)));
2919 case TRUTH_AND_EXPR
:
2920 return build2 (TRUTH_OR_EXPR
, type
,
2921 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2922 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2925 return build2 (TRUTH_AND_EXPR
, type
,
2926 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2927 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2929 case TRUTH_XOR_EXPR
:
2930 /* Here we can invert either operand. We invert the first operand
2931 unless the second operand is a TRUTH_NOT_EXPR in which case our
2932 result is the XOR of the first operand with the inside of the
2933 negation of the second operand. */
2935 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
2936 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
2937 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
2939 return build2 (TRUTH_XOR_EXPR
, type
,
2940 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2941 TREE_OPERAND (arg
, 1));
2943 case TRUTH_ANDIF_EXPR
:
2944 return build2 (TRUTH_ORIF_EXPR
, type
,
2945 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2946 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2948 case TRUTH_ORIF_EXPR
:
2949 return build2 (TRUTH_ANDIF_EXPR
, type
,
2950 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2951 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2953 case TRUTH_NOT_EXPR
:
2954 return TREE_OPERAND (arg
, 0);
2957 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2958 invert_truthvalue (TREE_OPERAND (arg
, 1)),
2959 invert_truthvalue (TREE_OPERAND (arg
, 2)));
2962 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2963 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2965 case NON_LVALUE_EXPR
:
2966 return invert_truthvalue (TREE_OPERAND (arg
, 0));
2969 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
2974 return build1 (TREE_CODE (arg
), type
,
2975 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2978 if (!integer_onep (TREE_OPERAND (arg
, 1)))
2980 return build2 (EQ_EXPR
, type
, arg
,
2981 fold_convert (type
, integer_zero_node
));
2984 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2986 case CLEANUP_POINT_EXPR
:
2987 return build1 (CLEANUP_POINT_EXPR
, type
,
2988 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2993 gcc_assert (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
);
2994 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2997 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2998 operands are another bit-wise operation with a common input. If so,
2999 distribute the bit operations to save an operation and possibly two if
3000 constants are involved. For example, convert
3001 (A | B) & (A | C) into A | (B & C)
3002 Further simplification will occur if B and C are constants.
3004 If this optimization cannot be done, 0 will be returned. */
3007 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3012 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3013 || TREE_CODE (arg0
) == code
3014 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3015 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3018 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3020 common
= TREE_OPERAND (arg0
, 0);
3021 left
= TREE_OPERAND (arg0
, 1);
3022 right
= TREE_OPERAND (arg1
, 1);
3024 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3026 common
= TREE_OPERAND (arg0
, 0);
3027 left
= TREE_OPERAND (arg0
, 1);
3028 right
= TREE_OPERAND (arg1
, 0);
3030 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3032 common
= TREE_OPERAND (arg0
, 1);
3033 left
= TREE_OPERAND (arg0
, 0);
3034 right
= TREE_OPERAND (arg1
, 1);
3036 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3038 common
= TREE_OPERAND (arg0
, 1);
3039 left
= TREE_OPERAND (arg0
, 0);
3040 right
= TREE_OPERAND (arg1
, 0);
3045 return fold (build2 (TREE_CODE (arg0
), type
, common
,
3046 fold (build2 (code
, type
, left
, right
))));
3049 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3050 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3053 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
3056 tree result
= build3 (BIT_FIELD_REF
, type
, inner
,
3057 size_int (bitsize
), bitsize_int (bitpos
));
3059 BIT_FIELD_REF_UNSIGNED (result
) = unsignedp
;
3064 /* Optimize a bit-field compare.
3066 There are two cases: First is a compare against a constant and the
3067 second is a comparison of two items where the fields are at the same
3068 bit position relative to the start of a chunk (byte, halfword, word)
3069 large enough to contain it. In these cases we can avoid the shift
3070 implicit in bitfield extractions.
3072 For constants, we emit a compare of the shifted constant with the
3073 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3074 compared. For two fields at the same position, we do the ANDs with the
3075 similar mask and compare the result of the ANDs.
3077 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3078 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3079 are the left and right operands of the comparison, respectively.
3081 If the optimization described above can be done, we return the resulting
3082 tree. Otherwise we return zero. */
3085 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
3088 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3089 tree type
= TREE_TYPE (lhs
);
3090 tree signed_type
, unsigned_type
;
3091 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3092 enum machine_mode lmode
, rmode
, nmode
;
3093 int lunsignedp
, runsignedp
;
3094 int lvolatilep
= 0, rvolatilep
= 0;
3095 tree linner
, rinner
= NULL_TREE
;
3099 /* Get all the information about the extractions being done. If the bit size
3100 if the same as the size of the underlying object, we aren't doing an
3101 extraction at all and so can do nothing. We also don't want to
3102 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3103 then will no longer be able to replace it. */
3104 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3105 &lunsignedp
, &lvolatilep
, false);
3106 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3107 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3112 /* If this is not a constant, we can only do something if bit positions,
3113 sizes, and signedness are the same. */
3114 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3115 &runsignedp
, &rvolatilep
, false);
3117 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3118 || lunsignedp
!= runsignedp
|| offset
!= 0
3119 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3123 /* See if we can find a mode to refer to this field. We should be able to,
3124 but fail if we can't. */
3125 nmode
= get_best_mode (lbitsize
, lbitpos
,
3126 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3127 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3128 TYPE_ALIGN (TREE_TYPE (rinner
))),
3129 word_mode
, lvolatilep
|| rvolatilep
);
3130 if (nmode
== VOIDmode
)
3133 /* Set signed and unsigned types of the precision of this mode for the
3135 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3136 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3138 /* Compute the bit position and size for the new reference and our offset
3139 within it. If the new reference is the same size as the original, we
3140 won't optimize anything, so return zero. */
3141 nbitsize
= GET_MODE_BITSIZE (nmode
);
3142 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3144 if (nbitsize
== lbitsize
)
3147 if (BYTES_BIG_ENDIAN
)
3148 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3150 /* Make the mask to be used against the extracted field. */
3151 mask
= build_int_cst (unsigned_type
, -1);
3152 mask
= force_fit_type (mask
, 0, false, false);
3153 mask
= fold_convert (unsigned_type
, mask
);
3154 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
3155 mask
= const_binop (RSHIFT_EXPR
, mask
,
3156 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
3159 /* If not comparing with constant, just rework the comparison
3161 return build2 (code
, compare_type
,
3162 build2 (BIT_AND_EXPR
, unsigned_type
,
3163 make_bit_field_ref (linner
, unsigned_type
,
3164 nbitsize
, nbitpos
, 1),
3166 build2 (BIT_AND_EXPR
, unsigned_type
,
3167 make_bit_field_ref (rinner
, unsigned_type
,
3168 nbitsize
, nbitpos
, 1),
3171 /* Otherwise, we are handling the constant case. See if the constant is too
3172 big for the field. Warn and return a tree of for 0 (false) if so. We do
3173 this not only for its own sake, but to avoid having to test for this
3174 error case below. If we didn't, we might generate wrong code.
3176 For unsigned fields, the constant shifted right by the field length should
3177 be all zero. For signed fields, the high-order bits should agree with
3182 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3183 fold_convert (unsigned_type
, rhs
),
3184 size_int (lbitsize
), 0)))
3186 warning ("comparison is always %d due to width of bit-field",
3188 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3193 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
3194 size_int (lbitsize
- 1), 0);
3195 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3197 warning ("comparison is always %d due to width of bit-field",
3199 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3203 /* Single-bit compares should always be against zero. */
3204 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3206 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3207 rhs
= fold_convert (type
, integer_zero_node
);
3210 /* Make a new bitfield reference, shift the constant over the
3211 appropriate number of bits and mask it with the computed mask
3212 (in case this was a signed field). If we changed it, make a new one. */
3213 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3216 TREE_SIDE_EFFECTS (lhs
) = 1;
3217 TREE_THIS_VOLATILE (lhs
) = 1;
3220 rhs
= fold (const_binop (BIT_AND_EXPR
,
3221 const_binop (LSHIFT_EXPR
,
3222 fold_convert (unsigned_type
, rhs
),
3223 size_int (lbitpos
), 0),
3226 return build2 (code
, compare_type
,
3227 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
3231 /* Subroutine for fold_truthop: decode a field reference.
3233 If EXP is a comparison reference, we return the innermost reference.
3235 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3236 set to the starting bit number.
3238 If the innermost field can be completely contained in a mode-sized
3239 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3241 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3242 otherwise it is not changed.
3244 *PUNSIGNEDP is set to the signedness of the field.
3246 *PMASK is set to the mask used. This is either contained in a
3247 BIT_AND_EXPR or derived from the width of the field.
3249 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3251 Return 0 if this is not a component reference or is one that we can't
3252 do anything with. */
3255 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3256 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3257 int *punsignedp
, int *pvolatilep
,
3258 tree
*pmask
, tree
*pand_mask
)
3260 tree outer_type
= 0;
3262 tree mask
, inner
, offset
;
3264 unsigned int precision
;
3266 /* All the optimizations using this function assume integer fields.
3267 There are problems with FP fields since the type_for_size call
3268 below can fail for, e.g., XFmode. */
3269 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3272 /* We are interested in the bare arrangement of bits, so strip everything
3273 that doesn't affect the machine mode. However, record the type of the
3274 outermost expression if it may matter below. */
3275 if (TREE_CODE (exp
) == NOP_EXPR
3276 || TREE_CODE (exp
) == CONVERT_EXPR
3277 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3278 outer_type
= TREE_TYPE (exp
);
3281 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3283 and_mask
= TREE_OPERAND (exp
, 1);
3284 exp
= TREE_OPERAND (exp
, 0);
3285 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3286 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3290 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3291 punsignedp
, pvolatilep
, false);
3292 if ((inner
== exp
&& and_mask
== 0)
3293 || *pbitsize
< 0 || offset
!= 0
3294 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3297 /* If the number of bits in the reference is the same as the bitsize of
3298 the outer type, then the outer type gives the signedness. Otherwise
3299 (in case of a small bitfield) the signedness is unchanged. */
3300 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3301 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3303 /* Compute the mask to access the bitfield. */
3304 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3305 precision
= TYPE_PRECISION (unsigned_type
);
3307 mask
= build_int_cst (unsigned_type
, -1);
3308 mask
= force_fit_type (mask
, 0, false, false);
3310 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3311 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3313 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3315 mask
= fold (build2 (BIT_AND_EXPR
, unsigned_type
,
3316 fold_convert (unsigned_type
, and_mask
), mask
));
3319 *pand_mask
= and_mask
;
3323 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3327 all_ones_mask_p (tree mask
, int size
)
3329 tree type
= TREE_TYPE (mask
);
3330 unsigned int precision
= TYPE_PRECISION (type
);
3333 tmask
= build_int_cst (lang_hooks
.types
.signed_type (type
), -1);
3334 tmask
= force_fit_type (tmask
, 0, false, false);
3337 tree_int_cst_equal (mask
,
3338 const_binop (RSHIFT_EXPR
,
3339 const_binop (LSHIFT_EXPR
, tmask
,
3340 size_int (precision
- size
),
3342 size_int (precision
- size
), 0));
3345 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3346 represents the sign bit of EXP's type. If EXP represents a sign
3347 or zero extension, also test VAL against the unextended type.
3348 The return value is the (sub)expression whose sign bit is VAL,
3349 or NULL_TREE otherwise. */
3352 sign_bit_p (tree exp
, tree val
)
3354 unsigned HOST_WIDE_INT mask_lo
, lo
;
3355 HOST_WIDE_INT mask_hi
, hi
;
3359 /* Tree EXP must have an integral type. */
3360 t
= TREE_TYPE (exp
);
3361 if (! INTEGRAL_TYPE_P (t
))
3364 /* Tree VAL must be an integer constant. */
3365 if (TREE_CODE (val
) != INTEGER_CST
3366 || TREE_CONSTANT_OVERFLOW (val
))
3369 width
= TYPE_PRECISION (t
);
3370 if (width
> HOST_BITS_PER_WIDE_INT
)
3372 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3375 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3376 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3382 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3385 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3386 >> (HOST_BITS_PER_WIDE_INT
- width
));
3389 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3390 treat VAL as if it were unsigned. */
3391 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3392 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3395 /* Handle extension from a narrower type. */
3396 if (TREE_CODE (exp
) == NOP_EXPR
3397 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3398 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3403 /* Subroutine for fold_truthop: determine if an operand is simple enough
3404 to be evaluated unconditionally. */
3407 simple_operand_p (tree exp
)
3409 /* Strip any conversions that don't change the machine mode. */
3412 return (CONSTANT_CLASS_P (exp
)
3413 || TREE_CODE (exp
) == SSA_NAME
3415 && ! TREE_ADDRESSABLE (exp
)
3416 && ! TREE_THIS_VOLATILE (exp
)
3417 && ! DECL_NONLOCAL (exp
)
3418 /* Don't regard global variables as simple. They may be
3419 allocated in ways unknown to the compiler (shared memory,
3420 #pragma weak, etc). */
3421 && ! TREE_PUBLIC (exp
)
3422 && ! DECL_EXTERNAL (exp
)
3423 /* Loading a static variable is unduly expensive, but global
3424 registers aren't expensive. */
3425 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3428 /* The following functions are subroutines to fold_range_test and allow it to
3429 try to change a logical combination of comparisons into a range test.
3432 X == 2 || X == 3 || X == 4 || X == 5
3436 (unsigned) (X - 2) <= 3
3438 We describe each set of comparisons as being either inside or outside
3439 a range, using a variable named like IN_P, and then describe the
3440 range with a lower and upper bound. If one of the bounds is omitted,
3441 it represents either the highest or lowest value of the type.
3443 In the comments below, we represent a range by two numbers in brackets
3444 preceded by a "+" to designate being inside that range, or a "-" to
3445 designate being outside that range, so the condition can be inverted by
3446 flipping the prefix. An omitted bound is represented by a "-". For
3447 example, "- [-, 10]" means being outside the range starting at the lowest
3448 possible value and ending at 10, in other words, being greater than 10.
3449 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3452 We set up things so that the missing bounds are handled in a consistent
3453 manner so neither a missing bound nor "true" and "false" need to be
3454 handled using a special case. */
3456 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3457 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3458 and UPPER1_P are nonzero if the respective argument is an upper bound
3459 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3460 must be specified for a comparison. ARG1 will be converted to ARG0's
3461 type if both are specified. */
3464 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3465 tree arg1
, int upper1_p
)
3471 /* If neither arg represents infinity, do the normal operation.
3472 Else, if not a comparison, return infinity. Else handle the special
3473 comparison rules. Note that most of the cases below won't occur, but
3474 are handled for consistency. */
3476 if (arg0
!= 0 && arg1
!= 0)
3478 tem
= fold (build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3479 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
)));
3481 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3484 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3487 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3488 for neither. In real maths, we cannot assume open ended ranges are
3489 the same. But, this is computer arithmetic, where numbers are finite.
3490 We can therefore make the transformation of any unbounded range with
3491 the value Z, Z being greater than any representable number. This permits
3492 us to treat unbounded ranges as equal. */
3493 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3494 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3498 result
= sgn0
== sgn1
;
3501 result
= sgn0
!= sgn1
;
3504 result
= sgn0
< sgn1
;
3507 result
= sgn0
<= sgn1
;
3510 result
= sgn0
> sgn1
;
3513 result
= sgn0
>= sgn1
;
3519 return constant_boolean_node (result
, type
);
3522 /* Given EXP, a logical expression, set the range it is testing into
3523 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3524 actually being tested. *PLOW and *PHIGH will be made of the same type
3525 as the returned expression. If EXP is not a comparison, we will most
3526 likely not be returning a useful value and range. */
3529 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
)
3531 enum tree_code code
;
3532 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
3533 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
3535 tree low
, high
, n_low
, n_high
;
3537 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3538 and see if we can refine the range. Some of the cases below may not
3539 happen, but it doesn't seem worth worrying about this. We "continue"
3540 the outer loop when we've changed something; otherwise we "break"
3541 the switch, which will "break" the while. */
3544 low
= high
= fold_convert (TREE_TYPE (exp
), integer_zero_node
);
3548 code
= TREE_CODE (exp
);
3549 exp_type
= TREE_TYPE (exp
);
3551 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3553 if (TREE_CODE_LENGTH (code
) > 0)
3554 arg0
= TREE_OPERAND (exp
, 0);
3555 if (TREE_CODE_CLASS (code
) == tcc_comparison
3556 || TREE_CODE_CLASS (code
) == tcc_unary
3557 || TREE_CODE_CLASS (code
) == tcc_binary
)
3558 arg0_type
= TREE_TYPE (arg0
);
3559 if (TREE_CODE_CLASS (code
) == tcc_binary
3560 || TREE_CODE_CLASS (code
) == tcc_comparison
3561 || (TREE_CODE_CLASS (code
) == tcc_expression
3562 && TREE_CODE_LENGTH (code
) > 1))
3563 arg1
= TREE_OPERAND (exp
, 1);
3568 case TRUTH_NOT_EXPR
:
3569 in_p
= ! in_p
, exp
= arg0
;
3572 case EQ_EXPR
: case NE_EXPR
:
3573 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3574 /* We can only do something if the range is testing for zero
3575 and if the second operand is an integer constant. Note that
3576 saying something is "in" the range we make is done by
3577 complementing IN_P since it will set in the initial case of
3578 being not equal to zero; "out" is leaving it alone. */
3579 if (low
== 0 || high
== 0
3580 || ! integer_zerop (low
) || ! integer_zerop (high
)
3581 || TREE_CODE (arg1
) != INTEGER_CST
)
3586 case NE_EXPR
: /* - [c, c] */
3589 case EQ_EXPR
: /* + [c, c] */
3590 in_p
= ! in_p
, low
= high
= arg1
;
3592 case GT_EXPR
: /* - [-, c] */
3593 low
= 0, high
= arg1
;
3595 case GE_EXPR
: /* + [c, -] */
3596 in_p
= ! in_p
, low
= arg1
, high
= 0;
3598 case LT_EXPR
: /* - [c, -] */
3599 low
= arg1
, high
= 0;
3601 case LE_EXPR
: /* + [-, c] */
3602 in_p
= ! in_p
, low
= 0, high
= arg1
;
3608 /* If this is an unsigned comparison, we also know that EXP is
3609 greater than or equal to zero. We base the range tests we make
3610 on that fact, so we record it here so we can parse existing
3611 range tests. We test arg0_type since often the return type
3612 of, e.g. EQ_EXPR, is boolean. */
3613 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3615 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3617 fold_convert (arg0_type
, integer_zero_node
),
3621 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3623 /* If the high bound is missing, but we have a nonzero low
3624 bound, reverse the range so it goes from zero to the low bound
3626 if (high
== 0 && low
&& ! integer_zerop (low
))
3629 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3630 integer_one_node
, 0);
3631 low
= fold_convert (arg0_type
, integer_zero_node
);
3639 /* (-x) IN [a,b] -> x in [-b, -a] */
3640 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3641 fold_convert (exp_type
, integer_zero_node
),
3643 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3644 fold_convert (exp_type
, integer_zero_node
),
3646 low
= n_low
, high
= n_high
;
3652 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3653 fold_convert (exp_type
, integer_one_node
));
3656 case PLUS_EXPR
: case MINUS_EXPR
:
3657 if (TREE_CODE (arg1
) != INTEGER_CST
)
3660 /* If EXP is signed, any overflow in the computation is undefined,
3661 so we don't worry about it so long as our computations on
3662 the bounds don't overflow. For unsigned, overflow is defined
3663 and this is exactly the right thing. */
3664 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3665 arg0_type
, low
, 0, arg1
, 0);
3666 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3667 arg0_type
, high
, 1, arg1
, 0);
3668 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3669 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3672 /* Check for an unsigned range which has wrapped around the maximum
3673 value thus making n_high < n_low, and normalize it. */
3674 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3676 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3677 integer_one_node
, 0);
3678 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3679 integer_one_node
, 0);
3681 /* If the range is of the form +/- [ x+1, x ], we won't
3682 be able to normalize it. But then, it represents the
3683 whole range or the empty set, so make it
3685 if (tree_int_cst_equal (n_low
, low
)
3686 && tree_int_cst_equal (n_high
, high
))
3692 low
= n_low
, high
= n_high
;
3697 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
3698 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3701 if (! INTEGRAL_TYPE_P (arg0_type
)
3702 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3703 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3706 n_low
= low
, n_high
= high
;
3709 n_low
= fold_convert (arg0_type
, n_low
);
3712 n_high
= fold_convert (arg0_type
, n_high
);
3715 /* If we're converting arg0 from an unsigned type, to exp,
3716 a signed type, we will be doing the comparison as unsigned.
3717 The tests above have already verified that LOW and HIGH
3720 So we have to ensure that we will handle large unsigned
3721 values the same way that the current signed bounds treat
3724 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
3727 tree equiv_type
= lang_hooks
.types
.type_for_mode
3728 (TYPE_MODE (arg0_type
), 1);
3730 /* A range without an upper bound is, naturally, unbounded.
3731 Since convert would have cropped a very large value, use
3732 the max value for the destination type. */
3734 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3735 : TYPE_MAX_VALUE (arg0_type
);
3737 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
3738 high_positive
= fold (build2 (RSHIFT_EXPR
, arg0_type
,
3739 fold_convert (arg0_type
,
3741 fold_convert (arg0_type
,
3742 integer_one_node
)));
3744 /* If the low bound is specified, "and" the range with the
3745 range for which the original unsigned value will be
3749 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3750 1, n_low
, n_high
, 1,
3751 fold_convert (arg0_type
,
3756 in_p
= (n_in_p
== in_p
);
3760 /* Otherwise, "or" the range with the range of the input
3761 that will be interpreted as negative. */
3762 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3763 0, n_low
, n_high
, 1,
3764 fold_convert (arg0_type
,
3769 in_p
= (in_p
!= n_in_p
);
3774 low
= n_low
, high
= n_high
;
3784 /* If EXP is a constant, we can evaluate whether this is true or false. */
3785 if (TREE_CODE (exp
) == INTEGER_CST
)
3787 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
3789 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3795 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3799 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3800 type, TYPE, return an expression to test if EXP is in (or out of, depending
3801 on IN_P) the range. Return 0 if the test couldn't be created. */
3804 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
3806 tree etype
= TREE_TYPE (exp
);
3811 value
= build_range_check (type
, exp
, 1, low
, high
);
3813 return invert_truthvalue (value
);
3818 if (low
== 0 && high
== 0)
3819 return fold_convert (type
, integer_one_node
);
3822 return fold (build2 (LE_EXPR
, type
, exp
, high
));
3825 return fold (build2 (GE_EXPR
, type
, exp
, low
));
3827 if (operand_equal_p (low
, high
, 0))
3828 return fold (build2 (EQ_EXPR
, type
, exp
, low
));
3830 if (integer_zerop (low
))
3832 if (! TYPE_UNSIGNED (etype
))
3834 etype
= lang_hooks
.types
.unsigned_type (etype
);
3835 high
= fold_convert (etype
, high
);
3836 exp
= fold_convert (etype
, exp
);
3838 return build_range_check (type
, exp
, 1, 0, high
);
3841 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3842 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
3844 unsigned HOST_WIDE_INT lo
;
3848 prec
= TYPE_PRECISION (etype
);
3849 if (prec
<= HOST_BITS_PER_WIDE_INT
)
3852 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
3856 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
3857 lo
= (unsigned HOST_WIDE_INT
) -1;
3860 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
3862 if (TYPE_UNSIGNED (etype
))
3864 etype
= lang_hooks
.types
.signed_type (etype
);
3865 exp
= fold_convert (etype
, exp
);
3867 return fold (build2 (GT_EXPR
, type
, exp
,
3868 fold_convert (etype
, integer_zero_node
)));
3872 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
3873 if (value
!= 0 && TREE_OVERFLOW (value
) && ! TYPE_UNSIGNED (etype
))
3875 tree utype
, minv
, maxv
;
3877 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3878 for the type in question, as we rely on this here. */
3879 switch (TREE_CODE (etype
))
3884 utype
= lang_hooks
.types
.unsigned_type (etype
);
3885 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
3886 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
3887 integer_one_node
, 1);
3888 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
3889 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
3893 high
= fold_convert (etype
, high
);
3894 low
= fold_convert (etype
, low
);
3895 exp
= fold_convert (etype
, exp
);
3896 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
3904 if (value
!= 0 && ! TREE_OVERFLOW (value
))
3905 return build_range_check (type
,
3906 fold (build2 (MINUS_EXPR
, etype
, exp
, low
)),
3907 1, fold_convert (etype
, integer_zero_node
),
3913 /* Given two ranges, see if we can merge them into one. Return 1 if we
3914 can, 0 if we can't. Set the output range into the specified parameters. */
3917 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
3918 tree high0
, int in1_p
, tree low1
, tree high1
)
3926 int lowequal
= ((low0
== 0 && low1
== 0)
3927 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3928 low0
, 0, low1
, 0)));
3929 int highequal
= ((high0
== 0 && high1
== 0)
3930 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3931 high0
, 1, high1
, 1)));
3933 /* Make range 0 be the range that starts first, or ends last if they
3934 start at the same value. Swap them if it isn't. */
3935 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3938 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3939 high1
, 1, high0
, 1))))
3941 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
3942 tem
= low0
, low0
= low1
, low1
= tem
;
3943 tem
= high0
, high0
= high1
, high1
= tem
;
3946 /* Now flag two cases, whether the ranges are disjoint or whether the
3947 second range is totally subsumed in the first. Note that the tests
3948 below are simplified by the ones above. */
3949 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
3950 high0
, 1, low1
, 0));
3951 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3952 high1
, 1, high0
, 1));
3954 /* We now have four cases, depending on whether we are including or
3955 excluding the two ranges. */
3958 /* If they don't overlap, the result is false. If the second range
3959 is a subset it is the result. Otherwise, the range is from the start
3960 of the second to the end of the first. */
3962 in_p
= 0, low
= high
= 0;
3964 in_p
= 1, low
= low1
, high
= high1
;
3966 in_p
= 1, low
= low1
, high
= high0
;
3969 else if (in0_p
&& ! in1_p
)
3971 /* If they don't overlap, the result is the first range. If they are
3972 equal, the result is false. If the second range is a subset of the
3973 first, and the ranges begin at the same place, we go from just after
3974 the end of the first range to the end of the second. If the second
3975 range is not a subset of the first, or if it is a subset and both
3976 ranges end at the same place, the range starts at the start of the
3977 first range and ends just before the second range.
3978 Otherwise, we can't describe this as a single range. */
3980 in_p
= 1, low
= low0
, high
= high0
;
3981 else if (lowequal
&& highequal
)
3982 in_p
= 0, low
= high
= 0;
3983 else if (subset
&& lowequal
)
3985 in_p
= 1, high
= high0
;
3986 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high1
, 0,
3987 integer_one_node
, 0);
3989 else if (! subset
|| highequal
)
3991 in_p
= 1, low
= low0
;
3992 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
3993 integer_one_node
, 0);
3999 else if (! in0_p
&& in1_p
)
4001 /* If they don't overlap, the result is the second range. If the second
4002 is a subset of the first, the result is false. Otherwise,
4003 the range starts just after the first range and ends at the
4004 end of the second. */
4006 in_p
= 1, low
= low1
, high
= high1
;
4007 else if (subset
|| highequal
)
4008 in_p
= 0, low
= high
= 0;
4011 in_p
= 1, high
= high1
;
4012 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4013 integer_one_node
, 0);
4019 /* The case where we are excluding both ranges. Here the complex case
4020 is if they don't overlap. In that case, the only time we have a
4021 range is if they are adjacent. If the second is a subset of the
4022 first, the result is the first. Otherwise, the range to exclude
4023 starts at the beginning of the first range and ends at the end of the
4027 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4028 range_binop (PLUS_EXPR
, NULL_TREE
,
4030 integer_one_node
, 1),
4032 in_p
= 0, low
= low0
, high
= high1
;
4035 /* Canonicalize - [min, x] into - [-, x]. */
4036 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4037 switch (TREE_CODE (TREE_TYPE (low0
)))
4040 if (TYPE_PRECISION (TREE_TYPE (low0
))
4041 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4046 if (tree_int_cst_equal (low0
,
4047 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4051 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4052 && integer_zerop (low0
))
4059 /* Canonicalize - [x, max] into - [x, -]. */
4060 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4061 switch (TREE_CODE (TREE_TYPE (high1
)))
4064 if (TYPE_PRECISION (TREE_TYPE (high1
))
4065 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4070 if (tree_int_cst_equal (high1
,
4071 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4075 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4076 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4078 integer_one_node
, 1)))
4085 /* The ranges might be also adjacent between the maximum and
4086 minimum values of the given type. For
4087 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4088 return + [x + 1, y - 1]. */
4089 if (low0
== 0 && high1
== 0)
4091 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4092 integer_one_node
, 1);
4093 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
4094 integer_one_node
, 0);
4095 if (low
== 0 || high
== 0)
4105 in_p
= 0, low
= low0
, high
= high0
;
4107 in_p
= 0, low
= low0
, high
= high1
;
4110 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4115 /* Subroutine of fold, looking inside expressions of the form
4116 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4117 of the COND_EXPR. This function is being used also to optimize
4118 A op B ? C : A, by reversing the comparison first.
4120 Return a folded expression whose code is not a COND_EXPR
4121 anymore, or NULL_TREE if no folding opportunity is found. */
4124 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
4126 enum tree_code comp_code
= TREE_CODE (arg0
);
4127 tree arg00
= TREE_OPERAND (arg0
, 0);
4128 tree arg01
= TREE_OPERAND (arg0
, 1);
4129 tree arg1_type
= TREE_TYPE (arg1
);
4135 /* If we have A op 0 ? A : -A, consider applying the following
4138 A == 0? A : -A same as -A
4139 A != 0? A : -A same as A
4140 A >= 0? A : -A same as abs (A)
4141 A > 0? A : -A same as abs (A)
4142 A <= 0? A : -A same as -abs (A)
4143 A < 0? A : -A same as -abs (A)
4145 None of these transformations work for modes with signed
4146 zeros. If A is +/-0, the first two transformations will
4147 change the sign of the result (from +0 to -0, or vice
4148 versa). The last four will fix the sign of the result,
4149 even though the original expressions could be positive or
4150 negative, depending on the sign of A.
4152 Note that all these transformations are correct if A is
4153 NaN, since the two alternatives (A and -A) are also NaNs. */
4154 if ((FLOAT_TYPE_P (TREE_TYPE (arg01
))
4155 ? real_zerop (arg01
)
4156 : integer_zerop (arg01
))
4157 && TREE_CODE (arg2
) == NEGATE_EXPR
4158 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4163 tem
= fold_convert (arg1_type
, arg1
);
4164 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
4167 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4170 if (flag_trapping_math
)
4175 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4176 arg1
= fold_convert (lang_hooks
.types
.signed_type
4177 (TREE_TYPE (arg1
)), arg1
);
4178 tem
= fold (build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
));
4179 return pedantic_non_lvalue (fold_convert (type
, tem
));
4182 if (flag_trapping_math
)
4186 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4187 arg1
= fold_convert (lang_hooks
.types
.signed_type
4188 (TREE_TYPE (arg1
)), arg1
);
4189 tem
= fold (build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
));
4190 return negate_expr (fold_convert (type
, tem
));
4192 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4196 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4197 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4198 both transformations are correct when A is NaN: A != 0
4199 is then true, and A == 0 is false. */
4201 if (integer_zerop (arg01
) && integer_zerop (arg2
))
4203 if (comp_code
== NE_EXPR
)
4204 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4205 else if (comp_code
== EQ_EXPR
)
4206 return fold_convert (type
, integer_zero_node
);
4209 /* Try some transformations of A op B ? A : B.
4211 A == B? A : B same as B
4212 A != B? A : B same as A
4213 A >= B? A : B same as max (A, B)
4214 A > B? A : B same as max (B, A)
4215 A <= B? A : B same as min (A, B)
4216 A < B? A : B same as min (B, A)
4218 As above, these transformations don't work in the presence
4219 of signed zeros. For example, if A and B are zeros of
4220 opposite sign, the first two transformations will change
4221 the sign of the result. In the last four, the original
4222 expressions give different results for (A=+0, B=-0) and
4223 (A=-0, B=+0), but the transformed expressions do not.
4225 The first two transformations are correct if either A or B
4226 is a NaN. In the first transformation, the condition will
4227 be false, and B will indeed be chosen. In the case of the
4228 second transformation, the condition A != B will be true,
4229 and A will be chosen.
4231 The conversions to max() and min() are not correct if B is
4232 a number and A is not. The conditions in the original
4233 expressions will be false, so all four give B. The min()
4234 and max() versions would give a NaN instead. */
4235 if (operand_equal_for_comparison_p (arg01
, arg2
, arg00
))
4237 tree comp_op0
= arg00
;
4238 tree comp_op1
= arg01
;
4239 tree comp_type
= TREE_TYPE (comp_op0
);
4241 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4242 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4252 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4254 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4259 /* In C++ a ?: expression can be an lvalue, so put the
4260 operand which will be used if they are equal first
4261 so that we can convert this back to the
4262 corresponding COND_EXPR. */
4263 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4265 comp_op0
= fold_convert (comp_type
, comp_op0
);
4266 comp_op1
= fold_convert (comp_type
, comp_op1
);
4267 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4268 ? fold (build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
))
4269 : fold (build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
));
4270 return pedantic_non_lvalue (fold_convert (type
, tem
));
4277 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4279 comp_op0
= fold_convert (comp_type
, comp_op0
);
4280 comp_op1
= fold_convert (comp_type
, comp_op1
);
4281 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4282 ? fold (build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
))
4283 : fold (build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
));
4284 return pedantic_non_lvalue (fold_convert (type
, tem
));
4288 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4289 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4292 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4293 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4296 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4301 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4302 we might still be able to simplify this. For example,
4303 if C1 is one less or one more than C2, this might have started
4304 out as a MIN or MAX and been transformed by this function.
4305 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4307 if (INTEGRAL_TYPE_P (type
)
4308 && TREE_CODE (arg01
) == INTEGER_CST
4309 && TREE_CODE (arg2
) == INTEGER_CST
)
4313 /* We can replace A with C1 in this case. */
4314 arg1
= fold_convert (type
, arg01
);
4315 return fold (build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
));
4318 /* If C1 is C2 + 1, this is min(A, C2). */
4319 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4321 && operand_equal_p (arg01
,
4322 const_binop (PLUS_EXPR
, arg2
,
4323 integer_one_node
, 0),
4325 return pedantic_non_lvalue (fold (build2 (MIN_EXPR
,
4326 type
, arg1
, arg2
)));
4330 /* If C1 is C2 - 1, this is min(A, C2). */
4331 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4333 && operand_equal_p (arg01
,
4334 const_binop (MINUS_EXPR
, arg2
,
4335 integer_one_node
, 0),
4337 return pedantic_non_lvalue (fold (build2 (MIN_EXPR
,
4338 type
, arg1
, arg2
)));
4342 /* If C1 is C2 - 1, this is max(A, C2). */
4343 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4345 && operand_equal_p (arg01
,
4346 const_binop (MINUS_EXPR
, arg2
,
4347 integer_one_node
, 0),
4349 return pedantic_non_lvalue (fold (build2 (MAX_EXPR
,
4350 type
, arg1
, arg2
)));
4354 /* If C1 is C2 + 1, this is max(A, C2). */
4355 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4357 && operand_equal_p (arg01
,
4358 const_binop (PLUS_EXPR
, arg2
,
4359 integer_one_node
, 0),
4361 return pedantic_non_lvalue (fold (build2 (MAX_EXPR
,
4362 type
, arg1
, arg2
)));
4375 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4376 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4379 /* EXP is some logical combination of boolean tests. See if we can
4380 merge it into some range test. Return the new tree if so. */
4383 fold_range_test (tree exp
)
4385 int or_op
= (TREE_CODE (exp
) == TRUTH_ORIF_EXPR
4386 || TREE_CODE (exp
) == TRUTH_OR_EXPR
);
4387 int in0_p
, in1_p
, in_p
;
4388 tree low0
, low1
, low
, high0
, high1
, high
;
4389 tree lhs
= make_range (TREE_OPERAND (exp
, 0), &in0_p
, &low0
, &high0
);
4390 tree rhs
= make_range (TREE_OPERAND (exp
, 1), &in1_p
, &low1
, &high1
);
4393 /* If this is an OR operation, invert both sides; we will invert
4394 again at the end. */
4396 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4398 /* If both expressions are the same, if we can merge the ranges, and we
4399 can build the range test, return it or it inverted. If one of the
4400 ranges is always true or always false, consider it to be the same
4401 expression as the other. */
4402 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4403 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4405 && 0 != (tem
= (build_range_check (TREE_TYPE (exp
),
4407 : rhs
!= 0 ? rhs
: integer_zero_node
,
4409 return or_op
? invert_truthvalue (tem
) : tem
;
4411 /* On machines where the branch cost is expensive, if this is a
4412 short-circuited branch and the underlying object on both sides
4413 is the same, make a non-short-circuit operation. */
4414 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4415 && lhs
!= 0 && rhs
!= 0
4416 && (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
4417 || TREE_CODE (exp
) == TRUTH_ORIF_EXPR
)
4418 && operand_equal_p (lhs
, rhs
, 0))
4420 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4421 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4422 which cases we can't do this. */
4423 if (simple_operand_p (lhs
))
4424 return build2 (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
4425 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4426 TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
4427 TREE_OPERAND (exp
, 1));
4429 else if (lang_hooks
.decls
.global_bindings_p () == 0
4430 && ! CONTAINS_PLACEHOLDER_P (lhs
))
4432 tree common
= save_expr (lhs
);
4434 if (0 != (lhs
= build_range_check (TREE_TYPE (exp
), common
,
4435 or_op
? ! in0_p
: in0_p
,
4437 && (0 != (rhs
= build_range_check (TREE_TYPE (exp
), common
,
4438 or_op
? ! in1_p
: in1_p
,
4440 return build2 (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
4441 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4442 TREE_TYPE (exp
), lhs
, rhs
);
4449 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4450 bit value. Arrange things so the extra bits will be set to zero if and
4451 only if C is signed-extended to its full width. If MASK is nonzero,
4452 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4455 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4457 tree type
= TREE_TYPE (c
);
4458 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4461 if (p
== modesize
|| unsignedp
)
4464 /* We work by getting just the sign bit into the low-order bit, then
4465 into the high-order bit, then sign-extend. We then XOR that value
4467 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
4468 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
4470 /* We must use a signed type in order to get an arithmetic right shift.
4471 However, we must also avoid introducing accidental overflows, so that
4472 a subsequent call to integer_zerop will work. Hence we must
4473 do the type conversion here. At this point, the constant is either
4474 zero or one, and the conversion to a signed type can never overflow.
4475 We could get an overflow if this conversion is done anywhere else. */
4476 if (TYPE_UNSIGNED (type
))
4477 temp
= fold_convert (lang_hooks
.types
.signed_type (type
), temp
);
4479 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
4480 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
4482 temp
= const_binop (BIT_AND_EXPR
, temp
,
4483 fold_convert (TREE_TYPE (c
), mask
), 0);
4484 /* If necessary, convert the type back to match the type of C. */
4485 if (TYPE_UNSIGNED (type
))
4486 temp
= fold_convert (type
, temp
);
4488 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
4491 /* Find ways of folding logical expressions of LHS and RHS:
4492 Try to merge two comparisons to the same innermost item.
4493 Look for range tests like "ch >= '0' && ch <= '9'".
4494 Look for combinations of simple terms on machines with expensive branches
4495 and evaluate the RHS unconditionally.
4497 For example, if we have p->a == 2 && p->b == 4 and we can make an
4498 object large enough to span both A and B, we can do this with a comparison
4499 against the object ANDed with the a mask.
4501 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4502 operations to do this with one comparison.
4504 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4505 function and the one above.
4507 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4508 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4510 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4513 We return the simplified tree or 0 if no optimization is possible. */
4516 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
4518 /* If this is the "or" of two comparisons, we can do something if
4519 the comparisons are NE_EXPR. If this is the "and", we can do something
4520 if the comparisons are EQ_EXPR. I.e.,
4521 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4523 WANTED_CODE is this operation code. For single bit fields, we can
4524 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4525 comparison for one-bit fields. */
4527 enum tree_code wanted_code
;
4528 enum tree_code lcode
, rcode
;
4529 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
4530 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
4531 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
4532 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
4533 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
4534 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
4535 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
4536 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
4537 enum machine_mode lnmode
, rnmode
;
4538 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
4539 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
4540 tree l_const
, r_const
;
4541 tree lntype
, rntype
, result
;
4542 int first_bit
, end_bit
;
4545 /* Start by getting the comparison codes. Fail if anything is volatile.
4546 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4547 it were surrounded with a NE_EXPR. */
4549 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
4552 lcode
= TREE_CODE (lhs
);
4553 rcode
= TREE_CODE (rhs
);
4555 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
4557 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
4558 fold_convert (TREE_TYPE (lhs
), integer_zero_node
));
4562 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
4564 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
4565 fold_convert (TREE_TYPE (rhs
), integer_zero_node
));
4569 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
4570 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
4573 ll_arg
= TREE_OPERAND (lhs
, 0);
4574 lr_arg
= TREE_OPERAND (lhs
, 1);
4575 rl_arg
= TREE_OPERAND (rhs
, 0);
4576 rr_arg
= TREE_OPERAND (rhs
, 1);
4578 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4579 if (simple_operand_p (ll_arg
)
4580 && simple_operand_p (lr_arg
))
4583 if (operand_equal_p (ll_arg
, rl_arg
, 0)
4584 && operand_equal_p (lr_arg
, rr_arg
, 0))
4586 result
= combine_comparisons (code
, lcode
, rcode
,
4587 truth_type
, ll_arg
, lr_arg
);
4591 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
4592 && operand_equal_p (lr_arg
, rl_arg
, 0))
4594 result
= combine_comparisons (code
, lcode
,
4595 swap_tree_comparison (rcode
),
4596 truth_type
, ll_arg
, lr_arg
);
4602 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
4603 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
4605 /* If the RHS can be evaluated unconditionally and its operands are
4606 simple, it wins to evaluate the RHS unconditionally on machines
4607 with expensive branches. In this case, this isn't a comparison
4608 that can be merged. Avoid doing this if the RHS is a floating-point
4609 comparison since those can trap. */
4611 if (BRANCH_COST
>= 2
4612 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
4613 && simple_operand_p (rl_arg
)
4614 && simple_operand_p (rr_arg
))
4616 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4617 if (code
== TRUTH_OR_EXPR
4618 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
4619 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
4620 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4621 return build2 (NE_EXPR
, truth_type
,
4622 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4624 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4626 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4627 if (code
== TRUTH_AND_EXPR
4628 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
4629 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
4630 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4631 return build2 (EQ_EXPR
, truth_type
,
4632 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4634 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4636 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
4637 return build2 (code
, truth_type
, lhs
, rhs
);
4640 /* See if the comparisons can be merged. Then get all the parameters for
4643 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
4644 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
4648 ll_inner
= decode_field_reference (ll_arg
,
4649 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
4650 &ll_unsignedp
, &volatilep
, &ll_mask
,
4652 lr_inner
= decode_field_reference (lr_arg
,
4653 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
4654 &lr_unsignedp
, &volatilep
, &lr_mask
,
4656 rl_inner
= decode_field_reference (rl_arg
,
4657 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
4658 &rl_unsignedp
, &volatilep
, &rl_mask
,
4660 rr_inner
= decode_field_reference (rr_arg
,
4661 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
4662 &rr_unsignedp
, &volatilep
, &rr_mask
,
4665 /* It must be true that the inner operation on the lhs of each
4666 comparison must be the same if we are to be able to do anything.
4667 Then see if we have constants. If not, the same must be true for
4669 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
4670 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
4673 if (TREE_CODE (lr_arg
) == INTEGER_CST
4674 && TREE_CODE (rr_arg
) == INTEGER_CST
)
4675 l_const
= lr_arg
, r_const
= rr_arg
;
4676 else if (lr_inner
== 0 || rr_inner
== 0
4677 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
4680 l_const
= r_const
= 0;
4682 /* If either comparison code is not correct for our logical operation,
4683 fail. However, we can convert a one-bit comparison against zero into
4684 the opposite comparison against that bit being set in the field. */
4686 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
4687 if (lcode
!= wanted_code
)
4689 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
4691 /* Make the left operand unsigned, since we are only interested
4692 in the value of one bit. Otherwise we are doing the wrong
4701 /* This is analogous to the code for l_const above. */
4702 if (rcode
!= wanted_code
)
4704 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
4713 /* After this point all optimizations will generate bit-field
4714 references, which we might not want. */
4715 if (! lang_hooks
.can_use_bit_fields_p ())
4718 /* See if we can find a mode that contains both fields being compared on
4719 the left. If we can't, fail. Otherwise, update all constants and masks
4720 to be relative to a field of that size. */
4721 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
4722 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
4723 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4724 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
4726 if (lnmode
== VOIDmode
)
4729 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
4730 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
4731 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
4732 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
4734 if (BYTES_BIG_ENDIAN
)
4736 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
4737 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
4740 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
4741 size_int (xll_bitpos
), 0);
4742 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
4743 size_int (xrl_bitpos
), 0);
4747 l_const
= fold_convert (lntype
, l_const
);
4748 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
4749 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
4750 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
4751 fold (build1 (BIT_NOT_EXPR
,
4755 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
4757 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4762 r_const
= fold_convert (lntype
, r_const
);
4763 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
4764 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
4765 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
4766 fold (build1 (BIT_NOT_EXPR
,
4770 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
4772 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4776 /* If the right sides are not constant, do the same for it. Also,
4777 disallow this optimization if a size or signedness mismatch occurs
4778 between the left and right sides. */
4781 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
4782 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
4783 /* Make sure the two fields on the right
4784 correspond to the left without being swapped. */
4785 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
4788 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
4789 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
4790 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4791 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
4793 if (rnmode
== VOIDmode
)
4796 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
4797 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
4798 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
4799 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
4801 if (BYTES_BIG_ENDIAN
)
4803 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
4804 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
4807 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
4808 size_int (xlr_bitpos
), 0);
4809 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
4810 size_int (xrr_bitpos
), 0);
4812 /* Make a mask that corresponds to both fields being compared.
4813 Do this for both items being compared. If the operands are the
4814 same size and the bits being compared are in the same position
4815 then we can do this by masking both and comparing the masked
4817 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4818 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
4819 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
4821 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4822 ll_unsignedp
|| rl_unsignedp
);
4823 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4824 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
4826 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
4827 lr_unsignedp
|| rr_unsignedp
);
4828 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
4829 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
4831 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
4834 /* There is still another way we can do something: If both pairs of
4835 fields being compared are adjacent, we may be able to make a wider
4836 field containing them both.
4838 Note that we still must mask the lhs/rhs expressions. Furthermore,
4839 the mask must be shifted to account for the shift done by
4840 make_bit_field_ref. */
4841 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
4842 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
4843 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
4844 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
4848 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
4849 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
4850 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
4851 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
4853 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
4854 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
4855 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
4856 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
4858 /* Convert to the smaller type before masking out unwanted bits. */
4860 if (lntype
!= rntype
)
4862 if (lnbitsize
> rnbitsize
)
4864 lhs
= fold_convert (rntype
, lhs
);
4865 ll_mask
= fold_convert (rntype
, ll_mask
);
4868 else if (lnbitsize
< rnbitsize
)
4870 rhs
= fold_convert (lntype
, rhs
);
4871 lr_mask
= fold_convert (lntype
, lr_mask
);
4876 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
4877 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
4879 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
4880 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
4882 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
4888 /* Handle the case of comparisons with constants. If there is something in
4889 common between the masks, those bits of the constants must be the same.
4890 If not, the condition is always false. Test for this to avoid generating
4891 incorrect code below. */
4892 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
4893 if (! integer_zerop (result
)
4894 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
4895 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
4897 if (wanted_code
== NE_EXPR
)
4899 warning ("%<or%> of unmatched not-equal tests is always 1");
4900 return constant_boolean_node (true, truth_type
);
4904 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4905 return constant_boolean_node (false, truth_type
);
4909 /* Construct the expression we will return. First get the component
4910 reference we will make. Unless the mask is all ones the width of
4911 that field, perform the mask operation. Then compare with the
4913 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4914 ll_unsignedp
|| rl_unsignedp
);
4916 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4917 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4918 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
4920 return build2 (wanted_code
, truth_type
, result
,
4921 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
4924 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4928 optimize_minmax_comparison (tree t
)
4930 tree type
= TREE_TYPE (t
);
4931 tree arg0
= TREE_OPERAND (t
, 0);
4932 enum tree_code op_code
;
4933 tree comp_const
= TREE_OPERAND (t
, 1);
4935 int consts_equal
, consts_lt
;
4938 STRIP_SIGN_NOPS (arg0
);
4940 op_code
= TREE_CODE (arg0
);
4941 minmax_const
= TREE_OPERAND (arg0
, 1);
4942 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
4943 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
4944 inner
= TREE_OPERAND (arg0
, 0);
4946 /* If something does not permit us to optimize, return the original tree. */
4947 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
4948 || TREE_CODE (comp_const
) != INTEGER_CST
4949 || TREE_CONSTANT_OVERFLOW (comp_const
)
4950 || TREE_CODE (minmax_const
) != INTEGER_CST
4951 || TREE_CONSTANT_OVERFLOW (minmax_const
))
4954 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4955 and GT_EXPR, doing the rest with recursive calls using logical
4957 switch (TREE_CODE (t
))
4959 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
4961 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t
)));
4965 fold (build2 (TRUTH_ORIF_EXPR
, type
,
4966 optimize_minmax_comparison
4967 (build2 (EQ_EXPR
, type
, arg0
, comp_const
)),
4968 optimize_minmax_comparison
4969 (build2 (GT_EXPR
, type
, arg0
, comp_const
))));
4972 if (op_code
== MAX_EXPR
&& consts_equal
)
4973 /* MAX (X, 0) == 0 -> X <= 0 */
4974 return fold (build2 (LE_EXPR
, type
, inner
, comp_const
));
4976 else if (op_code
== MAX_EXPR
&& consts_lt
)
4977 /* MAX (X, 0) == 5 -> X == 5 */
4978 return fold (build2 (EQ_EXPR
, type
, inner
, comp_const
));
4980 else if (op_code
== MAX_EXPR
)
4981 /* MAX (X, 0) == -1 -> false */
4982 return omit_one_operand (type
, integer_zero_node
, inner
);
4984 else if (consts_equal
)
4985 /* MIN (X, 0) == 0 -> X >= 0 */
4986 return fold (build2 (GE_EXPR
, type
, inner
, comp_const
));
4989 /* MIN (X, 0) == 5 -> false */
4990 return omit_one_operand (type
, integer_zero_node
, inner
);
4993 /* MIN (X, 0) == -1 -> X == -1 */
4994 return fold (build2 (EQ_EXPR
, type
, inner
, comp_const
));
4997 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
4998 /* MAX (X, 0) > 0 -> X > 0
4999 MAX (X, 0) > 5 -> X > 5 */
5000 return fold (build2 (GT_EXPR
, type
, inner
, comp_const
));
5002 else if (op_code
== MAX_EXPR
)
5003 /* MAX (X, 0) > -1 -> true */
5004 return omit_one_operand (type
, integer_one_node
, inner
);
5006 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5007 /* MIN (X, 0) > 0 -> false
5008 MIN (X, 0) > 5 -> false */
5009 return omit_one_operand (type
, integer_zero_node
, inner
);
5012 /* MIN (X, 0) > -1 -> X > -1 */
5013 return fold (build2 (GT_EXPR
, type
, inner
, comp_const
));
5020 /* T is an integer expression that is being multiplied, divided, or taken a
5021 modulus (CODE says which and what kind of divide or modulus) by a
5022 constant C. See if we can eliminate that operation by folding it with
5023 other operations already in T. WIDE_TYPE, if non-null, is a type that
5024 should be used for the computation if wider than our type.
5026 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5027 (X * 2) + (Y * 4). We must, however, be assured that either the original
5028 expression would not overflow or that overflow is undefined for the type
5029 in the language in question.
5031 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5032 the machine has a multiply-accumulate insn or that this is part of an
5033 addressing calculation.
5035 If we return a non-null expression, it is an equivalent form of the
5036 original computation, but need not be in the original type. */
5039 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5041 /* To avoid exponential search depth, refuse to allow recursion past
5042 three levels. Beyond that (1) it's highly unlikely that we'll find
5043 something interesting and (2) we've probably processed it before
5044 when we built the inner expression. */
5053 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
);
5060 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5062 tree type
= TREE_TYPE (t
);
5063 enum tree_code tcode
= TREE_CODE (t
);
5064 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5065 > GET_MODE_SIZE (TYPE_MODE (type
)))
5066 ? wide_type
: type
);
5068 int same_p
= tcode
== code
;
5069 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5071 /* Don't deal with constants of zero here; they confuse the code below. */
5072 if (integer_zerop (c
))
5075 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5076 op0
= TREE_OPERAND (t
, 0);
5078 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5079 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5081 /* Note that we need not handle conditional operations here since fold
5082 already handles those cases. So just do arithmetic here. */
5086 /* For a constant, we can always simplify if we are a multiply
5087 or (for divide and modulus) if it is a multiple of our constant. */
5088 if (code
== MULT_EXPR
5089 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
5090 return const_binop (code
, fold_convert (ctype
, t
),
5091 fold_convert (ctype
, c
), 0);
5094 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
5095 /* If op0 is an expression ... */
5096 if ((COMPARISON_CLASS_P (op0
)
5097 || UNARY_CLASS_P (op0
)
5098 || BINARY_CLASS_P (op0
)
5099 || EXPRESSION_CLASS_P (op0
))
5100 /* ... and is unsigned, and its type is smaller than ctype,
5101 then we cannot pass through as widening. */
5102 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
5103 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5104 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5105 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
5106 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
5107 /* ... or this is a truncation (t is narrower than op0),
5108 then we cannot pass through this narrowing. */
5109 || (GET_MODE_SIZE (TYPE_MODE (type
))
5110 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
5111 /* ... or signedness changes for division or modulus,
5112 then we cannot pass through this conversion. */
5113 || (code
!= MULT_EXPR
5114 && (TYPE_UNSIGNED (ctype
)
5115 != TYPE_UNSIGNED (TREE_TYPE (op0
))))))
5118 /* Pass the constant down and see if we can make a simplification. If
5119 we can, replace this expression with the inner simplification for
5120 possible later conversion to our or some other type. */
5121 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5122 && TREE_CODE (t2
) == INTEGER_CST
5123 && ! TREE_CONSTANT_OVERFLOW (t2
)
5124 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5126 ? ctype
: NULL_TREE
))))
5131 /* If widening the type changes it from signed to unsigned, then we
5132 must avoid building ABS_EXPR itself as unsigned. */
5133 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5135 tree cstype
= (*lang_hooks
.types
.signed_type
) (ctype
);
5136 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
)) != 0)
5138 t1
= fold (build1 (tcode
, cstype
, fold_convert (cstype
, t1
)));
5139 return fold_convert (ctype
, t1
);
5145 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5146 return fold (build1 (tcode
, ctype
, fold_convert (ctype
, t1
)));
5149 case MIN_EXPR
: case MAX_EXPR
:
5150 /* If widening the type changes the signedness, then we can't perform
5151 this optimization as that changes the result. */
5152 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5155 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5156 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
5157 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5159 if (tree_int_cst_sgn (c
) < 0)
5160 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5162 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5163 fold_convert (ctype
, t2
)));
5167 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5168 /* If the second operand is constant, this is a multiplication
5169 or floor division, by a power of two, so we can treat it that
5170 way unless the multiplier or divisor overflows. Signed
5171 left-shift overflow is implementation-defined rather than
5172 undefined in C90, so do not convert signed left shift into
5174 if (TREE_CODE (op1
) == INTEGER_CST
5175 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5176 /* const_binop may not detect overflow correctly,
5177 so check for it explicitly here. */
5178 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5179 && TREE_INT_CST_HIGH (op1
) == 0
5180 && 0 != (t1
= fold_convert (ctype
,
5181 const_binop (LSHIFT_EXPR
,
5184 && ! TREE_OVERFLOW (t1
))
5185 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5186 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5187 ctype
, fold_convert (ctype
, op0
), t1
),
5188 c
, code
, wide_type
);
5191 case PLUS_EXPR
: case MINUS_EXPR
:
5192 /* See if we can eliminate the operation on both sides. If we can, we
5193 can return a new PLUS or MINUS. If we can't, the only remaining
5194 cases where we can do anything are if the second operand is a
5196 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
5197 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
5198 if (t1
!= 0 && t2
!= 0
5199 && (code
== MULT_EXPR
5200 /* If not multiplication, we can only do this if both operands
5201 are divisible by c. */
5202 || (multiple_of_p (ctype
, op0
, c
)
5203 && multiple_of_p (ctype
, op1
, c
))))
5204 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5205 fold_convert (ctype
, t2
)));
5207 /* If this was a subtraction, negate OP1 and set it to be an addition.
5208 This simplifies the logic below. */
5209 if (tcode
== MINUS_EXPR
)
5210 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5212 if (TREE_CODE (op1
) != INTEGER_CST
)
5215 /* If either OP1 or C are negative, this optimization is not safe for
5216 some of the division and remainder types while for others we need
5217 to change the code. */
5218 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5220 if (code
== CEIL_DIV_EXPR
)
5221 code
= FLOOR_DIV_EXPR
;
5222 else if (code
== FLOOR_DIV_EXPR
)
5223 code
= CEIL_DIV_EXPR
;
5224 else if (code
!= MULT_EXPR
5225 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5229 /* If it's a multiply or a division/modulus operation of a multiple
5230 of our constant, do the operation and verify it doesn't overflow. */
5231 if (code
== MULT_EXPR
5232 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5234 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5235 fold_convert (ctype
, c
), 0);
5236 /* We allow the constant to overflow with wrapping semantics. */
5238 || (TREE_OVERFLOW (op1
) && ! flag_wrapv
))
5244 /* If we have an unsigned type is not a sizetype, we cannot widen
5245 the operation since it will change the result if the original
5246 computation overflowed. */
5247 if (TYPE_UNSIGNED (ctype
)
5248 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5252 /* If we were able to eliminate our operation from the first side,
5253 apply our operation to the second side and reform the PLUS. */
5254 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5255 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
));
5257 /* The last case is if we are a multiply. In that case, we can
5258 apply the distributive law to commute the multiply and addition
5259 if the multiplication of the constants doesn't overflow. */
5260 if (code
== MULT_EXPR
)
5261 return fold (build2 (tcode
, ctype
,
5262 fold (build2 (code
, ctype
,
5263 fold_convert (ctype
, op0
),
5264 fold_convert (ctype
, c
))),
5270 /* We have a special case here if we are doing something like
5271 (C * 8) % 4 since we know that's zero. */
5272 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5273 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5274 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5275 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5276 return omit_one_operand (type
, integer_zero_node
, op0
);
5278 /* ... fall through ... */
5280 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5281 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5282 /* If we can extract our operation from the LHS, do so and return a
5283 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5284 do something only if the second operand is a constant. */
5286 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5287 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5288 fold_convert (ctype
, op1
)));
5289 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5290 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5291 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5292 fold_convert (ctype
, t1
)));
5293 else if (TREE_CODE (op1
) != INTEGER_CST
)
5296 /* If these are the same operation types, we can associate them
5297 assuming no overflow. */
5299 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
5300 fold_convert (ctype
, c
), 0))
5301 && ! TREE_OVERFLOW (t1
))
5302 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
));
5304 /* If these operations "cancel" each other, we have the main
5305 optimizations of this pass, which occur when either constant is a
5306 multiple of the other, in which case we replace this with either an
5307 operation or CODE or TCODE.
5309 If we have an unsigned type that is not a sizetype, we cannot do
5310 this since it will change the result if the original computation
5312 if ((! TYPE_UNSIGNED (ctype
)
5313 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5315 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5316 || (tcode
== MULT_EXPR
5317 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5318 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
5320 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5321 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5322 fold_convert (ctype
,
5323 const_binop (TRUNC_DIV_EXPR
,
5325 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
5326 return fold (build2 (code
, ctype
, fold_convert (ctype
, op0
),
5327 fold_convert (ctype
,
5328 const_binop (TRUNC_DIV_EXPR
,
5340 /* Return a node which has the indicated constant VALUE (either 0 or
5341 1), and is of the indicated TYPE. */
5344 constant_boolean_node (int value
, tree type
)
5346 if (type
== integer_type_node
)
5347 return value
? integer_one_node
: integer_zero_node
;
5348 else if (type
== boolean_type_node
)
5349 return value
? boolean_true_node
: boolean_false_node
;
5350 else if (TREE_CODE (type
) == BOOLEAN_TYPE
)
5351 return lang_hooks
.truthvalue_conversion (value
? integer_one_node
5352 : integer_zero_node
);
5354 return build_int_cst (type
, value
);
5357 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5358 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5359 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5360 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5361 COND is the first argument to CODE; otherwise (as in the example
5362 given here), it is the second argument. TYPE is the type of the
5363 original expression. Return NULL_TREE if no simplification is
5367 fold_binary_op_with_conditional_arg (enum tree_code code
, tree type
,
5368 tree cond
, tree arg
, int cond_first_p
)
5370 tree test
, true_value
, false_value
;
5371 tree lhs
= NULL_TREE
;
5372 tree rhs
= NULL_TREE
;
5374 /* This transformation is only worthwhile if we don't have to wrap
5375 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5376 one of the branches once its pushed inside the COND_EXPR. */
5377 if (!TREE_CONSTANT (arg
))
5380 if (TREE_CODE (cond
) == COND_EXPR
)
5382 test
= TREE_OPERAND (cond
, 0);
5383 true_value
= TREE_OPERAND (cond
, 1);
5384 false_value
= TREE_OPERAND (cond
, 2);
5385 /* If this operand throws an expression, then it does not make
5386 sense to try to perform a logical or arithmetic operation
5388 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
5390 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
5395 tree testtype
= TREE_TYPE (cond
);
5397 true_value
= constant_boolean_node (true, testtype
);
5398 false_value
= constant_boolean_node (false, testtype
);
5402 lhs
= fold (cond_first_p
? build2 (code
, type
, true_value
, arg
)
5403 : build2 (code
, type
, arg
, true_value
));
5405 rhs
= fold (cond_first_p
? build2 (code
, type
, false_value
, arg
)
5406 : build2 (code
, type
, arg
, false_value
));
5408 test
= fold (build3 (COND_EXPR
, type
, test
, lhs
, rhs
));
5409 return fold_convert (type
, test
);
5413 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5415 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5416 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5417 ADDEND is the same as X.
5419 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5420 and finite. The problematic cases are when X is zero, and its mode
5421 has signed zeros. In the case of rounding towards -infinity,
5422 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5423 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5426 fold_real_zero_addition_p (tree type
, tree addend
, int negate
)
5428 if (!real_zerop (addend
))
5431 /* Don't allow the fold with -fsignaling-nans. */
5432 if (HONOR_SNANS (TYPE_MODE (type
)))
5435 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5436 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
5439 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5440 if (TREE_CODE (addend
) == REAL_CST
5441 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
5444 /* The mode has signed zeros, and we have to honor their sign.
5445 In this situation, there is only one case we can return true for.
5446 X - 0 is the same as X unless rounding towards -infinity is
5448 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
5451 /* Subroutine of fold() that checks comparisons of built-in math
5452 functions against real constants.
5454 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5455 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5456 is the type of the result and ARG0 and ARG1 are the operands of the
5457 comparison. ARG1 must be a TREE_REAL_CST.
5459 The function returns the constant folded tree if a simplification
5460 can be made, and NULL_TREE otherwise. */
5463 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
5464 tree type
, tree arg0
, tree arg1
)
5468 if (BUILTIN_SQRT_P (fcode
))
5470 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5471 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
5473 c
= TREE_REAL_CST (arg1
);
5474 if (REAL_VALUE_NEGATIVE (c
))
5476 /* sqrt(x) < y is always false, if y is negative. */
5477 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
5478 return omit_one_operand (type
, integer_zero_node
, arg
);
5480 /* sqrt(x) > y is always true, if y is negative and we
5481 don't care about NaNs, i.e. negative values of x. */
5482 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
5483 return omit_one_operand (type
, integer_one_node
, arg
);
5485 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5486 return fold (build2 (GE_EXPR
, type
, arg
,
5487 build_real (TREE_TYPE (arg
), dconst0
)));
5489 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
5493 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5494 real_convert (&c2
, mode
, &c2
);
5496 if (REAL_VALUE_ISINF (c2
))
5498 /* sqrt(x) > y is x == +Inf, when y is very large. */
5499 if (HONOR_INFINITIES (mode
))
5500 return fold (build2 (EQ_EXPR
, type
, arg
,
5501 build_real (TREE_TYPE (arg
), c2
)));
5503 /* sqrt(x) > y is always false, when y is very large
5504 and we don't care about infinities. */
5505 return omit_one_operand (type
, integer_zero_node
, arg
);
5508 /* sqrt(x) > c is the same as x > c*c. */
5509 return fold (build2 (code
, type
, arg
,
5510 build_real (TREE_TYPE (arg
), c2
)));
5512 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
5516 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5517 real_convert (&c2
, mode
, &c2
);
5519 if (REAL_VALUE_ISINF (c2
))
5521 /* sqrt(x) < y is always true, when y is a very large
5522 value and we don't care about NaNs or Infinities. */
5523 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
5524 return omit_one_operand (type
, integer_one_node
, arg
);
5526 /* sqrt(x) < y is x != +Inf when y is very large and we
5527 don't care about NaNs. */
5528 if (! HONOR_NANS (mode
))
5529 return fold (build2 (NE_EXPR
, type
, arg
,
5530 build_real (TREE_TYPE (arg
), c2
)));
5532 /* sqrt(x) < y is x >= 0 when y is very large and we
5533 don't care about Infinities. */
5534 if (! HONOR_INFINITIES (mode
))
5535 return fold (build2 (GE_EXPR
, type
, arg
,
5536 build_real (TREE_TYPE (arg
), dconst0
)));
5538 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5539 if (lang_hooks
.decls
.global_bindings_p () != 0
5540 || CONTAINS_PLACEHOLDER_P (arg
))
5543 arg
= save_expr (arg
);
5544 return fold (build2 (TRUTH_ANDIF_EXPR
, type
,
5545 fold (build2 (GE_EXPR
, type
, arg
,
5546 build_real (TREE_TYPE (arg
),
5548 fold (build2 (NE_EXPR
, type
, arg
,
5549 build_real (TREE_TYPE (arg
),
5553 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5554 if (! HONOR_NANS (mode
))
5555 return fold (build2 (code
, type
, arg
,
5556 build_real (TREE_TYPE (arg
), c2
)));
5558 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5559 if (lang_hooks
.decls
.global_bindings_p () == 0
5560 && ! CONTAINS_PLACEHOLDER_P (arg
))
5562 arg
= save_expr (arg
);
5563 return fold (build2 (TRUTH_ANDIF_EXPR
, type
,
5564 fold (build2 (GE_EXPR
, type
, arg
,
5565 build_real (TREE_TYPE (arg
),
5567 fold (build2 (code
, type
, arg
,
5568 build_real (TREE_TYPE (arg
),
5577 /* Subroutine of fold() that optimizes comparisons against Infinities,
5578 either +Inf or -Inf.
5580 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5581 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5582 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5584 The function returns the constant folded tree if a simplification
5585 can be made, and NULL_TREE otherwise. */
5588 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5590 enum machine_mode mode
;
5591 REAL_VALUE_TYPE max
;
5595 mode
= TYPE_MODE (TREE_TYPE (arg0
));
5597 /* For negative infinity swap the sense of the comparison. */
5598 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
5600 code
= swap_tree_comparison (code
);
5605 /* x > +Inf is always false, if with ignore sNANs. */
5606 if (HONOR_SNANS (mode
))
5608 return omit_one_operand (type
, integer_zero_node
, arg0
);
5611 /* x <= +Inf is always true, if we don't case about NaNs. */
5612 if (! HONOR_NANS (mode
))
5613 return omit_one_operand (type
, integer_one_node
, arg0
);
5615 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5616 if (lang_hooks
.decls
.global_bindings_p () == 0
5617 && ! CONTAINS_PLACEHOLDER_P (arg0
))
5619 arg0
= save_expr (arg0
);
5620 return fold (build2 (EQ_EXPR
, type
, arg0
, arg0
));
5626 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5627 real_maxval (&max
, neg
, mode
);
5628 return fold (build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5629 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5632 /* x < +Inf is always equal to x <= DBL_MAX. */
5633 real_maxval (&max
, neg
, mode
);
5634 return fold (build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5635 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5638 /* x != +Inf is always equal to !(x > DBL_MAX). */
5639 real_maxval (&max
, neg
, mode
);
5640 if (! HONOR_NANS (mode
))
5641 return fold (build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5642 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5644 /* The transformation below creates non-gimple code and thus is
5645 not appropriate if we are in gimple form. */
5649 temp
= fold (build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5650 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5651 return fold (build1 (TRUTH_NOT_EXPR
, type
, temp
));
5660 /* Subroutine of fold() that optimizes comparisons of a division by
5661 a nonzero integer constant against an integer constant, i.e.
5664 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5665 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5666 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5668 The function returns the constant folded tree if a simplification
5669 can be made, and NULL_TREE otherwise. */
5672 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5674 tree prod
, tmp
, hi
, lo
;
5675 tree arg00
= TREE_OPERAND (arg0
, 0);
5676 tree arg01
= TREE_OPERAND (arg0
, 1);
5677 unsigned HOST_WIDE_INT lpart
;
5678 HOST_WIDE_INT hpart
;
5681 /* We have to do this the hard way to detect unsigned overflow.
5682 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5683 overflow
= mul_double (TREE_INT_CST_LOW (arg01
),
5684 TREE_INT_CST_HIGH (arg01
),
5685 TREE_INT_CST_LOW (arg1
),
5686 TREE_INT_CST_HIGH (arg1
), &lpart
, &hpart
);
5687 prod
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
5688 prod
= force_fit_type (prod
, -1, overflow
, false);
5690 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)))
5692 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5695 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5696 overflow
= add_double (TREE_INT_CST_LOW (prod
),
5697 TREE_INT_CST_HIGH (prod
),
5698 TREE_INT_CST_LOW (tmp
),
5699 TREE_INT_CST_HIGH (tmp
),
5701 hi
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
5702 hi
= force_fit_type (hi
, -1, overflow
| TREE_OVERFLOW (prod
),
5703 TREE_CONSTANT_OVERFLOW (prod
));
5705 else if (tree_int_cst_sgn (arg01
) >= 0)
5707 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5708 switch (tree_int_cst_sgn (arg1
))
5711 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
5716 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
5721 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
5731 /* A negative divisor reverses the relational operators. */
5732 code
= swap_tree_comparison (code
);
5734 tmp
= int_const_binop (PLUS_EXPR
, arg01
, integer_one_node
, 0);
5735 switch (tree_int_cst_sgn (arg1
))
5738 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
5743 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
5748 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
5760 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
5761 return omit_one_operand (type
, integer_zero_node
, arg00
);
5762 if (TREE_OVERFLOW (hi
))
5763 return fold (build2 (GE_EXPR
, type
, arg00
, lo
));
5764 if (TREE_OVERFLOW (lo
))
5765 return fold (build2 (LE_EXPR
, type
, arg00
, hi
));
5766 return build_range_check (type
, arg00
, 1, lo
, hi
);
5769 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
5770 return omit_one_operand (type
, integer_one_node
, arg00
);
5771 if (TREE_OVERFLOW (hi
))
5772 return fold (build2 (LT_EXPR
, type
, arg00
, lo
));
5773 if (TREE_OVERFLOW (lo
))
5774 return fold (build2 (GT_EXPR
, type
, arg00
, hi
));
5775 return build_range_check (type
, arg00
, 0, lo
, hi
);
5778 if (TREE_OVERFLOW (lo
))
5779 return omit_one_operand (type
, integer_zero_node
, arg00
);
5780 return fold (build2 (LT_EXPR
, type
, arg00
, lo
));
5783 if (TREE_OVERFLOW (hi
))
5784 return omit_one_operand (type
, integer_one_node
, arg00
);
5785 return fold (build2 (LE_EXPR
, type
, arg00
, hi
));
5788 if (TREE_OVERFLOW (hi
))
5789 return omit_one_operand (type
, integer_zero_node
, arg00
);
5790 return fold (build2 (GT_EXPR
, type
, arg00
, hi
));
5793 if (TREE_OVERFLOW (lo
))
5794 return omit_one_operand (type
, integer_one_node
, arg00
);
5795 return fold (build2 (GE_EXPR
, type
, arg00
, lo
));
5805 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5806 equality/inequality test, then return a simplified form of
5807 the test using shifts and logical operations. Otherwise return
5808 NULL. TYPE is the desired result type. */
5811 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
5814 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5816 if (code
== TRUTH_NOT_EXPR
)
5818 code
= TREE_CODE (arg0
);
5819 if (code
!= NE_EXPR
&& code
!= EQ_EXPR
)
5822 /* Extract the arguments of the EQ/NE. */
5823 arg1
= TREE_OPERAND (arg0
, 1);
5824 arg0
= TREE_OPERAND (arg0
, 0);
5826 /* This requires us to invert the code. */
5827 code
= (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
);
5830 /* If this is testing a single bit, we can optimize the test. */
5831 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
5832 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
5833 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
5835 tree inner
= TREE_OPERAND (arg0
, 0);
5836 tree type
= TREE_TYPE (arg0
);
5837 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
5838 enum machine_mode operand_mode
= TYPE_MODE (type
);
5840 tree signed_type
, unsigned_type
, intermediate_type
;
5843 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5844 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5845 arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
5846 if (arg00
!= NULL_TREE
5847 /* This is only a win if casting to a signed type is cheap,
5848 i.e. when arg00's type is not a partial mode. */
5849 && TYPE_PRECISION (TREE_TYPE (arg00
))
5850 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
5852 tree stype
= lang_hooks
.types
.signed_type (TREE_TYPE (arg00
));
5853 return fold (build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
5854 result_type
, fold_convert (stype
, arg00
),
5855 fold_convert (stype
, integer_zero_node
)));
5858 /* Otherwise we have (A & C) != 0 where C is a single bit,
5859 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5860 Similarly for (A & C) == 0. */
5862 /* If INNER is a right shift of a constant and it plus BITNUM does
5863 not overflow, adjust BITNUM and INNER. */
5864 if (TREE_CODE (inner
) == RSHIFT_EXPR
5865 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
5866 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
5867 && bitnum
< TYPE_PRECISION (type
)
5868 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
5869 bitnum
- TYPE_PRECISION (type
)))
5871 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
5872 inner
= TREE_OPERAND (inner
, 0);
5875 /* If we are going to be able to omit the AND below, we must do our
5876 operations as unsigned. If we must use the AND, we have a choice.
5877 Normally unsigned is faster, but for some machines signed is. */
5878 #ifdef LOAD_EXTEND_OP
5879 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
5880 && !flag_syntax_only
) ? 0 : 1;
5885 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
5886 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
5887 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
5888 inner
= fold_convert (intermediate_type
, inner
);
5891 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
5892 inner
, size_int (bitnum
));
5894 if (code
== EQ_EXPR
)
5895 inner
= fold (build2 (BIT_XOR_EXPR
, intermediate_type
,
5896 inner
, integer_one_node
));
5898 /* Put the AND last so it can combine with more things. */
5899 inner
= build2 (BIT_AND_EXPR
, intermediate_type
,
5900 inner
, integer_one_node
);
5902 /* Make sure to return the proper type. */
5903 inner
= fold_convert (result_type
, inner
);
5910 /* Check whether we are allowed to reorder operands arg0 and arg1,
5911 such that the evaluation of arg1 occurs before arg0. */
5914 reorder_operands_p (tree arg0
, tree arg1
)
5916 if (! flag_evaluation_order
)
5918 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
5920 return ! TREE_SIDE_EFFECTS (arg0
)
5921 && ! TREE_SIDE_EFFECTS (arg1
);
5924 /* Test whether it is preferable two swap two operands, ARG0 and
5925 ARG1, for example because ARG0 is an integer constant and ARG1
5926 isn't. If REORDER is true, only recommend swapping if we can
5927 evaluate the operands in reverse order. */
5930 tree_swap_operands_p (tree arg0
, tree arg1
, bool reorder
)
5932 STRIP_SIGN_NOPS (arg0
);
5933 STRIP_SIGN_NOPS (arg1
);
5935 if (TREE_CODE (arg1
) == INTEGER_CST
)
5937 if (TREE_CODE (arg0
) == INTEGER_CST
)
5940 if (TREE_CODE (arg1
) == REAL_CST
)
5942 if (TREE_CODE (arg0
) == REAL_CST
)
5945 if (TREE_CODE (arg1
) == COMPLEX_CST
)
5947 if (TREE_CODE (arg0
) == COMPLEX_CST
)
5950 if (TREE_CONSTANT (arg1
))
5952 if (TREE_CONSTANT (arg0
))
5958 if (reorder
&& flag_evaluation_order
5959 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
5967 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5968 for commutative and comparison operators. Ensuring a canonical
5969 form allows the optimizers to find additional redundancies without
5970 having to explicitly check for both orderings. */
5971 if (TREE_CODE (arg0
) == SSA_NAME
5972 && TREE_CODE (arg1
) == SSA_NAME
5973 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
5979 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
5980 ARG0 is extended to a wider type. */
5983 fold_widened_comparison (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5985 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
5987 tree shorter_type
, outer_type
;
5991 if (arg0_unw
== arg0
)
5993 shorter_type
= TREE_TYPE (arg0_unw
);
5995 arg1_unw
= get_unwidened (arg1
, shorter_type
);
5999 /* If possible, express the comparison in the shorter mode. */
6000 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6001 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6002 && (TREE_TYPE (arg1_unw
) == shorter_type
6003 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6004 && TREE_CODE (shorter_type
) == INTEGER_TYPE
6005 && int_fits_type_p (arg1_unw
, shorter_type
))))
6006 return fold (build (code
, type
, arg0_unw
,
6007 fold_convert (shorter_type
, arg1_unw
)));
6009 if (TREE_CODE (arg1_unw
) != INTEGER_CST
)
6012 /* If we are comparing with the integer that does not fit into the range
6013 of the shorter type, the result is known. */
6014 outer_type
= TREE_TYPE (arg1_unw
);
6015 min
= lower_bound_in_type (outer_type
, shorter_type
);
6016 max
= upper_bound_in_type (outer_type
, shorter_type
);
6018 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6020 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6027 return omit_one_operand (type
, integer_zero_node
, arg0
);
6032 return omit_one_operand (type
, integer_one_node
, arg0
);
6038 return omit_one_operand (type
, integer_one_node
, arg0
);
6040 return omit_one_operand (type
, integer_zero_node
, arg0
);
6045 return omit_one_operand (type
, integer_zero_node
, arg0
);
6047 return omit_one_operand (type
, integer_one_node
, arg0
);
6056 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6057 ARG0 just the signedness is changed. */
6060 fold_sign_changed_comparison (enum tree_code code
, tree type
,
6061 tree arg0
, tree arg1
)
6063 tree arg0_inner
, tmp
;
6064 tree inner_type
, outer_type
;
6066 if (TREE_CODE (arg0
) != NOP_EXPR
)
6069 outer_type
= TREE_TYPE (arg0
);
6070 arg0_inner
= TREE_OPERAND (arg0
, 0);
6071 inner_type
= TREE_TYPE (arg0_inner
);
6073 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6076 if (TREE_CODE (arg1
) != INTEGER_CST
6077 && !(TREE_CODE (arg1
) == NOP_EXPR
6078 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6081 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6086 if (TREE_CODE (arg1
) == INTEGER_CST
)
6088 tmp
= build_int_cst_wide (inner_type
,
6089 TREE_INT_CST_LOW (arg1
),
6090 TREE_INT_CST_HIGH (arg1
));
6091 arg1
= force_fit_type (tmp
, 0,
6092 TREE_OVERFLOW (arg1
),
6093 TREE_CONSTANT_OVERFLOW (arg1
));
6096 arg1
= fold_convert (inner_type
, arg1
);
6098 return fold (build (code
, type
, arg0_inner
, arg1
));
6101 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6102 step of the array. TYPE is the type of the expression. ADDR is the address.
6103 MULT is the multiplicative expression. If the function succeeds, the new
6104 address expression is returned. Otherwise NULL_TREE is returned. */
6107 try_move_mult_to_index (tree type
, enum tree_code code
, tree addr
, tree mult
)
6109 tree s
, delta
, step
;
6110 tree arg0
= TREE_OPERAND (mult
, 0), arg1
= TREE_OPERAND (mult
, 1);
6111 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6118 if (TREE_CODE (arg0
) == INTEGER_CST
)
6123 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6131 for (;; ref
= TREE_OPERAND (ref
, 0))
6133 if (TREE_CODE (ref
) == ARRAY_REF
)
6135 step
= array_ref_element_size (ref
);
6137 if (TREE_CODE (step
) != INTEGER_CST
)
6140 itype
= TREE_TYPE (step
);
6142 /* If the type sizes do not match, we might run into problems
6143 when one of them would overflow. */
6144 if (TYPE_PRECISION (itype
) != TYPE_PRECISION (type
))
6147 if (!operand_equal_p (step
, fold_convert (itype
, s
), 0))
6150 delta
= fold_convert (itype
, delta
);
6154 if (!handled_component_p (ref
))
6158 /* We found the suitable array reference. So copy everything up to it,
6159 and replace the index. */
6161 pref
= TREE_OPERAND (addr
, 0);
6162 ret
= copy_node (pref
);
6167 pref
= TREE_OPERAND (pref
, 0);
6168 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6169 pos
= TREE_OPERAND (pos
, 0);
6172 TREE_OPERAND (pos
, 1) = fold (build2 (code
, itype
,
6173 TREE_OPERAND (pos
, 1),
6176 return build1 (ADDR_EXPR
, type
, ret
);
6180 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6181 means A >= Y && A != MAX, but in this case we know that
6182 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6185 fold_to_nonsharp_ineq_using_bound (tree ineq
, tree bound
)
6187 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6189 if (TREE_CODE (bound
) == LT_EXPR
)
6190 a
= TREE_OPERAND (bound
, 0);
6191 else if (TREE_CODE (bound
) == GT_EXPR
)
6192 a
= TREE_OPERAND (bound
, 1);
6196 typea
= TREE_TYPE (a
);
6197 if (!INTEGRAL_TYPE_P (typea
)
6198 && !POINTER_TYPE_P (typea
))
6201 if (TREE_CODE (ineq
) == LT_EXPR
)
6203 a1
= TREE_OPERAND (ineq
, 1);
6204 y
= TREE_OPERAND (ineq
, 0);
6206 else if (TREE_CODE (ineq
) == GT_EXPR
)
6208 a1
= TREE_OPERAND (ineq
, 0);
6209 y
= TREE_OPERAND (ineq
, 1);
6214 if (TREE_TYPE (a1
) != typea
)
6217 diff
= fold (build2 (MINUS_EXPR
, typea
, a1
, a
));
6218 if (!integer_onep (diff
))
6221 return fold (build2 (GE_EXPR
, type
, a
, y
));
6224 /* Perform constant folding and related simplification of EXPR.
6225 The related simplifications include x*1 => x, x*0 => 0, etc.,
6226 and application of the associative law.
6227 NOP_EXPR conversions may be removed freely (as long as we
6228 are careful not to change the type of the overall expression).
6229 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
6230 but we can constant-fold them if they have constant operands. */
6232 #ifdef ENABLE_FOLD_CHECKING
6233 # define fold(x) fold_1 (x)
6234 static tree
fold_1 (tree
);
6240 const tree t
= expr
;
6241 const tree type
= TREE_TYPE (expr
);
6242 tree t1
= NULL_TREE
;
6244 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
6245 enum tree_code code
= TREE_CODE (t
);
6246 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
6248 /* WINS will be nonzero when the switch is done
6249 if all operands are constant. */
6252 /* Return right away if a constant. */
6253 if (kind
== tcc_constant
)
6256 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
6260 /* Special case for conversion ops that can have fixed point args. */
6261 arg0
= TREE_OPERAND (t
, 0);
6263 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6265 STRIP_SIGN_NOPS (arg0
);
6267 if (arg0
!= 0 && TREE_CODE (arg0
) == COMPLEX_CST
)
6268 subop
= TREE_REALPART (arg0
);
6272 if (subop
!= 0 && TREE_CODE (subop
) != INTEGER_CST
6273 && TREE_CODE (subop
) != REAL_CST
)
6274 /* Note that TREE_CONSTANT isn't enough:
6275 static var addresses are constant but we can't
6276 do arithmetic on them. */
6279 else if (IS_EXPR_CODE_CLASS (kind
))
6281 int len
= TREE_CODE_LENGTH (code
);
6283 for (i
= 0; i
< len
; i
++)
6285 tree op
= TREE_OPERAND (t
, i
);
6289 continue; /* Valid for CALL_EXPR, at least. */
6291 /* Strip any conversions that don't change the mode. This is
6292 safe for every expression, except for a comparison expression
6293 because its signedness is derived from its operands. So, in
6294 the latter case, only strip conversions that don't change the
6297 Note that this is done as an internal manipulation within the
6298 constant folder, in order to find the simplest representation
6299 of the arguments so that their form can be studied. In any
6300 cases, the appropriate type conversions should be put back in
6301 the tree that will get out of the constant folder. */
6302 if (kind
== tcc_comparison
)
6303 STRIP_SIGN_NOPS (op
);
6307 if (TREE_CODE (op
) == COMPLEX_CST
)
6308 subop
= TREE_REALPART (op
);
6312 if (TREE_CODE (subop
) != INTEGER_CST
6313 && TREE_CODE (subop
) != REAL_CST
)
6314 /* Note that TREE_CONSTANT isn't enough:
6315 static var addresses are constant but we can't
6316 do arithmetic on them. */
6326 /* If this is a commutative operation, and ARG0 is a constant, move it
6327 to ARG1 to reduce the number of tests below. */
6328 if (commutative_tree_code (code
)
6329 && tree_swap_operands_p (arg0
, arg1
, true))
6330 return fold (build2 (code
, type
, TREE_OPERAND (t
, 1),
6331 TREE_OPERAND (t
, 0)));
6333 /* Now WINS is set as described above,
6334 ARG0 is the first operand of EXPR,
6335 and ARG1 is the second operand (if it has more than one operand).
6337 First check for cases where an arithmetic operation is applied to a
6338 compound, conditional, or comparison operation. Push the arithmetic
6339 operation inside the compound or conditional to see if any folding
6340 can then be done. Convert comparison to conditional for this purpose.
6341 The also optimizes non-constant cases that used to be done in
6344 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6345 one of the operands is a comparison and the other is a comparison, a
6346 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6347 code below would make the expression more complex. Change it to a
6348 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6349 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6351 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
6352 || code
== EQ_EXPR
|| code
== NE_EXPR
)
6353 && ((truth_value_p (TREE_CODE (arg0
))
6354 && (truth_value_p (TREE_CODE (arg1
))
6355 || (TREE_CODE (arg1
) == BIT_AND_EXPR
6356 && integer_onep (TREE_OPERAND (arg1
, 1)))))
6357 || (truth_value_p (TREE_CODE (arg1
))
6358 && (truth_value_p (TREE_CODE (arg0
))
6359 || (TREE_CODE (arg0
) == BIT_AND_EXPR
6360 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
6362 tem
= fold (build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
6363 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
6365 type
, fold_convert (boolean_type_node
, arg0
),
6366 fold_convert (boolean_type_node
, arg1
)));
6368 if (code
== EQ_EXPR
)
6369 tem
= invert_truthvalue (tem
);
6374 if (TREE_CODE_CLASS (code
) == tcc_unary
)
6376 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
6377 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6378 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))));
6379 else if (TREE_CODE (arg0
) == COND_EXPR
)
6381 tree arg01
= TREE_OPERAND (arg0
, 1);
6382 tree arg02
= TREE_OPERAND (arg0
, 2);
6383 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
6384 arg01
= fold (build1 (code
, type
, arg01
));
6385 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
6386 arg02
= fold (build1 (code
, type
, arg02
));
6387 tem
= fold (build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6390 /* If this was a conversion, and all we did was to move into
6391 inside the COND_EXPR, bring it back out. But leave it if
6392 it is a conversion from integer to integer and the
6393 result precision is no wider than a word since such a
6394 conversion is cheap and may be optimized away by combine,
6395 while it couldn't if it were outside the COND_EXPR. Then return
6396 so we don't get into an infinite recursion loop taking the
6397 conversion out and then back in. */
6399 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
6400 || code
== NON_LVALUE_EXPR
)
6401 && TREE_CODE (tem
) == COND_EXPR
6402 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
6403 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
6404 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
6405 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
6406 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
6407 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
6408 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
6410 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
6411 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
6412 || flag_syntax_only
))
6413 tem
= build1 (code
, type
,
6415 TREE_TYPE (TREE_OPERAND
6416 (TREE_OPERAND (tem
, 1), 0)),
6417 TREE_OPERAND (tem
, 0),
6418 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
6419 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
6422 else if (COMPARISON_CLASS_P (arg0
))
6424 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
6426 arg0
= copy_node (arg0
);
6427 TREE_TYPE (arg0
) = type
;
6430 else if (TREE_CODE (type
) != INTEGER_TYPE
)
6431 return fold (build3 (COND_EXPR
, type
, arg0
,
6432 fold (build1 (code
, type
,
6434 fold (build1 (code
, type
,
6435 integer_zero_node
))));
6438 else if (TREE_CODE_CLASS (code
) == tcc_comparison
6439 && TREE_CODE (arg0
) == COMPOUND_EXPR
)
6440 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6441 fold (build2 (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
6442 else if (TREE_CODE_CLASS (code
) == tcc_comparison
6443 && TREE_CODE (arg1
) == COMPOUND_EXPR
)
6444 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
6445 fold (build2 (code
, type
, arg0
, TREE_OPERAND (arg1
, 1))));
6446 else if (TREE_CODE_CLASS (code
) == tcc_binary
6447 || TREE_CODE_CLASS (code
) == tcc_comparison
)
6449 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
6450 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6451 fold (build2 (code
, type
, TREE_OPERAND (arg0
, 1),
6453 if (TREE_CODE (arg1
) == COMPOUND_EXPR
6454 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
6455 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
6456 fold (build2 (code
, type
,
6457 arg0
, TREE_OPERAND (arg1
, 1))));
6459 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
6461 tem
= fold_binary_op_with_conditional_arg (code
, type
, arg0
, arg1
,
6462 /*cond_first_p=*/1);
6463 if (tem
!= NULL_TREE
)
6467 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
6469 tem
= fold_binary_op_with_conditional_arg (code
, type
, arg1
, arg0
,
6470 /*cond_first_p=*/0);
6471 if (tem
!= NULL_TREE
)
6479 return fold (DECL_INITIAL (t
));
6484 case FIX_TRUNC_EXPR
:
6486 case FIX_FLOOR_EXPR
:
6487 case FIX_ROUND_EXPR
:
6488 if (TREE_TYPE (TREE_OPERAND (t
, 0)) == type
)
6489 return TREE_OPERAND (t
, 0);
6491 /* Handle cases of two conversions in a row. */
6492 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
6493 || TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
)
6495 tree inside_type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
6496 tree inter_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
6497 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
6498 int inside_ptr
= POINTER_TYPE_P (inside_type
);
6499 int inside_float
= FLOAT_TYPE_P (inside_type
);
6500 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
6501 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
6502 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
6503 int inter_ptr
= POINTER_TYPE_P (inter_type
);
6504 int inter_float
= FLOAT_TYPE_P (inter_type
);
6505 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
6506 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
6507 int final_int
= INTEGRAL_TYPE_P (type
);
6508 int final_ptr
= POINTER_TYPE_P (type
);
6509 int final_float
= FLOAT_TYPE_P (type
);
6510 unsigned int final_prec
= TYPE_PRECISION (type
);
6511 int final_unsignedp
= TYPE_UNSIGNED (type
);
6513 /* In addition to the cases of two conversions in a row
6514 handled below, if we are converting something to its own
6515 type via an object of identical or wider precision, neither
6516 conversion is needed. */
6517 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
6518 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
6519 && inter_prec
>= final_prec
)
6520 return fold (build1 (code
, type
,
6521 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
6523 /* Likewise, if the intermediate and final types are either both
6524 float or both integer, we don't need the middle conversion if
6525 it is wider than the final type and doesn't change the signedness
6526 (for integers). Avoid this if the final type is a pointer
6527 since then we sometimes need the inner conversion. Likewise if
6528 the outer has a precision not equal to the size of its mode. */
6529 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
6530 || (inter_float
&& inside_float
))
6531 && inter_prec
>= inside_prec
6532 && (inter_float
|| inter_unsignedp
== inside_unsignedp
)
6533 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6534 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6536 return fold (build1 (code
, type
,
6537 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
6539 /* If we have a sign-extension of a zero-extended value, we can
6540 replace that by a single zero-extension. */
6541 if (inside_int
&& inter_int
&& final_int
6542 && inside_prec
< inter_prec
&& inter_prec
< final_prec
6543 && inside_unsignedp
&& !inter_unsignedp
)
6544 return fold (build1 (code
, type
,
6545 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
6547 /* Two conversions in a row are not needed unless:
6548 - some conversion is floating-point (overstrict for now), or
6549 - the intermediate type is narrower than both initial and
6551 - the intermediate type and innermost type differ in signedness,
6552 and the outermost type is wider than the intermediate, or
6553 - the initial type is a pointer type and the precisions of the
6554 intermediate and final types differ, or
6555 - the final type is a pointer type and the precisions of the
6556 initial and intermediate types differ. */
6557 if (! inside_float
&& ! inter_float
&& ! final_float
6558 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
6559 && ! (inside_int
&& inter_int
6560 && inter_unsignedp
!= inside_unsignedp
6561 && inter_prec
< final_prec
)
6562 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
6563 == (final_unsignedp
&& final_prec
> inter_prec
))
6564 && ! (inside_ptr
&& inter_prec
!= final_prec
)
6565 && ! (final_ptr
&& inside_prec
!= inter_prec
)
6566 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6567 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6569 return fold (build1 (code
, type
,
6570 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
6573 if (TREE_CODE (TREE_OPERAND (t
, 0)) == MODIFY_EXPR
6574 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t
, 0), 1))
6575 /* Detect assigning a bitfield. */
6576 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)) == COMPONENT_REF
6577 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 1))))
6579 /* Don't leave an assignment inside a conversion
6580 unless assigning a bitfield. */
6581 tree prev
= TREE_OPERAND (t
, 0);
6582 tem
= copy_node (t
);
6583 TREE_OPERAND (tem
, 0) = TREE_OPERAND (prev
, 1);
6584 /* First do the assignment, then return converted constant. */
6585 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), prev
, fold (tem
));
6586 TREE_NO_WARNING (tem
) = 1;
6587 TREE_USED (tem
) = 1;
6591 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6592 constants (if x has signed type, the sign bit cannot be set
6593 in c). This folds extension into the BIT_AND_EXPR. */
6594 if (INTEGRAL_TYPE_P (type
)
6595 && TREE_CODE (type
) != BOOLEAN_TYPE
6596 && TREE_CODE (TREE_OPERAND (t
, 0)) == BIT_AND_EXPR
6597 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 1)) == INTEGER_CST
)
6599 tree
and = TREE_OPERAND (t
, 0);
6600 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
6603 if (TYPE_UNSIGNED (TREE_TYPE (and))
6604 || (TYPE_PRECISION (type
)
6605 <= TYPE_PRECISION (TREE_TYPE (and))))
6607 else if (TYPE_PRECISION (TREE_TYPE (and1
))
6608 <= HOST_BITS_PER_WIDE_INT
6609 && host_integerp (and1
, 1))
6611 unsigned HOST_WIDE_INT cst
;
6613 cst
= tree_low_cst (and1
, 1);
6614 cst
&= (HOST_WIDE_INT
) -1
6615 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
6616 change
= (cst
== 0);
6617 #ifdef LOAD_EXTEND_OP
6619 && !flag_syntax_only
6620 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
6623 tree uns
= lang_hooks
.types
.unsigned_type (TREE_TYPE (and0
));
6624 and0
= fold_convert (uns
, and0
);
6625 and1
= fold_convert (uns
, and1
);
6630 return fold (build2 (BIT_AND_EXPR
, type
,
6631 fold_convert (type
, and0
),
6632 fold_convert (type
, and1
)));
6635 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6636 T2 being pointers to types of the same size. */
6637 if (POINTER_TYPE_P (TREE_TYPE (t
))
6638 && BINARY_CLASS_P (arg0
)
6639 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
6640 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
6642 tree arg00
= TREE_OPERAND (arg0
, 0);
6643 tree t0
= TREE_TYPE (t
);
6644 tree t1
= TREE_TYPE (arg00
);
6645 tree tt0
= TREE_TYPE (t0
);
6646 tree tt1
= TREE_TYPE (t1
);
6647 tree s0
= TYPE_SIZE (tt0
);
6648 tree s1
= TYPE_SIZE (tt1
);
6650 if (s0
&& s1
&& operand_equal_p (s0
, s1
, OEP_ONLY_CONST
))
6651 return build2 (TREE_CODE (arg0
), t0
, fold_convert (t0
, arg00
),
6652 TREE_OPERAND (arg0
, 1));
6655 tem
= fold_convert_const (code
, type
, arg0
);
6656 return tem
? tem
: t
;
6658 case VIEW_CONVERT_EXPR
:
6659 if (TREE_CODE (TREE_OPERAND (t
, 0)) == VIEW_CONVERT_EXPR
)
6660 return build1 (VIEW_CONVERT_EXPR
, type
,
6661 TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
6665 if (TREE_CODE (arg0
) == CONSTRUCTOR
6666 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
6668 tree m
= purpose_member (arg1
, CONSTRUCTOR_ELTS (arg0
));
6670 return TREE_VALUE (m
);
6675 if (TREE_CONSTANT (t
) != wins
)
6677 tem
= copy_node (t
);
6678 TREE_CONSTANT (tem
) = wins
;
6679 TREE_INVARIANT (tem
) = wins
;
6685 if (negate_expr_p (arg0
))
6686 return fold_convert (type
, negate_expr (arg0
));
6690 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
6691 return fold_abs_const (arg0
, type
);
6692 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
6693 return fold (build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0)));
6694 /* Convert fabs((double)float) into (double)fabsf(float). */
6695 else if (TREE_CODE (arg0
) == NOP_EXPR
6696 && TREE_CODE (type
) == REAL_TYPE
)
6698 tree targ0
= strip_float_extensions (arg0
);
6700 return fold_convert (type
, fold (build1 (ABS_EXPR
,
6704 else if (tree_expr_nonnegative_p (arg0
))
6709 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
6710 return fold_convert (type
, arg0
);
6711 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
6712 return build2 (COMPLEX_EXPR
, type
,
6713 TREE_OPERAND (arg0
, 0),
6714 negate_expr (TREE_OPERAND (arg0
, 1)));
6715 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
6716 return build_complex (type
, TREE_REALPART (arg0
),
6717 negate_expr (TREE_IMAGPART (arg0
)));
6718 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
6719 return fold (build2 (TREE_CODE (arg0
), type
,
6720 fold (build1 (CONJ_EXPR
, type
,
6721 TREE_OPERAND (arg0
, 0))),
6722 fold (build1 (CONJ_EXPR
, type
,
6723 TREE_OPERAND (arg0
, 1)))));
6724 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
6725 return TREE_OPERAND (arg0
, 0);
6729 if (TREE_CODE (arg0
) == INTEGER_CST
)
6730 return fold_not_const (arg0
, type
);
6731 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
6732 return TREE_OPERAND (arg0
, 0);
6736 /* A + (-B) -> A - B */
6737 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
6738 return fold (build2 (MINUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
6739 /* (-A) + B -> B - A */
6740 if (TREE_CODE (arg0
) == NEGATE_EXPR
6741 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
6742 return fold (build2 (MINUS_EXPR
, type
, arg1
, TREE_OPERAND (arg0
, 0)));
6743 if (! FLOAT_TYPE_P (type
))
6745 if (integer_zerop (arg1
))
6746 return non_lvalue (fold_convert (type
, arg0
));
6748 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6749 with a constant, and the two constants have no bits in common,
6750 we should treat this as a BIT_IOR_EXPR since this may produce more
6752 if (TREE_CODE (arg0
) == BIT_AND_EXPR
6753 && TREE_CODE (arg1
) == BIT_AND_EXPR
6754 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
6755 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
6756 && integer_zerop (const_binop (BIT_AND_EXPR
,
6757 TREE_OPERAND (arg0
, 1),
6758 TREE_OPERAND (arg1
, 1), 0)))
6760 code
= BIT_IOR_EXPR
;
6764 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6765 (plus (plus (mult) (mult)) (foo)) so that we can
6766 take advantage of the factoring cases below. */
6767 if (((TREE_CODE (arg0
) == PLUS_EXPR
6768 || TREE_CODE (arg0
) == MINUS_EXPR
)
6769 && TREE_CODE (arg1
) == MULT_EXPR
)
6770 || ((TREE_CODE (arg1
) == PLUS_EXPR
6771 || TREE_CODE (arg1
) == MINUS_EXPR
)
6772 && TREE_CODE (arg0
) == MULT_EXPR
))
6774 tree parg0
, parg1
, parg
, marg
;
6775 enum tree_code pcode
;
6777 if (TREE_CODE (arg1
) == MULT_EXPR
)
6778 parg
= arg0
, marg
= arg1
;
6780 parg
= arg1
, marg
= arg0
;
6781 pcode
= TREE_CODE (parg
);
6782 parg0
= TREE_OPERAND (parg
, 0);
6783 parg1
= TREE_OPERAND (parg
, 1);
6787 if (TREE_CODE (parg0
) == MULT_EXPR
6788 && TREE_CODE (parg1
) != MULT_EXPR
)
6789 return fold (build2 (pcode
, type
,
6790 fold (build2 (PLUS_EXPR
, type
,
6791 fold_convert (type
, parg0
),
6792 fold_convert (type
, marg
))),
6793 fold_convert (type
, parg1
)));
6794 if (TREE_CODE (parg0
) != MULT_EXPR
6795 && TREE_CODE (parg1
) == MULT_EXPR
)
6796 return fold (build2 (PLUS_EXPR
, type
,
6797 fold_convert (type
, parg0
),
6798 fold (build2 (pcode
, type
,
6799 fold_convert (type
, marg
),
6804 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
)
6806 tree arg00
, arg01
, arg10
, arg11
;
6807 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6809 /* (A * C) + (B * C) -> (A+B) * C.
6810 We are most concerned about the case where C is a constant,
6811 but other combinations show up during loop reduction. Since
6812 it is not difficult, try all four possibilities. */
6814 arg00
= TREE_OPERAND (arg0
, 0);
6815 arg01
= TREE_OPERAND (arg0
, 1);
6816 arg10
= TREE_OPERAND (arg1
, 0);
6817 arg11
= TREE_OPERAND (arg1
, 1);
6820 if (operand_equal_p (arg01
, arg11
, 0))
6821 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6822 else if (operand_equal_p (arg00
, arg10
, 0))
6823 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6824 else if (operand_equal_p (arg00
, arg11
, 0))
6825 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6826 else if (operand_equal_p (arg01
, arg10
, 0))
6827 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6829 /* No identical multiplicands; see if we can find a common
6830 power-of-two factor in non-power-of-two multiplies. This
6831 can help in multi-dimensional array access. */
6832 else if (TREE_CODE (arg01
) == INTEGER_CST
6833 && TREE_CODE (arg11
) == INTEGER_CST
6834 && TREE_INT_CST_HIGH (arg01
) == 0
6835 && TREE_INT_CST_HIGH (arg11
) == 0)
6837 HOST_WIDE_INT int01
, int11
, tmp
;
6838 int01
= TREE_INT_CST_LOW (arg01
);
6839 int11
= TREE_INT_CST_LOW (arg11
);
6841 /* Move min of absolute values to int11. */
6842 if ((int01
>= 0 ? int01
: -int01
)
6843 < (int11
>= 0 ? int11
: -int11
))
6845 tmp
= int01
, int01
= int11
, int11
= tmp
;
6846 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6847 alt0
= arg01
, arg01
= arg11
, arg11
= alt0
;
6850 if (exact_log2 (int11
) > 0 && int01
% int11
== 0)
6852 alt0
= fold (build2 (MULT_EXPR
, type
, arg00
,
6853 build_int_cst (NULL_TREE
,
6861 return fold (build2 (MULT_EXPR
, type
,
6862 fold (build2 (PLUS_EXPR
, type
,
6863 fold_convert (type
, alt0
),
6864 fold_convert (type
, alt1
))),
6868 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
6869 of the array. Loop optimizer sometimes produce this type of
6871 if (TREE_CODE (arg0
) == ADDR_EXPR
6872 && TREE_CODE (arg1
) == MULT_EXPR
)
6874 tem
= try_move_mult_to_index (type
, PLUS_EXPR
, arg0
, arg1
);
6878 else if (TREE_CODE (arg1
) == ADDR_EXPR
6879 && TREE_CODE (arg0
) == MULT_EXPR
)
6881 tem
= try_move_mult_to_index (type
, PLUS_EXPR
, arg1
, arg0
);
6888 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6889 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
6890 return non_lvalue (fold_convert (type
, arg0
));
6892 /* Likewise if the operands are reversed. */
6893 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
6894 return non_lvalue (fold_convert (type
, arg1
));
6896 /* Convert X + -C into X - C. */
6897 if (TREE_CODE (arg1
) == REAL_CST
6898 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
6900 tem
= fold_negate_const (arg1
, type
);
6901 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
6902 return fold (build2 (MINUS_EXPR
, type
,
6903 fold_convert (type
, arg0
),
6904 fold_convert (type
, tem
)));
6907 /* Convert x+x into x*2.0. */
6908 if (operand_equal_p (arg0
, arg1
, 0)
6909 && SCALAR_FLOAT_TYPE_P (type
))
6910 return fold (build2 (MULT_EXPR
, type
, arg0
,
6911 build_real (type
, dconst2
)));
6913 /* Convert x*c+x into x*(c+1). */
6914 if (flag_unsafe_math_optimizations
6915 && TREE_CODE (arg0
) == MULT_EXPR
6916 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
6917 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
6918 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
6922 c
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
6923 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6924 return fold (build2 (MULT_EXPR
, type
, arg1
,
6925 build_real (type
, c
)));
6928 /* Convert x+x*c into x*(c+1). */
6929 if (flag_unsafe_math_optimizations
6930 && TREE_CODE (arg1
) == MULT_EXPR
6931 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
6932 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
6933 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
6937 c
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
6938 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6939 return fold (build2 (MULT_EXPR
, type
, arg0
,
6940 build_real (type
, c
)));
6943 /* Convert x*c1+x*c2 into x*(c1+c2). */
6944 if (flag_unsafe_math_optimizations
6945 && TREE_CODE (arg0
) == MULT_EXPR
6946 && TREE_CODE (arg1
) == MULT_EXPR
6947 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
6948 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
6949 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
6950 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
6951 && operand_equal_p (TREE_OPERAND (arg0
, 0),
6952 TREE_OPERAND (arg1
, 0), 0))
6954 REAL_VALUE_TYPE c1
, c2
;
6956 c1
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
6957 c2
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
6958 real_arithmetic (&c1
, PLUS_EXPR
, &c1
, &c2
);
6959 return fold (build2 (MULT_EXPR
, type
,
6960 TREE_OPERAND (arg0
, 0),
6961 build_real (type
, c1
)));
6963 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
6964 if (flag_unsafe_math_optimizations
6965 && TREE_CODE (arg1
) == PLUS_EXPR
6966 && TREE_CODE (arg0
) != MULT_EXPR
)
6968 tree tree10
= TREE_OPERAND (arg1
, 0);
6969 tree tree11
= TREE_OPERAND (arg1
, 1);
6970 if (TREE_CODE (tree11
) == MULT_EXPR
6971 && TREE_CODE (tree10
) == MULT_EXPR
)
6974 tree0
= fold (build2 (PLUS_EXPR
, type
, arg0
, tree10
));
6975 return fold (build2 (PLUS_EXPR
, type
, tree0
, tree11
));
6978 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
6979 if (flag_unsafe_math_optimizations
6980 && TREE_CODE (arg0
) == PLUS_EXPR
6981 && TREE_CODE (arg1
) != MULT_EXPR
)
6983 tree tree00
= TREE_OPERAND (arg0
, 0);
6984 tree tree01
= TREE_OPERAND (arg0
, 1);
6985 if (TREE_CODE (tree01
) == MULT_EXPR
6986 && TREE_CODE (tree00
) == MULT_EXPR
)
6989 tree0
= fold (build2 (PLUS_EXPR
, type
, tree01
, arg1
));
6990 return fold (build2 (PLUS_EXPR
, type
, tree00
, tree0
));
6996 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6997 is a rotate of A by C1 bits. */
6998 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6999 is a rotate of A by B bits. */
7001 enum tree_code code0
, code1
;
7002 code0
= TREE_CODE (arg0
);
7003 code1
= TREE_CODE (arg1
);
7004 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
7005 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
7006 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7007 TREE_OPERAND (arg1
, 0), 0)
7008 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7010 tree tree01
, tree11
;
7011 enum tree_code code01
, code11
;
7013 tree01
= TREE_OPERAND (arg0
, 1);
7014 tree11
= TREE_OPERAND (arg1
, 1);
7015 STRIP_NOPS (tree01
);
7016 STRIP_NOPS (tree11
);
7017 code01
= TREE_CODE (tree01
);
7018 code11
= TREE_CODE (tree11
);
7019 if (code01
== INTEGER_CST
7020 && code11
== INTEGER_CST
7021 && TREE_INT_CST_HIGH (tree01
) == 0
7022 && TREE_INT_CST_HIGH (tree11
) == 0
7023 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
7024 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
7025 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7026 code0
== LSHIFT_EXPR
? tree01
: tree11
);
7027 else if (code11
== MINUS_EXPR
)
7029 tree tree110
, tree111
;
7030 tree110
= TREE_OPERAND (tree11
, 0);
7031 tree111
= TREE_OPERAND (tree11
, 1);
7032 STRIP_NOPS (tree110
);
7033 STRIP_NOPS (tree111
);
7034 if (TREE_CODE (tree110
) == INTEGER_CST
7035 && 0 == compare_tree_int (tree110
,
7037 (TREE_TYPE (TREE_OPERAND
7039 && operand_equal_p (tree01
, tree111
, 0))
7040 return build2 ((code0
== LSHIFT_EXPR
7043 type
, TREE_OPERAND (arg0
, 0), tree01
);
7045 else if (code01
== MINUS_EXPR
)
7047 tree tree010
, tree011
;
7048 tree010
= TREE_OPERAND (tree01
, 0);
7049 tree011
= TREE_OPERAND (tree01
, 1);
7050 STRIP_NOPS (tree010
);
7051 STRIP_NOPS (tree011
);
7052 if (TREE_CODE (tree010
) == INTEGER_CST
7053 && 0 == compare_tree_int (tree010
,
7055 (TREE_TYPE (TREE_OPERAND
7057 && operand_equal_p (tree11
, tree011
, 0))
7058 return build2 ((code0
!= LSHIFT_EXPR
7061 type
, TREE_OPERAND (arg0
, 0), tree11
);
7067 /* In most languages, can't associate operations on floats through
7068 parentheses. Rather than remember where the parentheses were, we
7069 don't associate floats at all, unless the user has specified
7070 -funsafe-math-optimizations. */
7073 && (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
7075 tree var0
, con0
, lit0
, minus_lit0
;
7076 tree var1
, con1
, lit1
, minus_lit1
;
7078 /* Split both trees into variables, constants, and literals. Then
7079 associate each group together, the constants with literals,
7080 then the result with variables. This increases the chances of
7081 literals being recombined later and of generating relocatable
7082 expressions for the sum of a constant and literal. */
7083 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
7084 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
7085 code
== MINUS_EXPR
);
7087 /* Only do something if we found more than two objects. Otherwise,
7088 nothing has changed and we risk infinite recursion. */
7089 if (2 < ((var0
!= 0) + (var1
!= 0)
7090 + (con0
!= 0) + (con1
!= 0)
7091 + (lit0
!= 0) + (lit1
!= 0)
7092 + (minus_lit0
!= 0) + (minus_lit1
!= 0)))
7094 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7095 if (code
== MINUS_EXPR
)
7098 var0
= associate_trees (var0
, var1
, code
, type
);
7099 con0
= associate_trees (con0
, con1
, code
, type
);
7100 lit0
= associate_trees (lit0
, lit1
, code
, type
);
7101 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
7103 /* Preserve the MINUS_EXPR if the negative part of the literal is
7104 greater than the positive part. Otherwise, the multiplicative
7105 folding code (i.e extract_muldiv) may be fooled in case
7106 unsigned constants are subtracted, like in the following
7107 example: ((X*2 + 4) - 8U)/2. */
7108 if (minus_lit0
&& lit0
)
7110 if (TREE_CODE (lit0
) == INTEGER_CST
7111 && TREE_CODE (minus_lit0
) == INTEGER_CST
7112 && tree_int_cst_lt (lit0
, minus_lit0
))
7114 minus_lit0
= associate_trees (minus_lit0
, lit0
,
7120 lit0
= associate_trees (lit0
, minus_lit0
,
7128 return fold_convert (type
,
7129 associate_trees (var0
, minus_lit0
,
7133 con0
= associate_trees (con0
, minus_lit0
,
7135 return fold_convert (type
,
7136 associate_trees (var0
, con0
,
7141 con0
= associate_trees (con0
, lit0
, code
, type
);
7142 return fold_convert (type
, associate_trees (var0
, con0
,
7149 t1
= const_binop (code
, arg0
, arg1
, 0);
7150 if (t1
!= NULL_TREE
)
7152 /* The return value should always have
7153 the same type as the original expression. */
7154 if (TREE_TYPE (t1
) != type
)
7155 t1
= fold_convert (type
, t1
);
7162 /* A - (-B) -> A + B */
7163 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
7164 return fold (build2 (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
7165 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7166 if (TREE_CODE (arg0
) == NEGATE_EXPR
7167 && (FLOAT_TYPE_P (type
)
7168 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
))
7169 && negate_expr_p (arg1
)
7170 && reorder_operands_p (arg0
, arg1
))
7171 return fold (build2 (MINUS_EXPR
, type
, negate_expr (arg1
),
7172 TREE_OPERAND (arg0
, 0)));
7174 if (! FLOAT_TYPE_P (type
))
7176 if (! wins
&& integer_zerop (arg0
))
7177 return negate_expr (fold_convert (type
, arg1
));
7178 if (integer_zerop (arg1
))
7179 return non_lvalue (fold_convert (type
, arg0
));
7181 /* Fold A - (A & B) into ~B & A. */
7182 if (!TREE_SIDE_EFFECTS (arg0
)
7183 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
7185 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
7186 return fold (build2 (BIT_AND_EXPR
, type
,
7187 fold (build1 (BIT_NOT_EXPR
, type
,
7188 TREE_OPERAND (arg1
, 0))),
7190 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7191 return fold (build2 (BIT_AND_EXPR
, type
,
7192 fold (build1 (BIT_NOT_EXPR
, type
,
7193 TREE_OPERAND (arg1
, 1))),
7197 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7198 any power of 2 minus 1. */
7199 if (TREE_CODE (arg0
) == BIT_AND_EXPR
7200 && TREE_CODE (arg1
) == BIT_AND_EXPR
7201 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7202 TREE_OPERAND (arg1
, 0), 0))
7204 tree mask0
= TREE_OPERAND (arg0
, 1);
7205 tree mask1
= TREE_OPERAND (arg1
, 1);
7206 tree tem
= fold (build1 (BIT_NOT_EXPR
, type
, mask0
));
7208 if (operand_equal_p (tem
, mask1
, 0))
7210 tem
= fold (build2 (BIT_XOR_EXPR
, type
,
7211 TREE_OPERAND (arg0
, 0), mask1
));
7212 return fold (build2 (MINUS_EXPR
, type
, tem
, mask1
));
7217 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7218 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
7219 return non_lvalue (fold_convert (type
, arg0
));
7221 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7222 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7223 (-ARG1 + ARG0) reduces to -ARG1. */
7224 else if (!wins
&& fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
7225 return negate_expr (fold_convert (type
, arg1
));
7227 /* Fold &x - &x. This can happen from &x.foo - &x.
7228 This is unsafe for certain floats even in non-IEEE formats.
7229 In IEEE, it is unsafe because it does wrong for NaNs.
7230 Also note that operand_equal_p is always false if an operand
7233 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
7234 && operand_equal_p (arg0
, arg1
, 0))
7235 return fold_convert (type
, integer_zero_node
);
7237 /* A - B -> A + (-B) if B is easily negatable. */
7238 if (!wins
&& negate_expr_p (arg1
)
7239 && ((FLOAT_TYPE_P (type
)
7240 /* Avoid this transformation if B is a positive REAL_CST. */
7241 && (TREE_CODE (arg1
) != REAL_CST
7242 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
7243 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
)))
7244 return fold (build2 (PLUS_EXPR
, type
, arg0
, negate_expr (arg1
)));
7246 /* Try folding difference of addresses. */
7250 if ((TREE_CODE (arg0
) == ADDR_EXPR
7251 || TREE_CODE (arg1
) == ADDR_EXPR
)
7252 && ptr_difference_const (arg0
, arg1
, &diff
))
7253 return build_int_cst_type (type
, diff
);
7256 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7257 of the array. Loop optimizer sometimes produce this type of
7259 if (TREE_CODE (arg0
) == ADDR_EXPR
7260 && TREE_CODE (arg1
) == MULT_EXPR
)
7262 tem
= try_move_mult_to_index (type
, MINUS_EXPR
, arg0
, arg1
);
7267 if (TREE_CODE (arg0
) == MULT_EXPR
7268 && TREE_CODE (arg1
) == MULT_EXPR
7269 && (!FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
7271 /* (A * C) - (B * C) -> (A-B) * C. */
7272 if (operand_equal_p (TREE_OPERAND (arg0
, 1),
7273 TREE_OPERAND (arg1
, 1), 0))
7274 return fold (build2 (MULT_EXPR
, type
,
7275 fold (build2 (MINUS_EXPR
, type
,
7276 TREE_OPERAND (arg0
, 0),
7277 TREE_OPERAND (arg1
, 0))),
7278 TREE_OPERAND (arg0
, 1)));
7279 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7280 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
7281 TREE_OPERAND (arg1
, 0), 0))
7282 return fold (build2 (MULT_EXPR
, type
,
7283 TREE_OPERAND (arg0
, 0),
7284 fold (build2 (MINUS_EXPR
, type
,
7285 TREE_OPERAND (arg0
, 1),
7286 TREE_OPERAND (arg1
, 1)))));
7292 /* (-A) * (-B) -> A * B */
7293 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
7294 return fold (build2 (MULT_EXPR
, type
,
7295 TREE_OPERAND (arg0
, 0),
7296 negate_expr (arg1
)));
7297 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
7298 return fold (build2 (MULT_EXPR
, type
,
7300 TREE_OPERAND (arg1
, 0)));
7302 if (! FLOAT_TYPE_P (type
))
7304 if (integer_zerop (arg1
))
7305 return omit_one_operand (type
, arg1
, arg0
);
7306 if (integer_onep (arg1
))
7307 return non_lvalue (fold_convert (type
, arg0
));
7309 /* (a * (1 << b)) is (a << b) */
7310 if (TREE_CODE (arg1
) == LSHIFT_EXPR
7311 && integer_onep (TREE_OPERAND (arg1
, 0)))
7312 return fold (build2 (LSHIFT_EXPR
, type
, arg0
,
7313 TREE_OPERAND (arg1
, 1)));
7314 if (TREE_CODE (arg0
) == LSHIFT_EXPR
7315 && integer_onep (TREE_OPERAND (arg0
, 0)))
7316 return fold (build2 (LSHIFT_EXPR
, type
, arg1
,
7317 TREE_OPERAND (arg0
, 1)));
7319 if (TREE_CODE (arg1
) == INTEGER_CST
7320 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0),
7321 fold_convert (type
, arg1
),
7323 return fold_convert (type
, tem
);
7328 /* Maybe fold x * 0 to 0. The expressions aren't the same
7329 when x is NaN, since x * 0 is also NaN. Nor are they the
7330 same in modes with signed zeros, since multiplying a
7331 negative value by 0 gives -0, not +0. */
7332 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
7333 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
7334 && real_zerop (arg1
))
7335 return omit_one_operand (type
, arg1
, arg0
);
7336 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7337 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7338 && real_onep (arg1
))
7339 return non_lvalue (fold_convert (type
, arg0
));
7341 /* Transform x * -1.0 into -x. */
7342 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7343 && real_minus_onep (arg1
))
7344 return fold_convert (type
, negate_expr (arg0
));
7346 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7347 if (flag_unsafe_math_optimizations
7348 && TREE_CODE (arg0
) == RDIV_EXPR
7349 && TREE_CODE (arg1
) == REAL_CST
7350 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
7352 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
7355 return fold (build2 (RDIV_EXPR
, type
, tem
,
7356 TREE_OPERAND (arg0
, 1)));
7359 if (flag_unsafe_math_optimizations
)
7361 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
7362 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
7364 /* Optimizations of root(...)*root(...). */
7365 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
7367 tree rootfn
, arg
, arglist
;
7368 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7369 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7371 /* Optimize sqrt(x)*sqrt(x) as x. */
7372 if (BUILTIN_SQRT_P (fcode0
)
7373 && operand_equal_p (arg00
, arg10
, 0)
7374 && ! HONOR_SNANS (TYPE_MODE (type
)))
7377 /* Optimize root(x)*root(y) as root(x*y). */
7378 rootfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7379 arg
= fold (build2 (MULT_EXPR
, type
, arg00
, arg10
));
7380 arglist
= build_tree_list (NULL_TREE
, arg
);
7381 return build_function_call_expr (rootfn
, arglist
);
7384 /* Optimize expN(x)*expN(y) as expN(x+y). */
7385 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
7387 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7388 tree arg
= build2 (PLUS_EXPR
, type
,
7389 TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7390 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
7391 tree arglist
= build_tree_list (NULL_TREE
, fold (arg
));
7392 return build_function_call_expr (expfn
, arglist
);
7395 /* Optimizations of pow(...)*pow(...). */
7396 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
7397 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
7398 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
7400 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7401 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
7403 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7404 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
7407 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7408 if (operand_equal_p (arg01
, arg11
, 0))
7410 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7411 tree arg
= build2 (MULT_EXPR
, type
, arg00
, arg10
);
7412 tree arglist
= tree_cons (NULL_TREE
, fold (arg
),
7413 build_tree_list (NULL_TREE
,
7415 return build_function_call_expr (powfn
, arglist
);
7418 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7419 if (operand_equal_p (arg00
, arg10
, 0))
7421 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7422 tree arg
= fold (build2 (PLUS_EXPR
, type
, arg01
, arg11
));
7423 tree arglist
= tree_cons (NULL_TREE
, arg00
,
7424 build_tree_list (NULL_TREE
,
7426 return build_function_call_expr (powfn
, arglist
);
7430 /* Optimize tan(x)*cos(x) as sin(x). */
7431 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
7432 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
7433 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
7434 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
7435 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
7436 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
7437 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7438 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
7440 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
7442 if (sinfn
!= NULL_TREE
)
7443 return build_function_call_expr (sinfn
,
7444 TREE_OPERAND (arg0
, 1));
7447 /* Optimize x*pow(x,c) as pow(x,c+1). */
7448 if (fcode1
== BUILT_IN_POW
7449 || fcode1
== BUILT_IN_POWF
7450 || fcode1
== BUILT_IN_POWL
)
7452 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7453 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
7455 if (TREE_CODE (arg11
) == REAL_CST
7456 && ! TREE_CONSTANT_OVERFLOW (arg11
)
7457 && operand_equal_p (arg0
, arg10
, 0))
7459 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
7463 c
= TREE_REAL_CST (arg11
);
7464 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7465 arg
= build_real (type
, c
);
7466 arglist
= build_tree_list (NULL_TREE
, arg
);
7467 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
7468 return build_function_call_expr (powfn
, arglist
);
7472 /* Optimize pow(x,c)*x as pow(x,c+1). */
7473 if (fcode0
== BUILT_IN_POW
7474 || fcode0
== BUILT_IN_POWF
7475 || fcode0
== BUILT_IN_POWL
)
7477 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7478 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
7480 if (TREE_CODE (arg01
) == REAL_CST
7481 && ! TREE_CONSTANT_OVERFLOW (arg01
)
7482 && operand_equal_p (arg1
, arg00
, 0))
7484 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7488 c
= TREE_REAL_CST (arg01
);
7489 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7490 arg
= build_real (type
, c
);
7491 arglist
= build_tree_list (NULL_TREE
, arg
);
7492 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
7493 return build_function_call_expr (powfn
, arglist
);
7497 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7499 && operand_equal_p (arg0
, arg1
, 0))
7501 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7505 tree arg
= build_real (type
, dconst2
);
7506 tree arglist
= build_tree_list (NULL_TREE
, arg
);
7507 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
7508 return build_function_call_expr (powfn
, arglist
);
7517 if (integer_all_onesp (arg1
))
7518 return omit_one_operand (type
, arg1
, arg0
);
7519 if (integer_zerop (arg1
))
7520 return non_lvalue (fold_convert (type
, arg0
));
7521 if (operand_equal_p (arg0
, arg1
, 0))
7522 return non_lvalue (fold_convert (type
, arg0
));
7525 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
7526 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7528 t1
= build_int_cst (type
, -1);
7529 t1
= force_fit_type (t1
, 0, false, false);
7530 return omit_one_operand (type
, t1
, arg1
);
7534 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
7535 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7537 t1
= build_int_cst (type
, -1);
7538 t1
= force_fit_type (t1
, 0, false, false);
7539 return omit_one_operand (type
, t1
, arg0
);
7542 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
7543 if (t1
!= NULL_TREE
)
7546 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7548 This results in more efficient code for machines without a NAND
7549 instruction. Combine will canonicalize to the first form
7550 which will allow use of NAND instructions provided by the
7551 backend if they exist. */
7552 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
7553 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
7555 return fold (build1 (BIT_NOT_EXPR
, type
,
7556 build2 (BIT_AND_EXPR
, type
,
7557 TREE_OPERAND (arg0
, 0),
7558 TREE_OPERAND (arg1
, 0))));
7561 /* See if this can be simplified into a rotate first. If that
7562 is unsuccessful continue in the association code. */
7566 if (integer_zerop (arg1
))
7567 return non_lvalue (fold_convert (type
, arg0
));
7568 if (integer_all_onesp (arg1
))
7569 return fold (build1 (BIT_NOT_EXPR
, type
, arg0
));
7570 if (operand_equal_p (arg0
, arg1
, 0))
7571 return omit_one_operand (type
, integer_zero_node
, arg0
);
7574 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
7575 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7577 t1
= build_int_cst (type
, -1);
7578 t1
= force_fit_type (t1
, 0, false, false);
7579 return omit_one_operand (type
, t1
, arg1
);
7583 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
7584 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7586 t1
= build_int_cst (type
, -1);
7587 t1
= force_fit_type (t1
, 0, false, false);
7588 return omit_one_operand (type
, t1
, arg0
);
7591 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7592 with a constant, and the two constants have no bits in common,
7593 we should treat this as a BIT_IOR_EXPR since this may produce more
7595 if (TREE_CODE (arg0
) == BIT_AND_EXPR
7596 && TREE_CODE (arg1
) == BIT_AND_EXPR
7597 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7598 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
7599 && integer_zerop (const_binop (BIT_AND_EXPR
,
7600 TREE_OPERAND (arg0
, 1),
7601 TREE_OPERAND (arg1
, 1), 0)))
7603 code
= BIT_IOR_EXPR
;
7607 /* See if this can be simplified into a rotate first. If that
7608 is unsuccessful continue in the association code. */
7612 if (integer_all_onesp (arg1
))
7613 return non_lvalue (fold_convert (type
, arg0
));
7614 if (integer_zerop (arg1
))
7615 return omit_one_operand (type
, arg1
, arg0
);
7616 if (operand_equal_p (arg0
, arg1
, 0))
7617 return non_lvalue (fold_convert (type
, arg0
));
7619 /* ~X & X is always zero. */
7620 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
7621 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7622 return omit_one_operand (type
, integer_zero_node
, arg1
);
7624 /* X & ~X is always zero. */
7625 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
7626 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7627 return omit_one_operand (type
, integer_zero_node
, arg0
);
7629 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
7630 if (t1
!= NULL_TREE
)
7632 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7633 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
7634 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7637 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
7639 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
7640 && (~TREE_INT_CST_LOW (arg1
)
7641 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
7642 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
7645 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7647 This results in more efficient code for machines without a NOR
7648 instruction. Combine will canonicalize to the first form
7649 which will allow use of NOR instructions provided by the
7650 backend if they exist. */
7651 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
7652 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
7654 return fold (build1 (BIT_NOT_EXPR
, type
,
7655 build2 (BIT_IOR_EXPR
, type
,
7656 TREE_OPERAND (arg0
, 0),
7657 TREE_OPERAND (arg1
, 0))));
7663 /* Don't touch a floating-point divide by zero unless the mode
7664 of the constant can represent infinity. */
7665 if (TREE_CODE (arg1
) == REAL_CST
7666 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
7667 && real_zerop (arg1
))
7670 /* (-A) / (-B) -> A / B */
7671 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
7672 return fold (build2 (RDIV_EXPR
, type
,
7673 TREE_OPERAND (arg0
, 0),
7674 negate_expr (arg1
)));
7675 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
7676 return fold (build2 (RDIV_EXPR
, type
,
7678 TREE_OPERAND (arg1
, 0)));
7680 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7681 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7682 && real_onep (arg1
))
7683 return non_lvalue (fold_convert (type
, arg0
));
7685 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7686 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7687 && real_minus_onep (arg1
))
7688 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
7690 /* If ARG1 is a constant, we can convert this to a multiply by the
7691 reciprocal. This does not have the same rounding properties,
7692 so only do this if -funsafe-math-optimizations. We can actually
7693 always safely do it if ARG1 is a power of two, but it's hard to
7694 tell if it is or not in a portable manner. */
7695 if (TREE_CODE (arg1
) == REAL_CST
)
7697 if (flag_unsafe_math_optimizations
7698 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
7700 return fold (build2 (MULT_EXPR
, type
, arg0
, tem
));
7701 /* Find the reciprocal if optimizing and the result is exact. */
7705 r
= TREE_REAL_CST (arg1
);
7706 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
7708 tem
= build_real (type
, r
);
7709 return fold (build2 (MULT_EXPR
, type
, arg0
, tem
));
7713 /* Convert A/B/C to A/(B*C). */
7714 if (flag_unsafe_math_optimizations
7715 && TREE_CODE (arg0
) == RDIV_EXPR
)
7716 return fold (build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7717 fold (build2 (MULT_EXPR
, type
,
7718 TREE_OPERAND (arg0
, 1), arg1
))));
7720 /* Convert A/(B/C) to (A/B)*C. */
7721 if (flag_unsafe_math_optimizations
7722 && TREE_CODE (arg1
) == RDIV_EXPR
)
7723 return fold (build2 (MULT_EXPR
, type
,
7724 fold (build2 (RDIV_EXPR
, type
, arg0
,
7725 TREE_OPERAND (arg1
, 0))),
7726 TREE_OPERAND (arg1
, 1)));
7728 /* Convert C1/(X*C2) into (C1/C2)/X. */
7729 if (flag_unsafe_math_optimizations
7730 && TREE_CODE (arg1
) == MULT_EXPR
7731 && TREE_CODE (arg0
) == REAL_CST
7732 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
7734 tree tem
= const_binop (RDIV_EXPR
, arg0
,
7735 TREE_OPERAND (arg1
, 1), 0);
7737 return fold (build2 (RDIV_EXPR
, type
, tem
,
7738 TREE_OPERAND (arg1
, 0)));
7741 if (flag_unsafe_math_optimizations
)
7743 enum built_in_function fcode
= builtin_mathfn_code (arg1
);
7744 /* Optimize x/expN(y) into x*expN(-y). */
7745 if (BUILTIN_EXPONENT_P (fcode
))
7747 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
7748 tree arg
= negate_expr (TREE_VALUE (TREE_OPERAND (arg1
, 1)));
7749 tree arglist
= build_tree_list (NULL_TREE
,
7750 fold_convert (type
, arg
));
7751 arg1
= build_function_call_expr (expfn
, arglist
);
7752 return fold (build2 (MULT_EXPR
, type
, arg0
, arg1
));
7755 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7756 if (fcode
== BUILT_IN_POW
7757 || fcode
== BUILT_IN_POWF
7758 || fcode
== BUILT_IN_POWL
)
7760 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
7761 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7762 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
, 1)));
7763 tree neg11
= fold_convert (type
, negate_expr (arg11
));
7764 tree arglist
= tree_cons(NULL_TREE
, arg10
,
7765 build_tree_list (NULL_TREE
, neg11
));
7766 arg1
= build_function_call_expr (powfn
, arglist
);
7767 return fold (build2 (MULT_EXPR
, type
, arg0
, arg1
));
7771 if (flag_unsafe_math_optimizations
)
7773 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
7774 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
7776 /* Optimize sin(x)/cos(x) as tan(x). */
7777 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
7778 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
7779 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
7780 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7781 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
7783 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
7785 if (tanfn
!= NULL_TREE
)
7786 return build_function_call_expr (tanfn
,
7787 TREE_OPERAND (arg0
, 1));
7790 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7791 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
7792 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
7793 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
7794 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7795 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
7797 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
7799 if (tanfn
!= NULL_TREE
)
7801 tree tmp
= TREE_OPERAND (arg0
, 1);
7802 tmp
= build_function_call_expr (tanfn
, tmp
);
7803 return fold (build2 (RDIV_EXPR
, type
,
7804 build_real (type
, dconst1
), tmp
));
7808 /* Optimize pow(x,c)/x as pow(x,c-1). */
7809 if (fcode0
== BUILT_IN_POW
7810 || fcode0
== BUILT_IN_POWF
7811 || fcode0
== BUILT_IN_POWL
)
7813 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7814 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
7815 if (TREE_CODE (arg01
) == REAL_CST
7816 && ! TREE_CONSTANT_OVERFLOW (arg01
)
7817 && operand_equal_p (arg1
, arg00
, 0))
7819 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7823 c
= TREE_REAL_CST (arg01
);
7824 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
7825 arg
= build_real (type
, c
);
7826 arglist
= build_tree_list (NULL_TREE
, arg
);
7827 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
7828 return build_function_call_expr (powfn
, arglist
);
7834 case TRUNC_DIV_EXPR
:
7835 case ROUND_DIV_EXPR
:
7836 case FLOOR_DIV_EXPR
:
7838 case EXACT_DIV_EXPR
:
7839 if (integer_onep (arg1
))
7840 return non_lvalue (fold_convert (type
, arg0
));
7841 if (integer_zerop (arg1
))
7844 if (!TYPE_UNSIGNED (type
)
7845 && TREE_CODE (arg1
) == INTEGER_CST
7846 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
7847 && TREE_INT_CST_HIGH (arg1
) == -1)
7848 return fold_convert (type
, negate_expr (arg0
));
7850 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7851 operation, EXACT_DIV_EXPR.
7853 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7854 At one time others generated faster code, it's not clear if they do
7855 after the last round to changes to the DIV code in expmed.c. */
7856 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
7857 && multiple_of_p (type
, arg0
, arg1
))
7858 return fold (build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
));
7860 if (TREE_CODE (arg1
) == INTEGER_CST
7861 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
7863 return fold_convert (type
, tem
);
7868 case FLOOR_MOD_EXPR
:
7869 case ROUND_MOD_EXPR
:
7870 case TRUNC_MOD_EXPR
:
7871 if (integer_onep (arg1
))
7872 return omit_one_operand (type
, integer_zero_node
, arg0
);
7873 if (integer_zerop (arg1
))
7876 /* X % -1 is zero. */
7877 if (!TYPE_UNSIGNED (type
)
7878 && TREE_CODE (arg1
) == INTEGER_CST
7879 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
7880 && TREE_INT_CST_HIGH (arg1
) == -1)
7881 return omit_one_operand (type
, integer_zero_node
, arg0
);
7883 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7884 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7885 if (code
== TRUNC_MOD_EXPR
7886 && TYPE_UNSIGNED (type
)
7887 && integer_pow2p (arg1
))
7889 unsigned HOST_WIDE_INT high
, low
;
7893 l
= tree_log2 (arg1
);
7894 if (l
>= HOST_BITS_PER_WIDE_INT
)
7896 high
= ((unsigned HOST_WIDE_INT
) 1
7897 << (l
- HOST_BITS_PER_WIDE_INT
)) - 1;
7903 low
= ((unsigned HOST_WIDE_INT
) 1 << l
) - 1;
7906 mask
= build_int_cst_wide (type
, low
, high
);
7907 return fold (build2 (BIT_AND_EXPR
, type
,
7908 fold_convert (type
, arg0
), mask
));
7911 /* X % -C is the same as X % C. */
7912 if (code
== TRUNC_MOD_EXPR
7913 && !TYPE_UNSIGNED (type
)
7914 && TREE_CODE (arg1
) == INTEGER_CST
7915 && TREE_INT_CST_HIGH (arg1
) < 0
7917 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7918 && !sign_bit_p (arg1
, arg1
))
7919 return fold (build2 (code
, type
, fold_convert (type
, arg0
),
7920 fold_convert (type
, negate_expr (arg1
))));
7922 /* X % -Y is the same as X % Y. */
7923 if (code
== TRUNC_MOD_EXPR
7924 && !TYPE_UNSIGNED (type
)
7925 && TREE_CODE (arg1
) == NEGATE_EXPR
7927 return fold (build2 (code
, type
, fold_convert (type
, arg0
),
7928 fold_convert (type
, TREE_OPERAND (arg1
, 0))));
7930 if (TREE_CODE (arg1
) == INTEGER_CST
7931 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
7933 return fold_convert (type
, tem
);
7939 if (integer_all_onesp (arg0
))
7940 return omit_one_operand (type
, arg0
, arg1
);
7944 /* Optimize -1 >> x for arithmetic right shifts. */
7945 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
7946 return omit_one_operand (type
, arg0
, arg1
);
7947 /* ... fall through ... */
7951 if (integer_zerop (arg1
))
7952 return non_lvalue (fold_convert (type
, arg0
));
7953 if (integer_zerop (arg0
))
7954 return omit_one_operand (type
, arg0
, arg1
);
7956 /* Since negative shift count is not well-defined,
7957 don't try to compute it in the compiler. */
7958 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
7960 /* Rewrite an LROTATE_EXPR by a constant into an
7961 RROTATE_EXPR by a new constant. */
7962 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
7964 tree tem
= build_int_cst (NULL_TREE
,
7965 GET_MODE_BITSIZE (TYPE_MODE (type
)));
7966 tem
= fold_convert (TREE_TYPE (arg1
), tem
);
7967 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
7968 return fold (build2 (RROTATE_EXPR
, type
, arg0
, tem
));
7971 /* If we have a rotate of a bit operation with the rotate count and
7972 the second operand of the bit operation both constant,
7973 permute the two operations. */
7974 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
7975 && (TREE_CODE (arg0
) == BIT_AND_EXPR
7976 || TREE_CODE (arg0
) == BIT_IOR_EXPR
7977 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
7978 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
7979 return fold (build2 (TREE_CODE (arg0
), type
,
7980 fold (build2 (code
, type
,
7981 TREE_OPERAND (arg0
, 0), arg1
)),
7982 fold (build2 (code
, type
,
7983 TREE_OPERAND (arg0
, 1), arg1
))));
7985 /* Two consecutive rotates adding up to the width of the mode can
7987 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
7988 && TREE_CODE (arg0
) == RROTATE_EXPR
7989 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7990 && TREE_INT_CST_HIGH (arg1
) == 0
7991 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
7992 && ((TREE_INT_CST_LOW (arg1
)
7993 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
7994 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
7995 return TREE_OPERAND (arg0
, 0);
8000 if (operand_equal_p (arg0
, arg1
, 0))
8001 return omit_one_operand (type
, arg0
, arg1
);
8002 if (INTEGRAL_TYPE_P (type
)
8003 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
8004 return omit_one_operand (type
, arg1
, arg0
);
8008 if (operand_equal_p (arg0
, arg1
, 0))
8009 return omit_one_operand (type
, arg0
, arg1
);
8010 if (INTEGRAL_TYPE_P (type
)
8011 && TYPE_MAX_VALUE (type
)
8012 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
8013 return omit_one_operand (type
, arg1
, arg0
);
8016 case TRUTH_NOT_EXPR
:
8017 /* The argument to invert_truthvalue must have Boolean type. */
8018 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8019 arg0
= fold_convert (boolean_type_node
, arg0
);
8021 /* Note that the operand of this must be an int
8022 and its values must be 0 or 1.
8023 ("true" is a fixed value perhaps depending on the language,
8024 but we don't handle values other than 1 correctly yet.) */
8025 tem
= invert_truthvalue (arg0
);
8026 /* Avoid infinite recursion. */
8027 if (TREE_CODE (tem
) == TRUTH_NOT_EXPR
)
8029 tem
= fold_single_bit_test (code
, arg0
, arg1
, type
);
8034 return fold_convert (type
, tem
);
8036 case TRUTH_ANDIF_EXPR
:
8037 /* Note that the operands of this must be ints
8038 and their values must be 0 or 1.
8039 ("true" is a fixed value perhaps depending on the language.) */
8040 /* If first arg is constant zero, return it. */
8041 if (integer_zerop (arg0
))
8042 return fold_convert (type
, arg0
);
8043 case TRUTH_AND_EXPR
:
8044 /* If either arg is constant true, drop it. */
8045 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8046 return non_lvalue (fold_convert (type
, arg1
));
8047 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
8048 /* Preserve sequence points. */
8049 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
8050 return non_lvalue (fold_convert (type
, arg0
));
8051 /* If second arg is constant zero, result is zero, but first arg
8052 must be evaluated. */
8053 if (integer_zerop (arg1
))
8054 return omit_one_operand (type
, arg1
, arg0
);
8055 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8056 case will be handled here. */
8057 if (integer_zerop (arg0
))
8058 return omit_one_operand (type
, arg0
, arg1
);
8060 /* !X && X is always false. */
8061 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8062 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8063 return omit_one_operand (type
, integer_zero_node
, arg1
);
8064 /* X && !X is always false. */
8065 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8066 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8067 return omit_one_operand (type
, integer_zero_node
, arg0
);
8069 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8070 means A >= Y && A != MAX, but in this case we know that
8073 if (!TREE_SIDE_EFFECTS (arg0
)
8074 && !TREE_SIDE_EFFECTS (arg1
))
8076 tem
= fold_to_nonsharp_ineq_using_bound (arg0
, arg1
);
8078 return fold (build2 (code
, type
, tem
, arg1
));
8080 tem
= fold_to_nonsharp_ineq_using_bound (arg1
, arg0
);
8082 return fold (build2 (code
, type
, arg0
, tem
));
8086 /* We only do these simplifications if we are optimizing. */
8090 /* Check for things like (A || B) && (A || C). We can convert this
8091 to A || (B && C). Note that either operator can be any of the four
8092 truth and/or operations and the transformation will still be
8093 valid. Also note that we only care about order for the
8094 ANDIF and ORIF operators. If B contains side effects, this
8095 might change the truth-value of A. */
8096 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8097 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8098 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8099 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8100 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8101 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8103 tree a00
= TREE_OPERAND (arg0
, 0);
8104 tree a01
= TREE_OPERAND (arg0
, 1);
8105 tree a10
= TREE_OPERAND (arg1
, 0);
8106 tree a11
= TREE_OPERAND (arg1
, 1);
8107 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8108 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8109 && (code
== TRUTH_AND_EXPR
8110 || code
== TRUTH_OR_EXPR
));
8112 if (operand_equal_p (a00
, a10
, 0))
8113 return fold (build2 (TREE_CODE (arg0
), type
, a00
,
8114 fold (build2 (code
, type
, a01
, a11
))));
8115 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8116 return fold (build2 (TREE_CODE (arg0
), type
, a00
,
8117 fold (build2 (code
, type
, a01
, a10
))));
8118 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8119 return fold (build2 (TREE_CODE (arg0
), type
, a01
,
8120 fold (build2 (code
, type
, a00
, a11
))));
8122 /* This case if tricky because we must either have commutative
8123 operators or else A10 must not have side-effects. */
8125 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8126 && operand_equal_p (a01
, a11
, 0))
8127 return fold (build2 (TREE_CODE (arg0
), type
,
8128 fold (build2 (code
, type
, a00
, a10
)),
8132 /* See if we can build a range comparison. */
8133 if (0 != (tem
= fold_range_test (t
)))
8136 /* Check for the possibility of merging component references. If our
8137 lhs is another similar operation, try to merge its rhs with our
8138 rhs. Then try to merge our lhs and rhs. */
8139 if (TREE_CODE (arg0
) == code
8140 && 0 != (tem
= fold_truthop (code
, type
,
8141 TREE_OPERAND (arg0
, 1), arg1
)))
8142 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
8144 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
8149 case TRUTH_ORIF_EXPR
:
8150 /* Note that the operands of this must be ints
8151 and their values must be 0 or true.
8152 ("true" is a fixed value perhaps depending on the language.) */
8153 /* If first arg is constant true, return it. */
8154 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8155 return fold_convert (type
, arg0
);
8157 /* If either arg is constant zero, drop it. */
8158 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
8159 return non_lvalue (fold_convert (type
, arg1
));
8160 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
8161 /* Preserve sequence points. */
8162 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
8163 return non_lvalue (fold_convert (type
, arg0
));
8164 /* If second arg is constant true, result is true, but we must
8165 evaluate first arg. */
8166 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
8167 return omit_one_operand (type
, arg1
, arg0
);
8168 /* Likewise for first arg, but note this only occurs here for
8170 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8171 return omit_one_operand (type
, arg0
, arg1
);
8173 /* !X || X is always true. */
8174 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8175 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8176 return omit_one_operand (type
, integer_one_node
, arg1
);
8177 /* X || !X is always true. */
8178 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8179 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8180 return omit_one_operand (type
, integer_one_node
, arg0
);
8184 case TRUTH_XOR_EXPR
:
8185 /* If the second arg is constant zero, drop it. */
8186 if (integer_zerop (arg1
))
8187 return non_lvalue (fold_convert (type
, arg0
));
8188 /* If the second arg is constant true, this is a logical inversion. */
8189 if (integer_onep (arg1
))
8190 return non_lvalue (fold_convert (type
, invert_truthvalue (arg0
)));
8191 /* Identical arguments cancel to zero. */
8192 if (operand_equal_p (arg0
, arg1
, 0))
8193 return omit_one_operand (type
, integer_zero_node
, arg0
);
8195 /* !X ^ X is always true. */
8196 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8197 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8198 return omit_one_operand (type
, integer_one_node
, arg1
);
8200 /* X ^ !X is always true. */
8201 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8202 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8203 return omit_one_operand (type
, integer_one_node
, arg0
);
8213 /* If one arg is a real or integer constant, put it last. */
8214 if (tree_swap_operands_p (arg0
, arg1
, true))
8215 return fold (build2 (swap_tree_comparison (code
), type
, arg1
, arg0
));
8217 /* If this is an equality comparison of the address of a non-weak
8218 object against zero, then we know the result. */
8219 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8220 && TREE_CODE (arg0
) == ADDR_EXPR
8221 && DECL_P (TREE_OPERAND (arg0
, 0))
8222 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
8223 && integer_zerop (arg1
))
8224 return constant_boolean_node (code
!= EQ_EXPR
, type
);
8226 /* If this is an equality comparison of the address of two non-weak,
8227 unaliased symbols neither of which are extern (since we do not
8228 have access to attributes for externs), then we know the result. */
8229 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8230 && TREE_CODE (arg0
) == ADDR_EXPR
8231 && DECL_P (TREE_OPERAND (arg0
, 0))
8232 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
8233 && ! lookup_attribute ("alias",
8234 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
8235 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
8236 && TREE_CODE (arg1
) == ADDR_EXPR
8237 && DECL_P (TREE_OPERAND (arg1
, 0))
8238 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
8239 && ! lookup_attribute ("alias",
8240 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
8241 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
8242 return constant_boolean_node (operand_equal_p (arg0
, arg1
, 0)
8243 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
8246 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8248 tree targ0
= strip_float_extensions (arg0
);
8249 tree targ1
= strip_float_extensions (arg1
);
8250 tree newtype
= TREE_TYPE (targ0
);
8252 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8253 newtype
= TREE_TYPE (targ1
);
8255 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8256 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
8257 return fold (build2 (code
, type
, fold_convert (newtype
, targ0
),
8258 fold_convert (newtype
, targ1
)));
8260 /* (-a) CMP (-b) -> b CMP a */
8261 if (TREE_CODE (arg0
) == NEGATE_EXPR
8262 && TREE_CODE (arg1
) == NEGATE_EXPR
)
8263 return fold (build2 (code
, type
, TREE_OPERAND (arg1
, 0),
8264 TREE_OPERAND (arg0
, 0)));
8266 if (TREE_CODE (arg1
) == REAL_CST
)
8268 REAL_VALUE_TYPE cst
;
8269 cst
= TREE_REAL_CST (arg1
);
8271 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8272 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8274 fold (build2 (swap_tree_comparison (code
), type
,
8275 TREE_OPERAND (arg0
, 0),
8276 build_real (TREE_TYPE (arg1
),
8277 REAL_VALUE_NEGATE (cst
))));
8279 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8280 /* a CMP (-0) -> a CMP 0 */
8281 if (REAL_VALUE_MINUS_ZERO (cst
))
8282 return fold (build2 (code
, type
, arg0
,
8283 build_real (TREE_TYPE (arg1
), dconst0
)));
8285 /* x != NaN is always true, other ops are always false. */
8286 if (REAL_VALUE_ISNAN (cst
)
8287 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
8289 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
8290 return omit_one_operand (type
, tem
, arg0
);
8293 /* Fold comparisons against infinity. */
8294 if (REAL_VALUE_ISINF (cst
))
8296 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
8297 if (tem
!= NULL_TREE
)
8302 /* If this is a comparison of a real constant with a PLUS_EXPR
8303 or a MINUS_EXPR of a real constant, we can convert it into a
8304 comparison with a revised real constant as long as no overflow
8305 occurs when unsafe_math_optimizations are enabled. */
8306 if (flag_unsafe_math_optimizations
8307 && TREE_CODE (arg1
) == REAL_CST
8308 && (TREE_CODE (arg0
) == PLUS_EXPR
8309 || TREE_CODE (arg0
) == MINUS_EXPR
)
8310 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
8311 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
8312 ? MINUS_EXPR
: PLUS_EXPR
,
8313 arg1
, TREE_OPERAND (arg0
, 1), 0))
8314 && ! TREE_CONSTANT_OVERFLOW (tem
))
8315 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
8317 /* Likewise, we can simplify a comparison of a real constant with
8318 a MINUS_EXPR whose first operand is also a real constant, i.e.
8319 (c1 - x) < c2 becomes x > c1-c2. */
8320 if (flag_unsafe_math_optimizations
8321 && TREE_CODE (arg1
) == REAL_CST
8322 && TREE_CODE (arg0
) == MINUS_EXPR
8323 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
8324 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
8326 && ! TREE_CONSTANT_OVERFLOW (tem
))
8327 return fold (build2 (swap_tree_comparison (code
), type
,
8328 TREE_OPERAND (arg0
, 1), tem
));
8330 /* Fold comparisons against built-in math functions. */
8331 if (TREE_CODE (arg1
) == REAL_CST
8332 && flag_unsafe_math_optimizations
8333 && ! flag_errno_math
)
8335 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8337 if (fcode
!= END_BUILTINS
)
8339 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
8340 if (tem
!= NULL_TREE
)
8346 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8347 if (TREE_CONSTANT (arg1
)
8348 && (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
8349 || TREE_CODE (arg0
) == POSTDECREMENT_EXPR
)
8350 /* This optimization is invalid for ordered comparisons
8351 if CONST+INCR overflows or if foo+incr might overflow.
8352 This optimization is invalid for floating point due to rounding.
8353 For pointer types we assume overflow doesn't happen. */
8354 && (POINTER_TYPE_P (TREE_TYPE (arg0
))
8355 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8356 && (code
== EQ_EXPR
|| code
== NE_EXPR
))))
8358 tree varop
, newconst
;
8360 if (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
)
8362 newconst
= fold (build2 (PLUS_EXPR
, TREE_TYPE (arg0
),
8363 arg1
, TREE_OPERAND (arg0
, 1)));
8364 varop
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (arg0
),
8365 TREE_OPERAND (arg0
, 0),
8366 TREE_OPERAND (arg0
, 1));
8370 newconst
= fold (build2 (MINUS_EXPR
, TREE_TYPE (arg0
),
8371 arg1
, TREE_OPERAND (arg0
, 1)));
8372 varop
= build2 (PREDECREMENT_EXPR
, TREE_TYPE (arg0
),
8373 TREE_OPERAND (arg0
, 0),
8374 TREE_OPERAND (arg0
, 1));
8378 /* If VAROP is a reference to a bitfield, we must mask
8379 the constant by the width of the field. */
8380 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
8381 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop
, 0), 1))
8382 && host_integerp (DECL_SIZE (TREE_OPERAND
8383 (TREE_OPERAND (varop
, 0), 1)), 1))
8385 tree fielddecl
= TREE_OPERAND (TREE_OPERAND (varop
, 0), 1);
8386 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (fielddecl
), 1);
8387 tree folded_compare
, shift
;
8389 /* First check whether the comparison would come out
8390 always the same. If we don't do that we would
8391 change the meaning with the masking. */
8392 folded_compare
= fold (build2 (code
, type
,
8393 TREE_OPERAND (varop
, 0), arg1
));
8394 if (integer_zerop (folded_compare
)
8395 || integer_onep (folded_compare
))
8396 return omit_one_operand (type
, folded_compare
, varop
);
8398 shift
= build_int_cst (NULL_TREE
,
8399 TYPE_PRECISION (TREE_TYPE (varop
)) - size
);
8400 shift
= fold_convert (TREE_TYPE (varop
), shift
);
8401 newconst
= fold (build2 (LSHIFT_EXPR
, TREE_TYPE (varop
),
8403 newconst
= fold (build2 (RSHIFT_EXPR
, TREE_TYPE (varop
),
8407 return fold (build2 (code
, type
, varop
, newconst
));
8410 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8411 This transformation affects the cases which are handled in later
8412 optimizations involving comparisons with non-negative constants. */
8413 if (TREE_CODE (arg1
) == INTEGER_CST
8414 && TREE_CODE (arg0
) != INTEGER_CST
8415 && tree_int_cst_sgn (arg1
) > 0)
8420 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
8421 return fold (build2 (GT_EXPR
, type
, arg0
, arg1
));
8424 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
8425 return fold (build2 (LE_EXPR
, type
, arg0
, arg1
));
8432 /* Comparisons with the highest or lowest possible integer of
8433 the specified size will have known values.
8435 This is quite similar to fold_relational_hi_lo, however,
8436 attempts to share the code have been nothing but trouble. */
8438 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1
)));
8440 if (TREE_CODE (arg1
) == INTEGER_CST
8441 && ! TREE_CONSTANT_OVERFLOW (arg1
)
8442 && width
<= HOST_BITS_PER_WIDE_INT
8443 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
8444 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
8446 unsigned HOST_WIDE_INT signed_max
;
8447 unsigned HOST_WIDE_INT max
, min
;
8449 signed_max
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1)) - 1;
8451 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
8453 max
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
8459 min
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
8462 if (TREE_INT_CST_HIGH (arg1
) == 0
8463 && TREE_INT_CST_LOW (arg1
) == max
)
8467 return omit_one_operand (type
, integer_zero_node
, arg0
);
8470 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
8473 return omit_one_operand (type
, integer_one_node
, arg0
);
8476 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
8478 /* The GE_EXPR and LT_EXPR cases above are not normally
8479 reached because of previous transformations. */
8484 else if (TREE_INT_CST_HIGH (arg1
) == 0
8485 && TREE_INT_CST_LOW (arg1
) == max
- 1)
8489 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
8490 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
8492 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
8493 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
8497 else if (TREE_INT_CST_HIGH (arg1
) == (min
? -1 : 0)
8498 && TREE_INT_CST_LOW (arg1
) == min
)
8502 return omit_one_operand (type
, integer_zero_node
, arg0
);
8505 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
8508 return omit_one_operand (type
, integer_one_node
, arg0
);
8511 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
8516 else if (TREE_INT_CST_HIGH (arg1
) == (min
? -1 : 0)
8517 && TREE_INT_CST_LOW (arg1
) == min
+ 1)
8521 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
8522 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
8524 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
8525 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
8530 else if (!in_gimple_form
8531 && TREE_INT_CST_HIGH (arg1
) == 0
8532 && TREE_INT_CST_LOW (arg1
) == signed_max
8533 && TYPE_UNSIGNED (TREE_TYPE (arg1
))
8534 /* signed_type does not work on pointer types. */
8535 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
8537 /* The following case also applies to X < signed_max+1
8538 and X >= signed_max+1 because previous transformations. */
8539 if (code
== LE_EXPR
|| code
== GT_EXPR
)
8542 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (arg0
));
8543 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (arg1
));
8545 (build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
8546 type
, fold_convert (st0
, arg0
),
8547 fold_convert (st1
, integer_zero_node
)));
8553 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8554 a MINUS_EXPR of a constant, we can convert it into a comparison with
8555 a revised constant as long as no overflow occurs. */
8556 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8557 && TREE_CODE (arg1
) == INTEGER_CST
8558 && (TREE_CODE (arg0
) == PLUS_EXPR
8559 || TREE_CODE (arg0
) == MINUS_EXPR
)
8560 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8561 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
8562 ? MINUS_EXPR
: PLUS_EXPR
,
8563 arg1
, TREE_OPERAND (arg0
, 1), 0))
8564 && ! TREE_CONSTANT_OVERFLOW (tem
))
8565 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
8567 /* Similarly for a NEGATE_EXPR. */
8568 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8569 && TREE_CODE (arg0
) == NEGATE_EXPR
8570 && TREE_CODE (arg1
) == INTEGER_CST
8571 && 0 != (tem
= negate_expr (arg1
))
8572 && TREE_CODE (tem
) == INTEGER_CST
8573 && ! TREE_CONSTANT_OVERFLOW (tem
))
8574 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
8576 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8577 for !=. Don't do this for ordered comparisons due to overflow. */
8578 else if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
8579 && integer_zerop (arg1
) && TREE_CODE (arg0
) == MINUS_EXPR
)
8580 return fold (build2 (code
, type
,
8581 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1)));
8583 else if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
8584 && TREE_CODE (arg0
) == NOP_EXPR
)
8586 /* If we are widening one operand of an integer comparison,
8587 see if the other operand is similarly being widened. Perhaps we
8588 can do the comparison in the narrower type. */
8589 tem
= fold_widened_comparison (code
, type
, arg0
, arg1
);
8593 /* Or if we are changing signedness. */
8594 tem
= fold_sign_changed_comparison (code
, type
, arg0
, arg1
);
8599 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8600 constant, we can simplify it. */
8601 else if (TREE_CODE (arg1
) == INTEGER_CST
8602 && (TREE_CODE (arg0
) == MIN_EXPR
8603 || TREE_CODE (arg0
) == MAX_EXPR
)
8604 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8605 return optimize_minmax_comparison (t
);
8607 /* If we are comparing an ABS_EXPR with a constant, we can
8608 convert all the cases into explicit comparisons, but they may
8609 well not be faster than doing the ABS and one comparison.
8610 But ABS (X) <= C is a range comparison, which becomes a subtraction
8611 and a comparison, and is probably faster. */
8612 else if (code
== LE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
8613 && TREE_CODE (arg0
) == ABS_EXPR
8614 && ! TREE_SIDE_EFFECTS (arg0
)
8615 && (0 != (tem
= negate_expr (arg1
)))
8616 && TREE_CODE (tem
) == INTEGER_CST
8617 && ! TREE_CONSTANT_OVERFLOW (tem
))
8618 return fold (build2 (TRUTH_ANDIF_EXPR
, type
,
8619 build2 (GE_EXPR
, type
,
8620 TREE_OPERAND (arg0
, 0), tem
),
8621 build2 (LE_EXPR
, type
,
8622 TREE_OPERAND (arg0
, 0), arg1
)));
8624 /* If this is an EQ or NE comparison with zero and ARG0 is
8625 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8626 two operations, but the latter can be done in one less insn
8627 on machines that have only two-operand insns or on which a
8628 constant cannot be the first operand. */
8629 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
8630 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
8632 tree arg00
= TREE_OPERAND (arg0
, 0);
8633 tree arg01
= TREE_OPERAND (arg0
, 1);
8634 if (TREE_CODE (arg00
) == LSHIFT_EXPR
8635 && integer_onep (TREE_OPERAND (arg00
, 0)))
8637 fold (build2 (code
, type
,
8638 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
8639 build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
8640 arg01
, TREE_OPERAND (arg00
, 1)),
8641 fold_convert (TREE_TYPE (arg0
),
8644 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
8645 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
8647 fold (build2 (code
, type
,
8648 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
8649 build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
8650 arg00
, TREE_OPERAND (arg01
, 1)),
8651 fold_convert (TREE_TYPE (arg0
),
8656 /* If this is an NE or EQ comparison of zero against the result of a
8657 signed MOD operation whose second operand is a power of 2, make
8658 the MOD operation unsigned since it is simpler and equivalent. */
8659 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
8660 && integer_zerop (arg1
)
8661 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
8662 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
8663 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
8664 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
8665 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
8666 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
8668 tree newtype
= lang_hooks
.types
.unsigned_type (TREE_TYPE (arg0
));
8669 tree newmod
= fold (build2 (TREE_CODE (arg0
), newtype
,
8670 fold_convert (newtype
,
8671 TREE_OPERAND (arg0
, 0)),
8672 fold_convert (newtype
,
8673 TREE_OPERAND (arg0
, 1))));
8675 return fold (build2 (code
, type
, newmod
,
8676 fold_convert (newtype
, arg1
)));
8679 /* If this is an NE comparison of zero with an AND of one, remove the
8680 comparison since the AND will give the correct value. */
8681 if (code
== NE_EXPR
&& integer_zerop (arg1
)
8682 && TREE_CODE (arg0
) == BIT_AND_EXPR
8683 && integer_onep (TREE_OPERAND (arg0
, 1)))
8684 return fold_convert (type
, arg0
);
8686 /* If we have (A & C) == C where C is a power of 2, convert this into
8687 (A & C) != 0. Similarly for NE_EXPR. */
8688 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8689 && TREE_CODE (arg0
) == BIT_AND_EXPR
8690 && integer_pow2p (TREE_OPERAND (arg0
, 1))
8691 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
8692 return fold (build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
8693 arg0
, fold_convert (TREE_TYPE (arg0
),
8694 integer_zero_node
)));
8696 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8697 2, then fold the expression into shifts and logical operations. */
8698 tem
= fold_single_bit_test (code
, arg0
, arg1
, type
);
8702 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8703 Similarly for NE_EXPR. */
8704 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8705 && TREE_CODE (arg0
) == BIT_AND_EXPR
8706 && TREE_CODE (arg1
) == INTEGER_CST
8707 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8709 tree notc
= fold (build1 (BIT_NOT_EXPR
,
8710 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
8711 TREE_OPERAND (arg0
, 1)));
8712 tree dandnotc
= fold (build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
8714 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
8715 if (integer_nonzerop (dandnotc
))
8716 return omit_one_operand (type
, rslt
, arg0
);
8719 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8720 Similarly for NE_EXPR. */
8721 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8722 && TREE_CODE (arg0
) == BIT_IOR_EXPR
8723 && TREE_CODE (arg1
) == INTEGER_CST
8724 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8726 tree notd
= fold (build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
));
8727 tree candnotd
= fold (build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
8728 TREE_OPERAND (arg0
, 1), notd
));
8729 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
8730 if (integer_nonzerop (candnotd
))
8731 return omit_one_operand (type
, rslt
, arg0
);
8734 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8735 and similarly for >= into !=. */
8736 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
8737 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
8738 && TREE_CODE (arg1
) == LSHIFT_EXPR
8739 && integer_onep (TREE_OPERAND (arg1
, 0)))
8740 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
8741 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
8742 TREE_OPERAND (arg1
, 1)),
8743 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
8745 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
8746 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
8747 && (TREE_CODE (arg1
) == NOP_EXPR
8748 || TREE_CODE (arg1
) == CONVERT_EXPR
)
8749 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
8750 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
8752 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
8753 fold_convert (TREE_TYPE (arg0
),
8754 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
8755 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
8757 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
8759 /* Simplify comparison of something with itself. (For IEEE
8760 floating-point, we can only do some of these simplifications.) */
8761 if (operand_equal_p (arg0
, arg1
, 0))
8766 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8767 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8768 return constant_boolean_node (1, type
);
8773 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8774 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8775 return constant_boolean_node (1, type
);
8776 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
8779 /* For NE, we can only do this simplification if integer
8780 or we don't honor IEEE floating point NaNs. */
8781 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
8782 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8784 /* ... fall through ... */
8787 return constant_boolean_node (0, type
);
8793 /* If we are comparing an expression that just has comparisons
8794 of two integer values, arithmetic expressions of those comparisons,
8795 and constants, we can simplify it. There are only three cases
8796 to check: the two values can either be equal, the first can be
8797 greater, or the second can be greater. Fold the expression for
8798 those three values. Since each value must be 0 or 1, we have
8799 eight possibilities, each of which corresponds to the constant 0
8800 or 1 or one of the six possible comparisons.
8802 This handles common cases like (a > b) == 0 but also handles
8803 expressions like ((x > y) - (y > x)) > 0, which supposedly
8804 occur in macroized code. */
8806 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8808 tree cval1
= 0, cval2
= 0;
8811 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8812 /* Don't handle degenerate cases here; they should already
8813 have been handled anyway. */
8814 && cval1
!= 0 && cval2
!= 0
8815 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8816 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8817 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8818 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8819 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8820 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8821 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8823 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8824 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8826 /* We can't just pass T to eval_subst in case cval1 or cval2
8827 was the same as ARG1. */
8830 = fold (build2 (code
, type
,
8831 eval_subst (arg0
, cval1
, maxval
,
8835 = fold (build2 (code
, type
,
8836 eval_subst (arg0
, cval1
, maxval
,
8840 = fold (build2 (code
, type
,
8841 eval_subst (arg0
, cval1
, minval
,
8845 /* All three of these results should be 0 or 1. Confirm they
8846 are. Then use those values to select the proper code
8849 if ((integer_zerop (high_result
)
8850 || integer_onep (high_result
))
8851 && (integer_zerop (equal_result
)
8852 || integer_onep (equal_result
))
8853 && (integer_zerop (low_result
)
8854 || integer_onep (low_result
)))
8856 /* Make a 3-bit mask with the high-order bit being the
8857 value for `>', the next for '=', and the low for '<'. */
8858 switch ((integer_onep (high_result
) * 4)
8859 + (integer_onep (equal_result
) * 2)
8860 + integer_onep (low_result
))
8864 return omit_one_operand (type
, integer_zero_node
, arg0
);
8885 return omit_one_operand (type
, integer_one_node
, arg0
);
8888 tem
= build2 (code
, type
, cval1
, cval2
);
8890 return save_expr (tem
);
8897 /* If this is a comparison of a field, we may be able to simplify it. */
8898 if (((TREE_CODE (arg0
) == COMPONENT_REF
8899 && lang_hooks
.can_use_bit_fields_p ())
8900 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
8901 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
8902 /* Handle the constant case even without -O
8903 to make sure the warnings are given. */
8904 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
8906 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
8911 /* If this is a comparison of complex values and either or both sides
8912 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8913 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8914 This may prevent needless evaluations. */
8915 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8916 && TREE_CODE (TREE_TYPE (arg0
)) == COMPLEX_TYPE
8917 && (TREE_CODE (arg0
) == COMPLEX_EXPR
8918 || TREE_CODE (arg1
) == COMPLEX_EXPR
8919 || TREE_CODE (arg0
) == COMPLEX_CST
8920 || TREE_CODE (arg1
) == COMPLEX_CST
))
8922 tree subtype
= TREE_TYPE (TREE_TYPE (arg0
));
8923 tree real0
, imag0
, real1
, imag1
;
8925 arg0
= save_expr (arg0
);
8926 arg1
= save_expr (arg1
);
8927 real0
= fold (build1 (REALPART_EXPR
, subtype
, arg0
));
8928 imag0
= fold (build1 (IMAGPART_EXPR
, subtype
, arg0
));
8929 real1
= fold (build1 (REALPART_EXPR
, subtype
, arg1
));
8930 imag1
= fold (build1 (IMAGPART_EXPR
, subtype
, arg1
));
8932 return fold (build2 ((code
== EQ_EXPR
? TRUTH_ANDIF_EXPR
8935 fold (build2 (code
, type
, real0
, real1
)),
8936 fold (build2 (code
, type
, imag0
, imag1
))));
8939 /* Optimize comparisons of strlen vs zero to a compare of the
8940 first character of the string vs zero. To wit,
8941 strlen(ptr) == 0 => *ptr == 0
8942 strlen(ptr) != 0 => *ptr != 0
8943 Other cases should reduce to one of these two (or a constant)
8944 due to the return value of strlen being unsigned. */
8945 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8946 && integer_zerop (arg1
)
8947 && TREE_CODE (arg0
) == CALL_EXPR
)
8949 tree fndecl
= get_callee_fndecl (arg0
);
8953 && DECL_BUILT_IN (fndecl
)
8954 && DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_MD
8955 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
8956 && (arglist
= TREE_OPERAND (arg0
, 1))
8957 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
8958 && ! TREE_CHAIN (arglist
))
8959 return fold (build2 (code
, type
,
8960 build1 (INDIRECT_REF
, char_type_node
,
8961 TREE_VALUE (arglist
)),
8962 fold_convert (char_type_node
,
8963 integer_zero_node
)));
8966 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8967 into a single range test. */
8968 if (TREE_CODE (arg0
) == TRUNC_DIV_EXPR
8969 && TREE_CODE (arg1
) == INTEGER_CST
8970 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8971 && !integer_zerop (TREE_OPERAND (arg0
, 1))
8972 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8973 && !TREE_OVERFLOW (arg1
))
8975 t1
= fold_div_compare (code
, type
, arg0
, arg1
);
8976 if (t1
!= NULL_TREE
)
8980 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8981 && !TREE_SIDE_EFFECTS (arg0
)
8982 && integer_zerop (arg1
)
8983 && tree_expr_nonzero_p (arg0
))
8984 return constant_boolean_node (code
==NE_EXPR
, type
);
8986 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
8987 return t1
== NULL_TREE
? t
: t1
;
8989 case UNORDERED_EXPR
:
8997 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
8999 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
9000 if (t1
!= NULL_TREE
)
9004 /* If the first operand is NaN, the result is constant. */
9005 if (TREE_CODE (arg0
) == REAL_CST
9006 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
9007 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
9009 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
9012 return omit_one_operand (type
, t1
, arg1
);
9015 /* If the second operand is NaN, the result is constant. */
9016 if (TREE_CODE (arg1
) == REAL_CST
9017 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
9018 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
9020 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
9023 return omit_one_operand (type
, t1
, arg0
);
9026 /* Simplify unordered comparison of something with itself. */
9027 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
9028 && operand_equal_p (arg0
, arg1
, 0))
9029 return constant_boolean_node (1, type
);
9031 if (code
== LTGT_EXPR
9032 && !flag_trapping_math
9033 && operand_equal_p (arg0
, arg1
, 0))
9034 return constant_boolean_node (0, type
);
9036 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9038 tree targ0
= strip_float_extensions (arg0
);
9039 tree targ1
= strip_float_extensions (arg1
);
9040 tree newtype
= TREE_TYPE (targ0
);
9042 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9043 newtype
= TREE_TYPE (targ1
);
9045 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9046 return fold (build2 (code
, type
, fold_convert (newtype
, targ0
),
9047 fold_convert (newtype
, targ1
)));
9053 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9054 so all simple results must be passed through pedantic_non_lvalue. */
9055 if (TREE_CODE (arg0
) == INTEGER_CST
)
9057 tem
= TREE_OPERAND (t
, (integer_zerop (arg0
) ? 2 : 1));
9058 /* Only optimize constant conditions when the selected branch
9059 has the same type as the COND_EXPR. This avoids optimizing
9060 away "c ? x : throw", where the throw has a void type. */
9061 if (! VOID_TYPE_P (TREE_TYPE (tem
))
9062 || VOID_TYPE_P (type
))
9063 return pedantic_non_lvalue (tem
);
9066 if (operand_equal_p (arg1
, TREE_OPERAND (t
, 2), 0))
9067 return pedantic_omit_one_operand (type
, arg1
, arg0
);
9069 /* If we have A op B ? A : C, we may be able to convert this to a
9070 simpler expression, depending on the operation and the values
9071 of B and C. Signed zeros prevent all of these transformations,
9072 for reasons given above each one.
9074 Also try swapping the arguments and inverting the conditional. */
9075 if (COMPARISON_CLASS_P (arg0
)
9076 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
9077 arg1
, TREE_OPERAND (arg0
, 1))
9078 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
9080 tem
= fold_cond_expr_with_comparison (type
, arg0
,
9081 TREE_OPERAND (t
, 1),
9082 TREE_OPERAND (t
, 2));
9087 if (COMPARISON_CLASS_P (arg0
)
9088 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
9089 TREE_OPERAND (t
, 2),
9090 TREE_OPERAND (arg0
, 1))
9091 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 2)))))
9093 tem
= invert_truthvalue (arg0
);
9094 if (COMPARISON_CLASS_P (tem
))
9096 tem
= fold_cond_expr_with_comparison (type
, tem
,
9097 TREE_OPERAND (t
, 2),
9098 TREE_OPERAND (t
, 1));
9104 /* If the second operand is simpler than the third, swap them
9105 since that produces better jump optimization results. */
9106 if (tree_swap_operands_p (TREE_OPERAND (t
, 1),
9107 TREE_OPERAND (t
, 2), false))
9109 /* See if this can be inverted. If it can't, possibly because
9110 it was a floating-point inequality comparison, don't do
9112 tem
= invert_truthvalue (arg0
);
9114 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
9115 return fold (build3 (code
, type
, tem
,
9116 TREE_OPERAND (t
, 2), TREE_OPERAND (t
, 1)));
9119 /* Convert A ? 1 : 0 to simply A. */
9120 if (integer_onep (TREE_OPERAND (t
, 1))
9121 && integer_zerop (TREE_OPERAND (t
, 2))
9122 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
9123 call to fold will try to move the conversion inside
9124 a COND, which will recurse. In that case, the COND_EXPR
9125 is probably the best choice, so leave it alone. */
9126 && type
== TREE_TYPE (arg0
))
9127 return pedantic_non_lvalue (arg0
);
9129 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9130 over COND_EXPR in cases such as floating point comparisons. */
9131 if (integer_zerop (TREE_OPERAND (t
, 1))
9132 && integer_onep (TREE_OPERAND (t
, 2))
9133 && truth_value_p (TREE_CODE (arg0
)))
9134 return pedantic_non_lvalue (fold_convert (type
,
9135 invert_truthvalue (arg0
)));
9137 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9138 if (TREE_CODE (arg0
) == LT_EXPR
9139 && integer_zerop (TREE_OPERAND (arg0
, 1))
9140 && integer_zerop (TREE_OPERAND (t
, 2))
9141 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
9142 return fold_convert (type
, fold (build2 (BIT_AND_EXPR
,
9143 TREE_TYPE (tem
), tem
, arg1
)));
9145 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9146 already handled above. */
9147 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9148 && integer_onep (TREE_OPERAND (arg0
, 1))
9149 && integer_zerop (TREE_OPERAND (t
, 2))
9150 && integer_pow2p (arg1
))
9152 tree tem
= TREE_OPERAND (arg0
, 0);
9154 if (TREE_CODE (tem
) == RSHIFT_EXPR
9155 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
9156 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
9157 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
9158 return fold (build2 (BIT_AND_EXPR
, type
,
9159 TREE_OPERAND (tem
, 0), arg1
));
9162 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9163 is probably obsolete because the first operand should be a
9164 truth value (that's why we have the two cases above), but let's
9165 leave it in until we can confirm this for all front-ends. */
9166 if (integer_zerop (TREE_OPERAND (t
, 2))
9167 && TREE_CODE (arg0
) == NE_EXPR
9168 && integer_zerop (TREE_OPERAND (arg0
, 1))
9169 && integer_pow2p (arg1
)
9170 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
9171 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
9172 arg1
, OEP_ONLY_CONST
))
9173 return pedantic_non_lvalue (fold_convert (type
,
9174 TREE_OPERAND (arg0
, 0)));
9176 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9177 if (integer_zerop (TREE_OPERAND (t
, 2))
9178 && truth_value_p (TREE_CODE (arg0
))
9179 && truth_value_p (TREE_CODE (arg1
)))
9180 return fold (build2 (TRUTH_ANDIF_EXPR
, type
, arg0
, arg1
));
9182 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9183 if (integer_onep (TREE_OPERAND (t
, 2))
9184 && truth_value_p (TREE_CODE (arg0
))
9185 && truth_value_p (TREE_CODE (arg1
)))
9187 /* Only perform transformation if ARG0 is easily inverted. */
9188 tem
= invert_truthvalue (arg0
);
9189 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
9190 return fold (build2 (TRUTH_ORIF_EXPR
, type
, tem
, arg1
));
9193 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9194 if (integer_zerop (arg1
)
9195 && truth_value_p (TREE_CODE (arg0
))
9196 && truth_value_p (TREE_CODE (TREE_OPERAND (t
, 2))))
9198 /* Only perform transformation if ARG0 is easily inverted. */
9199 tem
= invert_truthvalue (arg0
);
9200 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
9201 return fold (build2 (TRUTH_ANDIF_EXPR
, type
, tem
,
9202 TREE_OPERAND (t
, 2)));
9205 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9206 if (integer_onep (arg1
)
9207 && truth_value_p (TREE_CODE (arg0
))
9208 && truth_value_p (TREE_CODE (TREE_OPERAND (t
, 2))))
9209 return fold (build2 (TRUTH_ORIF_EXPR
, type
, arg0
,
9210 TREE_OPERAND (t
, 2)));
9215 /* When pedantic, a compound expression can be neither an lvalue
9216 nor an integer constant expression. */
9217 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
9219 /* Don't let (0, 0) be null pointer constant. */
9220 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
9221 : fold_convert (type
, arg1
);
9222 return pedantic_non_lvalue (tem
);
9226 return build_complex (type
, arg0
, arg1
);
9230 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
9232 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
9233 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
9234 TREE_OPERAND (arg0
, 1));
9235 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
9236 return TREE_REALPART (arg0
);
9237 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9238 return fold (build2 (TREE_CODE (arg0
), type
,
9239 fold (build1 (REALPART_EXPR
, type
,
9240 TREE_OPERAND (arg0
, 0))),
9241 fold (build1 (REALPART_EXPR
, type
,
9242 TREE_OPERAND (arg0
, 1)))));
9246 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
9247 return fold_convert (type
, integer_zero_node
);
9248 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
9249 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
9250 TREE_OPERAND (arg0
, 0));
9251 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
9252 return TREE_IMAGPART (arg0
);
9253 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9254 return fold (build2 (TREE_CODE (arg0
), type
,
9255 fold (build1 (IMAGPART_EXPR
, type
,
9256 TREE_OPERAND (arg0
, 0))),
9257 fold (build1 (IMAGPART_EXPR
, type
,
9258 TREE_OPERAND (arg0
, 1)))));
9262 /* Check for a built-in function. */
9263 if (TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
9264 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0))
9266 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)))
9268 tree tmp
= fold_builtin (t
, false);
9276 } /* switch (code) */
9279 #ifdef ENABLE_FOLD_CHECKING
9282 static void fold_checksum_tree (tree
, struct md5_ctx
*, htab_t
);
9283 static void fold_check_failed (tree
, tree
);
9284 void print_fold_checksum (tree
);
9286 /* When --enable-checking=fold, compute a digest of expr before
9287 and after actual fold call to see if fold did not accidentally
9288 change original expr. */
9295 unsigned char checksum_before
[16], checksum_after
[16];
9298 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
9299 md5_init_ctx (&ctx
);
9300 fold_checksum_tree (expr
, &ctx
, ht
);
9301 md5_finish_ctx (&ctx
, checksum_before
);
9304 ret
= fold_1 (expr
);
9306 md5_init_ctx (&ctx
);
9307 fold_checksum_tree (expr
, &ctx
, ht
);
9308 md5_finish_ctx (&ctx
, checksum_after
);
9311 if (memcmp (checksum_before
, checksum_after
, 16))
9312 fold_check_failed (expr
, ret
);
9318 print_fold_checksum (tree expr
)
9321 unsigned char checksum
[16], cnt
;
9324 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
9325 md5_init_ctx (&ctx
);
9326 fold_checksum_tree (expr
, &ctx
, ht
);
9327 md5_finish_ctx (&ctx
, checksum
);
9329 for (cnt
= 0; cnt
< 16; ++cnt
)
9330 fprintf (stderr
, "%02x", checksum
[cnt
]);
9331 putc ('\n', stderr
);
9335 fold_check_failed (tree expr ATTRIBUTE_UNUSED
, tree ret ATTRIBUTE_UNUSED
)
9337 internal_error ("fold check: original tree changed by fold");
9341 fold_checksum_tree (tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
9344 enum tree_code code
;
9345 char buf
[sizeof (struct tree_decl
)];
9348 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
9349 <= sizeof (struct tree_decl
))
9350 && sizeof (struct tree_type
) <= sizeof (struct tree_decl
));
9353 slot
= htab_find_slot (ht
, expr
, INSERT
);
9357 code
= TREE_CODE (expr
);
9358 if (TREE_CODE_CLASS (code
) == tcc_declaration
9359 && DECL_ASSEMBLER_NAME_SET_P (expr
))
9361 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9362 memcpy (buf
, expr
, tree_size (expr
));
9364 SET_DECL_ASSEMBLER_NAME (expr
, NULL
);
9366 else if (TREE_CODE_CLASS (code
) == tcc_type
9367 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
9368 || TYPE_CACHED_VALUES_P (expr
)))
9370 /* Allow these fields to be modified. */
9371 memcpy (buf
, expr
, tree_size (expr
));
9373 TYPE_POINTER_TO (expr
) = NULL
;
9374 TYPE_REFERENCE_TO (expr
) = NULL
;
9375 TYPE_CACHED_VALUES_P (expr
) = 0;
9376 TYPE_CACHED_VALUES (expr
) = NULL
;
9378 md5_process_bytes (expr
, tree_size (expr
), ctx
);
9379 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
9380 if (TREE_CODE_CLASS (code
) != tcc_type
9381 && TREE_CODE_CLASS (code
) != tcc_declaration
)
9382 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
9383 switch (TREE_CODE_CLASS (code
))
9389 md5_process_bytes (TREE_STRING_POINTER (expr
),
9390 TREE_STRING_LENGTH (expr
), ctx
);
9393 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
9394 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
9397 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
9403 case tcc_exceptional
:
9407 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
9408 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
9411 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
9412 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
9418 case tcc_expression
:
9420 case tcc_comparison
:
9424 len
= TREE_CODE_LENGTH (code
);
9425 for (i
= 0; i
< len
; ++i
)
9426 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
9428 case tcc_declaration
:
9429 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
9430 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
9431 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
9432 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
9433 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
9434 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
9435 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
9436 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
9437 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
9438 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
9439 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
9442 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
9443 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
9444 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
9445 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
9446 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
9447 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
9448 if (INTEGRAL_TYPE_P (expr
)
9449 || SCALAR_FLOAT_TYPE_P (expr
))
9451 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
9452 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
9454 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
9455 if (TREE_CODE (expr
) == RECORD_TYPE
9456 || TREE_CODE (expr
) == UNION_TYPE
9457 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
9458 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
9459 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
9468 /* Perform constant folding and related simplification of initializer
9469 expression EXPR. This behaves identically to "fold" but ignores
9470 potential run-time traps and exceptions that fold must preserve. */
9473 fold_initializer (tree expr
)
9475 int saved_signaling_nans
= flag_signaling_nans
;
9476 int saved_trapping_math
= flag_trapping_math
;
9477 int saved_trapv
= flag_trapv
;
9480 flag_signaling_nans
= 0;
9481 flag_trapping_math
= 0;
9484 result
= fold (expr
);
9486 flag_signaling_nans
= saved_signaling_nans
;
9487 flag_trapping_math
= saved_trapping_math
;
9488 flag_trapv
= saved_trapv
;
9493 /* Determine if first argument is a multiple of second argument. Return 0 if
9494 it is not, or we cannot easily determined it to be.
9496 An example of the sort of thing we care about (at this point; this routine
9497 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9498 fold cases do now) is discovering that
9500 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9506 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9508 This code also handles discovering that
9510 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9512 is a multiple of 8 so we don't have to worry about dealing with a
9515 Note that we *look* inside a SAVE_EXPR only to determine how it was
9516 calculated; it is not safe for fold to do much of anything else with the
9517 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9518 at run time. For example, the latter example above *cannot* be implemented
9519 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9520 evaluation time of the original SAVE_EXPR is not necessarily the same at
9521 the time the new expression is evaluated. The only optimization of this
9522 sort that would be valid is changing
9524 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9528 SAVE_EXPR (I) * SAVE_EXPR (J)
9530 (where the same SAVE_EXPR (J) is used in the original and the
9531 transformed version). */
9534 multiple_of_p (tree type
, tree top
, tree bottom
)
9536 if (operand_equal_p (top
, bottom
, 0))
9539 if (TREE_CODE (type
) != INTEGER_TYPE
)
9542 switch (TREE_CODE (top
))
9545 /* Bitwise and provides a power of two multiple. If the mask is
9546 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
9547 if (!integer_pow2p (bottom
))
9552 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
9553 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
9557 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
9558 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
9561 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
9565 op1
= TREE_OPERAND (top
, 1);
9566 /* const_binop may not detect overflow correctly,
9567 so check for it explicitly here. */
9568 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
9569 > TREE_INT_CST_LOW (op1
)
9570 && TREE_INT_CST_HIGH (op1
) == 0
9571 && 0 != (t1
= fold_convert (type
,
9572 const_binop (LSHIFT_EXPR
,
9575 && ! TREE_OVERFLOW (t1
))
9576 return multiple_of_p (type
, t1
, bottom
);
9581 /* Can't handle conversions from non-integral or wider integral type. */
9582 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
9583 || (TYPE_PRECISION (type
)
9584 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
9587 /* .. fall through ... */
9590 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
9593 if (TREE_CODE (bottom
) != INTEGER_CST
9594 || (TYPE_UNSIGNED (type
)
9595 && (tree_int_cst_sgn (top
) < 0
9596 || tree_int_cst_sgn (bottom
) < 0)))
9598 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,
9606 /* Return true if `t' is known to be non-negative. */
9609 tree_expr_nonnegative_p (tree t
)
9611 switch (TREE_CODE (t
))
9617 return tree_int_cst_sgn (t
) >= 0;
9620 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
9623 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
9624 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9625 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9627 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9628 both unsigned and at least 2 bits shorter than the result. */
9629 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
9630 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
9631 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
9633 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
9634 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
9635 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
9636 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
9638 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
9639 TYPE_PRECISION (inner2
)) + 1;
9640 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
9646 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
9648 /* x * x for floating point x is always non-negative. */
9649 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
9651 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9652 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9655 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9656 both unsigned and their total bits is shorter than the result. */
9657 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
9658 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
9659 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
9661 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
9662 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
9663 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
9664 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
9665 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
9666 < TYPE_PRECISION (TREE_TYPE (t
));
9670 case TRUNC_DIV_EXPR
:
9672 case FLOOR_DIV_EXPR
:
9673 case ROUND_DIV_EXPR
:
9674 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9675 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9677 case TRUNC_MOD_EXPR
:
9679 case FLOOR_MOD_EXPR
:
9680 case ROUND_MOD_EXPR
:
9681 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9684 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9685 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9688 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
9689 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9692 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9693 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9697 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
9698 tree outer_type
= TREE_TYPE (t
);
9700 if (TREE_CODE (outer_type
) == REAL_TYPE
)
9702 if (TREE_CODE (inner_type
) == REAL_TYPE
)
9703 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9704 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
9706 if (TYPE_UNSIGNED (inner_type
))
9708 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9711 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
9713 if (TREE_CODE (inner_type
) == REAL_TYPE
)
9714 return tree_expr_nonnegative_p (TREE_OPERAND (t
,0));
9715 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
9716 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
9717 && TYPE_UNSIGNED (inner_type
);
9723 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
9724 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
9726 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9728 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9729 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9731 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9732 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9734 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9736 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t
, 1)));
9738 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9739 case NON_LVALUE_EXPR
:
9740 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9742 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9746 tree temp
= TARGET_EXPR_SLOT (t
);
9747 t
= TARGET_EXPR_INITIAL (t
);
9749 /* If the initializer is non-void, then it's a normal expression
9750 that will be assigned to the slot. */
9751 if (!VOID_TYPE_P (t
))
9752 return tree_expr_nonnegative_p (t
);
9754 /* Otherwise, the initializer sets the slot in some way. One common
9755 way is an assignment statement at the end of the initializer. */
9758 if (TREE_CODE (t
) == BIND_EXPR
)
9759 t
= expr_last (BIND_EXPR_BODY (t
));
9760 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
9761 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
9762 t
= expr_last (TREE_OPERAND (t
, 0));
9763 else if (TREE_CODE (t
) == STATEMENT_LIST
)
9768 if (TREE_CODE (t
) == MODIFY_EXPR
9769 && TREE_OPERAND (t
, 0) == temp
)
9770 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9777 tree fndecl
= get_callee_fndecl (t
);
9778 tree arglist
= TREE_OPERAND (t
, 1);
9780 && DECL_BUILT_IN (fndecl
)
9781 && DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_MD
)
9782 switch (DECL_FUNCTION_CODE (fndecl
))
9784 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9785 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9786 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9787 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9789 CASE_BUILTIN_F (BUILT_IN_ACOS
)
9790 CASE_BUILTIN_F (BUILT_IN_ACOSH
)
9791 CASE_BUILTIN_F (BUILT_IN_CABS
)
9792 CASE_BUILTIN_F (BUILT_IN_COSH
)
9793 CASE_BUILTIN_F (BUILT_IN_ERFC
)
9794 CASE_BUILTIN_F (BUILT_IN_EXP
)
9795 CASE_BUILTIN_F (BUILT_IN_EXP10
)
9796 CASE_BUILTIN_F (BUILT_IN_EXP2
)
9797 CASE_BUILTIN_F (BUILT_IN_FABS
)
9798 CASE_BUILTIN_F (BUILT_IN_FDIM
)
9799 CASE_BUILTIN_F (BUILT_IN_FREXP
)
9800 CASE_BUILTIN_F (BUILT_IN_HYPOT
)
9801 CASE_BUILTIN_F (BUILT_IN_POW10
)
9802 CASE_BUILTIN_I (BUILT_IN_FFS
)
9803 CASE_BUILTIN_I (BUILT_IN_PARITY
)
9804 CASE_BUILTIN_I (BUILT_IN_POPCOUNT
)
9808 CASE_BUILTIN_F (BUILT_IN_SQRT
)
9809 /* sqrt(-0.0) is -0.0. */
9810 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
9812 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
9814 CASE_BUILTIN_F (BUILT_IN_ASINH
)
9815 CASE_BUILTIN_F (BUILT_IN_ATAN
)
9816 CASE_BUILTIN_F (BUILT_IN_ATANH
)
9817 CASE_BUILTIN_F (BUILT_IN_CBRT
)
9818 CASE_BUILTIN_F (BUILT_IN_CEIL
)
9819 CASE_BUILTIN_F (BUILT_IN_ERF
)
9820 CASE_BUILTIN_F (BUILT_IN_EXPM1
)
9821 CASE_BUILTIN_F (BUILT_IN_FLOOR
)
9822 CASE_BUILTIN_F (BUILT_IN_FMOD
)
9823 CASE_BUILTIN_F (BUILT_IN_LDEXP
)
9824 CASE_BUILTIN_F (BUILT_IN_LLRINT
)
9825 CASE_BUILTIN_F (BUILT_IN_LLROUND
)
9826 CASE_BUILTIN_F (BUILT_IN_LRINT
)
9827 CASE_BUILTIN_F (BUILT_IN_LROUND
)
9828 CASE_BUILTIN_F (BUILT_IN_MODF
)
9829 CASE_BUILTIN_F (BUILT_IN_NEARBYINT
)
9830 CASE_BUILTIN_F (BUILT_IN_POW
)
9831 CASE_BUILTIN_F (BUILT_IN_RINT
)
9832 CASE_BUILTIN_F (BUILT_IN_ROUND
)
9833 CASE_BUILTIN_F (BUILT_IN_SIGNBIT
)
9834 CASE_BUILTIN_F (BUILT_IN_SINH
)
9835 CASE_BUILTIN_F (BUILT_IN_TANH
)
9836 CASE_BUILTIN_F (BUILT_IN_TRUNC
)
9837 /* True if the 1st argument is nonnegative. */
9838 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
9840 CASE_BUILTIN_F (BUILT_IN_FMAX
)
9841 /* True if the 1st OR 2nd arguments are nonnegative. */
9842 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
9843 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
9845 CASE_BUILTIN_F (BUILT_IN_FMIN
)
9846 /* True if the 1st AND 2nd arguments are nonnegative. */
9847 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
9848 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
9850 CASE_BUILTIN_F (BUILT_IN_COPYSIGN
)
9851 /* True if the 2nd argument is nonnegative. */
9852 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
9856 #undef CASE_BUILTIN_F
9857 #undef CASE_BUILTIN_I
9861 /* ... fall through ... */
9864 if (truth_value_p (TREE_CODE (t
)))
9865 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9869 /* We don't know sign of `t', so be conservative and return false. */
9873 /* Return true when T is an address and is known to be nonzero.
9874 For floating point we further ensure that T is not denormal.
9875 Similar logic is present in nonzero_address in rtlanal.h. */
9878 tree_expr_nonzero_p (tree t
)
9880 tree type
= TREE_TYPE (t
);
9882 /* Doing something useful for floating point would need more work. */
9883 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9886 switch (TREE_CODE (t
))
9889 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
9890 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
9893 /* We used to test for !integer_zerop here. This does not work correctly
9894 if TREE_CONSTANT_OVERFLOW (t). */
9895 return (TREE_INT_CST_LOW (t
) != 0
9896 || TREE_INT_CST_HIGH (t
) != 0);
9899 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
9901 /* With the presence of negative values it is hard
9902 to say something. */
9903 if (!tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9904 || !tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
9906 /* One of operands must be positive and the other non-negative. */
9907 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
9908 || tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
9913 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
9915 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
9916 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
9922 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
9923 tree outer_type
= TREE_TYPE (t
);
9925 return (TYPE_PRECISION (inner_type
) >= TYPE_PRECISION (outer_type
)
9926 && tree_expr_nonzero_p (TREE_OPERAND (t
, 0)));
9932 tree base
= get_base_address (TREE_OPERAND (t
, 0));
9937 /* Weak declarations may link to NULL. */
9939 return !DECL_WEAK (base
);
9941 /* Constants are never weak. */
9942 if (CONSTANT_CLASS_P (base
))
9949 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
9950 && tree_expr_nonzero_p (TREE_OPERAND (t
, 2)));
9953 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
9954 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
9957 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 0)))
9959 /* When both operands are nonzero, then MAX must be too. */
9960 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1)))
9963 /* MAX where operand 0 is positive is positive. */
9964 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9966 /* MAX where operand 1 is positive is positive. */
9967 else if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
9968 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
9975 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1));
9978 case NON_LVALUE_EXPR
:
9979 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
9982 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
9983 || tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
9991 /* See if we are applying CODE, a relational to the highest or lowest
9992 possible integer of TYPE. If so, then the result is a compile
9996 fold_relational_hi_lo (enum tree_code
*code_p
, const tree type
, tree
*op0_p
,
10001 enum tree_code code
= *code_p
;
10002 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1
)));
10004 if (TREE_CODE (op1
) == INTEGER_CST
10005 && ! TREE_CONSTANT_OVERFLOW (op1
)
10006 && width
<= HOST_BITS_PER_WIDE_INT
10007 && (INTEGRAL_TYPE_P (TREE_TYPE (op1
))
10008 || POINTER_TYPE_P (TREE_TYPE (op1
))))
10010 unsigned HOST_WIDE_INT signed_max
;
10011 unsigned HOST_WIDE_INT max
, min
;
10013 signed_max
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1)) - 1;
10015 if (TYPE_UNSIGNED (TREE_TYPE (op1
)))
10017 max
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
10023 min
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
10026 if (TREE_INT_CST_HIGH (op1
) == 0
10027 && TREE_INT_CST_LOW (op1
) == max
)
10031 return omit_one_operand (type
, integer_zero_node
, op0
);
10037 return omit_one_operand (type
, integer_one_node
, op0
);
10043 /* The GE_EXPR and LT_EXPR cases above are not normally
10044 reached because of previous transformations. */
10049 else if (TREE_INT_CST_HIGH (op1
) == 0
10050 && TREE_INT_CST_LOW (op1
) == max
- 1)
10055 *op1_p
= const_binop (PLUS_EXPR
, op1
, integer_one_node
, 0);
10059 *op1_p
= const_binop (PLUS_EXPR
, op1
, integer_one_node
, 0);
10064 else if (TREE_INT_CST_HIGH (op1
) == (min
? -1 : 0)
10065 && TREE_INT_CST_LOW (op1
) == min
)
10069 return omit_one_operand (type
, integer_zero_node
, op0
);
10076 return omit_one_operand (type
, integer_one_node
, op0
);
10085 else if (TREE_INT_CST_HIGH (op1
) == (min
? -1 : 0)
10086 && TREE_INT_CST_LOW (op1
) == min
+ 1)
10091 *op1_p
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
10095 *op1_p
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
10101 else if (TREE_INT_CST_HIGH (op1
) == 0
10102 && TREE_INT_CST_LOW (op1
) == signed_max
10103 && TYPE_UNSIGNED (TREE_TYPE (op1
))
10104 /* signed_type does not work on pointer types. */
10105 && INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
10107 /* The following case also applies to X < signed_max+1
10108 and X >= signed_max+1 because previous transformations. */
10109 if (code
== LE_EXPR
|| code
== GT_EXPR
)
10111 tree st0
, st1
, exp
, retval
;
10112 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (op0
));
10113 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (op1
));
10115 exp
= build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
10117 fold_convert (st0
, op0
),
10118 fold_convert (st1
, integer_zero_node
));
10120 retval
= fold_binary_to_constant (TREE_CODE (exp
),
10122 TREE_OPERAND (exp
, 0),
10123 TREE_OPERAND (exp
, 1));
10125 /* If we are in gimple form, then returning EXP would create
10126 non-gimple expressions. Clearing it is safe and insures
10127 we do not allow a non-gimple expression to escape. */
10128 if (in_gimple_form
)
10131 return (retval
? retval
: exp
);
10140 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10141 attempt to fold the expression to a constant without modifying TYPE,
10144 If the expression could be simplified to a constant, then return
10145 the constant. If the expression would not be simplified to a
10146 constant, then return NULL_TREE.
10148 Note this is primarily designed to be called after gimplification
10149 of the tree structures and when at least one operand is a constant.
10150 As a result of those simplifying assumptions this routine is far
10151 simpler than the generic fold routine. */
10154 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
10161 /* If this is a commutative operation, and ARG0 is a constant, move it
10162 to ARG1 to reduce the number of tests below. */
10163 if (commutative_tree_code (code
)
10164 && (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
))
10171 /* If either operand is a complex type, extract its real component. */
10172 if (TREE_CODE (op0
) == COMPLEX_CST
)
10173 subop0
= TREE_REALPART (op0
);
10177 if (TREE_CODE (op1
) == COMPLEX_CST
)
10178 subop1
= TREE_REALPART (op1
);
10182 /* Note if either argument is not a real or integer constant.
10183 With a few exceptions, simplification is limited to cases
10184 where both arguments are constants. */
10185 if ((TREE_CODE (subop0
) != INTEGER_CST
10186 && TREE_CODE (subop0
) != REAL_CST
)
10187 || (TREE_CODE (subop1
) != INTEGER_CST
10188 && TREE_CODE (subop1
) != REAL_CST
))
10194 /* (plus (address) (const_int)) is a constant. */
10195 if (TREE_CODE (op0
) == PLUS_EXPR
10196 && TREE_CODE (op1
) == INTEGER_CST
10197 && (TREE_CODE (TREE_OPERAND (op0
, 0)) == ADDR_EXPR
10198 || (TREE_CODE (TREE_OPERAND (op0
, 0)) == NOP_EXPR
10199 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0
, 0), 0))
10201 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
10203 return build2 (PLUS_EXPR
, type
, TREE_OPERAND (op0
, 0),
10204 const_binop (PLUS_EXPR
, op1
,
10205 TREE_OPERAND (op0
, 1), 0));
10213 /* Both arguments are constants. Simplify. */
10214 tem
= const_binop (code
, op0
, op1
, 0);
10215 if (tem
!= NULL_TREE
)
10217 /* The return value should always have the same type as
10218 the original expression. */
10219 if (TREE_TYPE (tem
) != type
)
10220 tem
= fold_convert (type
, tem
);
10227 /* Fold &x - &x. This can happen from &x.foo - &x.
10228 This is unsafe for certain floats even in non-IEEE formats.
10229 In IEEE, it is unsafe because it does wrong for NaNs.
10230 Also note that operand_equal_p is always false if an
10231 operand is volatile. */
10232 if (! FLOAT_TYPE_P (type
) && operand_equal_p (op0
, op1
, 0))
10233 return fold_convert (type
, integer_zero_node
);
10239 /* Special case multiplication or bitwise AND where one argument
10241 if (! FLOAT_TYPE_P (type
) && integer_zerop (op1
))
10242 return omit_one_operand (type
, op1
, op0
);
10244 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0
)))
10245 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0
)))
10246 && real_zerop (op1
))
10247 return omit_one_operand (type
, op1
, op0
);
10252 /* Special case when we know the result will be all ones. */
10253 if (integer_all_onesp (op1
))
10254 return omit_one_operand (type
, op1
, op0
);
10258 case TRUNC_DIV_EXPR
:
10259 case ROUND_DIV_EXPR
:
10260 case FLOOR_DIV_EXPR
:
10261 case CEIL_DIV_EXPR
:
10262 case EXACT_DIV_EXPR
:
10263 case TRUNC_MOD_EXPR
:
10264 case ROUND_MOD_EXPR
:
10265 case FLOOR_MOD_EXPR
:
10266 case CEIL_MOD_EXPR
:
10268 /* Division by zero is undefined. */
10269 if (integer_zerop (op1
))
10272 if (TREE_CODE (op1
) == REAL_CST
10273 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1
)))
10274 && real_zerop (op1
))
10280 if (INTEGRAL_TYPE_P (type
)
10281 && operand_equal_p (op1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
10282 return omit_one_operand (type
, op1
, op0
);
10287 if (INTEGRAL_TYPE_P (type
)
10288 && TYPE_MAX_VALUE (type
)
10289 && operand_equal_p (op1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
10290 return omit_one_operand (type
, op1
, op0
);
10295 /* Optimize -1 >> x for arithmetic right shifts. */
10296 if (integer_all_onesp (op0
) && ! TYPE_UNSIGNED (type
))
10297 return omit_one_operand (type
, op0
, op1
);
10298 /* ... fall through ... */
10301 if (integer_zerop (op0
))
10302 return omit_one_operand (type
, op0
, op1
);
10304 /* Since negative shift count is not well-defined, don't
10305 try to compute it in the compiler. */
10306 if (TREE_CODE (op1
) == INTEGER_CST
&& tree_int_cst_sgn (op1
) < 0)
10313 /* -1 rotated either direction by any amount is still -1. */
10314 if (integer_all_onesp (op0
))
10315 return omit_one_operand (type
, op0
, op1
);
10317 /* 0 rotated either direction by any amount is still zero. */
10318 if (integer_zerop (op0
))
10319 return omit_one_operand (type
, op0
, op1
);
10325 return build_complex (type
, op0
, op1
);
10334 /* If one arg is a real or integer constant, put it last. */
10335 if ((TREE_CODE (op0
) == INTEGER_CST
10336 && TREE_CODE (op1
) != INTEGER_CST
)
10337 || (TREE_CODE (op0
) == REAL_CST
10338 && TREE_CODE (op0
) != REAL_CST
))
10345 code
= swap_tree_comparison (code
);
10348 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10349 This transformation affects the cases which are handled in later
10350 optimizations involving comparisons with non-negative constants. */
10351 if (TREE_CODE (op1
) == INTEGER_CST
10352 && TREE_CODE (op0
) != INTEGER_CST
10353 && tree_int_cst_sgn (op1
) > 0)
10359 op1
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
10364 op1
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
10372 tem
= fold_relational_hi_lo (&code
, type
, &op0
, &op1
);
10376 /* Fall through. */
10379 case UNORDERED_EXPR
:
10389 return fold_relational_const (code
, type
, op0
, op1
);
10392 /* This could probably be handled. */
10395 case TRUTH_AND_EXPR
:
10396 /* If second arg is constant zero, result is zero, but first arg
10397 must be evaluated. */
10398 if (integer_zerop (op1
))
10399 return omit_one_operand (type
, op1
, op0
);
10400 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10401 case will be handled here. */
10402 if (integer_zerop (op0
))
10403 return omit_one_operand (type
, op0
, op1
);
10404 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
10405 return constant_boolean_node (true, type
);
10408 case TRUTH_OR_EXPR
:
10409 /* If second arg is constant true, result is true, but we must
10410 evaluate first arg. */
10411 if (TREE_CODE (op1
) == INTEGER_CST
&& ! integer_zerop (op1
))
10412 return omit_one_operand (type
, op1
, op0
);
10413 /* Likewise for first arg, but note this only occurs here for
10415 if (TREE_CODE (op0
) == INTEGER_CST
&& ! integer_zerop (op0
))
10416 return omit_one_operand (type
, op0
, op1
);
10417 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
10418 return constant_boolean_node (false, type
);
10421 case TRUTH_XOR_EXPR
:
10422 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
10424 int x
= ! integer_zerop (op0
) ^ ! integer_zerop (op1
);
10425 return constant_boolean_node (x
, type
);
10434 /* Given the components of a unary expression CODE, TYPE and OP0,
10435 attempt to fold the expression to a constant without modifying
10438 If the expression could be simplified to a constant, then return
10439 the constant. If the expression would not be simplified to a
10440 constant, then return NULL_TREE.
10442 Note this is primarily designed to be called after gimplification
10443 of the tree structures and when op0 is a constant. As a result
10444 of those simplifying assumptions this routine is far simpler than
10445 the generic fold routine. */
10448 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
10450 /* Make sure we have a suitable constant argument. */
10451 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
10455 if (TREE_CODE (op0
) == COMPLEX_CST
)
10456 subop
= TREE_REALPART (op0
);
10460 if (TREE_CODE (subop
) != INTEGER_CST
&& TREE_CODE (subop
) != REAL_CST
)
10469 case FIX_TRUNC_EXPR
:
10470 case FIX_FLOOR_EXPR
:
10471 case FIX_CEIL_EXPR
:
10472 return fold_convert_const (code
, type
, op0
);
10475 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
10476 return fold_negate_const (op0
, type
);
10481 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
10482 return fold_abs_const (op0
, type
);
10487 if (TREE_CODE (op0
) == INTEGER_CST
)
10488 return fold_not_const (op0
, type
);
10492 case REALPART_EXPR
:
10493 if (TREE_CODE (op0
) == COMPLEX_CST
)
10494 return TREE_REALPART (op0
);
10498 case IMAGPART_EXPR
:
10499 if (TREE_CODE (op0
) == COMPLEX_CST
)
10500 return TREE_IMAGPART (op0
);
10505 if (TREE_CODE (op0
) == COMPLEX_CST
10506 && TREE_CODE (TREE_TYPE (op0
)) == COMPLEX_TYPE
)
10507 return build_complex (type
, TREE_REALPART (op0
),
10508 negate_expr (TREE_IMAGPART (op0
)));
10516 /* If EXP represents referencing an element in a constant string
10517 (either via pointer arithmetic or array indexing), return the
10518 tree representing the value accessed, otherwise return NULL. */
10521 fold_read_from_constant_string (tree exp
)
10523 if (TREE_CODE (exp
) == INDIRECT_REF
|| TREE_CODE (exp
) == ARRAY_REF
)
10525 tree exp1
= TREE_OPERAND (exp
, 0);
10529 if (TREE_CODE (exp
) == INDIRECT_REF
)
10530 string
= string_constant (exp1
, &index
);
10533 tree low_bound
= array_ref_low_bound (exp
);
10534 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
10536 /* Optimize the special-case of a zero lower bound.
10538 We convert the low_bound to sizetype to avoid some problems
10539 with constant folding. (E.g. suppose the lower bound is 1,
10540 and its mode is QI. Without the conversion,l (ARRAY
10541 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10542 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10543 if (! integer_zerop (low_bound
))
10544 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
10550 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (string
))
10551 && TREE_CODE (string
) == STRING_CST
10552 && TREE_CODE (index
) == INTEGER_CST
10553 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
10554 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
10556 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
10557 return fold_convert (TREE_TYPE (exp
),
10558 build_int_cst (NULL_TREE
,
10559 (TREE_STRING_POINTER (string
)
10560 [TREE_INT_CST_LOW (index
)])));
10565 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10566 an integer constant or real constant.
10568 TYPE is the type of the result. */
10571 fold_negate_const (tree arg0
, tree type
)
10573 tree t
= NULL_TREE
;
10575 switch (TREE_CODE (arg0
))
10579 unsigned HOST_WIDE_INT low
;
10580 HOST_WIDE_INT high
;
10581 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
10582 TREE_INT_CST_HIGH (arg0
),
10584 t
= build_int_cst_wide (type
, low
, high
);
10585 t
= force_fit_type (t
, 1,
10586 (overflow
| TREE_OVERFLOW (arg0
))
10587 && !TYPE_UNSIGNED (type
),
10588 TREE_CONSTANT_OVERFLOW (arg0
));
10593 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
10597 gcc_unreachable ();
10603 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10604 an integer constant or real constant.
10606 TYPE is the type of the result. */
10609 fold_abs_const (tree arg0
, tree type
)
10611 tree t
= NULL_TREE
;
10613 switch (TREE_CODE (arg0
))
10616 /* If the value is unsigned, then the absolute value is
10617 the same as the ordinary value. */
10618 if (TYPE_UNSIGNED (type
))
10620 /* Similarly, if the value is non-negative. */
10621 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
10623 /* If the value is negative, then the absolute value is
10627 unsigned HOST_WIDE_INT low
;
10628 HOST_WIDE_INT high
;
10629 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
10630 TREE_INT_CST_HIGH (arg0
),
10632 t
= build_int_cst_wide (type
, low
, high
);
10633 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg0
),
10634 TREE_CONSTANT_OVERFLOW (arg0
));
10639 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
10640 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
10646 gcc_unreachable ();
10652 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10653 constant. TYPE is the type of the result. */
10656 fold_not_const (tree arg0
, tree type
)
10658 tree t
= NULL_TREE
;
10660 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
10662 t
= build_int_cst_wide (type
,
10663 ~ TREE_INT_CST_LOW (arg0
),
10664 ~ TREE_INT_CST_HIGH (arg0
));
10665 t
= force_fit_type (t
, 0, TREE_OVERFLOW (arg0
),
10666 TREE_CONSTANT_OVERFLOW (arg0
));
10671 /* Given CODE, a relational operator, the target type, TYPE and two
10672 constant operands OP0 and OP1, return the result of the
10673 relational operation. If the result is not a compile time
10674 constant, then return NULL_TREE. */
10677 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
10679 int result
, invert
;
10681 /* From here on, the only cases we handle are when the result is
10682 known to be a constant. */
10684 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
10686 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
10687 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
10689 /* Handle the cases where either operand is a NaN. */
10690 if (real_isnan (c0
) || real_isnan (c1
))
10700 case UNORDERED_EXPR
:
10714 if (flag_trapping_math
)
10720 gcc_unreachable ();
10723 return constant_boolean_node (result
, type
);
10726 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
10729 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10731 To compute GT, swap the arguments and do LT.
10732 To compute GE, do LT and invert the result.
10733 To compute LE, swap the arguments, do LT and invert the result.
10734 To compute NE, do EQ and invert the result.
10736 Therefore, the code below must handle only EQ and LT. */
10738 if (code
== LE_EXPR
|| code
== GT_EXPR
)
10743 code
= swap_tree_comparison (code
);
10746 /* Note that it is safe to invert for real values here because we
10747 have already handled the one case that it matters. */
10750 if (code
== NE_EXPR
|| code
== GE_EXPR
)
10753 code
= invert_tree_comparison (code
, false);
10756 /* Compute a result for LT or EQ if args permit;
10757 Otherwise return T. */
10758 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
10760 if (code
== EQ_EXPR
)
10761 result
= tree_int_cst_equal (op0
, op1
);
10762 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
10763 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
10765 result
= INT_CST_LT (op0
, op1
);
10772 return constant_boolean_node (result
, type
);
10775 /* Build an expression for the a clean point containing EXPR with type TYPE.
10776 Don't build a cleanup point expression for EXPR which don't have side
10780 fold_build_cleanup_point_expr (tree type
, tree expr
)
10782 /* If the expression does not have side effects then we don't have to wrap
10783 it with a cleanup point expression. */
10784 if (!TREE_SIDE_EFFECTS (expr
))
10787 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
10790 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10791 avoid confusing the gimplify process. */
10794 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
10796 /* The size of the object is not relevant when talking about its address. */
10797 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
10798 t
= TREE_OPERAND (t
, 0);
10800 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
10801 if (TREE_CODE (t
) == INDIRECT_REF
10802 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
10804 t
= TREE_OPERAND (t
, 0);
10805 if (TREE_TYPE (t
) != ptrtype
)
10806 t
= build1 (NOP_EXPR
, ptrtype
, t
);
10812 while (handled_component_p (base
))
10813 base
= TREE_OPERAND (base
, 0);
10815 TREE_ADDRESSABLE (base
) = 1;
10817 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
10824 build_fold_addr_expr (tree t
)
10826 return build_fold_addr_expr_with_type (t
, build_pointer_type (TREE_TYPE (t
)));
10829 /* Builds an expression for an indirection through T, simplifying some
10833 build_fold_indirect_ref (tree t
)
10835 tree type
= TREE_TYPE (TREE_TYPE (t
));
10840 if (TREE_CODE (sub
) == ADDR_EXPR
)
10842 tree op
= TREE_OPERAND (sub
, 0);
10843 tree optype
= TREE_TYPE (op
);
10845 if (lang_hooks
.types_compatible_p (type
, optype
))
10847 /* *(foo *)&fooarray => fooarray[0] */
10848 else if (TREE_CODE (optype
) == ARRAY_TYPE
10849 && lang_hooks
.types_compatible_p (type
, TREE_TYPE (optype
)))
10850 return build4 (ARRAY_REF
, type
, op
, size_zero_node
, NULL_TREE
, NULL_TREE
);
10853 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10854 subtype
= TREE_TYPE (sub
);
10855 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
10856 && lang_hooks
.types_compatible_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
10858 sub
= build_fold_indirect_ref (sub
);
10859 return build4 (ARRAY_REF
, type
, sub
, size_zero_node
, NULL_TREE
, NULL_TREE
);
10862 return build1 (INDIRECT_REF
, type
, t
);
10865 /* Strip non-trapping, non-side-effecting tree nodes from an expression
10866 whose result is ignored. The type of the returned tree need not be
10867 the same as the original expression. */
10870 fold_ignored_result (tree t
)
10872 if (!TREE_SIDE_EFFECTS (t
))
10873 return integer_zero_node
;
10876 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
10879 t
= TREE_OPERAND (t
, 0);
10883 case tcc_comparison
:
10884 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
10885 t
= TREE_OPERAND (t
, 0);
10886 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
10887 t
= TREE_OPERAND (t
, 1);
10892 case tcc_expression
:
10893 switch (TREE_CODE (t
))
10895 case COMPOUND_EXPR
:
10896 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
10898 t
= TREE_OPERAND (t
, 0);
10902 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
10903 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
10905 t
= TREE_OPERAND (t
, 0);
10918 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
10919 This can only be applied to objects of a sizetype. */
10922 round_up (tree value
, int divisor
)
10924 tree div
= NULL_TREE
;
10926 gcc_assert (divisor
> 0);
10930 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10931 have to do anything. Only do this when we are not given a const,
10932 because in that case, this check is more expensive than just
10934 if (TREE_CODE (value
) != INTEGER_CST
)
10936 div
= build_int_cst (TREE_TYPE (value
), divisor
);
10938 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
10942 /* If divisor is a power of two, simplify this to bit manipulation. */
10943 if (divisor
== (divisor
& -divisor
))
10947 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
10948 value
= size_binop (PLUS_EXPR
, value
, t
);
10949 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
10950 value
= size_binop (BIT_AND_EXPR
, value
, t
);
10955 div
= build_int_cst (TREE_TYPE (value
), divisor
);
10956 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
10957 value
= size_binop (MULT_EXPR
, value
, div
);
10963 /* Likewise, but round down. */
10966 round_down (tree value
, int divisor
)
10968 tree div
= NULL_TREE
;
10970 gcc_assert (divisor
> 0);
10974 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10975 have to do anything. Only do this when we are not given a const,
10976 because in that case, this check is more expensive than just
10978 if (TREE_CODE (value
) != INTEGER_CST
)
10980 div
= build_int_cst (TREE_TYPE (value
), divisor
);
10982 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
10986 /* If divisor is a power of two, simplify this to bit manipulation. */
10987 if (divisor
== (divisor
& -divisor
))
10991 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
10992 value
= size_binop (BIT_AND_EXPR
, value
, t
);
10997 div
= build_int_cst (TREE_TYPE (value
), divisor
);
10998 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
10999 value
= size_binop (MULT_EXPR
, value
, div
);
11005 /* Returns the pointer to the base of the object addressed by EXP and
11006 extracts the information about the offset of the access, storing it
11007 to PBITPOS and POFFSET. */
11010 split_address_to_core_and_offset (tree exp
,
11011 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
11014 enum machine_mode mode
;
11015 int unsignedp
, volatilep
;
11016 HOST_WIDE_INT bitsize
;
11018 if (TREE_CODE (exp
) == ADDR_EXPR
)
11020 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
11021 poffset
, &mode
, &unsignedp
, &volatilep
,
11024 if (TREE_CODE (core
) == INDIRECT_REF
)
11025 core
= TREE_OPERAND (core
, 0);
11031 *poffset
= NULL_TREE
;
11037 /* Returns true if addresses of E1 and E2 differ by a constant, false
11038 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11041 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
11044 HOST_WIDE_INT bitpos1
, bitpos2
;
11045 tree toffset1
, toffset2
, tdiff
, type
;
11047 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
11048 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
11050 if (bitpos1
% BITS_PER_UNIT
!= 0
11051 || bitpos2
% BITS_PER_UNIT
!= 0
11052 || !operand_equal_p (core1
, core2
, 0))
11055 if (toffset1
&& toffset2
)
11057 type
= TREE_TYPE (toffset1
);
11058 if (type
!= TREE_TYPE (toffset2
))
11059 toffset2
= fold_convert (type
, toffset2
);
11061 tdiff
= fold (build2 (MINUS_EXPR
, type
, toffset1
, toffset2
));
11062 if (!host_integerp (tdiff
, 0))
11065 *diff
= tree_low_cst (tdiff
, 0);
11067 else if (toffset1
|| toffset2
)
11069 /* If only one of the offsets is non-constant, the difference cannot
11076 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;