1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code
{
84 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
85 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
86 static bool negate_mathfn_p (enum built_in_function
);
87 static bool negate_expr_p (tree
);
88 static tree
negate_expr (tree
);
89 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
90 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
91 static tree
const_binop (enum tree_code
, tree
, tree
, int);
92 static enum tree_code
invert_tree_comparison (enum tree_code
, bool);
93 static enum comparison_code
comparison_to_compcode (enum tree_code
);
94 static enum tree_code
compcode_to_comparison (enum comparison_code
);
95 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
96 enum tree_code
, tree
, tree
, tree
);
97 static int truth_value_p (enum tree_code
);
98 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
99 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
100 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
101 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
102 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
103 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
104 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
105 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
106 enum machine_mode
*, int *, int *,
108 static int all_ones_mask_p (tree
, int);
109 static tree
sign_bit_p (tree
, tree
);
110 static int simple_operand_p (tree
);
111 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
112 static tree
make_range (tree
, int *, tree
*, tree
*);
113 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
114 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
116 static tree
fold_range_test (enum tree_code
, tree
, tree
, tree
);
117 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
118 static tree
unextend (tree
, int, int, tree
);
119 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
120 static tree
optimize_minmax_comparison (enum tree_code
, tree
, tree
, tree
);
121 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
);
122 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
);
123 static int multiple_of_p (tree
, tree
, tree
);
124 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
,
127 static bool fold_real_zero_addition_p (tree
, tree
, int);
128 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
130 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
131 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
132 static bool reorder_operands_p (tree
, tree
);
133 static tree
fold_negate_const (tree
, tree
);
134 static tree
fold_not_const (tree
, tree
);
135 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
136 static tree
fold_relational_hi_lo (enum tree_code
*, const tree
,
138 static bool tree_expr_nonzero_p (tree
);
140 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
141 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
142 and SUM1. Then this yields nonzero if overflow occurred during the
145 Overflow occurs if A and B have the same sign, but A and SUM differ in
146 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
148 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
150 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
151 We do that by representing the two-word integer in 4 words, with only
152 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
153 number. The value of the word is LOWPART + HIGHPART * BASE. */
156 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
157 #define HIGHPART(x) \
158 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
159 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
161 /* Unpack a two-word integer into 4 words.
162 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
163 WORDS points to the array of HOST_WIDE_INTs. */
166 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
168 words
[0] = LOWPART (low
);
169 words
[1] = HIGHPART (low
);
170 words
[2] = LOWPART (hi
);
171 words
[3] = HIGHPART (hi
);
174 /* Pack an array of 4 words into a two-word integer.
175 WORDS points to the array of words.
176 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
179 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
182 *low
= words
[0] + words
[1] * BASE
;
183 *hi
= words
[2] + words
[3] * BASE
;
186 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
187 in overflow of the value, when >0 we are only interested in signed
188 overflow, for <0 we are interested in any overflow. OVERFLOWED
189 indicates whether overflow has already occurred. CONST_OVERFLOWED
190 indicates whether constant overflow has already occurred. We force
191 T's value to be within range of T's type (by setting to 0 or 1 all
192 the bits outside the type's range). We set TREE_OVERFLOWED if,
193 OVERFLOWED is nonzero,
194 or OVERFLOWABLE is >0 and signed overflow occurs
195 or OVERFLOWABLE is <0 and any overflow occurs
196 We set TREE_CONSTANT_OVERFLOWED if,
197 CONST_OVERFLOWED is nonzero
198 or we set TREE_OVERFLOWED.
199 We return either the original T, or a copy. */
202 force_fit_type (tree t
, int overflowable
,
203 bool overflowed
, bool overflowed_const
)
205 unsigned HOST_WIDE_INT low
;
208 int sign_extended_type
;
210 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
212 low
= TREE_INT_CST_LOW (t
);
213 high
= TREE_INT_CST_HIGH (t
);
215 if (POINTER_TYPE_P (TREE_TYPE (t
))
216 || TREE_CODE (TREE_TYPE (t
)) == OFFSET_TYPE
)
219 prec
= TYPE_PRECISION (TREE_TYPE (t
));
220 /* Size types *are* sign extended. */
221 sign_extended_type
= (!TYPE_UNSIGNED (TREE_TYPE (t
))
222 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
223 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))));
225 /* First clear all bits that are beyond the type's precision. */
227 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
229 else if (prec
> HOST_BITS_PER_WIDE_INT
)
230 high
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
234 if (prec
< HOST_BITS_PER_WIDE_INT
)
235 low
&= ~((HOST_WIDE_INT
) (-1) << prec
);
238 if (!sign_extended_type
)
239 /* No sign extension */;
240 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
241 /* Correct width already. */;
242 else if (prec
> HOST_BITS_PER_WIDE_INT
)
244 /* Sign extend top half? */
245 if (high
& ((unsigned HOST_WIDE_INT
)1
246 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
247 high
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
249 else if (prec
== HOST_BITS_PER_WIDE_INT
)
251 if ((HOST_WIDE_INT
)low
< 0)
256 /* Sign extend bottom half? */
257 if (low
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
260 low
|= (HOST_WIDE_INT
)(-1) << prec
;
264 /* If the value changed, return a new node. */
265 if (overflowed
|| overflowed_const
266 || low
!= TREE_INT_CST_LOW (t
) || high
!= TREE_INT_CST_HIGH (t
))
268 t
= build_int_cst_wide (TREE_TYPE (t
), low
, high
);
272 || (overflowable
> 0 && sign_extended_type
))
275 TREE_OVERFLOW (t
) = 1;
276 TREE_CONSTANT_OVERFLOW (t
) = 1;
278 else if (overflowed_const
)
281 TREE_CONSTANT_OVERFLOW (t
) = 1;
288 /* Add two doubleword integers with doubleword result.
289 Each argument is given as two `HOST_WIDE_INT' pieces.
290 One argument is L1 and H1; the other, L2 and H2.
291 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
294 add_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
295 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
296 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
298 unsigned HOST_WIDE_INT l
;
302 h
= h1
+ h2
+ (l
< l1
);
306 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
309 /* Negate a doubleword integer with doubleword result.
310 Return nonzero if the operation overflows, assuming it's signed.
311 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
312 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
315 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
316 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
322 return (*hv
& h1
) < 0;
332 /* Multiply two doubleword integers with doubleword result.
333 Return nonzero if the operation overflows, assuming it's signed.
334 Each argument is given as two `HOST_WIDE_INT' pieces.
335 One argument is L1 and H1; the other, L2 and H2.
336 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
339 mul_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
340 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
341 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
343 HOST_WIDE_INT arg1
[4];
344 HOST_WIDE_INT arg2
[4];
345 HOST_WIDE_INT prod
[4 * 2];
346 unsigned HOST_WIDE_INT carry
;
348 unsigned HOST_WIDE_INT toplow
, neglow
;
349 HOST_WIDE_INT tophigh
, neghigh
;
351 encode (arg1
, l1
, h1
);
352 encode (arg2
, l2
, h2
);
354 memset (prod
, 0, sizeof prod
);
356 for (i
= 0; i
< 4; i
++)
359 for (j
= 0; j
< 4; j
++)
362 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
363 carry
+= arg1
[i
] * arg2
[j
];
364 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
366 prod
[k
] = LOWPART (carry
);
367 carry
= HIGHPART (carry
);
372 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
374 /* Check for overflow by calculating the top half of the answer in full;
375 it should agree with the low half's sign bit. */
376 decode (prod
+ 4, &toplow
, &tophigh
);
379 neg_double (l2
, h2
, &neglow
, &neghigh
);
380 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
384 neg_double (l1
, h1
, &neglow
, &neghigh
);
385 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
387 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
390 /* Shift the doubleword integer in L1, H1 left by COUNT places
391 keeping only PREC bits of result.
392 Shift right if COUNT is negative.
393 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
394 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
397 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
398 HOST_WIDE_INT count
, unsigned int prec
,
399 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
401 unsigned HOST_WIDE_INT signmask
;
405 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
409 if (SHIFT_COUNT_TRUNCATED
)
412 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
414 /* Shifting by the host word size is undefined according to the
415 ANSI standard, so we must handle this as a special case. */
419 else if (count
>= HOST_BITS_PER_WIDE_INT
)
421 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
426 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
427 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
431 /* Sign extend all bits that are beyond the precision. */
433 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
434 ? ((unsigned HOST_WIDE_INT
) *hv
435 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
436 : (*lv
>> (prec
- 1))) & 1);
438 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
440 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
442 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
443 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
448 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
449 *lv
|= signmask
<< prec
;
453 /* Shift the doubleword integer in L1, H1 right by COUNT places
454 keeping only PREC bits of result. COUNT must be positive.
455 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
456 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
459 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
460 HOST_WIDE_INT count
, unsigned int prec
,
461 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
464 unsigned HOST_WIDE_INT signmask
;
467 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
470 if (SHIFT_COUNT_TRUNCATED
)
473 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
475 /* Shifting by the host word size is undefined according to the
476 ANSI standard, so we must handle this as a special case. */
480 else if (count
>= HOST_BITS_PER_WIDE_INT
)
483 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
487 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
489 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
492 /* Zero / sign extend all bits that are beyond the precision. */
494 if (count
>= (HOST_WIDE_INT
)prec
)
499 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
501 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
503 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
504 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
509 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
510 *lv
|= signmask
<< (prec
- count
);
514 /* Rotate the doubleword integer in L1, H1 left by COUNT places
515 keeping only PREC bits of result.
516 Rotate right if COUNT is negative.
517 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
520 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
521 HOST_WIDE_INT count
, unsigned int prec
,
522 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
524 unsigned HOST_WIDE_INT s1l
, s2l
;
525 HOST_WIDE_INT s1h
, s2h
;
531 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
532 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
537 /* Rotate the doubleword integer in L1, H1 left by COUNT places
538 keeping only PREC bits of result. COUNT must be positive.
539 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
542 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
543 HOST_WIDE_INT count
, unsigned int prec
,
544 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
546 unsigned HOST_WIDE_INT s1l
, s2l
;
547 HOST_WIDE_INT s1h
, s2h
;
553 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
554 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
559 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
560 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
561 CODE is a tree code for a kind of division, one of
562 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
564 It controls how the quotient is rounded to an integer.
565 Return nonzero if the operation overflows.
566 UNS nonzero says do unsigned division. */
569 div_and_round_double (enum tree_code code
, int uns
,
570 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
571 HOST_WIDE_INT hnum_orig
,
572 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
573 HOST_WIDE_INT hden_orig
,
574 unsigned HOST_WIDE_INT
*lquo
,
575 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
579 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
580 HOST_WIDE_INT den
[4], quo
[4];
582 unsigned HOST_WIDE_INT work
;
583 unsigned HOST_WIDE_INT carry
= 0;
584 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
585 HOST_WIDE_INT hnum
= hnum_orig
;
586 unsigned HOST_WIDE_INT lden
= lden_orig
;
587 HOST_WIDE_INT hden
= hden_orig
;
590 if (hden
== 0 && lden
== 0)
591 overflow
= 1, lden
= 1;
593 /* Calculate quotient sign and convert operands to unsigned. */
599 /* (minimum integer) / (-1) is the only overflow case. */
600 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
601 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
607 neg_double (lden
, hden
, &lden
, &hden
);
611 if (hnum
== 0 && hden
== 0)
612 { /* single precision */
614 /* This unsigned division rounds toward zero. */
620 { /* trivial case: dividend < divisor */
621 /* hden != 0 already checked. */
628 memset (quo
, 0, sizeof quo
);
630 memset (num
, 0, sizeof num
); /* to zero 9th element */
631 memset (den
, 0, sizeof den
);
633 encode (num
, lnum
, hnum
);
634 encode (den
, lden
, hden
);
636 /* Special code for when the divisor < BASE. */
637 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
639 /* hnum != 0 already checked. */
640 for (i
= 4 - 1; i
>= 0; i
--)
642 work
= num
[i
] + carry
* BASE
;
643 quo
[i
] = work
/ lden
;
649 /* Full double precision division,
650 with thanks to Don Knuth's "Seminumerical Algorithms". */
651 int num_hi_sig
, den_hi_sig
;
652 unsigned HOST_WIDE_INT quo_est
, scale
;
654 /* Find the highest nonzero divisor digit. */
655 for (i
= 4 - 1;; i
--)
662 /* Insure that the first digit of the divisor is at least BASE/2.
663 This is required by the quotient digit estimation algorithm. */
665 scale
= BASE
/ (den
[den_hi_sig
] + 1);
667 { /* scale divisor and dividend */
669 for (i
= 0; i
<= 4 - 1; i
++)
671 work
= (num
[i
] * scale
) + carry
;
672 num
[i
] = LOWPART (work
);
673 carry
= HIGHPART (work
);
678 for (i
= 0; i
<= 4 - 1; i
++)
680 work
= (den
[i
] * scale
) + carry
;
681 den
[i
] = LOWPART (work
);
682 carry
= HIGHPART (work
);
683 if (den
[i
] != 0) den_hi_sig
= i
;
690 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
692 /* Guess the next quotient digit, quo_est, by dividing the first
693 two remaining dividend digits by the high order quotient digit.
694 quo_est is never low and is at most 2 high. */
695 unsigned HOST_WIDE_INT tmp
;
697 num_hi_sig
= i
+ den_hi_sig
+ 1;
698 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
699 if (num
[num_hi_sig
] != den
[den_hi_sig
])
700 quo_est
= work
/ den
[den_hi_sig
];
704 /* Refine quo_est so it's usually correct, and at most one high. */
705 tmp
= work
- quo_est
* den
[den_hi_sig
];
707 && (den
[den_hi_sig
- 1] * quo_est
708 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
711 /* Try QUO_EST as the quotient digit, by multiplying the
712 divisor by QUO_EST and subtracting from the remaining dividend.
713 Keep in mind that QUO_EST is the I - 1st digit. */
716 for (j
= 0; j
<= den_hi_sig
; j
++)
718 work
= quo_est
* den
[j
] + carry
;
719 carry
= HIGHPART (work
);
720 work
= num
[i
+ j
] - LOWPART (work
);
721 num
[i
+ j
] = LOWPART (work
);
722 carry
+= HIGHPART (work
) != 0;
725 /* If quo_est was high by one, then num[i] went negative and
726 we need to correct things. */
727 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
730 carry
= 0; /* add divisor back in */
731 for (j
= 0; j
<= den_hi_sig
; j
++)
733 work
= num
[i
+ j
] + den
[j
] + carry
;
734 carry
= HIGHPART (work
);
735 num
[i
+ j
] = LOWPART (work
);
738 num
[num_hi_sig
] += carry
;
741 /* Store the quotient digit. */
746 decode (quo
, lquo
, hquo
);
749 /* If result is negative, make it so. */
751 neg_double (*lquo
, *hquo
, lquo
, hquo
);
753 /* Compute trial remainder: rem = num - (quo * den) */
754 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
755 neg_double (*lrem
, *hrem
, lrem
, hrem
);
756 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
761 case TRUNC_MOD_EXPR
: /* round toward zero */
762 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
766 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
767 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
770 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
778 case CEIL_MOD_EXPR
: /* round toward positive infinity */
779 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
781 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
789 case ROUND_MOD_EXPR
: /* round to closest integer */
791 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
792 HOST_WIDE_INT habs_rem
= *hrem
;
793 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
794 HOST_WIDE_INT habs_den
= hden
, htwice
;
796 /* Get absolute values. */
798 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
800 neg_double (lden
, hden
, &labs_den
, &habs_den
);
802 /* If (2 * abs (lrem) >= abs (lden)) */
803 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
804 labs_rem
, habs_rem
, <wice
, &htwice
);
806 if (((unsigned HOST_WIDE_INT
) habs_den
807 < (unsigned HOST_WIDE_INT
) htwice
)
808 || (((unsigned HOST_WIDE_INT
) habs_den
809 == (unsigned HOST_WIDE_INT
) htwice
)
810 && (labs_den
< ltwice
)))
814 add_double (*lquo
, *hquo
,
815 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
818 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
830 /* Compute true remainder: rem = num - (quo * den) */
831 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
832 neg_double (*lrem
, *hrem
, lrem
, hrem
);
833 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
837 /* Return true if built-in mathematical function specified by CODE
838 preserves the sign of it argument, i.e. -f(x) == f(-x). */
841 negate_mathfn_p (enum built_in_function code
)
865 /* Check whether we may negate an integer constant T without causing
869 may_negate_without_overflow_p (tree t
)
871 unsigned HOST_WIDE_INT val
;
875 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
877 type
= TREE_TYPE (t
);
878 if (TYPE_UNSIGNED (type
))
881 prec
= TYPE_PRECISION (type
);
882 if (prec
> HOST_BITS_PER_WIDE_INT
)
884 if (TREE_INT_CST_LOW (t
) != 0)
886 prec
-= HOST_BITS_PER_WIDE_INT
;
887 val
= TREE_INT_CST_HIGH (t
);
890 val
= TREE_INT_CST_LOW (t
);
891 if (prec
< HOST_BITS_PER_WIDE_INT
)
892 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
893 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
896 /* Determine whether an expression T can be cheaply negated using
897 the function negate_expr. */
900 negate_expr_p (tree t
)
907 type
= TREE_TYPE (t
);
910 switch (TREE_CODE (t
))
913 if (TYPE_UNSIGNED (type
) || ! flag_trapv
)
916 /* Check that -CST will not overflow type. */
917 return may_negate_without_overflow_p (t
);
924 return negate_expr_p (TREE_REALPART (t
))
925 && negate_expr_p (TREE_IMAGPART (t
));
928 if (FLOAT_TYPE_P (type
) && !flag_unsafe_math_optimizations
)
930 /* -(A + B) -> (-B) - A. */
931 if (negate_expr_p (TREE_OPERAND (t
, 1))
932 && reorder_operands_p (TREE_OPERAND (t
, 0),
933 TREE_OPERAND (t
, 1)))
935 /* -(A + B) -> (-A) - B. */
936 return negate_expr_p (TREE_OPERAND (t
, 0));
939 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
940 return (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
941 && reorder_operands_p (TREE_OPERAND (t
, 0),
942 TREE_OPERAND (t
, 1));
945 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
951 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
952 return negate_expr_p (TREE_OPERAND (t
, 1))
953 || negate_expr_p (TREE_OPERAND (t
, 0));
957 /* Negate -((double)float) as (double)(-float). */
958 if (TREE_CODE (type
) == REAL_TYPE
)
960 tree tem
= strip_float_extensions (t
);
962 return negate_expr_p (tem
);
967 /* Negate -f(x) as f(-x). */
968 if (negate_mathfn_p (builtin_mathfn_code (t
)))
969 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1)));
973 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
974 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
976 tree op1
= TREE_OPERAND (t
, 1);
977 if (TREE_INT_CST_HIGH (op1
) == 0
978 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
979 == TREE_INT_CST_LOW (op1
))
990 /* Given T, an expression, return the negation of T. Allow for T to be
991 null, in which case return null. */
1002 type
= TREE_TYPE (t
);
1003 STRIP_SIGN_NOPS (t
);
1005 switch (TREE_CODE (t
))
1008 tem
= fold_negate_const (t
, type
);
1009 if (! TREE_OVERFLOW (tem
)
1010 || TYPE_UNSIGNED (type
)
1016 tem
= fold_negate_const (t
, type
);
1017 /* Two's complement FP formats, such as c4x, may overflow. */
1018 if (! TREE_OVERFLOW (tem
) || ! flag_trapping_math
)
1019 return fold_convert (type
, tem
);
1024 tree rpart
= negate_expr (TREE_REALPART (t
));
1025 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1027 if ((TREE_CODE (rpart
) == REAL_CST
1028 && TREE_CODE (ipart
) == REAL_CST
)
1029 || (TREE_CODE (rpart
) == INTEGER_CST
1030 && TREE_CODE (ipart
) == INTEGER_CST
))
1031 return build_complex (type
, rpart
, ipart
);
1036 return fold_convert (type
, TREE_OPERAND (t
, 0));
1039 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1041 /* -(A + B) -> (-B) - A. */
1042 if (negate_expr_p (TREE_OPERAND (t
, 1))
1043 && reorder_operands_p (TREE_OPERAND (t
, 0),
1044 TREE_OPERAND (t
, 1)))
1046 tem
= negate_expr (TREE_OPERAND (t
, 1));
1047 tem
= fold (build2 (MINUS_EXPR
, TREE_TYPE (t
),
1048 tem
, TREE_OPERAND (t
, 0)));
1049 return fold_convert (type
, tem
);
1052 /* -(A + B) -> (-A) - B. */
1053 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1055 tem
= negate_expr (TREE_OPERAND (t
, 0));
1056 tem
= fold (build2 (MINUS_EXPR
, TREE_TYPE (t
),
1057 tem
, TREE_OPERAND (t
, 1)));
1058 return fold_convert (type
, tem
);
1064 /* - (A - B) -> B - A */
1065 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1066 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1067 return fold_convert (type
,
1068 fold (build2 (MINUS_EXPR
, TREE_TYPE (t
),
1069 TREE_OPERAND (t
, 1),
1070 TREE_OPERAND (t
, 0))));
1074 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1080 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1082 tem
= TREE_OPERAND (t
, 1);
1083 if (negate_expr_p (tem
))
1084 return fold_convert (type
,
1085 fold (build2 (TREE_CODE (t
), TREE_TYPE (t
),
1086 TREE_OPERAND (t
, 0),
1087 negate_expr (tem
))));
1088 tem
= TREE_OPERAND (t
, 0);
1089 if (negate_expr_p (tem
))
1090 return fold_convert (type
,
1091 fold (build2 (TREE_CODE (t
), TREE_TYPE (t
),
1093 TREE_OPERAND (t
, 1))));
1098 /* Convert -((double)float) into (double)(-float). */
1099 if (TREE_CODE (type
) == REAL_TYPE
)
1101 tem
= strip_float_extensions (t
);
1102 if (tem
!= t
&& negate_expr_p (tem
))
1103 return fold_convert (type
, negate_expr (tem
));
1108 /* Negate -f(x) as f(-x). */
1109 if (negate_mathfn_p (builtin_mathfn_code (t
))
1110 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1))))
1112 tree fndecl
, arg
, arglist
;
1114 fndecl
= get_callee_fndecl (t
);
1115 arg
= negate_expr (TREE_VALUE (TREE_OPERAND (t
, 1)));
1116 arglist
= build_tree_list (NULL_TREE
, arg
);
1117 return build_function_call_expr (fndecl
, arglist
);
1122 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1123 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1125 tree op1
= TREE_OPERAND (t
, 1);
1126 if (TREE_INT_CST_HIGH (op1
) == 0
1127 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1128 == TREE_INT_CST_LOW (op1
))
1130 tree ntype
= TYPE_UNSIGNED (type
)
1131 ? lang_hooks
.types
.signed_type (type
)
1132 : lang_hooks
.types
.unsigned_type (type
);
1133 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1134 temp
= fold (build2 (RSHIFT_EXPR
, ntype
, temp
, op1
));
1135 return fold_convert (type
, temp
);
1144 tem
= fold (build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
));
1145 return fold_convert (type
, tem
);
1148 /* Split a tree IN into a constant, literal and variable parts that could be
1149 combined with CODE to make IN. "constant" means an expression with
1150 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1151 commutative arithmetic operation. Store the constant part into *CONP,
1152 the literal in *LITP and return the variable part. If a part isn't
1153 present, set it to null. If the tree does not decompose in this way,
1154 return the entire tree as the variable part and the other parts as null.
1156 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1157 case, we negate an operand that was subtracted. Except if it is a
1158 literal for which we use *MINUS_LITP instead.
1160 If NEGATE_P is true, we are negating all of IN, again except a literal
1161 for which we use *MINUS_LITP instead.
1163 If IN is itself a literal or constant, return it as appropriate.
1165 Note that we do not guarantee that any of the three values will be the
1166 same type as IN, but they will have the same signedness and mode. */
1169 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1170 tree
*minus_litp
, int negate_p
)
1178 /* Strip any conversions that don't change the machine mode or signedness. */
1179 STRIP_SIGN_NOPS (in
);
1181 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1183 else if (TREE_CODE (in
) == code
1184 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1185 /* We can associate addition and subtraction together (even
1186 though the C standard doesn't say so) for integers because
1187 the value is not affected. For reals, the value might be
1188 affected, so we can't. */
1189 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1190 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1192 tree op0
= TREE_OPERAND (in
, 0);
1193 tree op1
= TREE_OPERAND (in
, 1);
1194 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1195 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1197 /* First see if either of the operands is a literal, then a constant. */
1198 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1199 *litp
= op0
, op0
= 0;
1200 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1201 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1203 if (op0
!= 0 && TREE_CONSTANT (op0
))
1204 *conp
= op0
, op0
= 0;
1205 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1206 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1208 /* If we haven't dealt with either operand, this is not a case we can
1209 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1210 if (op0
!= 0 && op1
!= 0)
1215 var
= op1
, neg_var_p
= neg1_p
;
1217 /* Now do any needed negations. */
1219 *minus_litp
= *litp
, *litp
= 0;
1221 *conp
= negate_expr (*conp
);
1223 var
= negate_expr (var
);
1225 else if (TREE_CONSTANT (in
))
1233 *minus_litp
= *litp
, *litp
= 0;
1234 else if (*minus_litp
)
1235 *litp
= *minus_litp
, *minus_litp
= 0;
1236 *conp
= negate_expr (*conp
);
1237 var
= negate_expr (var
);
1243 /* Re-associate trees split by the above function. T1 and T2 are either
1244 expressions to associate or null. Return the new expression, if any. If
1245 we build an operation, do it in TYPE and with CODE. */
1248 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1255 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1256 try to fold this since we will have infinite recursion. But do
1257 deal with any NEGATE_EXPRs. */
1258 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1259 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1261 if (code
== PLUS_EXPR
)
1263 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1264 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1265 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1266 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1267 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1268 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1269 else if (integer_zerop (t2
))
1270 return fold_convert (type
, t1
);
1272 else if (code
== MINUS_EXPR
)
1274 if (integer_zerop (t2
))
1275 return fold_convert (type
, t1
);
1278 return build2 (code
, type
, fold_convert (type
, t1
),
1279 fold_convert (type
, t2
));
1282 return fold (build2 (code
, type
, fold_convert (type
, t1
),
1283 fold_convert (type
, t2
)));
1286 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1287 to produce a new constant.
1289 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1292 int_const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1294 unsigned HOST_WIDE_INT int1l
, int2l
;
1295 HOST_WIDE_INT int1h
, int2h
;
1296 unsigned HOST_WIDE_INT low
;
1298 unsigned HOST_WIDE_INT garbagel
;
1299 HOST_WIDE_INT garbageh
;
1301 tree type
= TREE_TYPE (arg1
);
1302 int uns
= TYPE_UNSIGNED (type
);
1304 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1307 int1l
= TREE_INT_CST_LOW (arg1
);
1308 int1h
= TREE_INT_CST_HIGH (arg1
);
1309 int2l
= TREE_INT_CST_LOW (arg2
);
1310 int2h
= TREE_INT_CST_HIGH (arg2
);
1315 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1319 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1323 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1329 /* It's unclear from the C standard whether shifts can overflow.
1330 The following code ignores overflow; perhaps a C standard
1331 interpretation ruling is needed. */
1332 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1339 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1344 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1348 neg_double (int2l
, int2h
, &low
, &hi
);
1349 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1350 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1354 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1357 case TRUNC_DIV_EXPR
:
1358 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1359 case EXACT_DIV_EXPR
:
1360 /* This is a shortcut for a common special case. */
1361 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1362 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1363 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1364 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1366 if (code
== CEIL_DIV_EXPR
)
1369 low
= int1l
/ int2l
, hi
= 0;
1373 /* ... fall through ... */
1375 case ROUND_DIV_EXPR
:
1376 if (int2h
== 0 && int2l
== 1)
1378 low
= int1l
, hi
= int1h
;
1381 if (int1l
== int2l
&& int1h
== int2h
1382 && ! (int1l
== 0 && int1h
== 0))
1387 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1388 &low
, &hi
, &garbagel
, &garbageh
);
1391 case TRUNC_MOD_EXPR
:
1392 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1393 /* This is a shortcut for a common special case. */
1394 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1395 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1396 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1397 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1399 if (code
== CEIL_MOD_EXPR
)
1401 low
= int1l
% int2l
, hi
= 0;
1405 /* ... fall through ... */
1407 case ROUND_MOD_EXPR
:
1408 overflow
= div_and_round_double (code
, uns
,
1409 int1l
, int1h
, int2l
, int2h
,
1410 &garbagel
, &garbageh
, &low
, &hi
);
1416 low
= (((unsigned HOST_WIDE_INT
) int1h
1417 < (unsigned HOST_WIDE_INT
) int2h
)
1418 || (((unsigned HOST_WIDE_INT
) int1h
1419 == (unsigned HOST_WIDE_INT
) int2h
)
1422 low
= (int1h
< int2h
1423 || (int1h
== int2h
&& int1l
< int2l
));
1425 if (low
== (code
== MIN_EXPR
))
1426 low
= int1l
, hi
= int1h
;
1428 low
= int2l
, hi
= int2h
;
1435 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1439 /* Propagate overflow flags ourselves. */
1440 if (((!uns
|| is_sizetype
) && overflow
)
1441 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1444 TREE_OVERFLOW (t
) = 1;
1445 TREE_CONSTANT_OVERFLOW (t
) = 1;
1447 else if (TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
))
1450 TREE_CONSTANT_OVERFLOW (t
) = 1;
1454 t
= force_fit_type (t
, 1,
1455 ((!uns
|| is_sizetype
) && overflow
)
1456 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
),
1457 TREE_CONSTANT_OVERFLOW (arg1
)
1458 | TREE_CONSTANT_OVERFLOW (arg2
));
1463 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1464 constant. We assume ARG1 and ARG2 have the same data type, or at least
1465 are the same kind of constant and the same machine mode.
1467 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1470 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1475 if (TREE_CODE (arg1
) == INTEGER_CST
)
1476 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1478 if (TREE_CODE (arg1
) == REAL_CST
)
1480 enum machine_mode mode
;
1483 REAL_VALUE_TYPE value
;
1484 REAL_VALUE_TYPE result
;
1488 d1
= TREE_REAL_CST (arg1
);
1489 d2
= TREE_REAL_CST (arg2
);
1491 type
= TREE_TYPE (arg1
);
1492 mode
= TYPE_MODE (type
);
1494 /* Don't perform operation if we honor signaling NaNs and
1495 either operand is a NaN. */
1496 if (HONOR_SNANS (mode
)
1497 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1500 /* Don't perform operation if it would raise a division
1501 by zero exception. */
1502 if (code
== RDIV_EXPR
1503 && REAL_VALUES_EQUAL (d2
, dconst0
)
1504 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1507 /* If either operand is a NaN, just return it. Otherwise, set up
1508 for floating-point trap; we return an overflow. */
1509 if (REAL_VALUE_ISNAN (d1
))
1511 else if (REAL_VALUE_ISNAN (d2
))
1514 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1515 real_convert (&result
, mode
, &value
);
1517 /* Don't constant fold this floating point operation if the
1518 result may dependent upon the run-time rounding mode and
1519 flag_rounding_math is set, or if GCC's software emulation
1520 is unable to accurately represent the result. */
1522 if ((flag_rounding_math
1523 || (REAL_MODE_FORMAT_COMPOSITE_P (mode
)
1524 && !flag_unsafe_math_optimizations
))
1525 && (inexact
|| !real_identical (&result
, &value
)))
1528 t
= build_real (type
, result
);
1530 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1531 TREE_CONSTANT_OVERFLOW (t
)
1533 | TREE_CONSTANT_OVERFLOW (arg1
)
1534 | TREE_CONSTANT_OVERFLOW (arg2
);
1537 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1539 tree type
= TREE_TYPE (arg1
);
1540 tree r1
= TREE_REALPART (arg1
);
1541 tree i1
= TREE_IMAGPART (arg1
);
1542 tree r2
= TREE_REALPART (arg2
);
1543 tree i2
= TREE_IMAGPART (arg2
);
1549 t
= build_complex (type
,
1550 const_binop (PLUS_EXPR
, r1
, r2
, notrunc
),
1551 const_binop (PLUS_EXPR
, i1
, i2
, notrunc
));
1555 t
= build_complex (type
,
1556 const_binop (MINUS_EXPR
, r1
, r2
, notrunc
),
1557 const_binop (MINUS_EXPR
, i1
, i2
, notrunc
));
1561 t
= build_complex (type
,
1562 const_binop (MINUS_EXPR
,
1563 const_binop (MULT_EXPR
,
1565 const_binop (MULT_EXPR
,
1568 const_binop (PLUS_EXPR
,
1569 const_binop (MULT_EXPR
,
1571 const_binop (MULT_EXPR
,
1579 = const_binop (PLUS_EXPR
,
1580 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1581 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1584 t
= build_complex (type
,
1586 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1587 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1588 const_binop (PLUS_EXPR
,
1589 const_binop (MULT_EXPR
, r1
, r2
,
1591 const_binop (MULT_EXPR
, i1
, i2
,
1594 magsquared
, notrunc
),
1596 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1597 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1598 const_binop (MINUS_EXPR
,
1599 const_binop (MULT_EXPR
, i1
, r2
,
1601 const_binop (MULT_EXPR
, r1
, i2
,
1604 magsquared
, notrunc
));
1616 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1617 indicates which particular sizetype to create. */
1620 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1622 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1625 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1626 is a tree code. The type of the result is taken from the operands.
1627 Both must be the same type integer type and it must be a size type.
1628 If the operands are constant, so is the result. */
1631 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
1633 tree type
= TREE_TYPE (arg0
);
1635 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1636 && type
== TREE_TYPE (arg1
));
1638 /* Handle the special case of two integer constants faster. */
1639 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1641 /* And some specific cases even faster than that. */
1642 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1644 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1645 && integer_zerop (arg1
))
1647 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1650 /* Handle general case of two integer constants. */
1651 return int_const_binop (code
, arg0
, arg1
, 0);
1654 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1655 return error_mark_node
;
1657 return fold (build2 (code
, type
, arg0
, arg1
));
1660 /* Given two values, either both of sizetype or both of bitsizetype,
1661 compute the difference between the two values. Return the value
1662 in signed type corresponding to the type of the operands. */
1665 size_diffop (tree arg0
, tree arg1
)
1667 tree type
= TREE_TYPE (arg0
);
1670 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1671 && type
== TREE_TYPE (arg1
));
1673 /* If the type is already signed, just do the simple thing. */
1674 if (!TYPE_UNSIGNED (type
))
1675 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1677 ctype
= type
== bitsizetype
? sbitsizetype
: ssizetype
;
1679 /* If either operand is not a constant, do the conversions to the signed
1680 type and subtract. The hardware will do the right thing with any
1681 overflow in the subtraction. */
1682 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1683 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
1684 fold_convert (ctype
, arg1
));
1686 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1687 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1688 overflow) and negate (which can't either). Special-case a result
1689 of zero while we're here. */
1690 if (tree_int_cst_equal (arg0
, arg1
))
1691 return fold_convert (ctype
, integer_zero_node
);
1692 else if (tree_int_cst_lt (arg1
, arg0
))
1693 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1695 return size_binop (MINUS_EXPR
, fold_convert (ctype
, integer_zero_node
),
1696 fold_convert (ctype
, size_binop (MINUS_EXPR
,
1700 /* A subroutine of fold_convert_const handling conversions of an
1701 INTEGER_CST to another integer type. */
1704 fold_convert_const_int_from_int (tree type
, tree arg1
)
1708 /* Given an integer constant, make new constant with new type,
1709 appropriately sign-extended or truncated. */
1710 t
= build_int_cst_wide (type
, TREE_INT_CST_LOW (arg1
),
1711 TREE_INT_CST_HIGH (arg1
));
1713 t
= force_fit_type (t
,
1714 /* Don't set the overflow when
1715 converting a pointer */
1716 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1717 (TREE_INT_CST_HIGH (arg1
) < 0
1718 && (TYPE_UNSIGNED (type
)
1719 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1720 | TREE_OVERFLOW (arg1
),
1721 TREE_CONSTANT_OVERFLOW (arg1
));
1726 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1727 to an integer type. */
1730 fold_convert_const_int_from_real (enum tree_code code
, tree type
, tree arg1
)
1735 /* The following code implements the floating point to integer
1736 conversion rules required by the Java Language Specification,
1737 that IEEE NaNs are mapped to zero and values that overflow
1738 the target precision saturate, i.e. values greater than
1739 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1740 are mapped to INT_MIN. These semantics are allowed by the
1741 C and C++ standards that simply state that the behavior of
1742 FP-to-integer conversion is unspecified upon overflow. */
1744 HOST_WIDE_INT high
, low
;
1746 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1750 case FIX_TRUNC_EXPR
:
1751 real_trunc (&r
, VOIDmode
, &x
);
1755 real_ceil (&r
, VOIDmode
, &x
);
1758 case FIX_FLOOR_EXPR
:
1759 real_floor (&r
, VOIDmode
, &x
);
1762 case FIX_ROUND_EXPR
:
1763 real_round (&r
, VOIDmode
, &x
);
1770 /* If R is NaN, return zero and show we have an overflow. */
1771 if (REAL_VALUE_ISNAN (r
))
1778 /* See if R is less than the lower bound or greater than the
1783 tree lt
= TYPE_MIN_VALUE (type
);
1784 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1785 if (REAL_VALUES_LESS (r
, l
))
1788 high
= TREE_INT_CST_HIGH (lt
);
1789 low
= TREE_INT_CST_LOW (lt
);
1795 tree ut
= TYPE_MAX_VALUE (type
);
1798 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1799 if (REAL_VALUES_LESS (u
, r
))
1802 high
= TREE_INT_CST_HIGH (ut
);
1803 low
= TREE_INT_CST_LOW (ut
);
1809 REAL_VALUE_TO_INT (&low
, &high
, r
);
1811 t
= build_int_cst_wide (type
, low
, high
);
1813 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg1
),
1814 TREE_CONSTANT_OVERFLOW (arg1
));
1818 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1819 to another floating point type. */
1822 fold_convert_const_real_from_real (tree type
, tree arg1
)
1824 REAL_VALUE_TYPE value
;
1827 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1828 t
= build_real (type
, value
);
1830 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1831 TREE_CONSTANT_OVERFLOW (t
)
1832 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1836 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1837 type TYPE. If no simplification can be done return NULL_TREE. */
1840 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1842 if (TREE_TYPE (arg1
) == type
)
1845 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
1847 if (TREE_CODE (arg1
) == INTEGER_CST
)
1848 return fold_convert_const_int_from_int (type
, arg1
);
1849 else if (TREE_CODE (arg1
) == REAL_CST
)
1850 return fold_convert_const_int_from_real (code
, type
, arg1
);
1852 else if (TREE_CODE (type
) == REAL_TYPE
)
1854 if (TREE_CODE (arg1
) == INTEGER_CST
)
1855 return build_real_from_int_cst (type
, arg1
);
1856 if (TREE_CODE (arg1
) == REAL_CST
)
1857 return fold_convert_const_real_from_real (type
, arg1
);
1862 /* Construct a vector of zero elements of vector type TYPE. */
1865 build_zero_vector (tree type
)
1870 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1871 units
= TYPE_VECTOR_SUBPARTS (type
);
1874 for (i
= 0; i
< units
; i
++)
1875 list
= tree_cons (NULL_TREE
, elem
, list
);
1876 return build_vector (type
, list
);
1879 /* Convert expression ARG to type TYPE. Used by the middle-end for
1880 simple conversions in preference to calling the front-end's convert. */
1883 fold_convert (tree type
, tree arg
)
1885 tree orig
= TREE_TYPE (arg
);
1891 if (TREE_CODE (arg
) == ERROR_MARK
1892 || TREE_CODE (type
) == ERROR_MARK
1893 || TREE_CODE (orig
) == ERROR_MARK
)
1894 return error_mark_node
;
1896 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
)
1897 || lang_hooks
.types_compatible_p (TYPE_MAIN_VARIANT (type
),
1898 TYPE_MAIN_VARIANT (orig
)))
1899 return fold (build1 (NOP_EXPR
, type
, arg
));
1901 switch (TREE_CODE (type
))
1903 case INTEGER_TYPE
: case CHAR_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1904 case POINTER_TYPE
: case REFERENCE_TYPE
:
1906 if (TREE_CODE (arg
) == INTEGER_CST
)
1908 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1909 if (tem
!= NULL_TREE
)
1912 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1913 || TREE_CODE (orig
) == OFFSET_TYPE
)
1914 return fold (build1 (NOP_EXPR
, type
, arg
));
1915 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1917 tem
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1918 return fold_convert (type
, tem
);
1920 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1921 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1922 return fold (build1 (NOP_EXPR
, type
, arg
));
1925 if (TREE_CODE (arg
) == INTEGER_CST
)
1927 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1928 if (tem
!= NULL_TREE
)
1931 else if (TREE_CODE (arg
) == REAL_CST
)
1933 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1934 if (tem
!= NULL_TREE
)
1938 switch (TREE_CODE (orig
))
1940 case INTEGER_TYPE
: case CHAR_TYPE
:
1941 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1942 case POINTER_TYPE
: case REFERENCE_TYPE
:
1943 return fold (build1 (FLOAT_EXPR
, type
, arg
));
1946 return fold (build1 (flag_float_store
? CONVERT_EXPR
: NOP_EXPR
,
1950 tem
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1951 return fold_convert (type
, tem
);
1958 switch (TREE_CODE (orig
))
1960 case INTEGER_TYPE
: case CHAR_TYPE
:
1961 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1962 case POINTER_TYPE
: case REFERENCE_TYPE
:
1964 return build2 (COMPLEX_EXPR
, type
,
1965 fold_convert (TREE_TYPE (type
), arg
),
1966 fold_convert (TREE_TYPE (type
), integer_zero_node
));
1971 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1973 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
1974 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
1975 return fold (build2 (COMPLEX_EXPR
, type
, rpart
, ipart
));
1978 arg
= save_expr (arg
);
1979 rpart
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1980 ipart
= fold (build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
));
1981 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
1982 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
1983 return fold (build2 (COMPLEX_EXPR
, type
, rpart
, ipart
));
1991 if (integer_zerop (arg
))
1992 return build_zero_vector (type
);
1993 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1994 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1995 || TREE_CODE (orig
) == VECTOR_TYPE
);
1996 return fold (build1 (NOP_EXPR
, type
, arg
));
1999 return fold (build1 (CONVERT_EXPR
, type
, fold_ignored_result (arg
)));
2006 /* Return an expr equal to X but certainly not valid as an lvalue. */
2011 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2016 /* We only need to wrap lvalue tree codes. */
2017 switch (TREE_CODE (x
))
2028 case ALIGN_INDIRECT_REF
:
2029 case MISALIGNED_INDIRECT_REF
:
2031 case ARRAY_RANGE_REF
:
2037 case PREINCREMENT_EXPR
:
2038 case PREDECREMENT_EXPR
:
2040 case TRY_CATCH_EXPR
:
2041 case WITH_CLEANUP_EXPR
:
2052 /* Assume the worst for front-end tree codes. */
2053 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2057 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2060 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2061 Zero means allow extended lvalues. */
2063 int pedantic_lvalues
;
2065 /* When pedantic, return an expr equal to X but certainly not valid as a
2066 pedantic lvalue. Otherwise, return X. */
2069 pedantic_non_lvalue (tree x
)
2071 if (pedantic_lvalues
)
2072 return non_lvalue (x
);
2077 /* Given a tree comparison code, return the code that is the logical inverse
2078 of the given code. It is not safe to do this for floating-point
2079 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2080 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2082 static enum tree_code
2083 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2085 if (honor_nans
&& flag_trapping_math
)
2095 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2097 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2099 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2101 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2115 return UNORDERED_EXPR
;
2116 case UNORDERED_EXPR
:
2117 return ORDERED_EXPR
;
2123 /* Similar, but return the comparison that results if the operands are
2124 swapped. This is safe for floating-point. */
2127 swap_tree_comparison (enum tree_code code
)
2148 /* Convert a comparison tree code from an enum tree_code representation
2149 into a compcode bit-based encoding. This function is the inverse of
2150 compcode_to_comparison. */
2152 static enum comparison_code
2153 comparison_to_compcode (enum tree_code code
)
2170 return COMPCODE_ORD
;
2171 case UNORDERED_EXPR
:
2172 return COMPCODE_UNORD
;
2174 return COMPCODE_UNLT
;
2176 return COMPCODE_UNEQ
;
2178 return COMPCODE_UNLE
;
2180 return COMPCODE_UNGT
;
2182 return COMPCODE_LTGT
;
2184 return COMPCODE_UNGE
;
2190 /* Convert a compcode bit-based encoding of a comparison operator back
2191 to GCC's enum tree_code representation. This function is the
2192 inverse of comparison_to_compcode. */
2194 static enum tree_code
2195 compcode_to_comparison (enum comparison_code code
)
2212 return ORDERED_EXPR
;
2213 case COMPCODE_UNORD
:
2214 return UNORDERED_EXPR
;
2232 /* Return a tree for the comparison which is the combination of
2233 doing the AND or OR (depending on CODE) of the two operations LCODE
2234 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2235 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2236 if this makes the transformation invalid. */
2239 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2240 enum tree_code rcode
, tree truth_type
,
2241 tree ll_arg
, tree lr_arg
)
2243 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2244 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2245 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2246 enum comparison_code compcode
;
2250 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2251 compcode
= lcompcode
& rcompcode
;
2254 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2255 compcode
= lcompcode
| rcompcode
;
2264 /* Eliminate unordered comparisons, as well as LTGT and ORD
2265 which are not used unless the mode has NaNs. */
2266 compcode
&= ~COMPCODE_UNORD
;
2267 if (compcode
== COMPCODE_LTGT
)
2268 compcode
= COMPCODE_NE
;
2269 else if (compcode
== COMPCODE_ORD
)
2270 compcode
= COMPCODE_TRUE
;
2272 else if (flag_trapping_math
)
2274 /* Check that the original operation and the optimized ones will trap
2275 under the same condition. */
2276 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2277 && (lcompcode
!= COMPCODE_EQ
)
2278 && (lcompcode
!= COMPCODE_ORD
);
2279 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2280 && (rcompcode
!= COMPCODE_EQ
)
2281 && (rcompcode
!= COMPCODE_ORD
);
2282 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2283 && (compcode
!= COMPCODE_EQ
)
2284 && (compcode
!= COMPCODE_ORD
);
2286 /* In a short-circuited boolean expression the LHS might be
2287 such that the RHS, if evaluated, will never trap. For
2288 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2289 if neither x nor y is NaN. (This is a mixed blessing: for
2290 example, the expression above will never trap, hence
2291 optimizing it to x < y would be invalid). */
2292 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2293 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2296 /* If the comparison was short-circuited, and only the RHS
2297 trapped, we may now generate a spurious trap. */
2299 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2302 /* If we changed the conditions that cause a trap, we lose. */
2303 if ((ltrap
|| rtrap
) != trap
)
2307 if (compcode
== COMPCODE_TRUE
)
2308 return constant_boolean_node (true, truth_type
);
2309 else if (compcode
== COMPCODE_FALSE
)
2310 return constant_boolean_node (false, truth_type
);
2312 return fold (build2 (compcode_to_comparison (compcode
),
2313 truth_type
, ll_arg
, lr_arg
));
2316 /* Return nonzero if CODE is a tree code that represents a truth value. */
2319 truth_value_p (enum tree_code code
)
2321 return (TREE_CODE_CLASS (code
) == tcc_comparison
2322 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2323 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2324 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2327 /* Return nonzero if two operands (typically of the same tree node)
2328 are necessarily equal. If either argument has side-effects this
2329 function returns zero. FLAGS modifies behavior as follows:
2331 If OEP_ONLY_CONST is set, only return nonzero for constants.
2332 This function tests whether the operands are indistinguishable;
2333 it does not test whether they are equal using C's == operation.
2334 The distinction is important for IEEE floating point, because
2335 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2336 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2338 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2339 even though it may hold multiple values during a function.
2340 This is because a GCC tree node guarantees that nothing else is
2341 executed between the evaluation of its "operands" (which may often
2342 be evaluated in arbitrary order). Hence if the operands themselves
2343 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2344 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2345 unset means assuming isochronic (or instantaneous) tree equivalence.
2346 Unless comparing arbitrary expression trees, such as from different
2347 statements, this flag can usually be left unset.
2349 If OEP_PURE_SAME is set, then pure functions with identical arguments
2350 are considered the same. It is used when the caller has other ways
2351 to ensure that global memory is unchanged in between. */
2354 operand_equal_p (tree arg0
, tree arg1
, unsigned int flags
)
2356 /* If either is ERROR_MARK, they aren't equal. */
2357 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
2360 /* If both types don't have the same signedness, then we can't consider
2361 them equal. We must check this before the STRIP_NOPS calls
2362 because they may change the signedness of the arguments. */
2363 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2369 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2370 /* This is needed for conversions and for COMPONENT_REF.
2371 Might as well play it safe and always test this. */
2372 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2373 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2374 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2377 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2378 We don't care about side effects in that case because the SAVE_EXPR
2379 takes care of that for us. In all other cases, two expressions are
2380 equal if they have no side effects. If we have two identical
2381 expressions with side effects that should be treated the same due
2382 to the only side effects being identical SAVE_EXPR's, that will
2383 be detected in the recursive calls below. */
2384 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2385 && (TREE_CODE (arg0
) == SAVE_EXPR
2386 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2389 /* Next handle constant cases, those for which we can return 1 even
2390 if ONLY_CONST is set. */
2391 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2392 switch (TREE_CODE (arg0
))
2395 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2396 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2397 && tree_int_cst_equal (arg0
, arg1
));
2400 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2401 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2402 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2403 TREE_REAL_CST (arg1
)));
2409 if (TREE_CONSTANT_OVERFLOW (arg0
)
2410 || TREE_CONSTANT_OVERFLOW (arg1
))
2413 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2414 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2417 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2420 v1
= TREE_CHAIN (v1
);
2421 v2
= TREE_CHAIN (v2
);
2428 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2430 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2434 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2435 && ! memcmp (TREE_STRING_POINTER (arg0
),
2436 TREE_STRING_POINTER (arg1
),
2437 TREE_STRING_LENGTH (arg0
)));
2440 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2446 if (flags
& OEP_ONLY_CONST
)
2449 /* Define macros to test an operand from arg0 and arg1 for equality and a
2450 variant that allows null and views null as being different from any
2451 non-null value. In the latter case, if either is null, the both
2452 must be; otherwise, do the normal comparison. */
2453 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2454 TREE_OPERAND (arg1, N), flags)
2456 #define OP_SAME_WITH_NULL(N) \
2457 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2458 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2460 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2463 /* Two conversions are equal only if signedness and modes match. */
2464 switch (TREE_CODE (arg0
))
2469 case FIX_TRUNC_EXPR
:
2470 case FIX_FLOOR_EXPR
:
2471 case FIX_ROUND_EXPR
:
2472 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2473 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2483 case tcc_comparison
:
2485 if (OP_SAME (0) && OP_SAME (1))
2488 /* For commutative ops, allow the other order. */
2489 return (commutative_tree_code (TREE_CODE (arg0
))
2490 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2491 TREE_OPERAND (arg1
, 1), flags
)
2492 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2493 TREE_OPERAND (arg1
, 0), flags
));
2496 /* If either of the pointer (or reference) expressions we are
2497 dereferencing contain a side effect, these cannot be equal. */
2498 if (TREE_SIDE_EFFECTS (arg0
)
2499 || TREE_SIDE_EFFECTS (arg1
))
2502 switch (TREE_CODE (arg0
))
2505 case ALIGN_INDIRECT_REF
:
2506 case MISALIGNED_INDIRECT_REF
:
2512 case ARRAY_RANGE_REF
:
2513 /* Operands 2 and 3 may be null. */
2516 && OP_SAME_WITH_NULL (2)
2517 && OP_SAME_WITH_NULL (3));
2520 /* Handle operand 2 the same as for ARRAY_REF. */
2521 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2524 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2530 case tcc_expression
:
2531 switch (TREE_CODE (arg0
))
2534 case TRUTH_NOT_EXPR
:
2537 case TRUTH_ANDIF_EXPR
:
2538 case TRUTH_ORIF_EXPR
:
2539 return OP_SAME (0) && OP_SAME (1);
2541 case TRUTH_AND_EXPR
:
2543 case TRUTH_XOR_EXPR
:
2544 if (OP_SAME (0) && OP_SAME (1))
2547 /* Otherwise take into account this is a commutative operation. */
2548 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2549 TREE_OPERAND (arg1
, 1), flags
)
2550 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2551 TREE_OPERAND (arg1
, 0), flags
));
2554 /* If the CALL_EXPRs call different functions, then they
2555 clearly can not be equal. */
2560 unsigned int cef
= call_expr_flags (arg0
);
2561 if (flags
& OEP_PURE_SAME
)
2562 cef
&= ECF_CONST
| ECF_PURE
;
2569 /* Now see if all the arguments are the same. operand_equal_p
2570 does not handle TREE_LIST, so we walk the operands here
2571 feeding them to operand_equal_p. */
2572 arg0
= TREE_OPERAND (arg0
, 1);
2573 arg1
= TREE_OPERAND (arg1
, 1);
2574 while (arg0
&& arg1
)
2576 if (! operand_equal_p (TREE_VALUE (arg0
), TREE_VALUE (arg1
),
2580 arg0
= TREE_CHAIN (arg0
);
2581 arg1
= TREE_CHAIN (arg1
);
2584 /* If we get here and both argument lists are exhausted
2585 then the CALL_EXPRs are equal. */
2586 return ! (arg0
|| arg1
);
2592 case tcc_declaration
:
2593 /* Consider __builtin_sqrt equal to sqrt. */
2594 return (TREE_CODE (arg0
) == FUNCTION_DECL
2595 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2596 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2597 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2604 #undef OP_SAME_WITH_NULL
2607 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2608 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2610 When in doubt, return 0. */
2613 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2615 int unsignedp1
, unsignedpo
;
2616 tree primarg0
, primarg1
, primother
;
2617 unsigned int correct_width
;
2619 if (operand_equal_p (arg0
, arg1
, 0))
2622 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2623 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2626 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2627 and see if the inner values are the same. This removes any
2628 signedness comparison, which doesn't matter here. */
2629 primarg0
= arg0
, primarg1
= arg1
;
2630 STRIP_NOPS (primarg0
);
2631 STRIP_NOPS (primarg1
);
2632 if (operand_equal_p (primarg0
, primarg1
, 0))
2635 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2636 actual comparison operand, ARG0.
2638 First throw away any conversions to wider types
2639 already present in the operands. */
2641 primarg1
= get_narrower (arg1
, &unsignedp1
);
2642 primother
= get_narrower (other
, &unsignedpo
);
2644 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2645 if (unsignedp1
== unsignedpo
2646 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2647 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2649 tree type
= TREE_TYPE (arg0
);
2651 /* Make sure shorter operand is extended the right way
2652 to match the longer operand. */
2653 primarg1
= fold_convert (lang_hooks
.types
.signed_or_unsigned_type
2654 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2656 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2663 /* See if ARG is an expression that is either a comparison or is performing
2664 arithmetic on comparisons. The comparisons must only be comparing
2665 two different values, which will be stored in *CVAL1 and *CVAL2; if
2666 they are nonzero it means that some operands have already been found.
2667 No variables may be used anywhere else in the expression except in the
2668 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2669 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2671 If this is true, return 1. Otherwise, return zero. */
2674 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2676 enum tree_code code
= TREE_CODE (arg
);
2677 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2679 /* We can handle some of the tcc_expression cases here. */
2680 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2682 else if (class == tcc_expression
2683 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2684 || code
== COMPOUND_EXPR
))
2687 else if (class == tcc_expression
&& code
== SAVE_EXPR
2688 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2690 /* If we've already found a CVAL1 or CVAL2, this expression is
2691 two complex to handle. */
2692 if (*cval1
|| *cval2
)
2702 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2705 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2706 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2707 cval1
, cval2
, save_p
));
2712 case tcc_expression
:
2713 if (code
== COND_EXPR
)
2714 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2715 cval1
, cval2
, save_p
)
2716 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2717 cval1
, cval2
, save_p
)
2718 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2719 cval1
, cval2
, save_p
));
2722 case tcc_comparison
:
2723 /* First see if we can handle the first operand, then the second. For
2724 the second operand, we know *CVAL1 can't be zero. It must be that
2725 one side of the comparison is each of the values; test for the
2726 case where this isn't true by failing if the two operands
2729 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2730 TREE_OPERAND (arg
, 1), 0))
2734 *cval1
= TREE_OPERAND (arg
, 0);
2735 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2737 else if (*cval2
== 0)
2738 *cval2
= TREE_OPERAND (arg
, 0);
2739 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2744 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2746 else if (*cval2
== 0)
2747 *cval2
= TREE_OPERAND (arg
, 1);
2748 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2760 /* ARG is a tree that is known to contain just arithmetic operations and
2761 comparisons. Evaluate the operations in the tree substituting NEW0 for
2762 any occurrence of OLD0 as an operand of a comparison and likewise for
2766 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
2768 tree type
= TREE_TYPE (arg
);
2769 enum tree_code code
= TREE_CODE (arg
);
2770 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2772 /* We can handle some of the tcc_expression cases here. */
2773 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2775 else if (class == tcc_expression
2776 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2782 return fold (build1 (code
, type
,
2783 eval_subst (TREE_OPERAND (arg
, 0),
2784 old0
, new0
, old1
, new1
)));
2787 return fold (build2 (code
, type
,
2788 eval_subst (TREE_OPERAND (arg
, 0),
2789 old0
, new0
, old1
, new1
),
2790 eval_subst (TREE_OPERAND (arg
, 1),
2791 old0
, new0
, old1
, new1
)));
2793 case tcc_expression
:
2797 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
2800 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
2803 return fold (build3 (code
, type
,
2804 eval_subst (TREE_OPERAND (arg
, 0),
2805 old0
, new0
, old1
, new1
),
2806 eval_subst (TREE_OPERAND (arg
, 1),
2807 old0
, new0
, old1
, new1
),
2808 eval_subst (TREE_OPERAND (arg
, 2),
2809 old0
, new0
, old1
, new1
)));
2813 /* Fall through - ??? */
2815 case tcc_comparison
:
2817 tree arg0
= TREE_OPERAND (arg
, 0);
2818 tree arg1
= TREE_OPERAND (arg
, 1);
2820 /* We need to check both for exact equality and tree equality. The
2821 former will be true if the operand has a side-effect. In that
2822 case, we know the operand occurred exactly once. */
2824 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2826 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2829 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2831 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2834 return fold (build2 (code
, type
, arg0
, arg1
));
2842 /* Return a tree for the case when the result of an expression is RESULT
2843 converted to TYPE and OMITTED was previously an operand of the expression
2844 but is now not needed (e.g., we folded OMITTED * 0).
2846 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2847 the conversion of RESULT to TYPE. */
2850 omit_one_operand (tree type
, tree result
, tree omitted
)
2852 tree t
= fold_convert (type
, result
);
2854 if (TREE_SIDE_EFFECTS (omitted
))
2855 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2857 return non_lvalue (t
);
2860 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2863 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
2865 tree t
= fold_convert (type
, result
);
2867 if (TREE_SIDE_EFFECTS (omitted
))
2868 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2870 return pedantic_non_lvalue (t
);
2873 /* Return a tree for the case when the result of an expression is RESULT
2874 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2875 of the expression but are now not needed.
2877 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2878 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2879 evaluated before OMITTED2. Otherwise, if neither has side effects,
2880 just do the conversion of RESULT to TYPE. */
2883 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
2885 tree t
= fold_convert (type
, result
);
2887 if (TREE_SIDE_EFFECTS (omitted2
))
2888 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
2889 if (TREE_SIDE_EFFECTS (omitted1
))
2890 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
2892 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
2896 /* Return a simplified tree node for the truth-negation of ARG. This
2897 never alters ARG itself. We assume that ARG is an operation that
2898 returns a truth value (0 or 1).
2900 FIXME: one would think we would fold the result, but it causes
2901 problems with the dominator optimizer. */
2903 invert_truthvalue (tree arg
)
2905 tree type
= TREE_TYPE (arg
);
2906 enum tree_code code
= TREE_CODE (arg
);
2908 if (code
== ERROR_MARK
)
2911 /* If this is a comparison, we can simply invert it, except for
2912 floating-point non-equality comparisons, in which case we just
2913 enclose a TRUTH_NOT_EXPR around what we have. */
2915 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
2917 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
2918 if (FLOAT_TYPE_P (op_type
)
2919 && flag_trapping_math
2920 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
2921 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
2922 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2925 code
= invert_tree_comparison (code
,
2926 HONOR_NANS (TYPE_MODE (op_type
)));
2927 if (code
== ERROR_MARK
)
2928 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2930 return build2 (code
, type
,
2931 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
2938 return constant_boolean_node (integer_zerop (arg
), type
);
2940 case TRUTH_AND_EXPR
:
2941 return build2 (TRUTH_OR_EXPR
, type
,
2942 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2943 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2946 return build2 (TRUTH_AND_EXPR
, type
,
2947 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2948 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2950 case TRUTH_XOR_EXPR
:
2951 /* Here we can invert either operand. We invert the first operand
2952 unless the second operand is a TRUTH_NOT_EXPR in which case our
2953 result is the XOR of the first operand with the inside of the
2954 negation of the second operand. */
2956 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
2957 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
2958 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
2960 return build2 (TRUTH_XOR_EXPR
, type
,
2961 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2962 TREE_OPERAND (arg
, 1));
2964 case TRUTH_ANDIF_EXPR
:
2965 return build2 (TRUTH_ORIF_EXPR
, type
,
2966 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2967 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2969 case TRUTH_ORIF_EXPR
:
2970 return build2 (TRUTH_ANDIF_EXPR
, type
,
2971 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2972 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2974 case TRUTH_NOT_EXPR
:
2975 return TREE_OPERAND (arg
, 0);
2978 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2979 invert_truthvalue (TREE_OPERAND (arg
, 1)),
2980 invert_truthvalue (TREE_OPERAND (arg
, 2)));
2983 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2984 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2986 case NON_LVALUE_EXPR
:
2987 return invert_truthvalue (TREE_OPERAND (arg
, 0));
2990 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
2995 return build1 (TREE_CODE (arg
), type
,
2996 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2999 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3001 return build2 (EQ_EXPR
, type
, arg
,
3002 fold_convert (type
, integer_zero_node
));
3005 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3007 case CLEANUP_POINT_EXPR
:
3008 return build1 (CLEANUP_POINT_EXPR
, type
,
3009 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3014 gcc_assert (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
);
3015 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3018 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3019 operands are another bit-wise operation with a common input. If so,
3020 distribute the bit operations to save an operation and possibly two if
3021 constants are involved. For example, convert
3022 (A | B) & (A | C) into A | (B & C)
3023 Further simplification will occur if B and C are constants.
3025 If this optimization cannot be done, 0 will be returned. */
3028 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3033 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3034 || TREE_CODE (arg0
) == code
3035 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3036 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3039 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3041 common
= TREE_OPERAND (arg0
, 0);
3042 left
= TREE_OPERAND (arg0
, 1);
3043 right
= TREE_OPERAND (arg1
, 1);
3045 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3047 common
= TREE_OPERAND (arg0
, 0);
3048 left
= TREE_OPERAND (arg0
, 1);
3049 right
= TREE_OPERAND (arg1
, 0);
3051 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3053 common
= TREE_OPERAND (arg0
, 1);
3054 left
= TREE_OPERAND (arg0
, 0);
3055 right
= TREE_OPERAND (arg1
, 1);
3057 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3059 common
= TREE_OPERAND (arg0
, 1);
3060 left
= TREE_OPERAND (arg0
, 0);
3061 right
= TREE_OPERAND (arg1
, 0);
3066 return fold (build2 (TREE_CODE (arg0
), type
, common
,
3067 fold (build2 (code
, type
, left
, right
))));
3070 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3071 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3074 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
3081 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3082 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3083 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3084 && host_integerp (size
, 0)
3085 && tree_low_cst (size
, 0) == bitsize
)
3086 return fold_convert (type
, inner
);
3089 result
= build3 (BIT_FIELD_REF
, type
, inner
,
3090 size_int (bitsize
), bitsize_int (bitpos
));
3092 BIT_FIELD_REF_UNSIGNED (result
) = unsignedp
;
3097 /* Optimize a bit-field compare.
3099 There are two cases: First is a compare against a constant and the
3100 second is a comparison of two items where the fields are at the same
3101 bit position relative to the start of a chunk (byte, halfword, word)
3102 large enough to contain it. In these cases we can avoid the shift
3103 implicit in bitfield extractions.
3105 For constants, we emit a compare of the shifted constant with the
3106 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3107 compared. For two fields at the same position, we do the ANDs with the
3108 similar mask and compare the result of the ANDs.
3110 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3111 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3112 are the left and right operands of the comparison, respectively.
3114 If the optimization described above can be done, we return the resulting
3115 tree. Otherwise we return zero. */
3118 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
3121 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3122 tree type
= TREE_TYPE (lhs
);
3123 tree signed_type
, unsigned_type
;
3124 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3125 enum machine_mode lmode
, rmode
, nmode
;
3126 int lunsignedp
, runsignedp
;
3127 int lvolatilep
= 0, rvolatilep
= 0;
3128 tree linner
, rinner
= NULL_TREE
;
3132 /* Get all the information about the extractions being done. If the bit size
3133 if the same as the size of the underlying object, we aren't doing an
3134 extraction at all and so can do nothing. We also don't want to
3135 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3136 then will no longer be able to replace it. */
3137 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3138 &lunsignedp
, &lvolatilep
, false);
3139 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3140 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3145 /* If this is not a constant, we can only do something if bit positions,
3146 sizes, and signedness are the same. */
3147 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3148 &runsignedp
, &rvolatilep
, false);
3150 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3151 || lunsignedp
!= runsignedp
|| offset
!= 0
3152 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3156 /* See if we can find a mode to refer to this field. We should be able to,
3157 but fail if we can't. */
3158 nmode
= get_best_mode (lbitsize
, lbitpos
,
3159 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3160 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3161 TYPE_ALIGN (TREE_TYPE (rinner
))),
3162 word_mode
, lvolatilep
|| rvolatilep
);
3163 if (nmode
== VOIDmode
)
3166 /* Set signed and unsigned types of the precision of this mode for the
3168 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3169 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3171 /* Compute the bit position and size for the new reference and our offset
3172 within it. If the new reference is the same size as the original, we
3173 won't optimize anything, so return zero. */
3174 nbitsize
= GET_MODE_BITSIZE (nmode
);
3175 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3177 if (nbitsize
== lbitsize
)
3180 if (BYTES_BIG_ENDIAN
)
3181 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3183 /* Make the mask to be used against the extracted field. */
3184 mask
= build_int_cst (unsigned_type
, -1);
3185 mask
= force_fit_type (mask
, 0, false, false);
3186 mask
= fold_convert (unsigned_type
, mask
);
3187 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
3188 mask
= const_binop (RSHIFT_EXPR
, mask
,
3189 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
3192 /* If not comparing with constant, just rework the comparison
3194 return build2 (code
, compare_type
,
3195 build2 (BIT_AND_EXPR
, unsigned_type
,
3196 make_bit_field_ref (linner
, unsigned_type
,
3197 nbitsize
, nbitpos
, 1),
3199 build2 (BIT_AND_EXPR
, unsigned_type
,
3200 make_bit_field_ref (rinner
, unsigned_type
,
3201 nbitsize
, nbitpos
, 1),
3204 /* Otherwise, we are handling the constant case. See if the constant is too
3205 big for the field. Warn and return a tree of for 0 (false) if so. We do
3206 this not only for its own sake, but to avoid having to test for this
3207 error case below. If we didn't, we might generate wrong code.
3209 For unsigned fields, the constant shifted right by the field length should
3210 be all zero. For signed fields, the high-order bits should agree with
3215 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3216 fold_convert (unsigned_type
, rhs
),
3217 size_int (lbitsize
), 0)))
3219 warning ("comparison is always %d due to width of bit-field",
3221 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3226 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
3227 size_int (lbitsize
- 1), 0);
3228 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3230 warning ("comparison is always %d due to width of bit-field",
3232 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3236 /* Single-bit compares should always be against zero. */
3237 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3239 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3240 rhs
= fold_convert (type
, integer_zero_node
);
3243 /* Make a new bitfield reference, shift the constant over the
3244 appropriate number of bits and mask it with the computed mask
3245 (in case this was a signed field). If we changed it, make a new one. */
3246 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3249 TREE_SIDE_EFFECTS (lhs
) = 1;
3250 TREE_THIS_VOLATILE (lhs
) = 1;
3253 rhs
= fold (const_binop (BIT_AND_EXPR
,
3254 const_binop (LSHIFT_EXPR
,
3255 fold_convert (unsigned_type
, rhs
),
3256 size_int (lbitpos
), 0),
3259 return build2 (code
, compare_type
,
3260 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
3264 /* Subroutine for fold_truthop: decode a field reference.
3266 If EXP is a comparison reference, we return the innermost reference.
3268 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3269 set to the starting bit number.
3271 If the innermost field can be completely contained in a mode-sized
3272 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3274 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3275 otherwise it is not changed.
3277 *PUNSIGNEDP is set to the signedness of the field.
3279 *PMASK is set to the mask used. This is either contained in a
3280 BIT_AND_EXPR or derived from the width of the field.
3282 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3284 Return 0 if this is not a component reference or is one that we can't
3285 do anything with. */
3288 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3289 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3290 int *punsignedp
, int *pvolatilep
,
3291 tree
*pmask
, tree
*pand_mask
)
3293 tree outer_type
= 0;
3295 tree mask
, inner
, offset
;
3297 unsigned int precision
;
3299 /* All the optimizations using this function assume integer fields.
3300 There are problems with FP fields since the type_for_size call
3301 below can fail for, e.g., XFmode. */
3302 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3305 /* We are interested in the bare arrangement of bits, so strip everything
3306 that doesn't affect the machine mode. However, record the type of the
3307 outermost expression if it may matter below. */
3308 if (TREE_CODE (exp
) == NOP_EXPR
3309 || TREE_CODE (exp
) == CONVERT_EXPR
3310 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3311 outer_type
= TREE_TYPE (exp
);
3314 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3316 and_mask
= TREE_OPERAND (exp
, 1);
3317 exp
= TREE_OPERAND (exp
, 0);
3318 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3319 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3323 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3324 punsignedp
, pvolatilep
, false);
3325 if ((inner
== exp
&& and_mask
== 0)
3326 || *pbitsize
< 0 || offset
!= 0
3327 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3330 /* If the number of bits in the reference is the same as the bitsize of
3331 the outer type, then the outer type gives the signedness. Otherwise
3332 (in case of a small bitfield) the signedness is unchanged. */
3333 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3334 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3336 /* Compute the mask to access the bitfield. */
3337 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3338 precision
= TYPE_PRECISION (unsigned_type
);
3340 mask
= build_int_cst (unsigned_type
, -1);
3341 mask
= force_fit_type (mask
, 0, false, false);
3343 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3344 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3346 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3348 mask
= fold (build2 (BIT_AND_EXPR
, unsigned_type
,
3349 fold_convert (unsigned_type
, and_mask
), mask
));
3352 *pand_mask
= and_mask
;
3356 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3360 all_ones_mask_p (tree mask
, int size
)
3362 tree type
= TREE_TYPE (mask
);
3363 unsigned int precision
= TYPE_PRECISION (type
);
3366 tmask
= build_int_cst (lang_hooks
.types
.signed_type (type
), -1);
3367 tmask
= force_fit_type (tmask
, 0, false, false);
3370 tree_int_cst_equal (mask
,
3371 const_binop (RSHIFT_EXPR
,
3372 const_binop (LSHIFT_EXPR
, tmask
,
3373 size_int (precision
- size
),
3375 size_int (precision
- size
), 0));
3378 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3379 represents the sign bit of EXP's type. If EXP represents a sign
3380 or zero extension, also test VAL against the unextended type.
3381 The return value is the (sub)expression whose sign bit is VAL,
3382 or NULL_TREE otherwise. */
3385 sign_bit_p (tree exp
, tree val
)
3387 unsigned HOST_WIDE_INT mask_lo
, lo
;
3388 HOST_WIDE_INT mask_hi
, hi
;
3392 /* Tree EXP must have an integral type. */
3393 t
= TREE_TYPE (exp
);
3394 if (! INTEGRAL_TYPE_P (t
))
3397 /* Tree VAL must be an integer constant. */
3398 if (TREE_CODE (val
) != INTEGER_CST
3399 || TREE_CONSTANT_OVERFLOW (val
))
3402 width
= TYPE_PRECISION (t
);
3403 if (width
> HOST_BITS_PER_WIDE_INT
)
3405 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3408 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3409 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3415 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3418 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3419 >> (HOST_BITS_PER_WIDE_INT
- width
));
3422 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3423 treat VAL as if it were unsigned. */
3424 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3425 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3428 /* Handle extension from a narrower type. */
3429 if (TREE_CODE (exp
) == NOP_EXPR
3430 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3431 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3436 /* Subroutine for fold_truthop: determine if an operand is simple enough
3437 to be evaluated unconditionally. */
3440 simple_operand_p (tree exp
)
3442 /* Strip any conversions that don't change the machine mode. */
3445 return (CONSTANT_CLASS_P (exp
)
3446 || TREE_CODE (exp
) == SSA_NAME
3448 && ! TREE_ADDRESSABLE (exp
)
3449 && ! TREE_THIS_VOLATILE (exp
)
3450 && ! DECL_NONLOCAL (exp
)
3451 /* Don't regard global variables as simple. They may be
3452 allocated in ways unknown to the compiler (shared memory,
3453 #pragma weak, etc). */
3454 && ! TREE_PUBLIC (exp
)
3455 && ! DECL_EXTERNAL (exp
)
3456 /* Loading a static variable is unduly expensive, but global
3457 registers aren't expensive. */
3458 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3461 /* The following functions are subroutines to fold_range_test and allow it to
3462 try to change a logical combination of comparisons into a range test.
3465 X == 2 || X == 3 || X == 4 || X == 5
3469 (unsigned) (X - 2) <= 3
3471 We describe each set of comparisons as being either inside or outside
3472 a range, using a variable named like IN_P, and then describe the
3473 range with a lower and upper bound. If one of the bounds is omitted,
3474 it represents either the highest or lowest value of the type.
3476 In the comments below, we represent a range by two numbers in brackets
3477 preceded by a "+" to designate being inside that range, or a "-" to
3478 designate being outside that range, so the condition can be inverted by
3479 flipping the prefix. An omitted bound is represented by a "-". For
3480 example, "- [-, 10]" means being outside the range starting at the lowest
3481 possible value and ending at 10, in other words, being greater than 10.
3482 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3485 We set up things so that the missing bounds are handled in a consistent
3486 manner so neither a missing bound nor "true" and "false" need to be
3487 handled using a special case. */
3489 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3490 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3491 and UPPER1_P are nonzero if the respective argument is an upper bound
3492 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3493 must be specified for a comparison. ARG1 will be converted to ARG0's
3494 type if both are specified. */
3497 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3498 tree arg1
, int upper1_p
)
3504 /* If neither arg represents infinity, do the normal operation.
3505 Else, if not a comparison, return infinity. Else handle the special
3506 comparison rules. Note that most of the cases below won't occur, but
3507 are handled for consistency. */
3509 if (arg0
!= 0 && arg1
!= 0)
3511 tem
= fold (build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3512 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
)));
3514 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3517 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3520 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3521 for neither. In real maths, we cannot assume open ended ranges are
3522 the same. But, this is computer arithmetic, where numbers are finite.
3523 We can therefore make the transformation of any unbounded range with
3524 the value Z, Z being greater than any representable number. This permits
3525 us to treat unbounded ranges as equal. */
3526 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3527 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3531 result
= sgn0
== sgn1
;
3534 result
= sgn0
!= sgn1
;
3537 result
= sgn0
< sgn1
;
3540 result
= sgn0
<= sgn1
;
3543 result
= sgn0
> sgn1
;
3546 result
= sgn0
>= sgn1
;
3552 return constant_boolean_node (result
, type
);
3555 /* Given EXP, a logical expression, set the range it is testing into
3556 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3557 actually being tested. *PLOW and *PHIGH will be made of the same type
3558 as the returned expression. If EXP is not a comparison, we will most
3559 likely not be returning a useful value and range. */
3562 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
)
3564 enum tree_code code
;
3565 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
3566 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
3568 tree low
, high
, n_low
, n_high
;
3570 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3571 and see if we can refine the range. Some of the cases below may not
3572 happen, but it doesn't seem worth worrying about this. We "continue"
3573 the outer loop when we've changed something; otherwise we "break"
3574 the switch, which will "break" the while. */
3577 low
= high
= fold_convert (TREE_TYPE (exp
), integer_zero_node
);
3581 code
= TREE_CODE (exp
);
3582 exp_type
= TREE_TYPE (exp
);
3584 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3586 if (TREE_CODE_LENGTH (code
) > 0)
3587 arg0
= TREE_OPERAND (exp
, 0);
3588 if (TREE_CODE_CLASS (code
) == tcc_comparison
3589 || TREE_CODE_CLASS (code
) == tcc_unary
3590 || TREE_CODE_CLASS (code
) == tcc_binary
)
3591 arg0_type
= TREE_TYPE (arg0
);
3592 if (TREE_CODE_CLASS (code
) == tcc_binary
3593 || TREE_CODE_CLASS (code
) == tcc_comparison
3594 || (TREE_CODE_CLASS (code
) == tcc_expression
3595 && TREE_CODE_LENGTH (code
) > 1))
3596 arg1
= TREE_OPERAND (exp
, 1);
3601 case TRUTH_NOT_EXPR
:
3602 in_p
= ! in_p
, exp
= arg0
;
3605 case EQ_EXPR
: case NE_EXPR
:
3606 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3607 /* We can only do something if the range is testing for zero
3608 and if the second operand is an integer constant. Note that
3609 saying something is "in" the range we make is done by
3610 complementing IN_P since it will set in the initial case of
3611 being not equal to zero; "out" is leaving it alone. */
3612 if (low
== 0 || high
== 0
3613 || ! integer_zerop (low
) || ! integer_zerop (high
)
3614 || TREE_CODE (arg1
) != INTEGER_CST
)
3619 case NE_EXPR
: /* - [c, c] */
3622 case EQ_EXPR
: /* + [c, c] */
3623 in_p
= ! in_p
, low
= high
= arg1
;
3625 case GT_EXPR
: /* - [-, c] */
3626 low
= 0, high
= arg1
;
3628 case GE_EXPR
: /* + [c, -] */
3629 in_p
= ! in_p
, low
= arg1
, high
= 0;
3631 case LT_EXPR
: /* - [c, -] */
3632 low
= arg1
, high
= 0;
3634 case LE_EXPR
: /* + [-, c] */
3635 in_p
= ! in_p
, low
= 0, high
= arg1
;
3641 /* If this is an unsigned comparison, we also know that EXP is
3642 greater than or equal to zero. We base the range tests we make
3643 on that fact, so we record it here so we can parse existing
3644 range tests. We test arg0_type since often the return type
3645 of, e.g. EQ_EXPR, is boolean. */
3646 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3648 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3650 fold_convert (arg0_type
, integer_zero_node
),
3654 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3656 /* If the high bound is missing, but we have a nonzero low
3657 bound, reverse the range so it goes from zero to the low bound
3659 if (high
== 0 && low
&& ! integer_zerop (low
))
3662 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3663 integer_one_node
, 0);
3664 low
= fold_convert (arg0_type
, integer_zero_node
);
3672 /* (-x) IN [a,b] -> x in [-b, -a] */
3673 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3674 fold_convert (exp_type
, integer_zero_node
),
3676 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3677 fold_convert (exp_type
, integer_zero_node
),
3679 low
= n_low
, high
= n_high
;
3685 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3686 fold_convert (exp_type
, integer_one_node
));
3689 case PLUS_EXPR
: case MINUS_EXPR
:
3690 if (TREE_CODE (arg1
) != INTEGER_CST
)
3693 /* If EXP is signed, any overflow in the computation is undefined,
3694 so we don't worry about it so long as our computations on
3695 the bounds don't overflow. For unsigned, overflow is defined
3696 and this is exactly the right thing. */
3697 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3698 arg0_type
, low
, 0, arg1
, 0);
3699 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3700 arg0_type
, high
, 1, arg1
, 0);
3701 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3702 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3705 /* Check for an unsigned range which has wrapped around the maximum
3706 value thus making n_high < n_low, and normalize it. */
3707 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3709 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3710 integer_one_node
, 0);
3711 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3712 integer_one_node
, 0);
3714 /* If the range is of the form +/- [ x+1, x ], we won't
3715 be able to normalize it. But then, it represents the
3716 whole range or the empty set, so make it
3718 if (tree_int_cst_equal (n_low
, low
)
3719 && tree_int_cst_equal (n_high
, high
))
3725 low
= n_low
, high
= n_high
;
3730 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
3731 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3734 if (! INTEGRAL_TYPE_P (arg0_type
)
3735 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3736 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3739 n_low
= low
, n_high
= high
;
3742 n_low
= fold_convert (arg0_type
, n_low
);
3745 n_high
= fold_convert (arg0_type
, n_high
);
3748 /* If we're converting arg0 from an unsigned type, to exp,
3749 a signed type, we will be doing the comparison as unsigned.
3750 The tests above have already verified that LOW and HIGH
3753 So we have to ensure that we will handle large unsigned
3754 values the same way that the current signed bounds treat
3757 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
3760 tree equiv_type
= lang_hooks
.types
.type_for_mode
3761 (TYPE_MODE (arg0_type
), 1);
3763 /* A range without an upper bound is, naturally, unbounded.
3764 Since convert would have cropped a very large value, use
3765 the max value for the destination type. */
3767 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3768 : TYPE_MAX_VALUE (arg0_type
);
3770 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
3771 high_positive
= fold (build2 (RSHIFT_EXPR
, arg0_type
,
3772 fold_convert (arg0_type
,
3774 fold_convert (arg0_type
,
3775 integer_one_node
)));
3777 /* If the low bound is specified, "and" the range with the
3778 range for which the original unsigned value will be
3782 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3783 1, n_low
, n_high
, 1,
3784 fold_convert (arg0_type
,
3789 in_p
= (n_in_p
== in_p
);
3793 /* Otherwise, "or" the range with the range of the input
3794 that will be interpreted as negative. */
3795 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3796 0, n_low
, n_high
, 1,
3797 fold_convert (arg0_type
,
3802 in_p
= (in_p
!= n_in_p
);
3807 low
= n_low
, high
= n_high
;
3817 /* If EXP is a constant, we can evaluate whether this is true or false. */
3818 if (TREE_CODE (exp
) == INTEGER_CST
)
3820 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
3822 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3828 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3832 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3833 type, TYPE, return an expression to test if EXP is in (or out of, depending
3834 on IN_P) the range. Return 0 if the test couldn't be created. */
3837 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
3839 tree etype
= TREE_TYPE (exp
);
3844 value
= build_range_check (type
, exp
, 1, low
, high
);
3846 return invert_truthvalue (value
);
3851 if (low
== 0 && high
== 0)
3852 return fold_convert (type
, integer_one_node
);
3855 return fold (build2 (LE_EXPR
, type
, exp
, high
));
3858 return fold (build2 (GE_EXPR
, type
, exp
, low
));
3860 if (operand_equal_p (low
, high
, 0))
3861 return fold (build2 (EQ_EXPR
, type
, exp
, low
));
3863 if (integer_zerop (low
))
3865 if (! TYPE_UNSIGNED (etype
))
3867 etype
= lang_hooks
.types
.unsigned_type (etype
);
3868 high
= fold_convert (etype
, high
);
3869 exp
= fold_convert (etype
, exp
);
3871 return build_range_check (type
, exp
, 1, 0, high
);
3874 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3875 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
3877 unsigned HOST_WIDE_INT lo
;
3881 prec
= TYPE_PRECISION (etype
);
3882 if (prec
<= HOST_BITS_PER_WIDE_INT
)
3885 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
3889 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
3890 lo
= (unsigned HOST_WIDE_INT
) -1;
3893 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
3895 if (TYPE_UNSIGNED (etype
))
3897 etype
= lang_hooks
.types
.signed_type (etype
);
3898 exp
= fold_convert (etype
, exp
);
3900 return fold (build2 (GT_EXPR
, type
, exp
,
3901 fold_convert (etype
, integer_zero_node
)));
3905 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
3906 if (value
!= 0 && TREE_OVERFLOW (value
) && ! TYPE_UNSIGNED (etype
))
3908 tree utype
, minv
, maxv
;
3910 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3911 for the type in question, as we rely on this here. */
3912 switch (TREE_CODE (etype
))
3917 utype
= lang_hooks
.types
.unsigned_type (etype
);
3918 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
3919 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
3920 integer_one_node
, 1);
3921 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
3922 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
3926 high
= fold_convert (etype
, high
);
3927 low
= fold_convert (etype
, low
);
3928 exp
= fold_convert (etype
, exp
);
3929 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
3937 if (value
!= 0 && ! TREE_OVERFLOW (value
))
3938 return build_range_check (type
,
3939 fold (build2 (MINUS_EXPR
, etype
, exp
, low
)),
3940 1, fold_convert (etype
, integer_zero_node
),
3946 /* Given two ranges, see if we can merge them into one. Return 1 if we
3947 can, 0 if we can't. Set the output range into the specified parameters. */
3950 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
3951 tree high0
, int in1_p
, tree low1
, tree high1
)
3959 int lowequal
= ((low0
== 0 && low1
== 0)
3960 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3961 low0
, 0, low1
, 0)));
3962 int highequal
= ((high0
== 0 && high1
== 0)
3963 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3964 high0
, 1, high1
, 1)));
3966 /* Make range 0 be the range that starts first, or ends last if they
3967 start at the same value. Swap them if it isn't. */
3968 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3971 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3972 high1
, 1, high0
, 1))))
3974 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
3975 tem
= low0
, low0
= low1
, low1
= tem
;
3976 tem
= high0
, high0
= high1
, high1
= tem
;
3979 /* Now flag two cases, whether the ranges are disjoint or whether the
3980 second range is totally subsumed in the first. Note that the tests
3981 below are simplified by the ones above. */
3982 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
3983 high0
, 1, low1
, 0));
3984 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3985 high1
, 1, high0
, 1));
3987 /* We now have four cases, depending on whether we are including or
3988 excluding the two ranges. */
3991 /* If they don't overlap, the result is false. If the second range
3992 is a subset it is the result. Otherwise, the range is from the start
3993 of the second to the end of the first. */
3995 in_p
= 0, low
= high
= 0;
3997 in_p
= 1, low
= low1
, high
= high1
;
3999 in_p
= 1, low
= low1
, high
= high0
;
4002 else if (in0_p
&& ! in1_p
)
4004 /* If they don't overlap, the result is the first range. If they are
4005 equal, the result is false. If the second range is a subset of the
4006 first, and the ranges begin at the same place, we go from just after
4007 the end of the first range to the end of the second. If the second
4008 range is not a subset of the first, or if it is a subset and both
4009 ranges end at the same place, the range starts at the start of the
4010 first range and ends just before the second range.
4011 Otherwise, we can't describe this as a single range. */
4013 in_p
= 1, low
= low0
, high
= high0
;
4014 else if (lowequal
&& highequal
)
4015 in_p
= 0, low
= high
= 0;
4016 else if (subset
&& lowequal
)
4018 in_p
= 1, high
= high0
;
4019 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high1
, 0,
4020 integer_one_node
, 0);
4022 else if (! subset
|| highequal
)
4024 in_p
= 1, low
= low0
;
4025 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
4026 integer_one_node
, 0);
4032 else if (! in0_p
&& in1_p
)
4034 /* If they don't overlap, the result is the second range. If the second
4035 is a subset of the first, the result is false. Otherwise,
4036 the range starts just after the first range and ends at the
4037 end of the second. */
4039 in_p
= 1, low
= low1
, high
= high1
;
4040 else if (subset
|| highequal
)
4041 in_p
= 0, low
= high
= 0;
4044 in_p
= 1, high
= high1
;
4045 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4046 integer_one_node
, 0);
4052 /* The case where we are excluding both ranges. Here the complex case
4053 is if they don't overlap. In that case, the only time we have a
4054 range is if they are adjacent. If the second is a subset of the
4055 first, the result is the first. Otherwise, the range to exclude
4056 starts at the beginning of the first range and ends at the end of the
4060 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4061 range_binop (PLUS_EXPR
, NULL_TREE
,
4063 integer_one_node
, 1),
4065 in_p
= 0, low
= low0
, high
= high1
;
4068 /* Canonicalize - [min, x] into - [-, x]. */
4069 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4070 switch (TREE_CODE (TREE_TYPE (low0
)))
4073 if (TYPE_PRECISION (TREE_TYPE (low0
))
4074 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4079 if (tree_int_cst_equal (low0
,
4080 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4084 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4085 && integer_zerop (low0
))
4092 /* Canonicalize - [x, max] into - [x, -]. */
4093 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4094 switch (TREE_CODE (TREE_TYPE (high1
)))
4097 if (TYPE_PRECISION (TREE_TYPE (high1
))
4098 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4103 if (tree_int_cst_equal (high1
,
4104 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4108 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4109 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4111 integer_one_node
, 1)))
4118 /* The ranges might be also adjacent between the maximum and
4119 minimum values of the given type. For
4120 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4121 return + [x + 1, y - 1]. */
4122 if (low0
== 0 && high1
== 0)
4124 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4125 integer_one_node
, 1);
4126 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
4127 integer_one_node
, 0);
4128 if (low
== 0 || high
== 0)
4138 in_p
= 0, low
= low0
, high
= high0
;
4140 in_p
= 0, low
= low0
, high
= high1
;
4143 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4148 /* Subroutine of fold, looking inside expressions of the form
4149 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4150 of the COND_EXPR. This function is being used also to optimize
4151 A op B ? C : A, by reversing the comparison first.
4153 Return a folded expression whose code is not a COND_EXPR
4154 anymore, or NULL_TREE if no folding opportunity is found. */
4157 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
4159 enum tree_code comp_code
= TREE_CODE (arg0
);
4160 tree arg00
= TREE_OPERAND (arg0
, 0);
4161 tree arg01
= TREE_OPERAND (arg0
, 1);
4162 tree arg1_type
= TREE_TYPE (arg1
);
4168 /* If we have A op 0 ? A : -A, consider applying the following
4171 A == 0? A : -A same as -A
4172 A != 0? A : -A same as A
4173 A >= 0? A : -A same as abs (A)
4174 A > 0? A : -A same as abs (A)
4175 A <= 0? A : -A same as -abs (A)
4176 A < 0? A : -A same as -abs (A)
4178 None of these transformations work for modes with signed
4179 zeros. If A is +/-0, the first two transformations will
4180 change the sign of the result (from +0 to -0, or vice
4181 versa). The last four will fix the sign of the result,
4182 even though the original expressions could be positive or
4183 negative, depending on the sign of A.
4185 Note that all these transformations are correct if A is
4186 NaN, since the two alternatives (A and -A) are also NaNs. */
4187 if ((FLOAT_TYPE_P (TREE_TYPE (arg01
))
4188 ? real_zerop (arg01
)
4189 : integer_zerop (arg01
))
4190 && TREE_CODE (arg2
) == NEGATE_EXPR
4191 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4196 tem
= fold_convert (arg1_type
, arg1
);
4197 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
4200 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4203 if (flag_trapping_math
)
4208 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4209 arg1
= fold_convert (lang_hooks
.types
.signed_type
4210 (TREE_TYPE (arg1
)), arg1
);
4211 tem
= fold (build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
));
4212 return pedantic_non_lvalue (fold_convert (type
, tem
));
4215 if (flag_trapping_math
)
4219 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4220 arg1
= fold_convert (lang_hooks
.types
.signed_type
4221 (TREE_TYPE (arg1
)), arg1
);
4222 tem
= fold (build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
));
4223 return negate_expr (fold_convert (type
, tem
));
4225 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4229 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4230 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4231 both transformations are correct when A is NaN: A != 0
4232 is then true, and A == 0 is false. */
4234 if (integer_zerop (arg01
) && integer_zerop (arg2
))
4236 if (comp_code
== NE_EXPR
)
4237 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4238 else if (comp_code
== EQ_EXPR
)
4239 return fold_convert (type
, integer_zero_node
);
4242 /* Try some transformations of A op B ? A : B.
4244 A == B? A : B same as B
4245 A != B? A : B same as A
4246 A >= B? A : B same as max (A, B)
4247 A > B? A : B same as max (B, A)
4248 A <= B? A : B same as min (A, B)
4249 A < B? A : B same as min (B, A)
4251 As above, these transformations don't work in the presence
4252 of signed zeros. For example, if A and B are zeros of
4253 opposite sign, the first two transformations will change
4254 the sign of the result. In the last four, the original
4255 expressions give different results for (A=+0, B=-0) and
4256 (A=-0, B=+0), but the transformed expressions do not.
4258 The first two transformations are correct if either A or B
4259 is a NaN. In the first transformation, the condition will
4260 be false, and B will indeed be chosen. In the case of the
4261 second transformation, the condition A != B will be true,
4262 and A will be chosen.
4264 The conversions to max() and min() are not correct if B is
4265 a number and A is not. The conditions in the original
4266 expressions will be false, so all four give B. The min()
4267 and max() versions would give a NaN instead. */
4268 if (operand_equal_for_comparison_p (arg01
, arg2
, arg00
))
4270 tree comp_op0
= arg00
;
4271 tree comp_op1
= arg01
;
4272 tree comp_type
= TREE_TYPE (comp_op0
);
4274 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4275 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4285 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4287 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4292 /* In C++ a ?: expression can be an lvalue, so put the
4293 operand which will be used if they are equal first
4294 so that we can convert this back to the
4295 corresponding COND_EXPR. */
4296 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4298 comp_op0
= fold_convert (comp_type
, comp_op0
);
4299 comp_op1
= fold_convert (comp_type
, comp_op1
);
4300 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4301 ? fold (build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
))
4302 : fold (build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
));
4303 return pedantic_non_lvalue (fold_convert (type
, tem
));
4310 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4312 comp_op0
= fold_convert (comp_type
, comp_op0
);
4313 comp_op1
= fold_convert (comp_type
, comp_op1
);
4314 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4315 ? fold (build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
))
4316 : fold (build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
));
4317 return pedantic_non_lvalue (fold_convert (type
, tem
));
4321 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4322 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4325 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4326 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4329 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4334 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4335 we might still be able to simplify this. For example,
4336 if C1 is one less or one more than C2, this might have started
4337 out as a MIN or MAX and been transformed by this function.
4338 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4340 if (INTEGRAL_TYPE_P (type
)
4341 && TREE_CODE (arg01
) == INTEGER_CST
4342 && TREE_CODE (arg2
) == INTEGER_CST
)
4346 /* We can replace A with C1 in this case. */
4347 arg1
= fold_convert (type
, arg01
);
4348 return fold (build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
));
4351 /* If C1 is C2 + 1, this is min(A, C2). */
4352 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4354 && operand_equal_p (arg01
,
4355 const_binop (PLUS_EXPR
, arg2
,
4356 integer_one_node
, 0),
4358 return pedantic_non_lvalue (fold (build2 (MIN_EXPR
,
4359 type
, arg1
, arg2
)));
4363 /* If C1 is C2 - 1, this is min(A, C2). */
4364 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4366 && operand_equal_p (arg01
,
4367 const_binop (MINUS_EXPR
, arg2
,
4368 integer_one_node
, 0),
4370 return pedantic_non_lvalue (fold (build2 (MIN_EXPR
,
4371 type
, arg1
, arg2
)));
4375 /* If C1 is C2 - 1, this is max(A, C2). */
4376 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4378 && operand_equal_p (arg01
,
4379 const_binop (MINUS_EXPR
, arg2
,
4380 integer_one_node
, 0),
4382 return pedantic_non_lvalue (fold (build2 (MAX_EXPR
,
4383 type
, arg1
, arg2
)));
4387 /* If C1 is C2 + 1, this is max(A, C2). */
4388 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4390 && operand_equal_p (arg01
,
4391 const_binop (PLUS_EXPR
, arg2
,
4392 integer_one_node
, 0),
4394 return pedantic_non_lvalue (fold (build2 (MAX_EXPR
,
4395 type
, arg1
, arg2
)));
4408 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4409 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4412 /* EXP is some logical combination of boolean tests. See if we can
4413 merge it into some range test. Return the new tree if so. */
4416 fold_range_test (enum tree_code code
, tree type
, tree op0
, tree op1
)
4418 int or_op
= (code
== TRUTH_ORIF_EXPR
4419 || code
== TRUTH_OR_EXPR
);
4420 int in0_p
, in1_p
, in_p
;
4421 tree low0
, low1
, low
, high0
, high1
, high
;
4422 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
);
4423 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
);
4426 /* If this is an OR operation, invert both sides; we will invert
4427 again at the end. */
4429 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4431 /* If both expressions are the same, if we can merge the ranges, and we
4432 can build the range test, return it or it inverted. If one of the
4433 ranges is always true or always false, consider it to be the same
4434 expression as the other. */
4435 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4436 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4438 && 0 != (tem
= (build_range_check (type
,
4440 : rhs
!= 0 ? rhs
: integer_zero_node
,
4442 return or_op
? invert_truthvalue (tem
) : tem
;
4444 /* On machines where the branch cost is expensive, if this is a
4445 short-circuited branch and the underlying object on both sides
4446 is the same, make a non-short-circuit operation. */
4447 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4448 && lhs
!= 0 && rhs
!= 0
4449 && (code
== TRUTH_ANDIF_EXPR
4450 || code
== TRUTH_ORIF_EXPR
)
4451 && operand_equal_p (lhs
, rhs
, 0))
4453 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4454 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4455 which cases we can't do this. */
4456 if (simple_operand_p (lhs
))
4457 return build2 (code
== TRUTH_ANDIF_EXPR
4458 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4461 else if (lang_hooks
.decls
.global_bindings_p () == 0
4462 && ! CONTAINS_PLACEHOLDER_P (lhs
))
4464 tree common
= save_expr (lhs
);
4466 if (0 != (lhs
= build_range_check (type
, common
,
4467 or_op
? ! in0_p
: in0_p
,
4469 && (0 != (rhs
= build_range_check (type
, common
,
4470 or_op
? ! in1_p
: in1_p
,
4472 return build2 (code
== TRUTH_ANDIF_EXPR
4473 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4481 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4482 bit value. Arrange things so the extra bits will be set to zero if and
4483 only if C is signed-extended to its full width. If MASK is nonzero,
4484 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4487 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4489 tree type
= TREE_TYPE (c
);
4490 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4493 if (p
== modesize
|| unsignedp
)
4496 /* We work by getting just the sign bit into the low-order bit, then
4497 into the high-order bit, then sign-extend. We then XOR that value
4499 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
4500 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
4502 /* We must use a signed type in order to get an arithmetic right shift.
4503 However, we must also avoid introducing accidental overflows, so that
4504 a subsequent call to integer_zerop will work. Hence we must
4505 do the type conversion here. At this point, the constant is either
4506 zero or one, and the conversion to a signed type can never overflow.
4507 We could get an overflow if this conversion is done anywhere else. */
4508 if (TYPE_UNSIGNED (type
))
4509 temp
= fold_convert (lang_hooks
.types
.signed_type (type
), temp
);
4511 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
4512 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
4514 temp
= const_binop (BIT_AND_EXPR
, temp
,
4515 fold_convert (TREE_TYPE (c
), mask
), 0);
4516 /* If necessary, convert the type back to match the type of C. */
4517 if (TYPE_UNSIGNED (type
))
4518 temp
= fold_convert (type
, temp
);
4520 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
4523 /* Find ways of folding logical expressions of LHS and RHS:
4524 Try to merge two comparisons to the same innermost item.
4525 Look for range tests like "ch >= '0' && ch <= '9'".
4526 Look for combinations of simple terms on machines with expensive branches
4527 and evaluate the RHS unconditionally.
4529 For example, if we have p->a == 2 && p->b == 4 and we can make an
4530 object large enough to span both A and B, we can do this with a comparison
4531 against the object ANDed with the a mask.
4533 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4534 operations to do this with one comparison.
4536 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4537 function and the one above.
4539 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4540 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4542 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4545 We return the simplified tree or 0 if no optimization is possible. */
4548 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
4550 /* If this is the "or" of two comparisons, we can do something if
4551 the comparisons are NE_EXPR. If this is the "and", we can do something
4552 if the comparisons are EQ_EXPR. I.e.,
4553 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4555 WANTED_CODE is this operation code. For single bit fields, we can
4556 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4557 comparison for one-bit fields. */
4559 enum tree_code wanted_code
;
4560 enum tree_code lcode
, rcode
;
4561 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
4562 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
4563 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
4564 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
4565 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
4566 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
4567 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
4568 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
4569 enum machine_mode lnmode
, rnmode
;
4570 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
4571 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
4572 tree l_const
, r_const
;
4573 tree lntype
, rntype
, result
;
4574 int first_bit
, end_bit
;
4577 /* Start by getting the comparison codes. Fail if anything is volatile.
4578 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4579 it were surrounded with a NE_EXPR. */
4581 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
4584 lcode
= TREE_CODE (lhs
);
4585 rcode
= TREE_CODE (rhs
);
4587 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
4589 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
4590 fold_convert (TREE_TYPE (lhs
), integer_zero_node
));
4594 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
4596 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
4597 fold_convert (TREE_TYPE (rhs
), integer_zero_node
));
4601 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
4602 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
4605 ll_arg
= TREE_OPERAND (lhs
, 0);
4606 lr_arg
= TREE_OPERAND (lhs
, 1);
4607 rl_arg
= TREE_OPERAND (rhs
, 0);
4608 rr_arg
= TREE_OPERAND (rhs
, 1);
4610 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4611 if (simple_operand_p (ll_arg
)
4612 && simple_operand_p (lr_arg
))
4615 if (operand_equal_p (ll_arg
, rl_arg
, 0)
4616 && operand_equal_p (lr_arg
, rr_arg
, 0))
4618 result
= combine_comparisons (code
, lcode
, rcode
,
4619 truth_type
, ll_arg
, lr_arg
);
4623 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
4624 && operand_equal_p (lr_arg
, rl_arg
, 0))
4626 result
= combine_comparisons (code
, lcode
,
4627 swap_tree_comparison (rcode
),
4628 truth_type
, ll_arg
, lr_arg
);
4634 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
4635 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
4637 /* If the RHS can be evaluated unconditionally and its operands are
4638 simple, it wins to evaluate the RHS unconditionally on machines
4639 with expensive branches. In this case, this isn't a comparison
4640 that can be merged. Avoid doing this if the RHS is a floating-point
4641 comparison since those can trap. */
4643 if (BRANCH_COST
>= 2
4644 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
4645 && simple_operand_p (rl_arg
)
4646 && simple_operand_p (rr_arg
))
4648 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4649 if (code
== TRUTH_OR_EXPR
4650 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
4651 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
4652 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4653 return build2 (NE_EXPR
, truth_type
,
4654 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4656 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4658 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4659 if (code
== TRUTH_AND_EXPR
4660 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
4661 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
4662 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4663 return build2 (EQ_EXPR
, truth_type
,
4664 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4666 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4668 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
4669 return build2 (code
, truth_type
, lhs
, rhs
);
4672 /* See if the comparisons can be merged. Then get all the parameters for
4675 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
4676 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
4680 ll_inner
= decode_field_reference (ll_arg
,
4681 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
4682 &ll_unsignedp
, &volatilep
, &ll_mask
,
4684 lr_inner
= decode_field_reference (lr_arg
,
4685 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
4686 &lr_unsignedp
, &volatilep
, &lr_mask
,
4688 rl_inner
= decode_field_reference (rl_arg
,
4689 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
4690 &rl_unsignedp
, &volatilep
, &rl_mask
,
4692 rr_inner
= decode_field_reference (rr_arg
,
4693 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
4694 &rr_unsignedp
, &volatilep
, &rr_mask
,
4697 /* It must be true that the inner operation on the lhs of each
4698 comparison must be the same if we are to be able to do anything.
4699 Then see if we have constants. If not, the same must be true for
4701 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
4702 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
4705 if (TREE_CODE (lr_arg
) == INTEGER_CST
4706 && TREE_CODE (rr_arg
) == INTEGER_CST
)
4707 l_const
= lr_arg
, r_const
= rr_arg
;
4708 else if (lr_inner
== 0 || rr_inner
== 0
4709 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
4712 l_const
= r_const
= 0;
4714 /* If either comparison code is not correct for our logical operation,
4715 fail. However, we can convert a one-bit comparison against zero into
4716 the opposite comparison against that bit being set in the field. */
4718 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
4719 if (lcode
!= wanted_code
)
4721 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
4723 /* Make the left operand unsigned, since we are only interested
4724 in the value of one bit. Otherwise we are doing the wrong
4733 /* This is analogous to the code for l_const above. */
4734 if (rcode
!= wanted_code
)
4736 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
4745 /* After this point all optimizations will generate bit-field
4746 references, which we might not want. */
4747 if (! lang_hooks
.can_use_bit_fields_p ())
4750 /* See if we can find a mode that contains both fields being compared on
4751 the left. If we can't, fail. Otherwise, update all constants and masks
4752 to be relative to a field of that size. */
4753 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
4754 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
4755 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4756 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
4758 if (lnmode
== VOIDmode
)
4761 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
4762 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
4763 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
4764 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
4766 if (BYTES_BIG_ENDIAN
)
4768 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
4769 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
4772 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
4773 size_int (xll_bitpos
), 0);
4774 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
4775 size_int (xrl_bitpos
), 0);
4779 l_const
= fold_convert (lntype
, l_const
);
4780 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
4781 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
4782 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
4783 fold (build1 (BIT_NOT_EXPR
,
4787 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
4789 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4794 r_const
= fold_convert (lntype
, r_const
);
4795 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
4796 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
4797 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
4798 fold (build1 (BIT_NOT_EXPR
,
4802 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
4804 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4808 /* If the right sides are not constant, do the same for it. Also,
4809 disallow this optimization if a size or signedness mismatch occurs
4810 between the left and right sides. */
4813 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
4814 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
4815 /* Make sure the two fields on the right
4816 correspond to the left without being swapped. */
4817 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
4820 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
4821 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
4822 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4823 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
4825 if (rnmode
== VOIDmode
)
4828 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
4829 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
4830 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
4831 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
4833 if (BYTES_BIG_ENDIAN
)
4835 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
4836 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
4839 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
4840 size_int (xlr_bitpos
), 0);
4841 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
4842 size_int (xrr_bitpos
), 0);
4844 /* Make a mask that corresponds to both fields being compared.
4845 Do this for both items being compared. If the operands are the
4846 same size and the bits being compared are in the same position
4847 then we can do this by masking both and comparing the masked
4849 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4850 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
4851 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
4853 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4854 ll_unsignedp
|| rl_unsignedp
);
4855 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4856 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
4858 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
4859 lr_unsignedp
|| rr_unsignedp
);
4860 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
4861 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
4863 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
4866 /* There is still another way we can do something: If both pairs of
4867 fields being compared are adjacent, we may be able to make a wider
4868 field containing them both.
4870 Note that we still must mask the lhs/rhs expressions. Furthermore,
4871 the mask must be shifted to account for the shift done by
4872 make_bit_field_ref. */
4873 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
4874 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
4875 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
4876 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
4880 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
4881 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
4882 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
4883 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
4885 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
4886 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
4887 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
4888 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
4890 /* Convert to the smaller type before masking out unwanted bits. */
4892 if (lntype
!= rntype
)
4894 if (lnbitsize
> rnbitsize
)
4896 lhs
= fold_convert (rntype
, lhs
);
4897 ll_mask
= fold_convert (rntype
, ll_mask
);
4900 else if (lnbitsize
< rnbitsize
)
4902 rhs
= fold_convert (lntype
, rhs
);
4903 lr_mask
= fold_convert (lntype
, lr_mask
);
4908 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
4909 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
4911 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
4912 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
4914 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
4920 /* Handle the case of comparisons with constants. If there is something in
4921 common between the masks, those bits of the constants must be the same.
4922 If not, the condition is always false. Test for this to avoid generating
4923 incorrect code below. */
4924 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
4925 if (! integer_zerop (result
)
4926 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
4927 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
4929 if (wanted_code
== NE_EXPR
)
4931 warning ("%<or%> of unmatched not-equal tests is always 1");
4932 return constant_boolean_node (true, truth_type
);
4936 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4937 return constant_boolean_node (false, truth_type
);
4941 /* Construct the expression we will return. First get the component
4942 reference we will make. Unless the mask is all ones the width of
4943 that field, perform the mask operation. Then compare with the
4945 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4946 ll_unsignedp
|| rl_unsignedp
);
4948 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4949 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4950 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
4952 return build2 (wanted_code
, truth_type
, result
,
4953 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
4956 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4960 optimize_minmax_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
4963 enum tree_code op_code
;
4964 tree comp_const
= op1
;
4966 int consts_equal
, consts_lt
;
4969 STRIP_SIGN_NOPS (arg0
);
4971 op_code
= TREE_CODE (arg0
);
4972 minmax_const
= TREE_OPERAND (arg0
, 1);
4973 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
4974 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
4975 inner
= TREE_OPERAND (arg0
, 0);
4977 /* If something does not permit us to optimize, return the original tree. */
4978 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
4979 || TREE_CODE (comp_const
) != INTEGER_CST
4980 || TREE_CONSTANT_OVERFLOW (comp_const
)
4981 || TREE_CODE (minmax_const
) != INTEGER_CST
4982 || TREE_CONSTANT_OVERFLOW (minmax_const
))
4985 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4986 and GT_EXPR, doing the rest with recursive calls using logical
4990 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
4992 /* FIXME: We should be able to invert code without building a
4993 scratch tree node, but doing so would require us to
4994 duplicate a part of invert_truthvalue here. */
4995 tree tem
= invert_truthvalue (build2 (code
, type
, op0
, op1
));
4996 tem
= optimize_minmax_comparison (TREE_CODE (tem
),
4998 TREE_OPERAND (tem
, 0),
4999 TREE_OPERAND (tem
, 1));
5000 return invert_truthvalue (tem
);
5005 fold (build2 (TRUTH_ORIF_EXPR
, type
,
5006 optimize_minmax_comparison
5007 (EQ_EXPR
, type
, arg0
, comp_const
),
5008 optimize_minmax_comparison
5009 (GT_EXPR
, type
, arg0
, comp_const
)));
5012 if (op_code
== MAX_EXPR
&& consts_equal
)
5013 /* MAX (X, 0) == 0 -> X <= 0 */
5014 return fold (build2 (LE_EXPR
, type
, inner
, comp_const
));
5016 else if (op_code
== MAX_EXPR
&& consts_lt
)
5017 /* MAX (X, 0) == 5 -> X == 5 */
5018 return fold (build2 (EQ_EXPR
, type
, inner
, comp_const
));
5020 else if (op_code
== MAX_EXPR
)
5021 /* MAX (X, 0) == -1 -> false */
5022 return omit_one_operand (type
, integer_zero_node
, inner
);
5024 else if (consts_equal
)
5025 /* MIN (X, 0) == 0 -> X >= 0 */
5026 return fold (build2 (GE_EXPR
, type
, inner
, comp_const
));
5029 /* MIN (X, 0) == 5 -> false */
5030 return omit_one_operand (type
, integer_zero_node
, inner
);
5033 /* MIN (X, 0) == -1 -> X == -1 */
5034 return fold (build2 (EQ_EXPR
, type
, inner
, comp_const
));
5037 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5038 /* MAX (X, 0) > 0 -> X > 0
5039 MAX (X, 0) > 5 -> X > 5 */
5040 return fold (build2 (GT_EXPR
, type
, inner
, comp_const
));
5042 else if (op_code
== MAX_EXPR
)
5043 /* MAX (X, 0) > -1 -> true */
5044 return omit_one_operand (type
, integer_one_node
, inner
);
5046 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5047 /* MIN (X, 0) > 0 -> false
5048 MIN (X, 0) > 5 -> false */
5049 return omit_one_operand (type
, integer_zero_node
, inner
);
5052 /* MIN (X, 0) > -1 -> X > -1 */
5053 return fold (build2 (GT_EXPR
, type
, inner
, comp_const
));
5060 /* T is an integer expression that is being multiplied, divided, or taken a
5061 modulus (CODE says which and what kind of divide or modulus) by a
5062 constant C. See if we can eliminate that operation by folding it with
5063 other operations already in T. WIDE_TYPE, if non-null, is a type that
5064 should be used for the computation if wider than our type.
5066 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5067 (X * 2) + (Y * 4). We must, however, be assured that either the original
5068 expression would not overflow or that overflow is undefined for the type
5069 in the language in question.
5071 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5072 the machine has a multiply-accumulate insn or that this is part of an
5073 addressing calculation.
5075 If we return a non-null expression, it is an equivalent form of the
5076 original computation, but need not be in the original type. */
5079 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5081 /* To avoid exponential search depth, refuse to allow recursion past
5082 three levels. Beyond that (1) it's highly unlikely that we'll find
5083 something interesting and (2) we've probably processed it before
5084 when we built the inner expression. */
5093 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
);
5100 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5102 tree type
= TREE_TYPE (t
);
5103 enum tree_code tcode
= TREE_CODE (t
);
5104 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5105 > GET_MODE_SIZE (TYPE_MODE (type
)))
5106 ? wide_type
: type
);
5108 int same_p
= tcode
== code
;
5109 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5111 /* Don't deal with constants of zero here; they confuse the code below. */
5112 if (integer_zerop (c
))
5115 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5116 op0
= TREE_OPERAND (t
, 0);
5118 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5119 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5121 /* Note that we need not handle conditional operations here since fold
5122 already handles those cases. So just do arithmetic here. */
5126 /* For a constant, we can always simplify if we are a multiply
5127 or (for divide and modulus) if it is a multiple of our constant. */
5128 if (code
== MULT_EXPR
5129 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
5130 return const_binop (code
, fold_convert (ctype
, t
),
5131 fold_convert (ctype
, c
), 0);
5134 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
5135 /* If op0 is an expression ... */
5136 if ((COMPARISON_CLASS_P (op0
)
5137 || UNARY_CLASS_P (op0
)
5138 || BINARY_CLASS_P (op0
)
5139 || EXPRESSION_CLASS_P (op0
))
5140 /* ... and is unsigned, and its type is smaller than ctype,
5141 then we cannot pass through as widening. */
5142 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
5143 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5144 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5145 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
5146 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
5147 /* ... or this is a truncation (t is narrower than op0),
5148 then we cannot pass through this narrowing. */
5149 || (GET_MODE_SIZE (TYPE_MODE (type
))
5150 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
5151 /* ... or signedness changes for division or modulus,
5152 then we cannot pass through this conversion. */
5153 || (code
!= MULT_EXPR
5154 && (TYPE_UNSIGNED (ctype
)
5155 != TYPE_UNSIGNED (TREE_TYPE (op0
))))))
5158 /* Pass the constant down and see if we can make a simplification. If
5159 we can, replace this expression with the inner simplification for
5160 possible later conversion to our or some other type. */
5161 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5162 && TREE_CODE (t2
) == INTEGER_CST
5163 && ! TREE_CONSTANT_OVERFLOW (t2
)
5164 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5166 ? ctype
: NULL_TREE
))))
5171 /* If widening the type changes it from signed to unsigned, then we
5172 must avoid building ABS_EXPR itself as unsigned. */
5173 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5175 tree cstype
= (*lang_hooks
.types
.signed_type
) (ctype
);
5176 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
)) != 0)
5178 t1
= fold (build1 (tcode
, cstype
, fold_convert (cstype
, t1
)));
5179 return fold_convert (ctype
, t1
);
5185 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5186 return fold (build1 (tcode
, ctype
, fold_convert (ctype
, t1
)));
5189 case MIN_EXPR
: case MAX_EXPR
:
5190 /* If widening the type changes the signedness, then we can't perform
5191 this optimization as that changes the result. */
5192 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5195 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5196 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
5197 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5199 if (tree_int_cst_sgn (c
) < 0)
5200 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5202 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5203 fold_convert (ctype
, t2
)));
5207 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5208 /* If the second operand is constant, this is a multiplication
5209 or floor division, by a power of two, so we can treat it that
5210 way unless the multiplier or divisor overflows. Signed
5211 left-shift overflow is implementation-defined rather than
5212 undefined in C90, so do not convert signed left shift into
5214 if (TREE_CODE (op1
) == INTEGER_CST
5215 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5216 /* const_binop may not detect overflow correctly,
5217 so check for it explicitly here. */
5218 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5219 && TREE_INT_CST_HIGH (op1
) == 0
5220 && 0 != (t1
= fold_convert (ctype
,
5221 const_binop (LSHIFT_EXPR
,
5224 && ! TREE_OVERFLOW (t1
))
5225 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5226 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5227 ctype
, fold_convert (ctype
, op0
), t1
),
5228 c
, code
, wide_type
);
5231 case PLUS_EXPR
: case MINUS_EXPR
:
5232 /* See if we can eliminate the operation on both sides. If we can, we
5233 can return a new PLUS or MINUS. If we can't, the only remaining
5234 cases where we can do anything are if the second operand is a
5236 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
5237 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
5238 if (t1
!= 0 && t2
!= 0
5239 && (code
== MULT_EXPR
5240 /* If not multiplication, we can only do this if both operands
5241 are divisible by c. */
5242 || (multiple_of_p (ctype
, op0
, c
)
5243 && multiple_of_p (ctype
, op1
, c
))))
5244 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5245 fold_convert (ctype
, t2
)));
5247 /* If this was a subtraction, negate OP1 and set it to be an addition.
5248 This simplifies the logic below. */
5249 if (tcode
== MINUS_EXPR
)
5250 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5252 if (TREE_CODE (op1
) != INTEGER_CST
)
5255 /* If either OP1 or C are negative, this optimization is not safe for
5256 some of the division and remainder types while for others we need
5257 to change the code. */
5258 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5260 if (code
== CEIL_DIV_EXPR
)
5261 code
= FLOOR_DIV_EXPR
;
5262 else if (code
== FLOOR_DIV_EXPR
)
5263 code
= CEIL_DIV_EXPR
;
5264 else if (code
!= MULT_EXPR
5265 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5269 /* If it's a multiply or a division/modulus operation of a multiple
5270 of our constant, do the operation and verify it doesn't overflow. */
5271 if (code
== MULT_EXPR
5272 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5274 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5275 fold_convert (ctype
, c
), 0);
5276 /* We allow the constant to overflow with wrapping semantics. */
5278 || (TREE_OVERFLOW (op1
) && ! flag_wrapv
))
5284 /* If we have an unsigned type is not a sizetype, we cannot widen
5285 the operation since it will change the result if the original
5286 computation overflowed. */
5287 if (TYPE_UNSIGNED (ctype
)
5288 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5292 /* If we were able to eliminate our operation from the first side,
5293 apply our operation to the second side and reform the PLUS. */
5294 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5295 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
));
5297 /* The last case is if we are a multiply. In that case, we can
5298 apply the distributive law to commute the multiply and addition
5299 if the multiplication of the constants doesn't overflow. */
5300 if (code
== MULT_EXPR
)
5301 return fold (build2 (tcode
, ctype
,
5302 fold (build2 (code
, ctype
,
5303 fold_convert (ctype
, op0
),
5304 fold_convert (ctype
, c
))),
5310 /* We have a special case here if we are doing something like
5311 (C * 8) % 4 since we know that's zero. */
5312 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5313 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5314 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5315 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5316 return omit_one_operand (type
, integer_zero_node
, op0
);
5318 /* ... fall through ... */
5320 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5321 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5322 /* If we can extract our operation from the LHS, do so and return a
5323 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5324 do something only if the second operand is a constant. */
5326 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5327 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5328 fold_convert (ctype
, op1
)));
5329 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5330 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5331 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5332 fold_convert (ctype
, t1
)));
5333 else if (TREE_CODE (op1
) != INTEGER_CST
)
5336 /* If these are the same operation types, we can associate them
5337 assuming no overflow. */
5339 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
5340 fold_convert (ctype
, c
), 0))
5341 && ! TREE_OVERFLOW (t1
))
5342 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
));
5344 /* If these operations "cancel" each other, we have the main
5345 optimizations of this pass, which occur when either constant is a
5346 multiple of the other, in which case we replace this with either an
5347 operation or CODE or TCODE.
5349 If we have an unsigned type that is not a sizetype, we cannot do
5350 this since it will change the result if the original computation
5352 if ((! TYPE_UNSIGNED (ctype
)
5353 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5355 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5356 || (tcode
== MULT_EXPR
5357 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5358 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
5360 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5361 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5362 fold_convert (ctype
,
5363 const_binop (TRUNC_DIV_EXPR
,
5365 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
5366 return fold (build2 (code
, ctype
, fold_convert (ctype
, op0
),
5367 fold_convert (ctype
,
5368 const_binop (TRUNC_DIV_EXPR
,
5380 /* Return a node which has the indicated constant VALUE (either 0 or
5381 1), and is of the indicated TYPE. */
5384 constant_boolean_node (int value
, tree type
)
5386 if (type
== integer_type_node
)
5387 return value
? integer_one_node
: integer_zero_node
;
5388 else if (type
== boolean_type_node
)
5389 return value
? boolean_true_node
: boolean_false_node
;
5391 return build_int_cst (type
, value
);
5395 /* Return true if expr looks like an ARRAY_REF and set base and
5396 offset to the appropriate trees. If there is no offset,
5397 offset is set to NULL_TREE. */
5400 extract_array_ref (tree expr
, tree
*base
, tree
*offset
)
5402 /* We have to be careful with stripping nops as with the
5403 base type the meaning of the offset can change. */
5404 tree inner_expr
= expr
;
5405 STRIP_NOPS (inner_expr
);
5406 /* One canonical form is a PLUS_EXPR with the first
5407 argument being an ADDR_EXPR with a possible NOP_EXPR
5409 if (TREE_CODE (expr
) == PLUS_EXPR
)
5411 tree op0
= TREE_OPERAND (expr
, 0);
5413 if (TREE_CODE (op0
) == ADDR_EXPR
)
5415 *base
= TREE_OPERAND (expr
, 0);
5416 *offset
= TREE_OPERAND (expr
, 1);
5420 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5421 which we transform into an ADDR_EXPR with appropriate
5422 offset. For other arguments to the ADDR_EXPR we assume
5423 zero offset and as such do not care about the ADDR_EXPR
5424 type and strip possible nops from it. */
5425 else if (TREE_CODE (inner_expr
) == ADDR_EXPR
)
5427 tree op0
= TREE_OPERAND (inner_expr
, 0);
5428 if (TREE_CODE (op0
) == ARRAY_REF
)
5430 *base
= build_fold_addr_expr (TREE_OPERAND (op0
, 0));
5431 *offset
= TREE_OPERAND (op0
, 1);
5436 *offset
= NULL_TREE
;
5445 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5446 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5447 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5448 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5449 COND is the first argument to CODE; otherwise (as in the example
5450 given here), it is the second argument. TYPE is the type of the
5451 original expression. Return NULL_TREE if no simplification is
5455 fold_binary_op_with_conditional_arg (enum tree_code code
,
5456 tree type
, tree op0
, tree op1
,
5457 tree cond
, tree arg
, int cond_first_p
)
5459 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
5460 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
5461 tree test
, true_value
, false_value
;
5462 tree lhs
= NULL_TREE
;
5463 tree rhs
= NULL_TREE
;
5465 /* This transformation is only worthwhile if we don't have to wrap
5466 arg in a SAVE_EXPR, and the operation can be simplified on at least
5467 one of the branches once its pushed inside the COND_EXPR. */
5468 if (!TREE_CONSTANT (arg
))
5471 if (TREE_CODE (cond
) == COND_EXPR
)
5473 test
= TREE_OPERAND (cond
, 0);
5474 true_value
= TREE_OPERAND (cond
, 1);
5475 false_value
= TREE_OPERAND (cond
, 2);
5476 /* If this operand throws an expression, then it does not make
5477 sense to try to perform a logical or arithmetic operation
5479 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
5481 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
5486 tree testtype
= TREE_TYPE (cond
);
5488 true_value
= constant_boolean_node (true, testtype
);
5489 false_value
= constant_boolean_node (false, testtype
);
5492 arg
= fold_convert (arg_type
, arg
);
5495 true_value
= fold_convert (cond_type
, true_value
);
5496 lhs
= fold (cond_first_p
? build2 (code
, type
, true_value
, arg
)
5497 : build2 (code
, type
, arg
, true_value
));
5501 false_value
= fold_convert (cond_type
, false_value
);
5502 rhs
= fold (cond_first_p
? build2 (code
, type
, false_value
, arg
)
5503 : build2 (code
, type
, arg
, false_value
));
5506 test
= fold (build3 (COND_EXPR
, type
, test
, lhs
, rhs
));
5507 return fold_convert (type
, test
);
5511 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5513 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5514 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5515 ADDEND is the same as X.
5517 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5518 and finite. The problematic cases are when X is zero, and its mode
5519 has signed zeros. In the case of rounding towards -infinity,
5520 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5521 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5524 fold_real_zero_addition_p (tree type
, tree addend
, int negate
)
5526 if (!real_zerop (addend
))
5529 /* Don't allow the fold with -fsignaling-nans. */
5530 if (HONOR_SNANS (TYPE_MODE (type
)))
5533 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5534 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
5537 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5538 if (TREE_CODE (addend
) == REAL_CST
5539 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
5542 /* The mode has signed zeros, and we have to honor their sign.
5543 In this situation, there is only one case we can return true for.
5544 X - 0 is the same as X unless rounding towards -infinity is
5546 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
5549 /* Subroutine of fold() that checks comparisons of built-in math
5550 functions against real constants.
5552 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5553 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5554 is the type of the result and ARG0 and ARG1 are the operands of the
5555 comparison. ARG1 must be a TREE_REAL_CST.
5557 The function returns the constant folded tree if a simplification
5558 can be made, and NULL_TREE otherwise. */
5561 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
5562 tree type
, tree arg0
, tree arg1
)
5566 if (BUILTIN_SQRT_P (fcode
))
5568 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5569 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
5571 c
= TREE_REAL_CST (arg1
);
5572 if (REAL_VALUE_NEGATIVE (c
))
5574 /* sqrt(x) < y is always false, if y is negative. */
5575 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
5576 return omit_one_operand (type
, integer_zero_node
, arg
);
5578 /* sqrt(x) > y is always true, if y is negative and we
5579 don't care about NaNs, i.e. negative values of x. */
5580 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
5581 return omit_one_operand (type
, integer_one_node
, arg
);
5583 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5584 return fold (build2 (GE_EXPR
, type
, arg
,
5585 build_real (TREE_TYPE (arg
), dconst0
)));
5587 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
5591 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5592 real_convert (&c2
, mode
, &c2
);
5594 if (REAL_VALUE_ISINF (c2
))
5596 /* sqrt(x) > y is x == +Inf, when y is very large. */
5597 if (HONOR_INFINITIES (mode
))
5598 return fold (build2 (EQ_EXPR
, type
, arg
,
5599 build_real (TREE_TYPE (arg
), c2
)));
5601 /* sqrt(x) > y is always false, when y is very large
5602 and we don't care about infinities. */
5603 return omit_one_operand (type
, integer_zero_node
, arg
);
5606 /* sqrt(x) > c is the same as x > c*c. */
5607 return fold (build2 (code
, type
, arg
,
5608 build_real (TREE_TYPE (arg
), c2
)));
5610 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
5614 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5615 real_convert (&c2
, mode
, &c2
);
5617 if (REAL_VALUE_ISINF (c2
))
5619 /* sqrt(x) < y is always true, when y is a very large
5620 value and we don't care about NaNs or Infinities. */
5621 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
5622 return omit_one_operand (type
, integer_one_node
, arg
);
5624 /* sqrt(x) < y is x != +Inf when y is very large and we
5625 don't care about NaNs. */
5626 if (! HONOR_NANS (mode
))
5627 return fold (build2 (NE_EXPR
, type
, arg
,
5628 build_real (TREE_TYPE (arg
), c2
)));
5630 /* sqrt(x) < y is x >= 0 when y is very large and we
5631 don't care about Infinities. */
5632 if (! HONOR_INFINITIES (mode
))
5633 return fold (build2 (GE_EXPR
, type
, arg
,
5634 build_real (TREE_TYPE (arg
), dconst0
)));
5636 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5637 if (lang_hooks
.decls
.global_bindings_p () != 0
5638 || CONTAINS_PLACEHOLDER_P (arg
))
5641 arg
= save_expr (arg
);
5642 return fold (build2 (TRUTH_ANDIF_EXPR
, type
,
5643 fold (build2 (GE_EXPR
, type
, arg
,
5644 build_real (TREE_TYPE (arg
),
5646 fold (build2 (NE_EXPR
, type
, arg
,
5647 build_real (TREE_TYPE (arg
),
5651 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5652 if (! HONOR_NANS (mode
))
5653 return fold (build2 (code
, type
, arg
,
5654 build_real (TREE_TYPE (arg
), c2
)));
5656 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5657 if (lang_hooks
.decls
.global_bindings_p () == 0
5658 && ! CONTAINS_PLACEHOLDER_P (arg
))
5660 arg
= save_expr (arg
);
5661 return fold (build2 (TRUTH_ANDIF_EXPR
, type
,
5662 fold (build2 (GE_EXPR
, type
, arg
,
5663 build_real (TREE_TYPE (arg
),
5665 fold (build2 (code
, type
, arg
,
5666 build_real (TREE_TYPE (arg
),
5675 /* Subroutine of fold() that optimizes comparisons against Infinities,
5676 either +Inf or -Inf.
5678 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5679 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5680 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5682 The function returns the constant folded tree if a simplification
5683 can be made, and NULL_TREE otherwise. */
5686 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5688 enum machine_mode mode
;
5689 REAL_VALUE_TYPE max
;
5693 mode
= TYPE_MODE (TREE_TYPE (arg0
));
5695 /* For negative infinity swap the sense of the comparison. */
5696 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
5698 code
= swap_tree_comparison (code
);
5703 /* x > +Inf is always false, if with ignore sNANs. */
5704 if (HONOR_SNANS (mode
))
5706 return omit_one_operand (type
, integer_zero_node
, arg0
);
5709 /* x <= +Inf is always true, if we don't case about NaNs. */
5710 if (! HONOR_NANS (mode
))
5711 return omit_one_operand (type
, integer_one_node
, arg0
);
5713 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5714 if (lang_hooks
.decls
.global_bindings_p () == 0
5715 && ! CONTAINS_PLACEHOLDER_P (arg0
))
5717 arg0
= save_expr (arg0
);
5718 return fold (build2 (EQ_EXPR
, type
, arg0
, arg0
));
5724 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5725 real_maxval (&max
, neg
, mode
);
5726 return fold (build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5727 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5730 /* x < +Inf is always equal to x <= DBL_MAX. */
5731 real_maxval (&max
, neg
, mode
);
5732 return fold (build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5733 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5736 /* x != +Inf is always equal to !(x > DBL_MAX). */
5737 real_maxval (&max
, neg
, mode
);
5738 if (! HONOR_NANS (mode
))
5739 return fold (build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5740 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5742 /* The transformation below creates non-gimple code and thus is
5743 not appropriate if we are in gimple form. */
5747 temp
= fold (build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5748 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5749 return fold (build1 (TRUTH_NOT_EXPR
, type
, temp
));
5758 /* Subroutine of fold() that optimizes comparisons of a division by
5759 a nonzero integer constant against an integer constant, i.e.
5762 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5763 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5764 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5766 The function returns the constant folded tree if a simplification
5767 can be made, and NULL_TREE otherwise. */
5770 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5772 tree prod
, tmp
, hi
, lo
;
5773 tree arg00
= TREE_OPERAND (arg0
, 0);
5774 tree arg01
= TREE_OPERAND (arg0
, 1);
5775 unsigned HOST_WIDE_INT lpart
;
5776 HOST_WIDE_INT hpart
;
5779 /* We have to do this the hard way to detect unsigned overflow.
5780 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5781 overflow
= mul_double (TREE_INT_CST_LOW (arg01
),
5782 TREE_INT_CST_HIGH (arg01
),
5783 TREE_INT_CST_LOW (arg1
),
5784 TREE_INT_CST_HIGH (arg1
), &lpart
, &hpart
);
5785 prod
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
5786 prod
= force_fit_type (prod
, -1, overflow
, false);
5788 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)))
5790 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5793 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5794 overflow
= add_double (TREE_INT_CST_LOW (prod
),
5795 TREE_INT_CST_HIGH (prod
),
5796 TREE_INT_CST_LOW (tmp
),
5797 TREE_INT_CST_HIGH (tmp
),
5799 hi
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
5800 hi
= force_fit_type (hi
, -1, overflow
| TREE_OVERFLOW (prod
),
5801 TREE_CONSTANT_OVERFLOW (prod
));
5803 else if (tree_int_cst_sgn (arg01
) >= 0)
5805 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5806 switch (tree_int_cst_sgn (arg1
))
5809 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
5814 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
5819 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
5829 /* A negative divisor reverses the relational operators. */
5830 code
= swap_tree_comparison (code
);
5832 tmp
= int_const_binop (PLUS_EXPR
, arg01
, integer_one_node
, 0);
5833 switch (tree_int_cst_sgn (arg1
))
5836 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
5841 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
5846 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
5858 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
5859 return omit_one_operand (type
, integer_zero_node
, arg00
);
5860 if (TREE_OVERFLOW (hi
))
5861 return fold (build2 (GE_EXPR
, type
, arg00
, lo
));
5862 if (TREE_OVERFLOW (lo
))
5863 return fold (build2 (LE_EXPR
, type
, arg00
, hi
));
5864 return build_range_check (type
, arg00
, 1, lo
, hi
);
5867 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
5868 return omit_one_operand (type
, integer_one_node
, arg00
);
5869 if (TREE_OVERFLOW (hi
))
5870 return fold (build2 (LT_EXPR
, type
, arg00
, lo
));
5871 if (TREE_OVERFLOW (lo
))
5872 return fold (build2 (GT_EXPR
, type
, arg00
, hi
));
5873 return build_range_check (type
, arg00
, 0, lo
, hi
);
5876 if (TREE_OVERFLOW (lo
))
5877 return omit_one_operand (type
, integer_zero_node
, arg00
);
5878 return fold (build2 (LT_EXPR
, type
, arg00
, lo
));
5881 if (TREE_OVERFLOW (hi
))
5882 return omit_one_operand (type
, integer_one_node
, arg00
);
5883 return fold (build2 (LE_EXPR
, type
, arg00
, hi
));
5886 if (TREE_OVERFLOW (hi
))
5887 return omit_one_operand (type
, integer_zero_node
, arg00
);
5888 return fold (build2 (GT_EXPR
, type
, arg00
, hi
));
5891 if (TREE_OVERFLOW (lo
))
5892 return omit_one_operand (type
, integer_one_node
, arg00
);
5893 return fold (build2 (GE_EXPR
, type
, arg00
, lo
));
5903 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5904 equality/inequality test, then return a simplified form of
5905 the test using shifts and logical operations. Otherwise return
5906 NULL. TYPE is the desired result type. */
5909 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
5912 /* If this is testing a single bit, we can optimize the test. */
5913 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
5914 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
5915 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
5917 tree inner
= TREE_OPERAND (arg0
, 0);
5918 tree type
= TREE_TYPE (arg0
);
5919 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
5920 enum machine_mode operand_mode
= TYPE_MODE (type
);
5922 tree signed_type
, unsigned_type
, intermediate_type
;
5925 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5926 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5927 arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
5928 if (arg00
!= NULL_TREE
5929 /* This is only a win if casting to a signed type is cheap,
5930 i.e. when arg00's type is not a partial mode. */
5931 && TYPE_PRECISION (TREE_TYPE (arg00
))
5932 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
5934 tree stype
= lang_hooks
.types
.signed_type (TREE_TYPE (arg00
));
5935 return fold (build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
5936 result_type
, fold_convert (stype
, arg00
),
5937 fold_convert (stype
, integer_zero_node
)));
5940 /* Otherwise we have (A & C) != 0 where C is a single bit,
5941 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5942 Similarly for (A & C) == 0. */
5944 /* If INNER is a right shift of a constant and it plus BITNUM does
5945 not overflow, adjust BITNUM and INNER. */
5946 if (TREE_CODE (inner
) == RSHIFT_EXPR
5947 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
5948 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
5949 && bitnum
< TYPE_PRECISION (type
)
5950 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
5951 bitnum
- TYPE_PRECISION (type
)))
5953 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
5954 inner
= TREE_OPERAND (inner
, 0);
5957 /* If we are going to be able to omit the AND below, we must do our
5958 operations as unsigned. If we must use the AND, we have a choice.
5959 Normally unsigned is faster, but for some machines signed is. */
5960 #ifdef LOAD_EXTEND_OP
5961 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
5962 && !flag_syntax_only
) ? 0 : 1;
5967 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
5968 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
5969 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
5970 inner
= fold_convert (intermediate_type
, inner
);
5973 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
5974 inner
, size_int (bitnum
));
5976 if (code
== EQ_EXPR
)
5977 inner
= fold (build2 (BIT_XOR_EXPR
, intermediate_type
,
5978 inner
, integer_one_node
));
5980 /* Put the AND last so it can combine with more things. */
5981 inner
= build2 (BIT_AND_EXPR
, intermediate_type
,
5982 inner
, integer_one_node
);
5984 /* Make sure to return the proper type. */
5985 inner
= fold_convert (result_type
, inner
);
5992 /* Check whether we are allowed to reorder operands arg0 and arg1,
5993 such that the evaluation of arg1 occurs before arg0. */
5996 reorder_operands_p (tree arg0
, tree arg1
)
5998 if (! flag_evaluation_order
)
6000 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6002 return ! TREE_SIDE_EFFECTS (arg0
)
6003 && ! TREE_SIDE_EFFECTS (arg1
);
6006 /* Test whether it is preferable two swap two operands, ARG0 and
6007 ARG1, for example because ARG0 is an integer constant and ARG1
6008 isn't. If REORDER is true, only recommend swapping if we can
6009 evaluate the operands in reverse order. */
6012 tree_swap_operands_p (tree arg0
, tree arg1
, bool reorder
)
6014 STRIP_SIGN_NOPS (arg0
);
6015 STRIP_SIGN_NOPS (arg1
);
6017 if (TREE_CODE (arg1
) == INTEGER_CST
)
6019 if (TREE_CODE (arg0
) == INTEGER_CST
)
6022 if (TREE_CODE (arg1
) == REAL_CST
)
6024 if (TREE_CODE (arg0
) == REAL_CST
)
6027 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6029 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6032 if (TREE_CONSTANT (arg1
))
6034 if (TREE_CONSTANT (arg0
))
6040 if (reorder
&& flag_evaluation_order
6041 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6049 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6050 for commutative and comparison operators. Ensuring a canonical
6051 form allows the optimizers to find additional redundancies without
6052 having to explicitly check for both orderings. */
6053 if (TREE_CODE (arg0
) == SSA_NAME
6054 && TREE_CODE (arg1
) == SSA_NAME
6055 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6061 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6062 ARG0 is extended to a wider type. */
6065 fold_widened_comparison (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6067 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6069 tree shorter_type
, outer_type
;
6073 if (arg0_unw
== arg0
)
6075 shorter_type
= TREE_TYPE (arg0_unw
);
6077 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6080 arg1_unw
= get_unwidened (arg1
, shorter_type
);
6084 /* If possible, express the comparison in the shorter mode. */
6085 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6086 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6087 && (TREE_TYPE (arg1_unw
) == shorter_type
6088 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6089 && TREE_CODE (shorter_type
) == INTEGER_TYPE
6090 && int_fits_type_p (arg1_unw
, shorter_type
))))
6091 return fold (build (code
, type
, arg0_unw
,
6092 fold_convert (shorter_type
, arg1_unw
)));
6094 if (TREE_CODE (arg1_unw
) != INTEGER_CST
)
6097 /* If we are comparing with the integer that does not fit into the range
6098 of the shorter type, the result is known. */
6099 outer_type
= TREE_TYPE (arg1_unw
);
6100 min
= lower_bound_in_type (outer_type
, shorter_type
);
6101 max
= upper_bound_in_type (outer_type
, shorter_type
);
6103 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6105 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6112 return omit_one_operand (type
, integer_zero_node
, arg0
);
6117 return omit_one_operand (type
, integer_one_node
, arg0
);
6123 return omit_one_operand (type
, integer_one_node
, arg0
);
6125 return omit_one_operand (type
, integer_zero_node
, arg0
);
6130 return omit_one_operand (type
, integer_zero_node
, arg0
);
6132 return omit_one_operand (type
, integer_one_node
, arg0
);
6141 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6142 ARG0 just the signedness is changed. */
6145 fold_sign_changed_comparison (enum tree_code code
, tree type
,
6146 tree arg0
, tree arg1
)
6148 tree arg0_inner
, tmp
;
6149 tree inner_type
, outer_type
;
6151 if (TREE_CODE (arg0
) != NOP_EXPR
)
6154 outer_type
= TREE_TYPE (arg0
);
6155 arg0_inner
= TREE_OPERAND (arg0
, 0);
6156 inner_type
= TREE_TYPE (arg0_inner
);
6158 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6161 if (TREE_CODE (arg1
) != INTEGER_CST
6162 && !(TREE_CODE (arg1
) == NOP_EXPR
6163 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6166 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6171 if (TREE_CODE (arg1
) == INTEGER_CST
)
6173 tmp
= build_int_cst_wide (inner_type
,
6174 TREE_INT_CST_LOW (arg1
),
6175 TREE_INT_CST_HIGH (arg1
));
6176 arg1
= force_fit_type (tmp
, 0,
6177 TREE_OVERFLOW (arg1
),
6178 TREE_CONSTANT_OVERFLOW (arg1
));
6181 arg1
= fold_convert (inner_type
, arg1
);
6183 return fold (build (code
, type
, arg0_inner
, arg1
));
6186 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6187 step of the array. ADDR is the address. MULT is the multiplicative expression.
6188 If the function succeeds, the new address expression is returned. Otherwise
6189 NULL_TREE is returned. */
6192 try_move_mult_to_index (enum tree_code code
, tree addr
, tree mult
)
6194 tree s
, delta
, step
;
6195 tree arg0
= TREE_OPERAND (mult
, 0), arg1
= TREE_OPERAND (mult
, 1);
6196 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6203 if (TREE_CODE (arg0
) == INTEGER_CST
)
6208 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6216 for (;; ref
= TREE_OPERAND (ref
, 0))
6218 if (TREE_CODE (ref
) == ARRAY_REF
)
6220 step
= array_ref_element_size (ref
);
6222 if (TREE_CODE (step
) != INTEGER_CST
)
6225 itype
= TREE_TYPE (step
);
6227 /* If the type sizes do not match, we might run into problems
6228 when one of them would overflow. */
6229 if (TYPE_PRECISION (itype
) != TYPE_PRECISION (TREE_TYPE (s
)))
6232 if (!operand_equal_p (step
, fold_convert (itype
, s
), 0))
6235 delta
= fold_convert (itype
, delta
);
6239 if (!handled_component_p (ref
))
6243 /* We found the suitable array reference. So copy everything up to it,
6244 and replace the index. */
6246 pref
= TREE_OPERAND (addr
, 0);
6247 ret
= copy_node (pref
);
6252 pref
= TREE_OPERAND (pref
, 0);
6253 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6254 pos
= TREE_OPERAND (pos
, 0);
6257 TREE_OPERAND (pos
, 1) = fold (build2 (code
, itype
,
6258 TREE_OPERAND (pos
, 1),
6261 return build1 (ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6265 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6266 means A >= Y && A != MAX, but in this case we know that
6267 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6270 fold_to_nonsharp_ineq_using_bound (tree ineq
, tree bound
)
6272 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6274 if (TREE_CODE (bound
) == LT_EXPR
)
6275 a
= TREE_OPERAND (bound
, 0);
6276 else if (TREE_CODE (bound
) == GT_EXPR
)
6277 a
= TREE_OPERAND (bound
, 1);
6281 typea
= TREE_TYPE (a
);
6282 if (!INTEGRAL_TYPE_P (typea
)
6283 && !POINTER_TYPE_P (typea
))
6286 if (TREE_CODE (ineq
) == LT_EXPR
)
6288 a1
= TREE_OPERAND (ineq
, 1);
6289 y
= TREE_OPERAND (ineq
, 0);
6291 else if (TREE_CODE (ineq
) == GT_EXPR
)
6293 a1
= TREE_OPERAND (ineq
, 0);
6294 y
= TREE_OPERAND (ineq
, 1);
6299 if (TREE_TYPE (a1
) != typea
)
6302 diff
= fold (build2 (MINUS_EXPR
, typea
, a1
, a
));
6303 if (!integer_onep (diff
))
6306 return fold (build2 (GE_EXPR
, type
, a
, y
));
6309 /* Fold complex addition when both components are accessible by parts.
6310 Return non-null if successful. CODE should be PLUS_EXPR for addition,
6311 or MINUS_EXPR for subtraction. */
6314 fold_complex_add (tree type
, tree ac
, tree bc
, enum tree_code code
)
6316 tree ar
, ai
, br
, bi
, rr
, ri
, inner_type
;
6318 if (TREE_CODE (ac
) == COMPLEX_EXPR
)
6319 ar
= TREE_OPERAND (ac
, 0), ai
= TREE_OPERAND (ac
, 1);
6320 else if (TREE_CODE (ac
) == COMPLEX_CST
)
6321 ar
= TREE_REALPART (ac
), ai
= TREE_IMAGPART (ac
);
6325 if (TREE_CODE (bc
) == COMPLEX_EXPR
)
6326 br
= TREE_OPERAND (bc
, 0), bi
= TREE_OPERAND (bc
, 1);
6327 else if (TREE_CODE (bc
) == COMPLEX_CST
)
6328 br
= TREE_REALPART (bc
), bi
= TREE_IMAGPART (bc
);
6332 inner_type
= TREE_TYPE (type
);
6334 rr
= fold (build2 (code
, inner_type
, ar
, br
));
6335 ri
= fold (build2 (code
, inner_type
, ai
, bi
));
6337 return fold (build2 (COMPLEX_EXPR
, type
, rr
, ri
));
6340 /* Perform some simplifications of complex multiplication when one or more
6341 of the components are constants or zeros. Return non-null if successful. */
6344 fold_complex_mult_parts (tree type
, tree ar
, tree ai
, tree br
, tree bi
)
6346 tree rr
, ri
, inner_type
, zero
;
6347 bool ar0
, ai0
, br0
, bi0
, bi1
;
6349 inner_type
= TREE_TYPE (type
);
6352 if (SCALAR_FLOAT_TYPE_P (inner_type
))
6354 ar0
= ai0
= br0
= bi0
= bi1
= false;
6356 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6358 if (TREE_CODE (ar
) == REAL_CST
6359 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar
), dconst0
))
6360 ar0
= true, zero
= ar
;
6362 if (TREE_CODE (ai
) == REAL_CST
6363 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai
), dconst0
))
6364 ai0
= true, zero
= ai
;
6366 if (TREE_CODE (br
) == REAL_CST
6367 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br
), dconst0
))
6368 br0
= true, zero
= br
;
6370 if (TREE_CODE (bi
) == REAL_CST
)
6372 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi
), dconst0
))
6373 bi0
= true, zero
= bi
;
6374 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi
), dconst1
))
6380 ar0
= integer_zerop (ar
);
6383 ai0
= integer_zerop (ai
);
6386 br0
= integer_zerop (br
);
6389 bi0
= integer_zerop (bi
);
6396 bi1
= integer_onep (bi
);
6399 /* We won't optimize anything below unless something is zero. */
6403 if (ai0
&& br0
&& bi1
)
6408 else if (ai0
&& bi0
)
6410 rr
= fold (build2 (MULT_EXPR
, inner_type
, ar
, br
));
6413 else if (ai0
&& br0
)
6416 ri
= fold (build2 (MULT_EXPR
, inner_type
, ar
, bi
));
6418 else if (ar0
&& bi0
)
6421 ri
= fold (build2 (MULT_EXPR
, inner_type
, ai
, br
));
6423 else if (ar0
&& br0
)
6425 rr
= fold (build2 (MULT_EXPR
, inner_type
, ai
, bi
));
6426 rr
= fold (build1 (NEGATE_EXPR
, inner_type
, rr
));
6431 rr
= fold (build2 (MULT_EXPR
, inner_type
, ar
, br
));
6432 ri
= fold (build2 (MULT_EXPR
, inner_type
, ai
, br
));
6436 rr
= fold (build2 (MULT_EXPR
, inner_type
, ar
, br
));
6437 ri
= fold (build2 (MULT_EXPR
, inner_type
, ar
, bi
));
6441 rr
= fold (build2 (MULT_EXPR
, inner_type
, ai
, bi
));
6442 rr
= fold (build1 (NEGATE_EXPR
, inner_type
, rr
));
6443 ri
= fold (build2 (MULT_EXPR
, inner_type
, ar
, bi
));
6447 rr
= fold (build2 (MULT_EXPR
, inner_type
, ai
, bi
));
6448 rr
= fold (build1 (NEGATE_EXPR
, inner_type
, rr
));
6449 ri
= fold (build2 (MULT_EXPR
, inner_type
, ai
, br
));
6454 return fold (build2 (COMPLEX_EXPR
, type
, rr
, ri
));
6458 fold_complex_mult (tree type
, tree ac
, tree bc
)
6460 tree ar
, ai
, br
, bi
;
6462 if (TREE_CODE (ac
) == COMPLEX_EXPR
)
6463 ar
= TREE_OPERAND (ac
, 0), ai
= TREE_OPERAND (ac
, 1);
6464 else if (TREE_CODE (ac
) == COMPLEX_CST
)
6465 ar
= TREE_REALPART (ac
), ai
= TREE_IMAGPART (ac
);
6469 if (TREE_CODE (bc
) == COMPLEX_EXPR
)
6470 br
= TREE_OPERAND (bc
, 0), bi
= TREE_OPERAND (bc
, 1);
6471 else if (TREE_CODE (bc
) == COMPLEX_CST
)
6472 br
= TREE_REALPART (bc
), bi
= TREE_IMAGPART (bc
);
6476 return fold_complex_mult_parts (type
, ar
, ai
, br
, bi
);
6479 /* Perform some simplifications of complex division when one or more of
6480 the components are constants or zeros. Return non-null if successful. */
6483 fold_complex_div_parts (tree type
, tree ar
, tree ai
, tree br
, tree bi
,
6484 enum tree_code code
)
6486 tree rr
, ri
, inner_type
, zero
;
6487 bool ar0
, ai0
, br0
, bi0
, bi1
;
6489 inner_type
= TREE_TYPE (type
);
6492 if (SCALAR_FLOAT_TYPE_P (inner_type
))
6494 ar0
= ai0
= br0
= bi0
= bi1
= false;
6496 /* We're only interested in +0.0 here, thus we don't use real_zerop. */
6498 if (TREE_CODE (ar
) == REAL_CST
6499 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar
), dconst0
))
6500 ar0
= true, zero
= ar
;
6502 if (TREE_CODE (ai
) == REAL_CST
6503 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai
), dconst0
))
6504 ai0
= true, zero
= ai
;
6506 if (TREE_CODE (br
) == REAL_CST
6507 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br
), dconst0
))
6508 br0
= true, zero
= br
;
6510 if (TREE_CODE (bi
) == REAL_CST
)
6512 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi
), dconst0
))
6513 bi0
= true, zero
= bi
;
6514 else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi
), dconst1
))
6520 ar0
= integer_zerop (ar
);
6523 ai0
= integer_zerop (ai
);
6526 br0
= integer_zerop (br
);
6529 bi0
= integer_zerop (bi
);
6536 bi1
= integer_onep (bi
);
6539 /* We won't optimize anything below unless something is zero. */
6545 rr
= fold (build2 (code
, inner_type
, ar
, br
));
6548 else if (ai0
&& br0
)
6551 ri
= fold (build2 (code
, inner_type
, ar
, bi
));
6552 ri
= fold (build1 (NEGATE_EXPR
, inner_type
, ri
));
6554 else if (ar0
&& bi0
)
6557 ri
= fold (build2 (code
, inner_type
, ai
, br
));
6559 else if (ar0
&& br0
)
6561 rr
= fold (build2 (code
, inner_type
, ai
, bi
));
6566 rr
= fold (build2 (code
, inner_type
, ar
, br
));
6567 ri
= fold (build2 (code
, inner_type
, ai
, br
));
6571 rr
= fold (build2 (code
, inner_type
, ai
, bi
));
6572 ri
= fold (build2 (code
, inner_type
, ar
, bi
));
6573 ri
= fold (build1 (NEGATE_EXPR
, inner_type
, ri
));
6578 return fold (build2 (COMPLEX_EXPR
, type
, rr
, ri
));
6582 fold_complex_div (tree type
, tree ac
, tree bc
, enum tree_code code
)
6584 tree ar
, ai
, br
, bi
;
6586 if (TREE_CODE (ac
) == COMPLEX_EXPR
)
6587 ar
= TREE_OPERAND (ac
, 0), ai
= TREE_OPERAND (ac
, 1);
6588 else if (TREE_CODE (ac
) == COMPLEX_CST
)
6589 ar
= TREE_REALPART (ac
), ai
= TREE_IMAGPART (ac
);
6593 if (TREE_CODE (bc
) == COMPLEX_EXPR
)
6594 br
= TREE_OPERAND (bc
, 0), bi
= TREE_OPERAND (bc
, 1);
6595 else if (TREE_CODE (bc
) == COMPLEX_CST
)
6596 br
= TREE_REALPART (bc
), bi
= TREE_IMAGPART (bc
);
6600 return fold_complex_div_parts (type
, ar
, ai
, br
, bi
, code
);
6603 /* Fold a unary expression EXPR. Return the folded expression if
6604 folding is successful. Otherwise, return the original
6608 fold_unary (enum tree_code code
, tree type
, tree op0
)
6612 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
6614 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
6615 && TREE_CODE_LENGTH (code
) == 1);
6620 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
6622 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6623 STRIP_SIGN_NOPS (arg0
);
6627 /* Strip any conversions that don't change the mode. This
6628 is safe for every expression, except for a comparison
6629 expression because its signedness is derived from its
6632 Note that this is done as an internal manipulation within
6633 the constant folder, in order to find the simplest
6634 representation of the arguments so that their form can be
6635 studied. In any cases, the appropriate type conversions
6636 should be put back in the tree that will get out of the
6642 if (TREE_CODE_CLASS (code
) == tcc_unary
)
6644 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
6645 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6646 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))));
6647 else if (TREE_CODE (arg0
) == COND_EXPR
)
6649 tree arg01
= TREE_OPERAND (arg0
, 1);
6650 tree arg02
= TREE_OPERAND (arg0
, 2);
6651 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
6652 arg01
= fold (build1 (code
, type
, arg01
));
6653 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
6654 arg02
= fold (build1 (code
, type
, arg02
));
6655 tem
= fold (build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6658 /* If this was a conversion, and all we did was to move into
6659 inside the COND_EXPR, bring it back out. But leave it if
6660 it is a conversion from integer to integer and the
6661 result precision is no wider than a word since such a
6662 conversion is cheap and may be optimized away by combine,
6663 while it couldn't if it were outside the COND_EXPR. Then return
6664 so we don't get into an infinite recursion loop taking the
6665 conversion out and then back in. */
6667 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
6668 || code
== NON_LVALUE_EXPR
)
6669 && TREE_CODE (tem
) == COND_EXPR
6670 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
6671 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
6672 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
6673 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
6674 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
6675 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
6676 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
6678 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
6679 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
6680 || flag_syntax_only
))
6681 tem
= build1 (code
, type
,
6683 TREE_TYPE (TREE_OPERAND
6684 (TREE_OPERAND (tem
, 1), 0)),
6685 TREE_OPERAND (tem
, 0),
6686 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
6687 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
6690 else if (COMPARISON_CLASS_P (arg0
))
6692 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
6694 arg0
= copy_node (arg0
);
6695 TREE_TYPE (arg0
) = type
;
6698 else if (TREE_CODE (type
) != INTEGER_TYPE
)
6699 return fold (build3 (COND_EXPR
, type
, arg0
,
6700 fold (build1 (code
, type
,
6702 fold (build1 (code
, type
,
6703 integer_zero_node
))));
6712 case FIX_TRUNC_EXPR
:
6714 case FIX_FLOOR_EXPR
:
6715 case FIX_ROUND_EXPR
:
6716 if (TREE_TYPE (op0
) == type
)
6719 /* Handle cases of two conversions in a row. */
6720 if (TREE_CODE (op0
) == NOP_EXPR
6721 || TREE_CODE (op0
) == CONVERT_EXPR
)
6723 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
6724 tree inter_type
= TREE_TYPE (op0
);
6725 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
6726 int inside_ptr
= POINTER_TYPE_P (inside_type
);
6727 int inside_float
= FLOAT_TYPE_P (inside_type
);
6728 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
6729 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
6730 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
6731 int inter_ptr
= POINTER_TYPE_P (inter_type
);
6732 int inter_float
= FLOAT_TYPE_P (inter_type
);
6733 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
6734 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
6735 int final_int
= INTEGRAL_TYPE_P (type
);
6736 int final_ptr
= POINTER_TYPE_P (type
);
6737 int final_float
= FLOAT_TYPE_P (type
);
6738 unsigned int final_prec
= TYPE_PRECISION (type
);
6739 int final_unsignedp
= TYPE_UNSIGNED (type
);
6741 /* In addition to the cases of two conversions in a row
6742 handled below, if we are converting something to its own
6743 type via an object of identical or wider precision, neither
6744 conversion is needed. */
6745 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
6746 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
6747 && inter_prec
>= final_prec
)
6748 return fold (build1 (code
, type
, TREE_OPERAND (op0
, 0)));
6750 /* Likewise, if the intermediate and final types are either both
6751 float or both integer, we don't need the middle conversion if
6752 it is wider than the final type and doesn't change the signedness
6753 (for integers). Avoid this if the final type is a pointer
6754 since then we sometimes need the inner conversion. Likewise if
6755 the outer has a precision not equal to the size of its mode. */
6756 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
6757 || (inter_float
&& inside_float
))
6758 && inter_prec
>= inside_prec
6759 && (inter_float
|| inter_unsignedp
== inside_unsignedp
)
6760 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6761 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6763 return fold (build1 (code
, type
, TREE_OPERAND (op0
, 0)));
6765 /* If we have a sign-extension of a zero-extended value, we can
6766 replace that by a single zero-extension. */
6767 if (inside_int
&& inter_int
&& final_int
6768 && inside_prec
< inter_prec
&& inter_prec
< final_prec
6769 && inside_unsignedp
&& !inter_unsignedp
)
6770 return fold (build1 (code
, type
, TREE_OPERAND (op0
, 0)));
6772 /* Two conversions in a row are not needed unless:
6773 - some conversion is floating-point (overstrict for now), or
6774 - the intermediate type is narrower than both initial and
6776 - the intermediate type and innermost type differ in signedness,
6777 and the outermost type is wider than the intermediate, or
6778 - the initial type is a pointer type and the precisions of the
6779 intermediate and final types differ, or
6780 - the final type is a pointer type and the precisions of the
6781 initial and intermediate types differ. */
6782 if (! inside_float
&& ! inter_float
&& ! final_float
6783 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
6784 && ! (inside_int
&& inter_int
6785 && inter_unsignedp
!= inside_unsignedp
6786 && inter_prec
< final_prec
)
6787 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
6788 == (final_unsignedp
&& final_prec
> inter_prec
))
6789 && ! (inside_ptr
&& inter_prec
!= final_prec
)
6790 && ! (final_ptr
&& inside_prec
!= inter_prec
)
6791 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6792 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6794 return fold (build1 (code
, type
, TREE_OPERAND (op0
, 0)));
6797 if (TREE_CODE (op0
) == MODIFY_EXPR
6798 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
6799 /* Detect assigning a bitfield. */
6800 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
6801 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
6803 /* Don't leave an assignment inside a conversion
6804 unless assigning a bitfield. */
6805 tem
= build1 (code
, type
, TREE_OPERAND (op0
, 1));
6806 /* First do the assignment, then return converted constant. */
6807 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, fold (tem
));
6808 TREE_NO_WARNING (tem
) = 1;
6809 TREE_USED (tem
) = 1;
6813 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6814 constants (if x has signed type, the sign bit cannot be set
6815 in c). This folds extension into the BIT_AND_EXPR. */
6816 if (INTEGRAL_TYPE_P (type
)
6817 && TREE_CODE (type
) != BOOLEAN_TYPE
6818 && TREE_CODE (op0
) == BIT_AND_EXPR
6819 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
6822 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
6825 if (TYPE_UNSIGNED (TREE_TYPE (and))
6826 || (TYPE_PRECISION (type
)
6827 <= TYPE_PRECISION (TREE_TYPE (and))))
6829 else if (TYPE_PRECISION (TREE_TYPE (and1
))
6830 <= HOST_BITS_PER_WIDE_INT
6831 && host_integerp (and1
, 1))
6833 unsigned HOST_WIDE_INT cst
;
6835 cst
= tree_low_cst (and1
, 1);
6836 cst
&= (HOST_WIDE_INT
) -1
6837 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
6838 change
= (cst
== 0);
6839 #ifdef LOAD_EXTEND_OP
6841 && !flag_syntax_only
6842 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
6845 tree uns
= lang_hooks
.types
.unsigned_type (TREE_TYPE (and0
));
6846 and0
= fold_convert (uns
, and0
);
6847 and1
= fold_convert (uns
, and1
);
6853 tem
= build_int_cst_wide (type
, TREE_INT_CST_LOW (and1
),
6854 TREE_INT_CST_HIGH (and1
));
6855 tem
= force_fit_type (tem
, 0, TREE_OVERFLOW (and1
),
6856 TREE_CONSTANT_OVERFLOW (and1
));
6857 return fold (build2 (BIT_AND_EXPR
, type
,
6858 fold_convert (type
, and0
), tem
));
6862 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6863 T2 being pointers to types of the same size. */
6864 if (POINTER_TYPE_P (type
)
6865 && BINARY_CLASS_P (arg0
)
6866 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
6867 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
6869 tree arg00
= TREE_OPERAND (arg0
, 0);
6871 tree t1
= TREE_TYPE (arg00
);
6872 tree tt0
= TREE_TYPE (t0
);
6873 tree tt1
= TREE_TYPE (t1
);
6874 tree s0
= TYPE_SIZE (tt0
);
6875 tree s1
= TYPE_SIZE (tt1
);
6877 if (s0
&& s1
&& operand_equal_p (s0
, s1
, OEP_ONLY_CONST
))
6878 return build2 (TREE_CODE (arg0
), t0
, fold_convert (t0
, arg00
),
6879 TREE_OPERAND (arg0
, 1));
6882 tem
= fold_convert_const (code
, type
, arg0
);
6883 return tem
? tem
: NULL_TREE
;
6885 case VIEW_CONVERT_EXPR
:
6886 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
6887 return build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
6891 if (negate_expr_p (arg0
))
6892 return fold_convert (type
, negate_expr (arg0
));
6893 /* Convert - (~A) to A + 1. */
6894 if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == BIT_NOT_EXPR
)
6895 return fold (build2 (PLUS_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6896 build_int_cst (type
, 1)));
6900 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
6901 return fold_abs_const (arg0
, type
);
6902 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
6903 return fold (build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0)));
6904 /* Convert fabs((double)float) into (double)fabsf(float). */
6905 else if (TREE_CODE (arg0
) == NOP_EXPR
6906 && TREE_CODE (type
) == REAL_TYPE
)
6908 tree targ0
= strip_float_extensions (arg0
);
6910 return fold_convert (type
, fold (build1 (ABS_EXPR
,
6914 else if (tree_expr_nonnegative_p (arg0
))
6917 /* Strip sign ops from argument. */
6918 if (TREE_CODE (type
) == REAL_TYPE
)
6920 tem
= fold_strip_sign_ops (arg0
);
6922 return fold (build1 (ABS_EXPR
, type
, fold_convert (type
, tem
)));
6927 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
6928 return fold_convert (type
, arg0
);
6929 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
6930 return build2 (COMPLEX_EXPR
, type
,
6931 TREE_OPERAND (arg0
, 0),
6932 negate_expr (TREE_OPERAND (arg0
, 1)));
6933 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
6934 return build_complex (type
, TREE_REALPART (arg0
),
6935 negate_expr (TREE_IMAGPART (arg0
)));
6936 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
6937 return fold (build2 (TREE_CODE (arg0
), type
,
6938 fold (build1 (CONJ_EXPR
, type
,
6939 TREE_OPERAND (arg0
, 0))),
6940 fold (build1 (CONJ_EXPR
, type
,
6941 TREE_OPERAND (arg0
, 1)))));
6942 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
6943 return TREE_OPERAND (arg0
, 0);
6947 if (TREE_CODE (arg0
) == INTEGER_CST
)
6948 return fold_not_const (arg0
, type
);
6949 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
6950 return TREE_OPERAND (arg0
, 0);
6951 /* Convert ~ (-A) to A - 1. */
6952 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
6953 return fold (build2 (MINUS_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6954 build_int_cst (type
, 1)));
6955 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
6956 else if (INTEGRAL_TYPE_P (type
)
6957 && ((TREE_CODE (arg0
) == MINUS_EXPR
6958 && integer_onep (TREE_OPERAND (arg0
, 1)))
6959 || (TREE_CODE (arg0
) == PLUS_EXPR
6960 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
6961 return fold (build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0)));
6964 case TRUTH_NOT_EXPR
:
6965 /* The argument to invert_truthvalue must have Boolean type. */
6966 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
6967 arg0
= fold_convert (boolean_type_node
, arg0
);
6969 /* Note that the operand of this must be an int
6970 and its values must be 0 or 1.
6971 ("true" is a fixed value perhaps depending on the language,
6972 but we don't handle values other than 1 correctly yet.) */
6973 tem
= invert_truthvalue (arg0
);
6974 /* Avoid infinite recursion. */
6975 if (TREE_CODE (tem
) == TRUTH_NOT_EXPR
)
6977 return fold_convert (type
, tem
);
6980 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
6982 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
6983 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
6984 TREE_OPERAND (arg0
, 1));
6985 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
6986 return TREE_REALPART (arg0
);
6987 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
6988 return fold (build2 (TREE_CODE (arg0
), type
,
6989 fold (build1 (REALPART_EXPR
, type
,
6990 TREE_OPERAND (arg0
, 0))),
6991 fold (build1 (REALPART_EXPR
, type
,
6992 TREE_OPERAND (arg0
, 1)))));
6996 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
6997 return fold_convert (type
, integer_zero_node
);
6998 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
6999 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
7000 TREE_OPERAND (arg0
, 0));
7001 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
7002 return TREE_IMAGPART (arg0
);
7003 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7004 return fold (build2 (TREE_CODE (arg0
), type
,
7005 fold (build1 (IMAGPART_EXPR
, type
,
7006 TREE_OPERAND (arg0
, 0))),
7007 fold (build1 (IMAGPART_EXPR
, type
,
7008 TREE_OPERAND (arg0
, 1)))));
7013 } /* switch (code) */
7016 /* Fold a binary expression EXPR. Return the folded expression if
7017 folding is successful. Otherwise, return the original
7021 fold_binary (enum tree_code code
, tree type
, tree op0
, tree op1
)
7023 tree t1
= NULL_TREE
;
7025 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
7026 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7028 /* WINS will be nonzero when the switch is done
7029 if all operands are constant. */
7032 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7033 && TREE_CODE_LENGTH (code
) == 2);
7042 /* Strip any conversions that don't change the mode. This is
7043 safe for every expression, except for a comparison expression
7044 because its signedness is derived from its operands. So, in
7045 the latter case, only strip conversions that don't change the
7048 Note that this is done as an internal manipulation within the
7049 constant folder, in order to find the simplest representation
7050 of the arguments so that their form can be studied. In any
7051 cases, the appropriate type conversions should be put back in
7052 the tree that will get out of the constant folder. */
7053 if (kind
== tcc_comparison
)
7054 STRIP_SIGN_NOPS (arg0
);
7058 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7059 subop
= TREE_REALPART (arg0
);
7063 if (TREE_CODE (subop
) != INTEGER_CST
7064 && TREE_CODE (subop
) != REAL_CST
)
7065 /* Note that TREE_CONSTANT isn't enough:
7066 static var addresses are constant but we can't
7067 do arithmetic on them. */
7075 /* Strip any conversions that don't change the mode. This is
7076 safe for every expression, except for a comparison expression
7077 because its signedness is derived from its operands. So, in
7078 the latter case, only strip conversions that don't change the
7081 Note that this is done as an internal manipulation within the
7082 constant folder, in order to find the simplest representation
7083 of the arguments so that their form can be studied. In any
7084 cases, the appropriate type conversions should be put back in
7085 the tree that will get out of the constant folder. */
7086 if (kind
== tcc_comparison
)
7087 STRIP_SIGN_NOPS (arg1
);
7091 if (TREE_CODE (arg1
) == COMPLEX_CST
)
7092 subop
= TREE_REALPART (arg1
);
7096 if (TREE_CODE (subop
) != INTEGER_CST
7097 && TREE_CODE (subop
) != REAL_CST
)
7098 /* Note that TREE_CONSTANT isn't enough:
7099 static var addresses are constant but we can't
7100 do arithmetic on them. */
7104 /* If this is a commutative operation, and ARG0 is a constant, move it
7105 to ARG1 to reduce the number of tests below. */
7106 if (commutative_tree_code (code
)
7107 && tree_swap_operands_p (arg0
, arg1
, true))
7108 return fold (build2 (code
, type
, op1
, op0
));
7110 /* Now WINS is set as described above,
7111 ARG0 is the first operand of EXPR,
7112 and ARG1 is the second operand (if it has more than one operand).
7114 First check for cases where an arithmetic operation is applied to a
7115 compound, conditional, or comparison operation. Push the arithmetic
7116 operation inside the compound or conditional to see if any folding
7117 can then be done. Convert comparison to conditional for this purpose.
7118 The also optimizes non-constant cases that used to be done in
7121 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7122 one of the operands is a comparison and the other is a comparison, a
7123 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7124 code below would make the expression more complex. Change it to a
7125 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7126 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7128 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
7129 || code
== EQ_EXPR
|| code
== NE_EXPR
)
7130 && ((truth_value_p (TREE_CODE (arg0
))
7131 && (truth_value_p (TREE_CODE (arg1
))
7132 || (TREE_CODE (arg1
) == BIT_AND_EXPR
7133 && integer_onep (TREE_OPERAND (arg1
, 1)))))
7134 || (truth_value_p (TREE_CODE (arg1
))
7135 && (truth_value_p (TREE_CODE (arg0
))
7136 || (TREE_CODE (arg0
) == BIT_AND_EXPR
7137 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
7139 tem
= fold (build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
7140 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
7142 type
, fold_convert (boolean_type_node
, arg0
),
7143 fold_convert (boolean_type_node
, arg1
)));
7145 if (code
== EQ_EXPR
)
7146 tem
= invert_truthvalue (tem
);
7151 if (TREE_CODE_CLASS (code
) == tcc_comparison
7152 && TREE_CODE (arg0
) == COMPOUND_EXPR
)
7153 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7154 fold (build2 (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
7155 else if (TREE_CODE_CLASS (code
) == tcc_comparison
7156 && TREE_CODE (arg1
) == COMPOUND_EXPR
)
7157 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
7158 fold (build2 (code
, type
, arg0
, TREE_OPERAND (arg1
, 1))));
7159 else if (TREE_CODE_CLASS (code
) == tcc_binary
7160 || TREE_CODE_CLASS (code
) == tcc_comparison
)
7162 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7163 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7164 fold (build2 (code
, type
, TREE_OPERAND (arg0
, 1),
7166 if (TREE_CODE (arg1
) == COMPOUND_EXPR
7167 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
7168 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
7169 fold (build2 (code
, type
,
7170 arg0
, TREE_OPERAND (arg1
, 1))));
7172 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
7174 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
7176 /*cond_first_p=*/1);
7177 if (tem
!= NULL_TREE
)
7181 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
7183 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
7185 /*cond_first_p=*/0);
7186 if (tem
!= NULL_TREE
)
7194 /* A + (-B) -> A - B */
7195 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
7196 return fold (build2 (MINUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
7197 /* (-A) + B -> B - A */
7198 if (TREE_CODE (arg0
) == NEGATE_EXPR
7199 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
7200 return fold (build2 (MINUS_EXPR
, type
, arg1
, TREE_OPERAND (arg0
, 0)));
7202 if (TREE_CODE (type
) == COMPLEX_TYPE
)
7204 tem
= fold_complex_add (type
, arg0
, arg1
, PLUS_EXPR
);
7209 if (! FLOAT_TYPE_P (type
))
7211 if (integer_zerop (arg1
))
7212 return non_lvalue (fold_convert (type
, arg0
));
7214 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7215 with a constant, and the two constants have no bits in common,
7216 we should treat this as a BIT_IOR_EXPR since this may produce more
7218 if (TREE_CODE (arg0
) == BIT_AND_EXPR
7219 && TREE_CODE (arg1
) == BIT_AND_EXPR
7220 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7221 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
7222 && integer_zerop (const_binop (BIT_AND_EXPR
,
7223 TREE_OPERAND (arg0
, 1),
7224 TREE_OPERAND (arg1
, 1), 0)))
7226 code
= BIT_IOR_EXPR
;
7230 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7231 (plus (plus (mult) (mult)) (foo)) so that we can
7232 take advantage of the factoring cases below. */
7233 if (((TREE_CODE (arg0
) == PLUS_EXPR
7234 || TREE_CODE (arg0
) == MINUS_EXPR
)
7235 && TREE_CODE (arg1
) == MULT_EXPR
)
7236 || ((TREE_CODE (arg1
) == PLUS_EXPR
7237 || TREE_CODE (arg1
) == MINUS_EXPR
)
7238 && TREE_CODE (arg0
) == MULT_EXPR
))
7240 tree parg0
, parg1
, parg
, marg
;
7241 enum tree_code pcode
;
7243 if (TREE_CODE (arg1
) == MULT_EXPR
)
7244 parg
= arg0
, marg
= arg1
;
7246 parg
= arg1
, marg
= arg0
;
7247 pcode
= TREE_CODE (parg
);
7248 parg0
= TREE_OPERAND (parg
, 0);
7249 parg1
= TREE_OPERAND (parg
, 1);
7253 if (TREE_CODE (parg0
) == MULT_EXPR
7254 && TREE_CODE (parg1
) != MULT_EXPR
)
7255 return fold (build2 (pcode
, type
,
7256 fold (build2 (PLUS_EXPR
, type
,
7257 fold_convert (type
, parg0
),
7258 fold_convert (type
, marg
))),
7259 fold_convert (type
, parg1
)));
7260 if (TREE_CODE (parg0
) != MULT_EXPR
7261 && TREE_CODE (parg1
) == MULT_EXPR
)
7262 return fold (build2 (PLUS_EXPR
, type
,
7263 fold_convert (type
, parg0
),
7264 fold (build2 (pcode
, type
,
7265 fold_convert (type
, marg
),
7270 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
)
7272 tree arg00
, arg01
, arg10
, arg11
;
7273 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7275 /* (A * C) + (B * C) -> (A+B) * C.
7276 We are most concerned about the case where C is a constant,
7277 but other combinations show up during loop reduction. Since
7278 it is not difficult, try all four possibilities. */
7280 arg00
= TREE_OPERAND (arg0
, 0);
7281 arg01
= TREE_OPERAND (arg0
, 1);
7282 arg10
= TREE_OPERAND (arg1
, 0);
7283 arg11
= TREE_OPERAND (arg1
, 1);
7286 if (operand_equal_p (arg01
, arg11
, 0))
7287 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7288 else if (operand_equal_p (arg00
, arg10
, 0))
7289 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7290 else if (operand_equal_p (arg00
, arg11
, 0))
7291 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7292 else if (operand_equal_p (arg01
, arg10
, 0))
7293 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7295 /* No identical multiplicands; see if we can find a common
7296 power-of-two factor in non-power-of-two multiplies. This
7297 can help in multi-dimensional array access. */
7298 else if (TREE_CODE (arg01
) == INTEGER_CST
7299 && TREE_CODE (arg11
) == INTEGER_CST
7300 && TREE_INT_CST_HIGH (arg01
) == 0
7301 && TREE_INT_CST_HIGH (arg11
) == 0)
7303 HOST_WIDE_INT int01
, int11
, tmp
;
7304 int01
= TREE_INT_CST_LOW (arg01
);
7305 int11
= TREE_INT_CST_LOW (arg11
);
7307 /* Move min of absolute values to int11. */
7308 if ((int01
>= 0 ? int01
: -int01
)
7309 < (int11
>= 0 ? int11
: -int11
))
7311 tmp
= int01
, int01
= int11
, int11
= tmp
;
7312 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7313 alt0
= arg01
, arg01
= arg11
, arg11
= alt0
;
7316 if (exact_log2 (int11
) > 0 && int01
% int11
== 0)
7318 alt0
= fold (build2 (MULT_EXPR
, type
, arg00
,
7319 build_int_cst (NULL_TREE
,
7327 return fold (build2 (MULT_EXPR
, type
,
7328 fold (build2 (PLUS_EXPR
, type
,
7329 fold_convert (type
, alt0
),
7330 fold_convert (type
, alt1
))),
7334 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7335 of the array. Loop optimizer sometimes produce this type of
7337 if (TREE_CODE (arg0
) == ADDR_EXPR
7338 && TREE_CODE (arg1
) == MULT_EXPR
)
7340 tem
= try_move_mult_to_index (PLUS_EXPR
, arg0
, arg1
);
7342 return fold_convert (type
, fold (tem
));
7344 else if (TREE_CODE (arg1
) == ADDR_EXPR
7345 && TREE_CODE (arg0
) == MULT_EXPR
)
7347 tem
= try_move_mult_to_index (PLUS_EXPR
, arg1
, arg0
);
7349 return fold_convert (type
, fold (tem
));
7354 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7355 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
7356 return non_lvalue (fold_convert (type
, arg0
));
7358 /* Likewise if the operands are reversed. */
7359 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
7360 return non_lvalue (fold_convert (type
, arg1
));
7362 /* Convert X + -C into X - C. */
7363 if (TREE_CODE (arg1
) == REAL_CST
7364 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
7366 tem
= fold_negate_const (arg1
, type
);
7367 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
7368 return fold (build2 (MINUS_EXPR
, type
,
7369 fold_convert (type
, arg0
),
7370 fold_convert (type
, tem
)));
7373 /* Convert x+x into x*2.0. */
7374 if (operand_equal_p (arg0
, arg1
, 0)
7375 && SCALAR_FLOAT_TYPE_P (type
))
7376 return fold (build2 (MULT_EXPR
, type
, arg0
,
7377 build_real (type
, dconst2
)));
7379 /* Convert x*c+x into x*(c+1). */
7380 if (flag_unsafe_math_optimizations
7381 && TREE_CODE (arg0
) == MULT_EXPR
7382 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
7383 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
7384 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7388 c
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
7389 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7390 return fold (build2 (MULT_EXPR
, type
, arg1
,
7391 build_real (type
, c
)));
7394 /* Convert x+x*c into x*(c+1). */
7395 if (flag_unsafe_math_optimizations
7396 && TREE_CODE (arg1
) == MULT_EXPR
7397 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
7398 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
7399 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
7403 c
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
7404 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7405 return fold (build2 (MULT_EXPR
, type
, arg0
,
7406 build_real (type
, c
)));
7409 /* Convert x*c1+x*c2 into x*(c1+c2). */
7410 if (flag_unsafe_math_optimizations
7411 && TREE_CODE (arg0
) == MULT_EXPR
7412 && TREE_CODE (arg1
) == MULT_EXPR
7413 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
7414 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
7415 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
7416 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
7417 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7418 TREE_OPERAND (arg1
, 0), 0))
7420 REAL_VALUE_TYPE c1
, c2
;
7422 c1
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
7423 c2
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
7424 real_arithmetic (&c1
, PLUS_EXPR
, &c1
, &c2
);
7425 return fold (build2 (MULT_EXPR
, type
,
7426 TREE_OPERAND (arg0
, 0),
7427 build_real (type
, c1
)));
7429 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7430 if (flag_unsafe_math_optimizations
7431 && TREE_CODE (arg1
) == PLUS_EXPR
7432 && TREE_CODE (arg0
) != MULT_EXPR
)
7434 tree tree10
= TREE_OPERAND (arg1
, 0);
7435 tree tree11
= TREE_OPERAND (arg1
, 1);
7436 if (TREE_CODE (tree11
) == MULT_EXPR
7437 && TREE_CODE (tree10
) == MULT_EXPR
)
7440 tree0
= fold (build2 (PLUS_EXPR
, type
, arg0
, tree10
));
7441 return fold (build2 (PLUS_EXPR
, type
, tree0
, tree11
));
7444 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7445 if (flag_unsafe_math_optimizations
7446 && TREE_CODE (arg0
) == PLUS_EXPR
7447 && TREE_CODE (arg1
) != MULT_EXPR
)
7449 tree tree00
= TREE_OPERAND (arg0
, 0);
7450 tree tree01
= TREE_OPERAND (arg0
, 1);
7451 if (TREE_CODE (tree01
) == MULT_EXPR
7452 && TREE_CODE (tree00
) == MULT_EXPR
)
7455 tree0
= fold (build2 (PLUS_EXPR
, type
, tree01
, arg1
));
7456 return fold (build2 (PLUS_EXPR
, type
, tree00
, tree0
));
7462 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7463 is a rotate of A by C1 bits. */
7464 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7465 is a rotate of A by B bits. */
7467 enum tree_code code0
, code1
;
7468 code0
= TREE_CODE (arg0
);
7469 code1
= TREE_CODE (arg1
);
7470 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
7471 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
7472 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7473 TREE_OPERAND (arg1
, 0), 0)
7474 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7476 tree tree01
, tree11
;
7477 enum tree_code code01
, code11
;
7479 tree01
= TREE_OPERAND (arg0
, 1);
7480 tree11
= TREE_OPERAND (arg1
, 1);
7481 STRIP_NOPS (tree01
);
7482 STRIP_NOPS (tree11
);
7483 code01
= TREE_CODE (tree01
);
7484 code11
= TREE_CODE (tree11
);
7485 if (code01
== INTEGER_CST
7486 && code11
== INTEGER_CST
7487 && TREE_INT_CST_HIGH (tree01
) == 0
7488 && TREE_INT_CST_HIGH (tree11
) == 0
7489 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
7490 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
7491 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7492 code0
== LSHIFT_EXPR
? tree01
: tree11
);
7493 else if (code11
== MINUS_EXPR
)
7495 tree tree110
, tree111
;
7496 tree110
= TREE_OPERAND (tree11
, 0);
7497 tree111
= TREE_OPERAND (tree11
, 1);
7498 STRIP_NOPS (tree110
);
7499 STRIP_NOPS (tree111
);
7500 if (TREE_CODE (tree110
) == INTEGER_CST
7501 && 0 == compare_tree_int (tree110
,
7503 (TREE_TYPE (TREE_OPERAND
7505 && operand_equal_p (tree01
, tree111
, 0))
7506 return build2 ((code0
== LSHIFT_EXPR
7509 type
, TREE_OPERAND (arg0
, 0), tree01
);
7511 else if (code01
== MINUS_EXPR
)
7513 tree tree010
, tree011
;
7514 tree010
= TREE_OPERAND (tree01
, 0);
7515 tree011
= TREE_OPERAND (tree01
, 1);
7516 STRIP_NOPS (tree010
);
7517 STRIP_NOPS (tree011
);
7518 if (TREE_CODE (tree010
) == INTEGER_CST
7519 && 0 == compare_tree_int (tree010
,
7521 (TREE_TYPE (TREE_OPERAND
7523 && operand_equal_p (tree11
, tree011
, 0))
7524 return build2 ((code0
!= LSHIFT_EXPR
7527 type
, TREE_OPERAND (arg0
, 0), tree11
);
7533 /* In most languages, can't associate operations on floats through
7534 parentheses. Rather than remember where the parentheses were, we
7535 don't associate floats at all, unless the user has specified
7536 -funsafe-math-optimizations. */
7539 && (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
7541 tree var0
, con0
, lit0
, minus_lit0
;
7542 tree var1
, con1
, lit1
, minus_lit1
;
7544 /* Split both trees into variables, constants, and literals. Then
7545 associate each group together, the constants with literals,
7546 then the result with variables. This increases the chances of
7547 literals being recombined later and of generating relocatable
7548 expressions for the sum of a constant and literal. */
7549 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
7550 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
7551 code
== MINUS_EXPR
);
7553 /* Only do something if we found more than two objects. Otherwise,
7554 nothing has changed and we risk infinite recursion. */
7555 if (2 < ((var0
!= 0) + (var1
!= 0)
7556 + (con0
!= 0) + (con1
!= 0)
7557 + (lit0
!= 0) + (lit1
!= 0)
7558 + (minus_lit0
!= 0) + (minus_lit1
!= 0)))
7560 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7561 if (code
== MINUS_EXPR
)
7564 var0
= associate_trees (var0
, var1
, code
, type
);
7565 con0
= associate_trees (con0
, con1
, code
, type
);
7566 lit0
= associate_trees (lit0
, lit1
, code
, type
);
7567 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
7569 /* Preserve the MINUS_EXPR if the negative part of the literal is
7570 greater than the positive part. Otherwise, the multiplicative
7571 folding code (i.e extract_muldiv) may be fooled in case
7572 unsigned constants are subtracted, like in the following
7573 example: ((X*2 + 4) - 8U)/2. */
7574 if (minus_lit0
&& lit0
)
7576 if (TREE_CODE (lit0
) == INTEGER_CST
7577 && TREE_CODE (minus_lit0
) == INTEGER_CST
7578 && tree_int_cst_lt (lit0
, minus_lit0
))
7580 minus_lit0
= associate_trees (minus_lit0
, lit0
,
7586 lit0
= associate_trees (lit0
, minus_lit0
,
7594 return fold_convert (type
,
7595 associate_trees (var0
, minus_lit0
,
7599 con0
= associate_trees (con0
, minus_lit0
,
7601 return fold_convert (type
,
7602 associate_trees (var0
, con0
,
7607 con0
= associate_trees (con0
, lit0
, code
, type
);
7608 return fold_convert (type
, associate_trees (var0
, con0
,
7615 t1
= const_binop (code
, arg0
, arg1
, 0);
7616 if (t1
!= NULL_TREE
)
7618 /* The return value should always have
7619 the same type as the original expression. */
7620 if (TREE_TYPE (t1
) != type
)
7621 t1
= fold_convert (type
, t1
);
7628 /* A - (-B) -> A + B */
7629 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
7630 return fold (build2 (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
7631 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7632 if (TREE_CODE (arg0
) == NEGATE_EXPR
7633 && (FLOAT_TYPE_P (type
)
7634 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
))
7635 && negate_expr_p (arg1
)
7636 && reorder_operands_p (arg0
, arg1
))
7637 return fold (build2 (MINUS_EXPR
, type
, negate_expr (arg1
),
7638 TREE_OPERAND (arg0
, 0)));
7640 if (TREE_CODE (type
) == COMPLEX_TYPE
)
7642 tem
= fold_complex_add (type
, arg0
, arg1
, MINUS_EXPR
);
7647 if (! FLOAT_TYPE_P (type
))
7649 if (! wins
&& integer_zerop (arg0
))
7650 return negate_expr (fold_convert (type
, arg1
));
7651 if (integer_zerop (arg1
))
7652 return non_lvalue (fold_convert (type
, arg0
));
7654 /* Fold A - (A & B) into ~B & A. */
7655 if (!TREE_SIDE_EFFECTS (arg0
)
7656 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
7658 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
7659 return fold (build2 (BIT_AND_EXPR
, type
,
7660 fold (build1 (BIT_NOT_EXPR
, type
,
7661 TREE_OPERAND (arg1
, 0))),
7663 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7664 return fold (build2 (BIT_AND_EXPR
, type
,
7665 fold (build1 (BIT_NOT_EXPR
, type
,
7666 TREE_OPERAND (arg1
, 1))),
7670 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7671 any power of 2 minus 1. */
7672 if (TREE_CODE (arg0
) == BIT_AND_EXPR
7673 && TREE_CODE (arg1
) == BIT_AND_EXPR
7674 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7675 TREE_OPERAND (arg1
, 0), 0))
7677 tree mask0
= TREE_OPERAND (arg0
, 1);
7678 tree mask1
= TREE_OPERAND (arg1
, 1);
7679 tree tem
= fold (build1 (BIT_NOT_EXPR
, type
, mask0
));
7681 if (operand_equal_p (tem
, mask1
, 0))
7683 tem
= fold (build2 (BIT_XOR_EXPR
, type
,
7684 TREE_OPERAND (arg0
, 0), mask1
));
7685 return fold (build2 (MINUS_EXPR
, type
, tem
, mask1
));
7690 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7691 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
7692 return non_lvalue (fold_convert (type
, arg0
));
7694 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7695 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7696 (-ARG1 + ARG0) reduces to -ARG1. */
7697 else if (!wins
&& fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
7698 return negate_expr (fold_convert (type
, arg1
));
7700 /* Fold &x - &x. This can happen from &x.foo - &x.
7701 This is unsafe for certain floats even in non-IEEE formats.
7702 In IEEE, it is unsafe because it does wrong for NaNs.
7703 Also note that operand_equal_p is always false if an operand
7706 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
7707 && operand_equal_p (arg0
, arg1
, 0))
7708 return fold_convert (type
, integer_zero_node
);
7710 /* A - B -> A + (-B) if B is easily negatable. */
7711 if (!wins
&& negate_expr_p (arg1
)
7712 && ((FLOAT_TYPE_P (type
)
7713 /* Avoid this transformation if B is a positive REAL_CST. */
7714 && (TREE_CODE (arg1
) != REAL_CST
7715 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
7716 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
)))
7717 return fold (build2 (PLUS_EXPR
, type
, arg0
, negate_expr (arg1
)));
7719 /* Try folding difference of addresses. */
7723 if ((TREE_CODE (arg0
) == ADDR_EXPR
7724 || TREE_CODE (arg1
) == ADDR_EXPR
)
7725 && ptr_difference_const (arg0
, arg1
, &diff
))
7726 return build_int_cst_type (type
, diff
);
7729 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7730 of the array. Loop optimizer sometimes produce this type of
7732 if (TREE_CODE (arg0
) == ADDR_EXPR
7733 && TREE_CODE (arg1
) == MULT_EXPR
)
7735 tem
= try_move_mult_to_index (MINUS_EXPR
, arg0
, arg1
);
7737 return fold_convert (type
, fold (tem
));
7740 if (TREE_CODE (arg0
) == MULT_EXPR
7741 && TREE_CODE (arg1
) == MULT_EXPR
7742 && (!FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
7744 /* (A * C) - (B * C) -> (A-B) * C. */
7745 if (operand_equal_p (TREE_OPERAND (arg0
, 1),
7746 TREE_OPERAND (arg1
, 1), 0))
7747 return fold (build2 (MULT_EXPR
, type
,
7748 fold (build2 (MINUS_EXPR
, type
,
7749 TREE_OPERAND (arg0
, 0),
7750 TREE_OPERAND (arg1
, 0))),
7751 TREE_OPERAND (arg0
, 1)));
7752 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7753 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
7754 TREE_OPERAND (arg1
, 0), 0))
7755 return fold (build2 (MULT_EXPR
, type
,
7756 TREE_OPERAND (arg0
, 0),
7757 fold (build2 (MINUS_EXPR
, type
,
7758 TREE_OPERAND (arg0
, 1),
7759 TREE_OPERAND (arg1
, 1)))));
7765 /* (-A) * (-B) -> A * B */
7766 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
7767 return fold (build2 (MULT_EXPR
, type
,
7768 TREE_OPERAND (arg0
, 0),
7769 negate_expr (arg1
)));
7770 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
7771 return fold (build2 (MULT_EXPR
, type
,
7773 TREE_OPERAND (arg1
, 0)));
7775 if (TREE_CODE (type
) == COMPLEX_TYPE
)
7777 tem
= fold_complex_mult (type
, arg0
, arg1
);
7782 if (! FLOAT_TYPE_P (type
))
7784 if (integer_zerop (arg1
))
7785 return omit_one_operand (type
, arg1
, arg0
);
7786 if (integer_onep (arg1
))
7787 return non_lvalue (fold_convert (type
, arg0
));
7789 /* (a * (1 << b)) is (a << b) */
7790 if (TREE_CODE (arg1
) == LSHIFT_EXPR
7791 && integer_onep (TREE_OPERAND (arg1
, 0)))
7792 return fold (build2 (LSHIFT_EXPR
, type
, arg0
,
7793 TREE_OPERAND (arg1
, 1)));
7794 if (TREE_CODE (arg0
) == LSHIFT_EXPR
7795 && integer_onep (TREE_OPERAND (arg0
, 0)))
7796 return fold (build2 (LSHIFT_EXPR
, type
, arg1
,
7797 TREE_OPERAND (arg0
, 1)));
7799 if (TREE_CODE (arg1
) == INTEGER_CST
7800 && 0 != (tem
= extract_muldiv (op0
,
7801 fold_convert (type
, arg1
),
7803 return fold_convert (type
, tem
);
7808 /* Maybe fold x * 0 to 0. The expressions aren't the same
7809 when x is NaN, since x * 0 is also NaN. Nor are they the
7810 same in modes with signed zeros, since multiplying a
7811 negative value by 0 gives -0, not +0. */
7812 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
7813 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
7814 && real_zerop (arg1
))
7815 return omit_one_operand (type
, arg1
, arg0
);
7816 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7817 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7818 && real_onep (arg1
))
7819 return non_lvalue (fold_convert (type
, arg0
));
7821 /* Transform x * -1.0 into -x. */
7822 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7823 && real_minus_onep (arg1
))
7824 return fold_convert (type
, negate_expr (arg0
));
7826 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7827 if (flag_unsafe_math_optimizations
7828 && TREE_CODE (arg0
) == RDIV_EXPR
7829 && TREE_CODE (arg1
) == REAL_CST
7830 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
7832 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
7835 return fold (build2 (RDIV_EXPR
, type
, tem
,
7836 TREE_OPERAND (arg0
, 1)));
7839 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7840 if (operand_equal_p (arg0
, arg1
, 0))
7842 tree tem
= fold_strip_sign_ops (arg0
);
7843 if (tem
!= NULL_TREE
)
7845 tem
= fold_convert (type
, tem
);
7846 return fold (build2 (MULT_EXPR
, type
, tem
, tem
));
7850 if (flag_unsafe_math_optimizations
)
7852 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
7853 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
7855 /* Optimizations of root(...)*root(...). */
7856 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
7858 tree rootfn
, arg
, arglist
;
7859 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7860 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7862 /* Optimize sqrt(x)*sqrt(x) as x. */
7863 if (BUILTIN_SQRT_P (fcode0
)
7864 && operand_equal_p (arg00
, arg10
, 0)
7865 && ! HONOR_SNANS (TYPE_MODE (type
)))
7868 /* Optimize root(x)*root(y) as root(x*y). */
7869 rootfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7870 arg
= fold (build2 (MULT_EXPR
, type
, arg00
, arg10
));
7871 arglist
= build_tree_list (NULL_TREE
, arg
);
7872 return build_function_call_expr (rootfn
, arglist
);
7875 /* Optimize expN(x)*expN(y) as expN(x+y). */
7876 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
7878 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7879 tree arg
= build2 (PLUS_EXPR
, type
,
7880 TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7881 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
7882 tree arglist
= build_tree_list (NULL_TREE
, fold (arg
));
7883 return build_function_call_expr (expfn
, arglist
);
7886 /* Optimizations of pow(...)*pow(...). */
7887 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
7888 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
7889 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
7891 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7892 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
7894 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7895 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
7898 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7899 if (operand_equal_p (arg01
, arg11
, 0))
7901 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7902 tree arg
= build2 (MULT_EXPR
, type
, arg00
, arg10
);
7903 tree arglist
= tree_cons (NULL_TREE
, fold (arg
),
7904 build_tree_list (NULL_TREE
,
7906 return build_function_call_expr (powfn
, arglist
);
7909 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7910 if (operand_equal_p (arg00
, arg10
, 0))
7912 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7913 tree arg
= fold (build2 (PLUS_EXPR
, type
, arg01
, arg11
));
7914 tree arglist
= tree_cons (NULL_TREE
, arg00
,
7915 build_tree_list (NULL_TREE
,
7917 return build_function_call_expr (powfn
, arglist
);
7921 /* Optimize tan(x)*cos(x) as sin(x). */
7922 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
7923 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
7924 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
7925 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
7926 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
7927 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
7928 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7929 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
7931 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
7933 if (sinfn
!= NULL_TREE
)
7934 return build_function_call_expr (sinfn
,
7935 TREE_OPERAND (arg0
, 1));
7938 /* Optimize x*pow(x,c) as pow(x,c+1). */
7939 if (fcode1
== BUILT_IN_POW
7940 || fcode1
== BUILT_IN_POWF
7941 || fcode1
== BUILT_IN_POWL
)
7943 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7944 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
7946 if (TREE_CODE (arg11
) == REAL_CST
7947 && ! TREE_CONSTANT_OVERFLOW (arg11
)
7948 && operand_equal_p (arg0
, arg10
, 0))
7950 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
7954 c
= TREE_REAL_CST (arg11
);
7955 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7956 arg
= build_real (type
, c
);
7957 arglist
= build_tree_list (NULL_TREE
, arg
);
7958 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
7959 return build_function_call_expr (powfn
, arglist
);
7963 /* Optimize pow(x,c)*x as pow(x,c+1). */
7964 if (fcode0
== BUILT_IN_POW
7965 || fcode0
== BUILT_IN_POWF
7966 || fcode0
== BUILT_IN_POWL
)
7968 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7969 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
7971 if (TREE_CODE (arg01
) == REAL_CST
7972 && ! TREE_CONSTANT_OVERFLOW (arg01
)
7973 && operand_equal_p (arg1
, arg00
, 0))
7975 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7979 c
= TREE_REAL_CST (arg01
);
7980 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7981 arg
= build_real (type
, c
);
7982 arglist
= build_tree_list (NULL_TREE
, arg
);
7983 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
7984 return build_function_call_expr (powfn
, arglist
);
7988 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7990 && operand_equal_p (arg0
, arg1
, 0))
7992 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7996 tree arg
= build_real (type
, dconst2
);
7997 tree arglist
= build_tree_list (NULL_TREE
, arg
);
7998 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
7999 return build_function_call_expr (powfn
, arglist
);
8008 if (integer_all_onesp (arg1
))
8009 return omit_one_operand (type
, arg1
, arg0
);
8010 if (integer_zerop (arg1
))
8011 return non_lvalue (fold_convert (type
, arg0
));
8012 if (operand_equal_p (arg0
, arg1
, 0))
8013 return non_lvalue (fold_convert (type
, arg0
));
8016 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8017 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8019 t1
= build_int_cst (type
, -1);
8020 t1
= force_fit_type (t1
, 0, false, false);
8021 return omit_one_operand (type
, t1
, arg1
);
8025 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8026 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8028 t1
= build_int_cst (type
, -1);
8029 t1
= force_fit_type (t1
, 0, false, false);
8030 return omit_one_operand (type
, t1
, arg0
);
8033 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
8034 if (t1
!= NULL_TREE
)
8037 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8039 This results in more efficient code for machines without a NAND
8040 instruction. Combine will canonicalize to the first form
8041 which will allow use of NAND instructions provided by the
8042 backend if they exist. */
8043 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8044 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8046 return fold (build1 (BIT_NOT_EXPR
, type
,
8047 build2 (BIT_AND_EXPR
, type
,
8048 TREE_OPERAND (arg0
, 0),
8049 TREE_OPERAND (arg1
, 0))));
8052 /* See if this can be simplified into a rotate first. If that
8053 is unsuccessful continue in the association code. */
8057 if (integer_zerop (arg1
))
8058 return non_lvalue (fold_convert (type
, arg0
));
8059 if (integer_all_onesp (arg1
))
8060 return fold (build1 (BIT_NOT_EXPR
, type
, arg0
));
8061 if (operand_equal_p (arg0
, arg1
, 0))
8062 return omit_one_operand (type
, integer_zero_node
, arg0
);
8065 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8066 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8068 t1
= build_int_cst (type
, -1);
8069 t1
= force_fit_type (t1
, 0, false, false);
8070 return omit_one_operand (type
, t1
, arg1
);
8074 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8075 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8077 t1
= build_int_cst (type
, -1);
8078 t1
= force_fit_type (t1
, 0, false, false);
8079 return omit_one_operand (type
, t1
, arg0
);
8082 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8083 with a constant, and the two constants have no bits in common,
8084 we should treat this as a BIT_IOR_EXPR since this may produce more
8086 if (TREE_CODE (arg0
) == BIT_AND_EXPR
8087 && TREE_CODE (arg1
) == BIT_AND_EXPR
8088 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8089 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8090 && integer_zerop (const_binop (BIT_AND_EXPR
,
8091 TREE_OPERAND (arg0
, 1),
8092 TREE_OPERAND (arg1
, 1), 0)))
8094 code
= BIT_IOR_EXPR
;
8098 /* See if this can be simplified into a rotate first. If that
8099 is unsuccessful continue in the association code. */
8103 if (integer_all_onesp (arg1
))
8104 return non_lvalue (fold_convert (type
, arg0
));
8105 if (integer_zerop (arg1
))
8106 return omit_one_operand (type
, arg1
, arg0
);
8107 if (operand_equal_p (arg0
, arg1
, 0))
8108 return non_lvalue (fold_convert (type
, arg0
));
8110 /* ~X & X is always zero. */
8111 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8112 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8113 return omit_one_operand (type
, integer_zero_node
, arg1
);
8115 /* X & ~X is always zero. */
8116 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8117 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8118 return omit_one_operand (type
, integer_zero_node
, arg0
);
8120 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
8121 if (t1
!= NULL_TREE
)
8123 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8124 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
8125 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
8128 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
8130 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
8131 && (~TREE_INT_CST_LOW (arg1
)
8132 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
8133 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
8136 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8138 This results in more efficient code for machines without a NOR
8139 instruction. Combine will canonicalize to the first form
8140 which will allow use of NOR instructions provided by the
8141 backend if they exist. */
8142 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8143 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8145 return fold (build1 (BIT_NOT_EXPR
, type
,
8146 build2 (BIT_IOR_EXPR
, type
,
8147 TREE_OPERAND (arg0
, 0),
8148 TREE_OPERAND (arg1
, 0))));
8154 /* Don't touch a floating-point divide by zero unless the mode
8155 of the constant can represent infinity. */
8156 if (TREE_CODE (arg1
) == REAL_CST
8157 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
8158 && real_zerop (arg1
))
8161 /* (-A) / (-B) -> A / B */
8162 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
8163 return fold (build2 (RDIV_EXPR
, type
,
8164 TREE_OPERAND (arg0
, 0),
8165 negate_expr (arg1
)));
8166 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
8167 return fold (build2 (RDIV_EXPR
, type
,
8169 TREE_OPERAND (arg1
, 0)));
8171 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8172 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8173 && real_onep (arg1
))
8174 return non_lvalue (fold_convert (type
, arg0
));
8176 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8177 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8178 && real_minus_onep (arg1
))
8179 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
8181 /* If ARG1 is a constant, we can convert this to a multiply by the
8182 reciprocal. This does not have the same rounding properties,
8183 so only do this if -funsafe-math-optimizations. We can actually
8184 always safely do it if ARG1 is a power of two, but it's hard to
8185 tell if it is or not in a portable manner. */
8186 if (TREE_CODE (arg1
) == REAL_CST
)
8188 if (flag_unsafe_math_optimizations
8189 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
8191 return fold (build2 (MULT_EXPR
, type
, arg0
, tem
));
8192 /* Find the reciprocal if optimizing and the result is exact. */
8196 r
= TREE_REAL_CST (arg1
);
8197 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
8199 tem
= build_real (type
, r
);
8200 return fold (build2 (MULT_EXPR
, type
, arg0
, tem
));
8204 /* Convert A/B/C to A/(B*C). */
8205 if (flag_unsafe_math_optimizations
8206 && TREE_CODE (arg0
) == RDIV_EXPR
)
8207 return fold (build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8208 fold (build2 (MULT_EXPR
, type
,
8209 TREE_OPERAND (arg0
, 1), arg1
))));
8211 /* Convert A/(B/C) to (A/B)*C. */
8212 if (flag_unsafe_math_optimizations
8213 && TREE_CODE (arg1
) == RDIV_EXPR
)
8214 return fold (build2 (MULT_EXPR
, type
,
8215 fold (build2 (RDIV_EXPR
, type
, arg0
,
8216 TREE_OPERAND (arg1
, 0))),
8217 TREE_OPERAND (arg1
, 1)));
8219 /* Convert C1/(X*C2) into (C1/C2)/X. */
8220 if (flag_unsafe_math_optimizations
8221 && TREE_CODE (arg1
) == MULT_EXPR
8222 && TREE_CODE (arg0
) == REAL_CST
8223 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
8225 tree tem
= const_binop (RDIV_EXPR
, arg0
,
8226 TREE_OPERAND (arg1
, 1), 0);
8228 return fold (build2 (RDIV_EXPR
, type
, tem
,
8229 TREE_OPERAND (arg1
, 0)));
8232 if (TREE_CODE (type
) == COMPLEX_TYPE
)
8234 tem
= fold_complex_div (type
, arg0
, arg1
, code
);
8239 if (flag_unsafe_math_optimizations
)
8241 enum built_in_function fcode
= builtin_mathfn_code (arg1
);
8242 /* Optimize x/expN(y) into x*expN(-y). */
8243 if (BUILTIN_EXPONENT_P (fcode
))
8245 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
8246 tree arg
= negate_expr (TREE_VALUE (TREE_OPERAND (arg1
, 1)));
8247 tree arglist
= build_tree_list (NULL_TREE
,
8248 fold_convert (type
, arg
));
8249 arg1
= build_function_call_expr (expfn
, arglist
);
8250 return fold (build2 (MULT_EXPR
, type
, arg0
, arg1
));
8253 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8254 if (fcode
== BUILT_IN_POW
8255 || fcode
== BUILT_IN_POWF
8256 || fcode
== BUILT_IN_POWL
)
8258 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
8259 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
8260 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
, 1)));
8261 tree neg11
= fold_convert (type
, negate_expr (arg11
));
8262 tree arglist
= tree_cons(NULL_TREE
, arg10
,
8263 build_tree_list (NULL_TREE
, neg11
));
8264 arg1
= build_function_call_expr (powfn
, arglist
);
8265 return fold (build2 (MULT_EXPR
, type
, arg0
, arg1
));
8269 if (flag_unsafe_math_optimizations
)
8271 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
8272 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
8274 /* Optimize sin(x)/cos(x) as tan(x). */
8275 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
8276 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
8277 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
8278 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
8279 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
8281 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
8283 if (tanfn
!= NULL_TREE
)
8284 return build_function_call_expr (tanfn
,
8285 TREE_OPERAND (arg0
, 1));
8288 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8289 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
8290 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
8291 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
8292 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
8293 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
8295 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
8297 if (tanfn
!= NULL_TREE
)
8299 tree tmp
= TREE_OPERAND (arg0
, 1);
8300 tmp
= build_function_call_expr (tanfn
, tmp
);
8301 return fold (build2 (RDIV_EXPR
, type
,
8302 build_real (type
, dconst1
), tmp
));
8306 /* Optimize pow(x,c)/x as pow(x,c-1). */
8307 if (fcode0
== BUILT_IN_POW
8308 || fcode0
== BUILT_IN_POWF
8309 || fcode0
== BUILT_IN_POWL
)
8311 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
8312 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
8313 if (TREE_CODE (arg01
) == REAL_CST
8314 && ! TREE_CONSTANT_OVERFLOW (arg01
)
8315 && operand_equal_p (arg1
, arg00
, 0))
8317 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8321 c
= TREE_REAL_CST (arg01
);
8322 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
8323 arg
= build_real (type
, c
);
8324 arglist
= build_tree_list (NULL_TREE
, arg
);
8325 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
8326 return build_function_call_expr (powfn
, arglist
);
8332 case TRUNC_DIV_EXPR
:
8333 case ROUND_DIV_EXPR
:
8334 case FLOOR_DIV_EXPR
:
8336 case EXACT_DIV_EXPR
:
8337 if (integer_onep (arg1
))
8338 return non_lvalue (fold_convert (type
, arg0
));
8339 if (integer_zerop (arg1
))
8342 if (!TYPE_UNSIGNED (type
)
8343 && TREE_CODE (arg1
) == INTEGER_CST
8344 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
8345 && TREE_INT_CST_HIGH (arg1
) == -1)
8346 return fold_convert (type
, negate_expr (arg0
));
8348 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8349 operation, EXACT_DIV_EXPR.
8351 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8352 At one time others generated faster code, it's not clear if they do
8353 after the last round to changes to the DIV code in expmed.c. */
8354 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
8355 && multiple_of_p (type
, arg0
, arg1
))
8356 return fold (build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
));
8358 if (TREE_CODE (arg1
) == INTEGER_CST
8359 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
8360 return fold_convert (type
, tem
);
8362 if (TREE_CODE (type
) == COMPLEX_TYPE
)
8364 tem
= fold_complex_div (type
, arg0
, arg1
, code
);
8371 case FLOOR_MOD_EXPR
:
8372 case ROUND_MOD_EXPR
:
8373 case TRUNC_MOD_EXPR
:
8374 /* X % 1 is always zero, but be sure to preserve any side
8376 if (integer_onep (arg1
))
8377 return omit_one_operand (type
, integer_zero_node
, arg0
);
8379 /* X % 0, return X % 0 unchanged so that we can get the
8380 proper warnings and errors. */
8381 if (integer_zerop (arg1
))
8384 /* 0 % X is always zero, but be sure to preserve any side
8385 effects in X. Place this after checking for X == 0. */
8386 if (integer_zerop (arg0
))
8387 return omit_one_operand (type
, integer_zero_node
, arg1
);
8389 /* X % -1 is zero. */
8390 if (!TYPE_UNSIGNED (type
)
8391 && TREE_CODE (arg1
) == INTEGER_CST
8392 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
8393 && TREE_INT_CST_HIGH (arg1
) == -1)
8394 return omit_one_operand (type
, integer_zero_node
, arg0
);
8396 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
8397 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
8398 if (code
== TRUNC_MOD_EXPR
8399 && TYPE_UNSIGNED (type
)
8400 && integer_pow2p (arg1
))
8402 unsigned HOST_WIDE_INT high
, low
;
8406 l
= tree_log2 (arg1
);
8407 if (l
>= HOST_BITS_PER_WIDE_INT
)
8409 high
= ((unsigned HOST_WIDE_INT
) 1
8410 << (l
- HOST_BITS_PER_WIDE_INT
)) - 1;
8416 low
= ((unsigned HOST_WIDE_INT
) 1 << l
) - 1;
8419 mask
= build_int_cst_wide (type
, low
, high
);
8420 return fold (build2 (BIT_AND_EXPR
, type
,
8421 fold_convert (type
, arg0
), mask
));
8424 /* X % -C is the same as X % C. */
8425 if (code
== TRUNC_MOD_EXPR
8426 && !TYPE_UNSIGNED (type
)
8427 && TREE_CODE (arg1
) == INTEGER_CST
8428 && TREE_INT_CST_HIGH (arg1
) < 0
8430 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8431 && !sign_bit_p (arg1
, arg1
))
8432 return fold (build2 (code
, type
, fold_convert (type
, arg0
),
8433 fold_convert (type
, negate_expr (arg1
))));
8435 /* X % -Y is the same as X % Y. */
8436 if (code
== TRUNC_MOD_EXPR
8437 && !TYPE_UNSIGNED (type
)
8438 && TREE_CODE (arg1
) == NEGATE_EXPR
8440 return fold (build2 (code
, type
, fold_convert (type
, arg0
),
8441 fold_convert (type
, TREE_OPERAND (arg1
, 0))));
8443 if (TREE_CODE (arg1
) == INTEGER_CST
8444 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
8445 return fold_convert (type
, tem
);
8451 if (integer_all_onesp (arg0
))
8452 return omit_one_operand (type
, arg0
, arg1
);
8456 /* Optimize -1 >> x for arithmetic right shifts. */
8457 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
8458 return omit_one_operand (type
, arg0
, arg1
);
8459 /* ... fall through ... */
8463 if (integer_zerop (arg1
))
8464 return non_lvalue (fold_convert (type
, arg0
));
8465 if (integer_zerop (arg0
))
8466 return omit_one_operand (type
, arg0
, arg1
);
8468 /* Since negative shift count is not well-defined,
8469 don't try to compute it in the compiler. */
8470 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
8472 /* Rewrite an LROTATE_EXPR by a constant into an
8473 RROTATE_EXPR by a new constant. */
8474 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
8476 tree tem
= build_int_cst (NULL_TREE
,
8477 GET_MODE_BITSIZE (TYPE_MODE (type
)));
8478 tem
= fold_convert (TREE_TYPE (arg1
), tem
);
8479 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
8480 return fold (build2 (RROTATE_EXPR
, type
, arg0
, tem
));
8483 /* If we have a rotate of a bit operation with the rotate count and
8484 the second operand of the bit operation both constant,
8485 permute the two operations. */
8486 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
8487 && (TREE_CODE (arg0
) == BIT_AND_EXPR
8488 || TREE_CODE (arg0
) == BIT_IOR_EXPR
8489 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
8490 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8491 return fold (build2 (TREE_CODE (arg0
), type
,
8492 fold (build2 (code
, type
,
8493 TREE_OPERAND (arg0
, 0), arg1
)),
8494 fold (build2 (code
, type
,
8495 TREE_OPERAND (arg0
, 1), arg1
))));
8497 /* Two consecutive rotates adding up to the width of the mode can
8499 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
8500 && TREE_CODE (arg0
) == RROTATE_EXPR
8501 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8502 && TREE_INT_CST_HIGH (arg1
) == 0
8503 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
8504 && ((TREE_INT_CST_LOW (arg1
)
8505 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
8506 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
8507 return TREE_OPERAND (arg0
, 0);
8512 if (operand_equal_p (arg0
, arg1
, 0))
8513 return omit_one_operand (type
, arg0
, arg1
);
8514 if (INTEGRAL_TYPE_P (type
)
8515 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
8516 return omit_one_operand (type
, arg1
, arg0
);
8520 if (operand_equal_p (arg0
, arg1
, 0))
8521 return omit_one_operand (type
, arg0
, arg1
);
8522 if (INTEGRAL_TYPE_P (type
)
8523 && TYPE_MAX_VALUE (type
)
8524 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
8525 return omit_one_operand (type
, arg1
, arg0
);
8528 case TRUTH_ANDIF_EXPR
:
8529 /* Note that the operands of this must be ints
8530 and their values must be 0 or 1.
8531 ("true" is a fixed value perhaps depending on the language.) */
8532 /* If first arg is constant zero, return it. */
8533 if (integer_zerop (arg0
))
8534 return fold_convert (type
, arg0
);
8535 case TRUTH_AND_EXPR
:
8536 /* If either arg is constant true, drop it. */
8537 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8538 return non_lvalue (fold_convert (type
, arg1
));
8539 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
8540 /* Preserve sequence points. */
8541 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
8542 return non_lvalue (fold_convert (type
, arg0
));
8543 /* If second arg is constant zero, result is zero, but first arg
8544 must be evaluated. */
8545 if (integer_zerop (arg1
))
8546 return omit_one_operand (type
, arg1
, arg0
);
8547 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8548 case will be handled here. */
8549 if (integer_zerop (arg0
))
8550 return omit_one_operand (type
, arg0
, arg1
);
8552 /* !X && X is always false. */
8553 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8554 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8555 return omit_one_operand (type
, integer_zero_node
, arg1
);
8556 /* X && !X is always false. */
8557 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8558 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8559 return omit_one_operand (type
, integer_zero_node
, arg0
);
8561 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8562 means A >= Y && A != MAX, but in this case we know that
8565 if (!TREE_SIDE_EFFECTS (arg0
)
8566 && !TREE_SIDE_EFFECTS (arg1
))
8568 tem
= fold_to_nonsharp_ineq_using_bound (arg0
, arg1
);
8570 return fold (build2 (code
, type
, tem
, arg1
));
8572 tem
= fold_to_nonsharp_ineq_using_bound (arg1
, arg0
);
8574 return fold (build2 (code
, type
, arg0
, tem
));
8578 /* We only do these simplifications if we are optimizing. */
8582 /* Check for things like (A || B) && (A || C). We can convert this
8583 to A || (B && C). Note that either operator can be any of the four
8584 truth and/or operations and the transformation will still be
8585 valid. Also note that we only care about order for the
8586 ANDIF and ORIF operators. If B contains side effects, this
8587 might change the truth-value of A. */
8588 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8589 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8590 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8591 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8592 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8593 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8595 tree a00
= TREE_OPERAND (arg0
, 0);
8596 tree a01
= TREE_OPERAND (arg0
, 1);
8597 tree a10
= TREE_OPERAND (arg1
, 0);
8598 tree a11
= TREE_OPERAND (arg1
, 1);
8599 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8600 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8601 && (code
== TRUTH_AND_EXPR
8602 || code
== TRUTH_OR_EXPR
));
8604 if (operand_equal_p (a00
, a10
, 0))
8605 return fold (build2 (TREE_CODE (arg0
), type
, a00
,
8606 fold (build2 (code
, type
, a01
, a11
))));
8607 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8608 return fold (build2 (TREE_CODE (arg0
), type
, a00
,
8609 fold (build2 (code
, type
, a01
, a10
))));
8610 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8611 return fold (build2 (TREE_CODE (arg0
), type
, a01
,
8612 fold (build2 (code
, type
, a00
, a11
))));
8614 /* This case if tricky because we must either have commutative
8615 operators or else A10 must not have side-effects. */
8617 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8618 && operand_equal_p (a01
, a11
, 0))
8619 return fold (build2 (TREE_CODE (arg0
), type
,
8620 fold (build2 (code
, type
, a00
, a10
)),
8624 /* See if we can build a range comparison. */
8625 if (0 != (tem
= fold_range_test (code
, type
, op0
, op1
)))
8628 /* Check for the possibility of merging component references. If our
8629 lhs is another similar operation, try to merge its rhs with our
8630 rhs. Then try to merge our lhs and rhs. */
8631 if (TREE_CODE (arg0
) == code
8632 && 0 != (tem
= fold_truthop (code
, type
,
8633 TREE_OPERAND (arg0
, 1), arg1
)))
8634 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
8636 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
8641 case TRUTH_ORIF_EXPR
:
8642 /* Note that the operands of this must be ints
8643 and their values must be 0 or true.
8644 ("true" is a fixed value perhaps depending on the language.) */
8645 /* If first arg is constant true, return it. */
8646 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8647 return fold_convert (type
, arg0
);
8649 /* If either arg is constant zero, drop it. */
8650 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
8651 return non_lvalue (fold_convert (type
, arg1
));
8652 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
8653 /* Preserve sequence points. */
8654 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
8655 return non_lvalue (fold_convert (type
, arg0
));
8656 /* If second arg is constant true, result is true, but we must
8657 evaluate first arg. */
8658 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
8659 return omit_one_operand (type
, arg1
, arg0
);
8660 /* Likewise for first arg, but note this only occurs here for
8662 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8663 return omit_one_operand (type
, arg0
, arg1
);
8665 /* !X || X is always true. */
8666 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8667 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8668 return omit_one_operand (type
, integer_one_node
, arg1
);
8669 /* X || !X is always true. */
8670 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8671 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8672 return omit_one_operand (type
, integer_one_node
, arg0
);
8676 case TRUTH_XOR_EXPR
:
8677 /* If the second arg is constant zero, drop it. */
8678 if (integer_zerop (arg1
))
8679 return non_lvalue (fold_convert (type
, arg0
));
8680 /* If the second arg is constant true, this is a logical inversion. */
8681 if (integer_onep (arg1
))
8682 return non_lvalue (fold_convert (type
, invert_truthvalue (arg0
)));
8683 /* Identical arguments cancel to zero. */
8684 if (operand_equal_p (arg0
, arg1
, 0))
8685 return omit_one_operand (type
, integer_zero_node
, arg0
);
8687 /* !X ^ X is always true. */
8688 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8689 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8690 return omit_one_operand (type
, integer_one_node
, arg1
);
8692 /* X ^ !X is always true. */
8693 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8694 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8695 return omit_one_operand (type
, integer_one_node
, arg0
);
8705 /* If one arg is a real or integer constant, put it last. */
8706 if (tree_swap_operands_p (arg0
, arg1
, true))
8707 return fold (build2 (swap_tree_comparison (code
), type
, arg1
, arg0
));
8709 /* If this is an equality comparison of the address of a non-weak
8710 object against zero, then we know the result. */
8711 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8712 && TREE_CODE (arg0
) == ADDR_EXPR
8713 && DECL_P (TREE_OPERAND (arg0
, 0))
8714 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
8715 && integer_zerop (arg1
))
8716 return constant_boolean_node (code
!= EQ_EXPR
, type
);
8718 /* If this is an equality comparison of the address of two non-weak,
8719 unaliased symbols neither of which are extern (since we do not
8720 have access to attributes for externs), then we know the result. */
8721 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8722 && TREE_CODE (arg0
) == ADDR_EXPR
8723 && DECL_P (TREE_OPERAND (arg0
, 0))
8724 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
8725 && ! lookup_attribute ("alias",
8726 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
8727 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
8728 && TREE_CODE (arg1
) == ADDR_EXPR
8729 && DECL_P (TREE_OPERAND (arg1
, 0))
8730 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
8731 && ! lookup_attribute ("alias",
8732 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
8733 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
8734 return constant_boolean_node (operand_equal_p (arg0
, arg1
, 0)
8735 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
8738 /* If this is a comparison of two exprs that look like an
8739 ARRAY_REF of the same object, then we can fold this to a
8740 comparison of the two offsets. */
8741 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
8743 tree base0
, offset0
, base1
, offset1
;
8745 if (extract_array_ref (arg0
, &base0
, &offset0
)
8746 && extract_array_ref (arg1
, &base1
, &offset1
)
8747 && operand_equal_p (base0
, base1
, 0))
8749 if (offset0
== NULL_TREE
8750 && offset1
== NULL_TREE
)
8752 offset0
= integer_zero_node
;
8753 offset1
= integer_zero_node
;
8755 else if (offset0
== NULL_TREE
)
8756 offset0
= build_int_cst (TREE_TYPE (offset1
), 0);
8757 else if (offset1
== NULL_TREE
)
8758 offset1
= build_int_cst (TREE_TYPE (offset0
), 0);
8760 if (TREE_TYPE (offset0
) == TREE_TYPE (offset1
))
8761 return fold (build2 (code
, type
, offset0
, offset1
));
8765 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8767 tree targ0
= strip_float_extensions (arg0
);
8768 tree targ1
= strip_float_extensions (arg1
);
8769 tree newtype
= TREE_TYPE (targ0
);
8771 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8772 newtype
= TREE_TYPE (targ1
);
8774 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8775 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
8776 return fold (build2 (code
, type
, fold_convert (newtype
, targ0
),
8777 fold_convert (newtype
, targ1
)));
8779 /* (-a) CMP (-b) -> b CMP a */
8780 if (TREE_CODE (arg0
) == NEGATE_EXPR
8781 && TREE_CODE (arg1
) == NEGATE_EXPR
)
8782 return fold (build2 (code
, type
, TREE_OPERAND (arg1
, 0),
8783 TREE_OPERAND (arg0
, 0)));
8785 if (TREE_CODE (arg1
) == REAL_CST
)
8787 REAL_VALUE_TYPE cst
;
8788 cst
= TREE_REAL_CST (arg1
);
8790 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8791 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8793 fold (build2 (swap_tree_comparison (code
), type
,
8794 TREE_OPERAND (arg0
, 0),
8795 build_real (TREE_TYPE (arg1
),
8796 REAL_VALUE_NEGATE (cst
))));
8798 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8799 /* a CMP (-0) -> a CMP 0 */
8800 if (REAL_VALUE_MINUS_ZERO (cst
))
8801 return fold (build2 (code
, type
, arg0
,
8802 build_real (TREE_TYPE (arg1
), dconst0
)));
8804 /* x != NaN is always true, other ops are always false. */
8805 if (REAL_VALUE_ISNAN (cst
)
8806 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
8808 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
8809 return omit_one_operand (type
, tem
, arg0
);
8812 /* Fold comparisons against infinity. */
8813 if (REAL_VALUE_ISINF (cst
))
8815 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
8816 if (tem
!= NULL_TREE
)
8821 /* If this is a comparison of a real constant with a PLUS_EXPR
8822 or a MINUS_EXPR of a real constant, we can convert it into a
8823 comparison with a revised real constant as long as no overflow
8824 occurs when unsafe_math_optimizations are enabled. */
8825 if (flag_unsafe_math_optimizations
8826 && TREE_CODE (arg1
) == REAL_CST
8827 && (TREE_CODE (arg0
) == PLUS_EXPR
8828 || TREE_CODE (arg0
) == MINUS_EXPR
)
8829 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
8830 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
8831 ? MINUS_EXPR
: PLUS_EXPR
,
8832 arg1
, TREE_OPERAND (arg0
, 1), 0))
8833 && ! TREE_CONSTANT_OVERFLOW (tem
))
8834 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
8836 /* Likewise, we can simplify a comparison of a real constant with
8837 a MINUS_EXPR whose first operand is also a real constant, i.e.
8838 (c1 - x) < c2 becomes x > c1-c2. */
8839 if (flag_unsafe_math_optimizations
8840 && TREE_CODE (arg1
) == REAL_CST
8841 && TREE_CODE (arg0
) == MINUS_EXPR
8842 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
8843 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
8845 && ! TREE_CONSTANT_OVERFLOW (tem
))
8846 return fold (build2 (swap_tree_comparison (code
), type
,
8847 TREE_OPERAND (arg0
, 1), tem
));
8849 /* Fold comparisons against built-in math functions. */
8850 if (TREE_CODE (arg1
) == REAL_CST
8851 && flag_unsafe_math_optimizations
8852 && ! flag_errno_math
)
8854 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8856 if (fcode
!= END_BUILTINS
)
8858 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
8859 if (tem
!= NULL_TREE
)
8865 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8866 if (TREE_CONSTANT (arg1
)
8867 && (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
8868 || TREE_CODE (arg0
) == POSTDECREMENT_EXPR
)
8869 /* This optimization is invalid for ordered comparisons
8870 if CONST+INCR overflows or if foo+incr might overflow.
8871 This optimization is invalid for floating point due to rounding.
8872 For pointer types we assume overflow doesn't happen. */
8873 && (POINTER_TYPE_P (TREE_TYPE (arg0
))
8874 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8875 && (code
== EQ_EXPR
|| code
== NE_EXPR
))))
8877 tree varop
, newconst
;
8879 if (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
)
8881 newconst
= fold (build2 (PLUS_EXPR
, TREE_TYPE (arg0
),
8882 arg1
, TREE_OPERAND (arg0
, 1)));
8883 varop
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (arg0
),
8884 TREE_OPERAND (arg0
, 0),
8885 TREE_OPERAND (arg0
, 1));
8889 newconst
= fold (build2 (MINUS_EXPR
, TREE_TYPE (arg0
),
8890 arg1
, TREE_OPERAND (arg0
, 1)));
8891 varop
= build2 (PREDECREMENT_EXPR
, TREE_TYPE (arg0
),
8892 TREE_OPERAND (arg0
, 0),
8893 TREE_OPERAND (arg0
, 1));
8897 /* If VAROP is a reference to a bitfield, we must mask
8898 the constant by the width of the field. */
8899 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
8900 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop
, 0), 1))
8901 && host_integerp (DECL_SIZE (TREE_OPERAND
8902 (TREE_OPERAND (varop
, 0), 1)), 1))
8904 tree fielddecl
= TREE_OPERAND (TREE_OPERAND (varop
, 0), 1);
8905 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (fielddecl
), 1);
8906 tree folded_compare
, shift
;
8908 /* First check whether the comparison would come out
8909 always the same. If we don't do that we would
8910 change the meaning with the masking. */
8911 folded_compare
= fold (build2 (code
, type
,
8912 TREE_OPERAND (varop
, 0), arg1
));
8913 if (integer_zerop (folded_compare
)
8914 || integer_onep (folded_compare
))
8915 return omit_one_operand (type
, folded_compare
, varop
);
8917 shift
= build_int_cst (NULL_TREE
,
8918 TYPE_PRECISION (TREE_TYPE (varop
)) - size
);
8919 shift
= fold_convert (TREE_TYPE (varop
), shift
);
8920 newconst
= fold (build2 (LSHIFT_EXPR
, TREE_TYPE (varop
),
8922 newconst
= fold (build2 (RSHIFT_EXPR
, TREE_TYPE (varop
),
8926 return fold (build2 (code
, type
, varop
, newconst
));
8929 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8930 This transformation affects the cases which are handled in later
8931 optimizations involving comparisons with non-negative constants. */
8932 if (TREE_CODE (arg1
) == INTEGER_CST
8933 && TREE_CODE (arg0
) != INTEGER_CST
8934 && tree_int_cst_sgn (arg1
) > 0)
8939 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
8940 return fold (build2 (GT_EXPR
, type
, arg0
, arg1
));
8943 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
8944 return fold (build2 (LE_EXPR
, type
, arg0
, arg1
));
8951 /* Comparisons with the highest or lowest possible integer of
8952 the specified size will have known values.
8954 This is quite similar to fold_relational_hi_lo, however,
8955 attempts to share the code have been nothing but trouble. */
8957 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1
)));
8959 if (TREE_CODE (arg1
) == INTEGER_CST
8960 && ! TREE_CONSTANT_OVERFLOW (arg1
)
8961 && width
<= 2 * HOST_BITS_PER_WIDE_INT
8962 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
8963 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
8965 HOST_WIDE_INT signed_max_hi
;
8966 unsigned HOST_WIDE_INT signed_max_lo
;
8967 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
8969 if (width
<= HOST_BITS_PER_WIDE_INT
)
8971 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
8976 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
8978 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
8984 max_lo
= signed_max_lo
;
8985 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
8991 width
-= HOST_BITS_PER_WIDE_INT
;
8993 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
8998 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
9000 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
9005 max_hi
= signed_max_hi
;
9006 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
9010 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
9011 && TREE_INT_CST_LOW (arg1
) == max_lo
)
9015 return omit_one_operand (type
, integer_zero_node
, arg0
);
9018 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
9021 return omit_one_operand (type
, integer_one_node
, arg0
);
9024 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
9026 /* The GE_EXPR and LT_EXPR cases above are not normally
9027 reached because of previous transformations. */
9032 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9034 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
9038 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
9039 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
9041 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
9042 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
9046 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9048 && TREE_INT_CST_LOW (arg1
) == min_lo
)
9052 return omit_one_operand (type
, integer_zero_node
, arg0
);
9055 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
9058 return omit_one_operand (type
, integer_one_node
, arg0
);
9061 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
9066 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9068 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
9072 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
9073 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
9075 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
9076 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
9081 else if (!in_gimple_form
9082 && TREE_INT_CST_HIGH (arg1
) == signed_max_hi
9083 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
9084 && TYPE_UNSIGNED (TREE_TYPE (arg1
))
9085 /* signed_type does not work on pointer types. */
9086 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
9088 /* The following case also applies to X < signed_max+1
9089 and X >= signed_max+1 because previous transformations. */
9090 if (code
== LE_EXPR
|| code
== GT_EXPR
)
9093 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (arg0
));
9094 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (arg1
));
9096 (build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
9097 type
, fold_convert (st0
, arg0
),
9098 fold_convert (st1
, integer_zero_node
)));
9104 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9105 a MINUS_EXPR of a constant, we can convert it into a comparison with
9106 a revised constant as long as no overflow occurs. */
9107 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9108 && TREE_CODE (arg1
) == INTEGER_CST
9109 && (TREE_CODE (arg0
) == PLUS_EXPR
9110 || TREE_CODE (arg0
) == MINUS_EXPR
)
9111 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9112 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9113 ? MINUS_EXPR
: PLUS_EXPR
,
9114 arg1
, TREE_OPERAND (arg0
, 1), 0))
9115 && ! TREE_CONSTANT_OVERFLOW (tem
))
9116 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
9118 /* Similarly for a NEGATE_EXPR. */
9119 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9120 && TREE_CODE (arg0
) == NEGATE_EXPR
9121 && TREE_CODE (arg1
) == INTEGER_CST
9122 && 0 != (tem
= negate_expr (arg1
))
9123 && TREE_CODE (tem
) == INTEGER_CST
9124 && ! TREE_CONSTANT_OVERFLOW (tem
))
9125 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
9127 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9128 for !=. Don't do this for ordered comparisons due to overflow. */
9129 else if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
9130 && integer_zerop (arg1
) && TREE_CODE (arg0
) == MINUS_EXPR
)
9131 return fold (build2 (code
, type
,
9132 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1)));
9134 else if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9135 && TREE_CODE (arg0
) == NOP_EXPR
)
9137 /* If we are widening one operand of an integer comparison,
9138 see if the other operand is similarly being widened. Perhaps we
9139 can do the comparison in the narrower type. */
9140 tem
= fold_widened_comparison (code
, type
, arg0
, arg1
);
9144 /* Or if we are changing signedness. */
9145 tem
= fold_sign_changed_comparison (code
, type
, arg0
, arg1
);
9150 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9151 constant, we can simplify it. */
9152 else if (TREE_CODE (arg1
) == INTEGER_CST
9153 && (TREE_CODE (arg0
) == MIN_EXPR
9154 || TREE_CODE (arg0
) == MAX_EXPR
)
9155 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9157 tem
= optimize_minmax_comparison (code
, type
, op0
, op1
);
9164 /* If we are comparing an ABS_EXPR with a constant, we can
9165 convert all the cases into explicit comparisons, but they may
9166 well not be faster than doing the ABS and one comparison.
9167 But ABS (X) <= C is a range comparison, which becomes a subtraction
9168 and a comparison, and is probably faster. */
9169 else if (code
== LE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
9170 && TREE_CODE (arg0
) == ABS_EXPR
9171 && ! TREE_SIDE_EFFECTS (arg0
)
9172 && (0 != (tem
= negate_expr (arg1
)))
9173 && TREE_CODE (tem
) == INTEGER_CST
9174 && ! TREE_CONSTANT_OVERFLOW (tem
))
9175 return fold (build2 (TRUTH_ANDIF_EXPR
, type
,
9176 build2 (GE_EXPR
, type
,
9177 TREE_OPERAND (arg0
, 0), tem
),
9178 build2 (LE_EXPR
, type
,
9179 TREE_OPERAND (arg0
, 0), arg1
)));
9181 /* Convert ABS_EXPR<x> >= 0 to true. */
9182 else if (code
== GE_EXPR
9183 && tree_expr_nonnegative_p (arg0
)
9184 && (integer_zerop (arg1
)
9185 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
9186 && real_zerop (arg1
))))
9187 return omit_one_operand (type
, integer_one_node
, arg0
);
9189 /* Convert ABS_EXPR<x> < 0 to false. */
9190 else if (code
== LT_EXPR
9191 && tree_expr_nonnegative_p (arg0
)
9192 && (integer_zerop (arg1
) || real_zerop (arg1
)))
9193 return omit_one_operand (type
, integer_zero_node
, arg0
);
9195 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9196 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9197 && TREE_CODE (arg0
) == ABS_EXPR
9198 && (integer_zerop (arg1
) || real_zerop (arg1
)))
9199 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), arg1
));
9201 /* If this is an EQ or NE comparison with zero and ARG0 is
9202 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9203 two operations, but the latter can be done in one less insn
9204 on machines that have only two-operand insns or on which a
9205 constant cannot be the first operand. */
9206 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
9207 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
9209 tree arg00
= TREE_OPERAND (arg0
, 0);
9210 tree arg01
= TREE_OPERAND (arg0
, 1);
9211 if (TREE_CODE (arg00
) == LSHIFT_EXPR
9212 && integer_onep (TREE_OPERAND (arg00
, 0)))
9214 fold (build2 (code
, type
,
9215 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9216 build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
9217 arg01
, TREE_OPERAND (arg00
, 1)),
9218 fold_convert (TREE_TYPE (arg0
),
9221 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
9222 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
9224 fold (build2 (code
, type
,
9225 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9226 build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
9227 arg00
, TREE_OPERAND (arg01
, 1)),
9228 fold_convert (TREE_TYPE (arg0
),
9233 /* If this is an NE or EQ comparison of zero against the result of a
9234 signed MOD operation whose second operand is a power of 2, make
9235 the MOD operation unsigned since it is simpler and equivalent. */
9236 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
9237 && integer_zerop (arg1
)
9238 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
9239 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
9240 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
9241 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
9242 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
9243 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9245 tree newtype
= lang_hooks
.types
.unsigned_type (TREE_TYPE (arg0
));
9246 tree newmod
= fold (build2 (TREE_CODE (arg0
), newtype
,
9247 fold_convert (newtype
,
9248 TREE_OPERAND (arg0
, 0)),
9249 fold_convert (newtype
,
9250 TREE_OPERAND (arg0
, 1))));
9252 return fold (build2 (code
, type
, newmod
,
9253 fold_convert (newtype
, arg1
)));
9256 /* If this is an NE comparison of zero with an AND of one, remove the
9257 comparison since the AND will give the correct value. */
9258 if (code
== NE_EXPR
&& integer_zerop (arg1
)
9259 && TREE_CODE (arg0
) == BIT_AND_EXPR
9260 && integer_onep (TREE_OPERAND (arg0
, 1)))
9261 return fold_convert (type
, arg0
);
9263 /* If we have (A & C) == C where C is a power of 2, convert this into
9264 (A & C) != 0. Similarly for NE_EXPR. */
9265 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9266 && TREE_CODE (arg0
) == BIT_AND_EXPR
9267 && integer_pow2p (TREE_OPERAND (arg0
, 1))
9268 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9269 return fold (build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
9270 arg0
, fold_convert (TREE_TYPE (arg0
),
9271 integer_zero_node
)));
9273 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
9274 2, then fold the expression into shifts and logical operations. */
9275 tem
= fold_single_bit_test (code
, arg0
, arg1
, type
);
9279 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9280 Similarly for NE_EXPR. */
9281 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9282 && TREE_CODE (arg0
) == BIT_AND_EXPR
9283 && TREE_CODE (arg1
) == INTEGER_CST
9284 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9286 tree notc
= fold (build1 (BIT_NOT_EXPR
,
9287 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
9288 TREE_OPERAND (arg0
, 1)));
9289 tree dandnotc
= fold (build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9291 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
9292 if (integer_nonzerop (dandnotc
))
9293 return omit_one_operand (type
, rslt
, arg0
);
9296 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9297 Similarly for NE_EXPR. */
9298 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9299 && TREE_CODE (arg0
) == BIT_IOR_EXPR
9300 && TREE_CODE (arg1
) == INTEGER_CST
9301 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9303 tree notd
= fold (build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
));
9304 tree candnotd
= fold (build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9305 TREE_OPERAND (arg0
, 1), notd
));
9306 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
9307 if (integer_nonzerop (candnotd
))
9308 return omit_one_operand (type
, rslt
, arg0
);
9311 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9312 and similarly for >= into !=. */
9313 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
9314 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
9315 && TREE_CODE (arg1
) == LSHIFT_EXPR
9316 && integer_onep (TREE_OPERAND (arg1
, 0)))
9317 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
9318 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
9319 TREE_OPERAND (arg1
, 1)),
9320 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
9322 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
9323 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
9324 && (TREE_CODE (arg1
) == NOP_EXPR
9325 || TREE_CODE (arg1
) == CONVERT_EXPR
)
9326 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
9327 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
9329 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
9330 fold_convert (TREE_TYPE (arg0
),
9331 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
9332 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
9334 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
9336 /* Simplify comparison of something with itself. (For IEEE
9337 floating-point, we can only do some of these simplifications.) */
9338 if (operand_equal_p (arg0
, arg1
, 0))
9343 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9344 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9345 return constant_boolean_node (1, type
);
9350 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9351 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9352 return constant_boolean_node (1, type
);
9353 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
9356 /* For NE, we can only do this simplification if integer
9357 or we don't honor IEEE floating point NaNs. */
9358 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9359 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9361 /* ... fall through ... */
9364 return constant_boolean_node (0, type
);
9370 /* If we are comparing an expression that just has comparisons
9371 of two integer values, arithmetic expressions of those comparisons,
9372 and constants, we can simplify it. There are only three cases
9373 to check: the two values can either be equal, the first can be
9374 greater, or the second can be greater. Fold the expression for
9375 those three values. Since each value must be 0 or 1, we have
9376 eight possibilities, each of which corresponds to the constant 0
9377 or 1 or one of the six possible comparisons.
9379 This handles common cases like (a > b) == 0 but also handles
9380 expressions like ((x > y) - (y > x)) > 0, which supposedly
9381 occur in macroized code. */
9383 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9385 tree cval1
= 0, cval2
= 0;
9388 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9389 /* Don't handle degenerate cases here; they should already
9390 have been handled anyway. */
9391 && cval1
!= 0 && cval2
!= 0
9392 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9393 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9394 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9395 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9396 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9397 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9398 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9400 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9401 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9403 /* We can't just pass T to eval_subst in case cval1 or cval2
9404 was the same as ARG1. */
9407 = fold (build2 (code
, type
,
9408 eval_subst (arg0
, cval1
, maxval
,
9412 = fold (build2 (code
, type
,
9413 eval_subst (arg0
, cval1
, maxval
,
9417 = fold (build2 (code
, type
,
9418 eval_subst (arg0
, cval1
, minval
,
9422 /* All three of these results should be 0 or 1. Confirm they
9423 are. Then use those values to select the proper code
9426 if ((integer_zerop (high_result
)
9427 || integer_onep (high_result
))
9428 && (integer_zerop (equal_result
)
9429 || integer_onep (equal_result
))
9430 && (integer_zerop (low_result
)
9431 || integer_onep (low_result
)))
9433 /* Make a 3-bit mask with the high-order bit being the
9434 value for `>', the next for '=', and the low for '<'. */
9435 switch ((integer_onep (high_result
) * 4)
9436 + (integer_onep (equal_result
) * 2)
9437 + integer_onep (low_result
))
9441 return omit_one_operand (type
, integer_zero_node
, arg0
);
9462 return omit_one_operand (type
, integer_one_node
, arg0
);
9465 tem
= build2 (code
, type
, cval1
, cval2
);
9467 return save_expr (tem
);
9474 /* If this is a comparison of a field, we may be able to simplify it. */
9475 if (((TREE_CODE (arg0
) == COMPONENT_REF
9476 && lang_hooks
.can_use_bit_fields_p ())
9477 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
9478 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
9479 /* Handle the constant case even without -O
9480 to make sure the warnings are given. */
9481 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
9483 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
9488 /* If this is a comparison of complex values and either or both sides
9489 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
9490 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
9491 This may prevent needless evaluations. */
9492 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9493 && TREE_CODE (TREE_TYPE (arg0
)) == COMPLEX_TYPE
9494 && (TREE_CODE (arg0
) == COMPLEX_EXPR
9495 || TREE_CODE (arg1
) == COMPLEX_EXPR
9496 || TREE_CODE (arg0
) == COMPLEX_CST
9497 || TREE_CODE (arg1
) == COMPLEX_CST
))
9499 tree subtype
= TREE_TYPE (TREE_TYPE (arg0
));
9500 tree real0
, imag0
, real1
, imag1
;
9502 arg0
= save_expr (arg0
);
9503 arg1
= save_expr (arg1
);
9504 real0
= fold (build1 (REALPART_EXPR
, subtype
, arg0
));
9505 imag0
= fold (build1 (IMAGPART_EXPR
, subtype
, arg0
));
9506 real1
= fold (build1 (REALPART_EXPR
, subtype
, arg1
));
9507 imag1
= fold (build1 (IMAGPART_EXPR
, subtype
, arg1
));
9509 return fold (build2 ((code
== EQ_EXPR
? TRUTH_ANDIF_EXPR
9512 fold (build2 (code
, type
, real0
, real1
)),
9513 fold (build2 (code
, type
, imag0
, imag1
))));
9516 /* Optimize comparisons of strlen vs zero to a compare of the
9517 first character of the string vs zero. To wit,
9518 strlen(ptr) == 0 => *ptr == 0
9519 strlen(ptr) != 0 => *ptr != 0
9520 Other cases should reduce to one of these two (or a constant)
9521 due to the return value of strlen being unsigned. */
9522 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9523 && integer_zerop (arg1
)
9524 && TREE_CODE (arg0
) == CALL_EXPR
)
9526 tree fndecl
= get_callee_fndecl (arg0
);
9530 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
9531 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
9532 && (arglist
= TREE_OPERAND (arg0
, 1))
9533 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
9534 && ! TREE_CHAIN (arglist
))
9535 return fold (build2 (code
, type
,
9536 build1 (INDIRECT_REF
, char_type_node
,
9537 TREE_VALUE (arglist
)),
9538 fold_convert (char_type_node
,
9539 integer_zero_node
)));
9542 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9543 into a single range test. */
9544 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9545 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9546 && TREE_CODE (arg1
) == INTEGER_CST
9547 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9548 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9549 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9550 && !TREE_OVERFLOW (arg1
))
9552 t1
= fold_div_compare (code
, type
, arg0
, arg1
);
9553 if (t1
!= NULL_TREE
)
9557 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9558 && !TREE_SIDE_EFFECTS (arg0
)
9559 && integer_zerop (arg1
)
9560 && tree_expr_nonzero_p (arg0
))
9561 return constant_boolean_node (code
==NE_EXPR
, type
);
9563 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
9564 return t1
== NULL_TREE
? NULL_TREE
: t1
;
9566 case UNORDERED_EXPR
:
9574 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9576 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
9577 if (t1
!= NULL_TREE
)
9581 /* If the first operand is NaN, the result is constant. */
9582 if (TREE_CODE (arg0
) == REAL_CST
9583 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
9584 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
9586 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
9589 return omit_one_operand (type
, t1
, arg1
);
9592 /* If the second operand is NaN, the result is constant. */
9593 if (TREE_CODE (arg1
) == REAL_CST
9594 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
9595 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
9597 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
9600 return omit_one_operand (type
, t1
, arg0
);
9603 /* Simplify unordered comparison of something with itself. */
9604 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
9605 && operand_equal_p (arg0
, arg1
, 0))
9606 return constant_boolean_node (1, type
);
9608 if (code
== LTGT_EXPR
9609 && !flag_trapping_math
9610 && operand_equal_p (arg0
, arg1
, 0))
9611 return constant_boolean_node (0, type
);
9613 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9615 tree targ0
= strip_float_extensions (arg0
);
9616 tree targ1
= strip_float_extensions (arg1
);
9617 tree newtype
= TREE_TYPE (targ0
);
9619 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9620 newtype
= TREE_TYPE (targ1
);
9622 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9623 return fold (build2 (code
, type
, fold_convert (newtype
, targ0
),
9624 fold_convert (newtype
, targ1
)));
9630 /* When pedantic, a compound expression can be neither an lvalue
9631 nor an integer constant expression. */
9632 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
9634 /* Don't let (0, 0) be null pointer constant. */
9635 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
9636 : fold_convert (type
, arg1
);
9637 return pedantic_non_lvalue (tem
);
9641 return build_complex (type
, arg0
, arg1
);
9646 } /* switch (code) */
9649 /* Fold a ternary expression EXPR. Return the folded expression if
9650 folding is successful. Otherwise, return the original
9654 fold_ternary (tree expr
)
9656 const tree t
= expr
;
9657 const tree type
= TREE_TYPE (expr
);
9660 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
9661 enum tree_code code
= TREE_CODE (t
);
9662 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9664 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9665 && TREE_CODE_LENGTH (code
) == 3);
9667 op0
= TREE_OPERAND (t
, 0);
9668 op1
= TREE_OPERAND (t
, 1);
9669 op2
= TREE_OPERAND (t
, 2);
9671 /* Strip any conversions that don't change the mode. This is safe
9672 for every expression, except for a comparison expression because
9673 its signedness is derived from its operands. So, in the latter
9674 case, only strip conversions that don't change the signedness.
9676 Note that this is done as an internal manipulation within the
9677 constant folder, in order to find the simplest representation of
9678 the arguments so that their form can be studied. In any cases,
9679 the appropriate type conversions should be put back in the tree
9680 that will get out of the constant folder. */
9696 if (TREE_CODE (arg0
) == CONSTRUCTOR
9697 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
9699 tree m
= purpose_member (arg1
, CONSTRUCTOR_ELTS (arg0
));
9701 return TREE_VALUE (m
);
9706 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9707 so all simple results must be passed through pedantic_non_lvalue. */
9708 if (TREE_CODE (arg0
) == INTEGER_CST
)
9710 tem
= integer_zerop (arg0
) ? op2
: op1
;
9711 /* Only optimize constant conditions when the selected branch
9712 has the same type as the COND_EXPR. This avoids optimizing
9713 away "c ? x : throw", where the throw has a void type. */
9714 if (! VOID_TYPE_P (TREE_TYPE (tem
))
9715 || VOID_TYPE_P (type
))
9716 return pedantic_non_lvalue (tem
);
9719 if (operand_equal_p (arg1
, op2
, 0))
9720 return pedantic_omit_one_operand (type
, arg1
, arg0
);
9722 /* If we have A op B ? A : C, we may be able to convert this to a
9723 simpler expression, depending on the operation and the values
9724 of B and C. Signed zeros prevent all of these transformations,
9725 for reasons given above each one.
9727 Also try swapping the arguments and inverting the conditional. */
9728 if (COMPARISON_CLASS_P (arg0
)
9729 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
9730 arg1
, TREE_OPERAND (arg0
, 1))
9731 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
9733 tem
= fold_cond_expr_with_comparison (type
, arg0
, op1
, op2
);
9738 if (COMPARISON_CLASS_P (arg0
)
9739 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
9741 TREE_OPERAND (arg0
, 1))
9742 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
9744 tem
= invert_truthvalue (arg0
);
9745 if (COMPARISON_CLASS_P (tem
))
9747 tem
= fold_cond_expr_with_comparison (type
, tem
, op2
, op1
);
9753 /* If the second operand is simpler than the third, swap them
9754 since that produces better jump optimization results. */
9755 if (tree_swap_operands_p (op1
, op2
, false))
9757 /* See if this can be inverted. If it can't, possibly because
9758 it was a floating-point inequality comparison, don't do
9760 tem
= invert_truthvalue (arg0
);
9762 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
9763 return fold (build3 (code
, type
, tem
, op2
, op1
));
9766 /* Convert A ? 1 : 0 to simply A. */
9767 if (integer_onep (op1
)
9768 && integer_zerop (op2
)
9769 /* If we try to convert OP0 to our type, the
9770 call to fold will try to move the conversion inside
9771 a COND, which will recurse. In that case, the COND_EXPR
9772 is probably the best choice, so leave it alone. */
9773 && type
== TREE_TYPE (arg0
))
9774 return pedantic_non_lvalue (arg0
);
9776 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9777 over COND_EXPR in cases such as floating point comparisons. */
9778 if (integer_zerop (op1
)
9779 && integer_onep (op2
)
9780 && truth_value_p (TREE_CODE (arg0
)))
9781 return pedantic_non_lvalue (fold_convert (type
,
9782 invert_truthvalue (arg0
)));
9784 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9785 if (TREE_CODE (arg0
) == LT_EXPR
9786 && integer_zerop (TREE_OPERAND (arg0
, 1))
9787 && integer_zerop (op2
)
9788 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
9789 return fold_convert (type
, fold (build2 (BIT_AND_EXPR
,
9790 TREE_TYPE (tem
), tem
, arg1
)));
9792 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9793 already handled above. */
9794 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9795 && integer_onep (TREE_OPERAND (arg0
, 1))
9796 && integer_zerop (op2
)
9797 && integer_pow2p (arg1
))
9799 tree tem
= TREE_OPERAND (arg0
, 0);
9801 if (TREE_CODE (tem
) == RSHIFT_EXPR
9802 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
9803 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
9804 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
9805 return fold (build2 (BIT_AND_EXPR
, type
,
9806 TREE_OPERAND (tem
, 0), arg1
));
9809 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9810 is probably obsolete because the first operand should be a
9811 truth value (that's why we have the two cases above), but let's
9812 leave it in until we can confirm this for all front-ends. */
9813 if (integer_zerop (op2
)
9814 && TREE_CODE (arg0
) == NE_EXPR
9815 && integer_zerop (TREE_OPERAND (arg0
, 1))
9816 && integer_pow2p (arg1
)
9817 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
9818 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
9819 arg1
, OEP_ONLY_CONST
))
9820 return pedantic_non_lvalue (fold_convert (type
,
9821 TREE_OPERAND (arg0
, 0)));
9823 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9824 if (integer_zerop (op2
)
9825 && truth_value_p (TREE_CODE (arg0
))
9826 && truth_value_p (TREE_CODE (arg1
)))
9827 return fold (build2 (TRUTH_ANDIF_EXPR
, type
, arg0
, arg1
));
9829 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9830 if (integer_onep (op2
)
9831 && truth_value_p (TREE_CODE (arg0
))
9832 && truth_value_p (TREE_CODE (arg1
)))
9834 /* Only perform transformation if ARG0 is easily inverted. */
9835 tem
= invert_truthvalue (arg0
);
9836 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
9837 return fold (build2 (TRUTH_ORIF_EXPR
, type
, tem
, arg1
));
9840 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9841 if (integer_zerop (arg1
)
9842 && truth_value_p (TREE_CODE (arg0
))
9843 && truth_value_p (TREE_CODE (op2
)))
9845 /* Only perform transformation if ARG0 is easily inverted. */
9846 tem
= invert_truthvalue (arg0
);
9847 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
9848 return fold (build2 (TRUTH_ANDIF_EXPR
, type
, tem
, op2
));
9851 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9852 if (integer_onep (arg1
)
9853 && truth_value_p (TREE_CODE (arg0
))
9854 && truth_value_p (TREE_CODE (op2
)))
9855 return fold (build2 (TRUTH_ORIF_EXPR
, type
, arg0
, op2
));
9860 /* Check for a built-in function. */
9861 if (TREE_CODE (op0
) == ADDR_EXPR
9862 && TREE_CODE (TREE_OPERAND (op0
, 0)) == FUNCTION_DECL
9863 && DECL_BUILT_IN (TREE_OPERAND (op0
, 0)))
9865 tree tmp
= fold_builtin (t
, false);
9873 } /* switch (code) */
9876 /* Perform constant folding and related simplification of EXPR.
9877 The related simplifications include x*1 => x, x*0 => 0, etc.,
9878 and application of the associative law.
9879 NOP_EXPR conversions may be removed freely (as long as we
9880 are careful not to change the type of the overall expression).
9881 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
9882 but we can constant-fold them if they have constant operands. */
9884 #ifdef ENABLE_FOLD_CHECKING
9885 # define fold(x) fold_1 (x)
9886 static tree
fold_1 (tree
);
9892 const tree t
= expr
;
9893 enum tree_code code
= TREE_CODE (t
);
9894 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9897 /* Return right away if a constant. */
9898 if (kind
== tcc_constant
)
9901 if (IS_EXPR_CODE_CLASS (kind
))
9903 tree type
= TREE_TYPE (t
);
9906 switch (TREE_CODE_LENGTH (code
))
9909 op0
= TREE_OPERAND (t
, 0);
9910 tem
= fold_unary (code
, type
, op0
);
9911 return tem
? tem
: expr
;
9913 op0
= TREE_OPERAND (t
, 0);
9914 op1
= TREE_OPERAND (t
, 1);
9915 tem
= fold_binary (code
, type
, op0
, op1
);
9916 return tem
? tem
: expr
;
9918 tem
= fold_ternary (expr
);
9919 return tem
? tem
: expr
;
9928 return fold (DECL_INITIAL (t
));
9932 } /* switch (code) */
9935 #ifdef ENABLE_FOLD_CHECKING
9938 static void fold_checksum_tree (tree
, struct md5_ctx
*, htab_t
);
9939 static void fold_check_failed (tree
, tree
);
9940 void print_fold_checksum (tree
);
9942 /* When --enable-checking=fold, compute a digest of expr before
9943 and after actual fold call to see if fold did not accidentally
9944 change original expr. */
9951 unsigned char checksum_before
[16], checksum_after
[16];
9954 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
9955 md5_init_ctx (&ctx
);
9956 fold_checksum_tree (expr
, &ctx
, ht
);
9957 md5_finish_ctx (&ctx
, checksum_before
);
9960 ret
= fold_1 (expr
);
9962 md5_init_ctx (&ctx
);
9963 fold_checksum_tree (expr
, &ctx
, ht
);
9964 md5_finish_ctx (&ctx
, checksum_after
);
9967 if (memcmp (checksum_before
, checksum_after
, 16))
9968 fold_check_failed (expr
, ret
);
9974 print_fold_checksum (tree expr
)
9977 unsigned char checksum
[16], cnt
;
9980 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
9981 md5_init_ctx (&ctx
);
9982 fold_checksum_tree (expr
, &ctx
, ht
);
9983 md5_finish_ctx (&ctx
, checksum
);
9985 for (cnt
= 0; cnt
< 16; ++cnt
)
9986 fprintf (stderr
, "%02x", checksum
[cnt
]);
9987 putc ('\n', stderr
);
9991 fold_check_failed (tree expr ATTRIBUTE_UNUSED
, tree ret ATTRIBUTE_UNUSED
)
9993 internal_error ("fold check: original tree changed by fold");
9997 fold_checksum_tree (tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
10000 enum tree_code code
;
10001 char buf
[sizeof (struct tree_decl
)];
10004 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
10005 <= sizeof (struct tree_decl
))
10006 && sizeof (struct tree_type
) <= sizeof (struct tree_decl
));
10009 slot
= htab_find_slot (ht
, expr
, INSERT
);
10013 code
= TREE_CODE (expr
);
10014 if (TREE_CODE_CLASS (code
) == tcc_declaration
10015 && DECL_ASSEMBLER_NAME_SET_P (expr
))
10017 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10018 memcpy (buf
, expr
, tree_size (expr
));
10020 SET_DECL_ASSEMBLER_NAME (expr
, NULL
);
10022 else if (TREE_CODE_CLASS (code
) == tcc_type
10023 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
10024 || TYPE_CACHED_VALUES_P (expr
)))
10026 /* Allow these fields to be modified. */
10027 memcpy (buf
, expr
, tree_size (expr
));
10029 TYPE_POINTER_TO (expr
) = NULL
;
10030 TYPE_REFERENCE_TO (expr
) = NULL
;
10031 TYPE_CACHED_VALUES_P (expr
) = 0;
10032 TYPE_CACHED_VALUES (expr
) = NULL
;
10034 md5_process_bytes (expr
, tree_size (expr
), ctx
);
10035 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
10036 if (TREE_CODE_CLASS (code
) != tcc_type
10037 && TREE_CODE_CLASS (code
) != tcc_declaration
)
10038 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
10039 switch (TREE_CODE_CLASS (code
))
10045 md5_process_bytes (TREE_STRING_POINTER (expr
),
10046 TREE_STRING_LENGTH (expr
), ctx
);
10049 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
10050 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
10053 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
10059 case tcc_exceptional
:
10063 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
10064 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
10067 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
10068 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
10074 case tcc_expression
:
10075 case tcc_reference
:
10076 case tcc_comparison
:
10079 case tcc_statement
:
10080 len
= TREE_CODE_LENGTH (code
);
10081 for (i
= 0; i
< len
; ++i
)
10082 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
10084 case tcc_declaration
:
10085 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
10086 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
10087 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
10088 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
10089 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
10090 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
10091 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
10092 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
10093 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
10094 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
10095 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
10098 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
10099 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
10100 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
10101 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
10102 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
10103 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
10104 if (INTEGRAL_TYPE_P (expr
)
10105 || SCALAR_FLOAT_TYPE_P (expr
))
10107 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
10108 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
10110 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
10111 if (TREE_CODE (expr
) == RECORD_TYPE
10112 || TREE_CODE (expr
) == UNION_TYPE
10113 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
10114 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
10115 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
10124 /* Perform constant folding and related simplification of initializer
10125 expression EXPR. This behaves identically to "fold" but ignores
10126 potential run-time traps and exceptions that fold must preserve. */
10129 fold_initializer (tree expr
)
10131 int saved_signaling_nans
= flag_signaling_nans
;
10132 int saved_trapping_math
= flag_trapping_math
;
10133 int saved_rounding_math
= flag_rounding_math
;
10134 int saved_trapv
= flag_trapv
;
10137 flag_signaling_nans
= 0;
10138 flag_trapping_math
= 0;
10139 flag_rounding_math
= 0;
10142 result
= fold (expr
);
10144 flag_signaling_nans
= saved_signaling_nans
;
10145 flag_trapping_math
= saved_trapping_math
;
10146 flag_rounding_math
= saved_rounding_math
;
10147 flag_trapv
= saved_trapv
;
10152 /* Determine if first argument is a multiple of second argument. Return 0 if
10153 it is not, or we cannot easily determined it to be.
10155 An example of the sort of thing we care about (at this point; this routine
10156 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10157 fold cases do now) is discovering that
10159 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10165 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10167 This code also handles discovering that
10169 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10171 is a multiple of 8 so we don't have to worry about dealing with a
10172 possible remainder.
10174 Note that we *look* inside a SAVE_EXPR only to determine how it was
10175 calculated; it is not safe for fold to do much of anything else with the
10176 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10177 at run time. For example, the latter example above *cannot* be implemented
10178 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10179 evaluation time of the original SAVE_EXPR is not necessarily the same at
10180 the time the new expression is evaluated. The only optimization of this
10181 sort that would be valid is changing
10183 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10187 SAVE_EXPR (I) * SAVE_EXPR (J)
10189 (where the same SAVE_EXPR (J) is used in the original and the
10190 transformed version). */
10193 multiple_of_p (tree type
, tree top
, tree bottom
)
10195 if (operand_equal_p (top
, bottom
, 0))
10198 if (TREE_CODE (type
) != INTEGER_TYPE
)
10201 switch (TREE_CODE (top
))
10204 /* Bitwise and provides a power of two multiple. If the mask is
10205 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10206 if (!integer_pow2p (bottom
))
10211 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
10212 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
10216 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
10217 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
10220 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
10224 op1
= TREE_OPERAND (top
, 1);
10225 /* const_binop may not detect overflow correctly,
10226 so check for it explicitly here. */
10227 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
10228 > TREE_INT_CST_LOW (op1
)
10229 && TREE_INT_CST_HIGH (op1
) == 0
10230 && 0 != (t1
= fold_convert (type
,
10231 const_binop (LSHIFT_EXPR
,
10234 && ! TREE_OVERFLOW (t1
))
10235 return multiple_of_p (type
, t1
, bottom
);
10240 /* Can't handle conversions from non-integral or wider integral type. */
10241 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
10242 || (TYPE_PRECISION (type
)
10243 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
10246 /* .. fall through ... */
10249 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
10252 if (TREE_CODE (bottom
) != INTEGER_CST
10253 || (TYPE_UNSIGNED (type
)
10254 && (tree_int_cst_sgn (top
) < 0
10255 || tree_int_cst_sgn (bottom
) < 0)))
10257 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,
10265 /* Return true if `t' is known to be non-negative. */
10268 tree_expr_nonnegative_p (tree t
)
10270 switch (TREE_CODE (t
))
10276 return tree_int_cst_sgn (t
) >= 0;
10279 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
10282 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
10283 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10284 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10286 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10287 both unsigned and at least 2 bits shorter than the result. */
10288 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
10289 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
10290 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
10292 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
10293 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
10294 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
10295 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
10297 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
10298 TYPE_PRECISION (inner2
)) + 1;
10299 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
10305 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
10307 /* x * x for floating point x is always non-negative. */
10308 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
10310 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10311 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10314 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10315 both unsigned and their total bits is shorter than the result. */
10316 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
10317 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
10318 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
10320 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
10321 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
10322 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
10323 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
10324 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
10325 < TYPE_PRECISION (TREE_TYPE (t
));
10329 case TRUNC_DIV_EXPR
:
10330 case CEIL_DIV_EXPR
:
10331 case FLOOR_DIV_EXPR
:
10332 case ROUND_DIV_EXPR
:
10333 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10334 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10336 case TRUNC_MOD_EXPR
:
10337 case CEIL_MOD_EXPR
:
10338 case FLOOR_MOD_EXPR
:
10339 case ROUND_MOD_EXPR
:
10340 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10343 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10344 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10347 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
10348 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10351 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10352 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10356 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
10357 tree outer_type
= TREE_TYPE (t
);
10359 if (TREE_CODE (outer_type
) == REAL_TYPE
)
10361 if (TREE_CODE (inner_type
) == REAL_TYPE
)
10362 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10363 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
10365 if (TYPE_UNSIGNED (inner_type
))
10367 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10370 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
10372 if (TREE_CODE (inner_type
) == REAL_TYPE
)
10373 return tree_expr_nonnegative_p (TREE_OPERAND (t
,0));
10374 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
10375 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
10376 && TYPE_UNSIGNED (inner_type
);
10382 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
10383 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
10384 case COMPOUND_EXPR
:
10385 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10387 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10388 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10390 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10391 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10393 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10395 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t
, 1)));
10397 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10398 case NON_LVALUE_EXPR
:
10399 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10401 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10405 tree temp
= TARGET_EXPR_SLOT (t
);
10406 t
= TARGET_EXPR_INITIAL (t
);
10408 /* If the initializer is non-void, then it's a normal expression
10409 that will be assigned to the slot. */
10410 if (!VOID_TYPE_P (t
))
10411 return tree_expr_nonnegative_p (t
);
10413 /* Otherwise, the initializer sets the slot in some way. One common
10414 way is an assignment statement at the end of the initializer. */
10417 if (TREE_CODE (t
) == BIND_EXPR
)
10418 t
= expr_last (BIND_EXPR_BODY (t
));
10419 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
10420 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
10421 t
= expr_last (TREE_OPERAND (t
, 0));
10422 else if (TREE_CODE (t
) == STATEMENT_LIST
)
10427 if (TREE_CODE (t
) == MODIFY_EXPR
10428 && TREE_OPERAND (t
, 0) == temp
)
10429 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10436 tree fndecl
= get_callee_fndecl (t
);
10437 tree arglist
= TREE_OPERAND (t
, 1);
10438 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
10439 switch (DECL_FUNCTION_CODE (fndecl
))
10441 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10442 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10443 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10444 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10446 CASE_BUILTIN_F (BUILT_IN_ACOS
)
10447 CASE_BUILTIN_F (BUILT_IN_ACOSH
)
10448 CASE_BUILTIN_F (BUILT_IN_CABS
)
10449 CASE_BUILTIN_F (BUILT_IN_COSH
)
10450 CASE_BUILTIN_F (BUILT_IN_ERFC
)
10451 CASE_BUILTIN_F (BUILT_IN_EXP
)
10452 CASE_BUILTIN_F (BUILT_IN_EXP10
)
10453 CASE_BUILTIN_F (BUILT_IN_EXP2
)
10454 CASE_BUILTIN_F (BUILT_IN_FABS
)
10455 CASE_BUILTIN_F (BUILT_IN_FDIM
)
10456 CASE_BUILTIN_F (BUILT_IN_FREXP
)
10457 CASE_BUILTIN_F (BUILT_IN_HYPOT
)
10458 CASE_BUILTIN_F (BUILT_IN_POW10
)
10459 CASE_BUILTIN_I (BUILT_IN_FFS
)
10460 CASE_BUILTIN_I (BUILT_IN_PARITY
)
10461 CASE_BUILTIN_I (BUILT_IN_POPCOUNT
)
10465 CASE_BUILTIN_F (BUILT_IN_SQRT
)
10466 /* sqrt(-0.0) is -0.0. */
10467 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
10469 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
10471 CASE_BUILTIN_F (BUILT_IN_ASINH
)
10472 CASE_BUILTIN_F (BUILT_IN_ATAN
)
10473 CASE_BUILTIN_F (BUILT_IN_ATANH
)
10474 CASE_BUILTIN_F (BUILT_IN_CBRT
)
10475 CASE_BUILTIN_F (BUILT_IN_CEIL
)
10476 CASE_BUILTIN_F (BUILT_IN_ERF
)
10477 CASE_BUILTIN_F (BUILT_IN_EXPM1
)
10478 CASE_BUILTIN_F (BUILT_IN_FLOOR
)
10479 CASE_BUILTIN_F (BUILT_IN_FMOD
)
10480 CASE_BUILTIN_F (BUILT_IN_LDEXP
)
10481 CASE_BUILTIN_F (BUILT_IN_LLRINT
)
10482 CASE_BUILTIN_F (BUILT_IN_LLROUND
)
10483 CASE_BUILTIN_F (BUILT_IN_LRINT
)
10484 CASE_BUILTIN_F (BUILT_IN_LROUND
)
10485 CASE_BUILTIN_F (BUILT_IN_MODF
)
10486 CASE_BUILTIN_F (BUILT_IN_NEARBYINT
)
10487 CASE_BUILTIN_F (BUILT_IN_POW
)
10488 CASE_BUILTIN_F (BUILT_IN_RINT
)
10489 CASE_BUILTIN_F (BUILT_IN_ROUND
)
10490 CASE_BUILTIN_F (BUILT_IN_SIGNBIT
)
10491 CASE_BUILTIN_F (BUILT_IN_SINH
)
10492 CASE_BUILTIN_F (BUILT_IN_TANH
)
10493 CASE_BUILTIN_F (BUILT_IN_TRUNC
)
10494 /* True if the 1st argument is nonnegative. */
10495 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
10497 CASE_BUILTIN_F (BUILT_IN_FMAX
)
10498 /* True if the 1st OR 2nd arguments are nonnegative. */
10499 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
10500 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
10502 CASE_BUILTIN_F (BUILT_IN_FMIN
)
10503 /* True if the 1st AND 2nd arguments are nonnegative. */
10504 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
10505 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
10507 CASE_BUILTIN_F (BUILT_IN_COPYSIGN
)
10508 /* True if the 2nd argument is nonnegative. */
10509 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
10513 #undef CASE_BUILTIN_F
10514 #undef CASE_BUILTIN_I
10518 /* ... fall through ... */
10521 if (truth_value_p (TREE_CODE (t
)))
10522 /* Truth values evaluate to 0 or 1, which is nonnegative. */
10526 /* We don't know sign of `t', so be conservative and return false. */
10530 /* Return true when T is an address and is known to be nonzero.
10531 For floating point we further ensure that T is not denormal.
10532 Similar logic is present in nonzero_address in rtlanal.h. */
10535 tree_expr_nonzero_p (tree t
)
10537 tree type
= TREE_TYPE (t
);
10539 /* Doing something useful for floating point would need more work. */
10540 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
10543 switch (TREE_CODE (t
))
10546 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
10547 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
10550 /* We used to test for !integer_zerop here. This does not work correctly
10551 if TREE_CONSTANT_OVERFLOW (t). */
10552 return (TREE_INT_CST_LOW (t
) != 0
10553 || TREE_INT_CST_HIGH (t
) != 0);
10556 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
10558 /* With the presence of negative values it is hard
10559 to say something. */
10560 if (!tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10561 || !tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
10563 /* One of operands must be positive and the other non-negative. */
10564 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
10565 || tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
10570 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
10572 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
10573 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
10579 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
10580 tree outer_type
= TREE_TYPE (t
);
10582 return (TYPE_PRECISION (inner_type
) >= TYPE_PRECISION (outer_type
)
10583 && tree_expr_nonzero_p (TREE_OPERAND (t
, 0)));
10589 tree base
= get_base_address (TREE_OPERAND (t
, 0));
10594 /* Weak declarations may link to NULL. */
10596 return !DECL_WEAK (base
);
10598 /* Constants are never weak. */
10599 if (CONSTANT_CLASS_P (base
))
10606 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
10607 && tree_expr_nonzero_p (TREE_OPERAND (t
, 2)));
10610 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
10611 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
10614 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 0)))
10616 /* When both operands are nonzero, then MAX must be too. */
10617 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1)))
10620 /* MAX where operand 0 is positive is positive. */
10621 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10623 /* MAX where operand 1 is positive is positive. */
10624 else if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
10625 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
10629 case COMPOUND_EXPR
:
10632 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1));
10635 case NON_LVALUE_EXPR
:
10636 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
10639 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
10640 || tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
10648 /* See if we are applying CODE, a relational to the highest or lowest
10649 possible integer of TYPE. If so, then the result is a compile
10653 fold_relational_hi_lo (enum tree_code
*code_p
, const tree type
, tree
*op0_p
,
10658 enum tree_code code
= *code_p
;
10659 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1
)));
10661 if (TREE_CODE (op1
) == INTEGER_CST
10662 && ! TREE_CONSTANT_OVERFLOW (op1
)
10663 && width
<= HOST_BITS_PER_WIDE_INT
10664 && (INTEGRAL_TYPE_P (TREE_TYPE (op1
))
10665 || POINTER_TYPE_P (TREE_TYPE (op1
))))
10667 unsigned HOST_WIDE_INT signed_max
;
10668 unsigned HOST_WIDE_INT max
, min
;
10670 signed_max
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1)) - 1;
10672 if (TYPE_UNSIGNED (TREE_TYPE (op1
)))
10674 max
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
10680 min
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
10683 if (TREE_INT_CST_HIGH (op1
) == 0
10684 && TREE_INT_CST_LOW (op1
) == max
)
10688 return omit_one_operand (type
, integer_zero_node
, op0
);
10694 return omit_one_operand (type
, integer_one_node
, op0
);
10700 /* The GE_EXPR and LT_EXPR cases above are not normally
10701 reached because of previous transformations. */
10706 else if (TREE_INT_CST_HIGH (op1
) == 0
10707 && TREE_INT_CST_LOW (op1
) == max
- 1)
10712 *op1_p
= const_binop (PLUS_EXPR
, op1
, integer_one_node
, 0);
10716 *op1_p
= const_binop (PLUS_EXPR
, op1
, integer_one_node
, 0);
10721 else if (TREE_INT_CST_HIGH (op1
) == (min
? -1 : 0)
10722 && TREE_INT_CST_LOW (op1
) == min
)
10726 return omit_one_operand (type
, integer_zero_node
, op0
);
10733 return omit_one_operand (type
, integer_one_node
, op0
);
10742 else if (TREE_INT_CST_HIGH (op1
) == (min
? -1 : 0)
10743 && TREE_INT_CST_LOW (op1
) == min
+ 1)
10748 *op1_p
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
10752 *op1_p
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
10758 else if (TREE_INT_CST_HIGH (op1
) == 0
10759 && TREE_INT_CST_LOW (op1
) == signed_max
10760 && TYPE_UNSIGNED (TREE_TYPE (op1
))
10761 /* signed_type does not work on pointer types. */
10762 && INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
10764 /* The following case also applies to X < signed_max+1
10765 and X >= signed_max+1 because previous transformations. */
10766 if (code
== LE_EXPR
|| code
== GT_EXPR
)
10768 tree st0
, st1
, exp
, retval
;
10769 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (op0
));
10770 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (op1
));
10772 exp
= build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
10774 fold_convert (st0
, op0
),
10775 fold_convert (st1
, integer_zero_node
));
10777 retval
= fold_binary_to_constant (TREE_CODE (exp
),
10779 TREE_OPERAND (exp
, 0),
10780 TREE_OPERAND (exp
, 1));
10782 /* If we are in gimple form, then returning EXP would create
10783 non-gimple expressions. Clearing it is safe and insures
10784 we do not allow a non-gimple expression to escape. */
10785 if (in_gimple_form
)
10788 return (retval
? retval
: exp
);
10797 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10798 attempt to fold the expression to a constant without modifying TYPE,
10801 If the expression could be simplified to a constant, then return
10802 the constant. If the expression would not be simplified to a
10803 constant, then return NULL_TREE.
10805 Note this is primarily designed to be called after gimplification
10806 of the tree structures and when at least one operand is a constant.
10807 As a result of those simplifying assumptions this routine is far
10808 simpler than the generic fold routine. */
10811 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
10818 /* If this is a commutative operation, and ARG0 is a constant, move it
10819 to ARG1 to reduce the number of tests below. */
10820 if (commutative_tree_code (code
)
10821 && (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
))
10828 /* If either operand is a complex type, extract its real component. */
10829 if (TREE_CODE (op0
) == COMPLEX_CST
)
10830 subop0
= TREE_REALPART (op0
);
10834 if (TREE_CODE (op1
) == COMPLEX_CST
)
10835 subop1
= TREE_REALPART (op1
);
10839 /* Note if either argument is not a real or integer constant.
10840 With a few exceptions, simplification is limited to cases
10841 where both arguments are constants. */
10842 if ((TREE_CODE (subop0
) != INTEGER_CST
10843 && TREE_CODE (subop0
) != REAL_CST
)
10844 || (TREE_CODE (subop1
) != INTEGER_CST
10845 && TREE_CODE (subop1
) != REAL_CST
))
10851 /* (plus (address) (const_int)) is a constant. */
10852 if (TREE_CODE (op0
) == PLUS_EXPR
10853 && TREE_CODE (op1
) == INTEGER_CST
10854 && (TREE_CODE (TREE_OPERAND (op0
, 0)) == ADDR_EXPR
10855 || (TREE_CODE (TREE_OPERAND (op0
, 0)) == NOP_EXPR
10856 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0
, 0), 0))
10858 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
10860 return build2 (PLUS_EXPR
, type
, TREE_OPERAND (op0
, 0),
10861 const_binop (PLUS_EXPR
, op1
,
10862 TREE_OPERAND (op0
, 1), 0));
10870 /* Both arguments are constants. Simplify. */
10871 tem
= const_binop (code
, op0
, op1
, 0);
10872 if (tem
!= NULL_TREE
)
10874 /* The return value should always have the same type as
10875 the original expression. */
10876 if (TREE_TYPE (tem
) != type
)
10877 tem
= fold_convert (type
, tem
);
10884 /* Fold &x - &x. This can happen from &x.foo - &x.
10885 This is unsafe for certain floats even in non-IEEE formats.
10886 In IEEE, it is unsafe because it does wrong for NaNs.
10887 Also note that operand_equal_p is always false if an
10888 operand is volatile. */
10889 if (! FLOAT_TYPE_P (type
) && operand_equal_p (op0
, op1
, 0))
10890 return fold_convert (type
, integer_zero_node
);
10896 /* Special case multiplication or bitwise AND where one argument
10898 if (! FLOAT_TYPE_P (type
) && integer_zerop (op1
))
10899 return omit_one_operand (type
, op1
, op0
);
10901 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0
)))
10902 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0
)))
10903 && real_zerop (op1
))
10904 return omit_one_operand (type
, op1
, op0
);
10909 /* Special case when we know the result will be all ones. */
10910 if (integer_all_onesp (op1
))
10911 return omit_one_operand (type
, op1
, op0
);
10915 case TRUNC_DIV_EXPR
:
10916 case ROUND_DIV_EXPR
:
10917 case FLOOR_DIV_EXPR
:
10918 case CEIL_DIV_EXPR
:
10919 case EXACT_DIV_EXPR
:
10920 case TRUNC_MOD_EXPR
:
10921 case ROUND_MOD_EXPR
:
10922 case FLOOR_MOD_EXPR
:
10923 case CEIL_MOD_EXPR
:
10925 /* Division by zero is undefined. */
10926 if (integer_zerop (op1
))
10929 if (TREE_CODE (op1
) == REAL_CST
10930 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1
)))
10931 && real_zerop (op1
))
10937 if (INTEGRAL_TYPE_P (type
)
10938 && operand_equal_p (op1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
10939 return omit_one_operand (type
, op1
, op0
);
10944 if (INTEGRAL_TYPE_P (type
)
10945 && TYPE_MAX_VALUE (type
)
10946 && operand_equal_p (op1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
10947 return omit_one_operand (type
, op1
, op0
);
10952 /* Optimize -1 >> x for arithmetic right shifts. */
10953 if (integer_all_onesp (op0
) && ! TYPE_UNSIGNED (type
))
10954 return omit_one_operand (type
, op0
, op1
);
10955 /* ... fall through ... */
10958 if (integer_zerop (op0
))
10959 return omit_one_operand (type
, op0
, op1
);
10961 /* Since negative shift count is not well-defined, don't
10962 try to compute it in the compiler. */
10963 if (TREE_CODE (op1
) == INTEGER_CST
&& tree_int_cst_sgn (op1
) < 0)
10970 /* -1 rotated either direction by any amount is still -1. */
10971 if (integer_all_onesp (op0
))
10972 return omit_one_operand (type
, op0
, op1
);
10974 /* 0 rotated either direction by any amount is still zero. */
10975 if (integer_zerop (op0
))
10976 return omit_one_operand (type
, op0
, op1
);
10982 return build_complex (type
, op0
, op1
);
10991 /* If one arg is a real or integer constant, put it last. */
10992 if ((TREE_CODE (op0
) == INTEGER_CST
10993 && TREE_CODE (op1
) != INTEGER_CST
)
10994 || (TREE_CODE (op0
) == REAL_CST
10995 && TREE_CODE (op0
) != REAL_CST
))
11002 code
= swap_tree_comparison (code
);
11005 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11006 This transformation affects the cases which are handled in later
11007 optimizations involving comparisons with non-negative constants. */
11008 if (TREE_CODE (op1
) == INTEGER_CST
11009 && TREE_CODE (op0
) != INTEGER_CST
11010 && tree_int_cst_sgn (op1
) > 0)
11016 op1
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
11021 op1
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
11029 tem
= fold_relational_hi_lo (&code
, type
, &op0
, &op1
);
11033 /* Fall through. */
11036 case UNORDERED_EXPR
:
11046 return fold_relational_const (code
, type
, op0
, op1
);
11049 /* This could probably be handled. */
11052 case TRUTH_AND_EXPR
:
11053 /* If second arg is constant zero, result is zero, but first arg
11054 must be evaluated. */
11055 if (integer_zerop (op1
))
11056 return omit_one_operand (type
, op1
, op0
);
11057 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11058 case will be handled here. */
11059 if (integer_zerop (op0
))
11060 return omit_one_operand (type
, op0
, op1
);
11061 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
11062 return constant_boolean_node (true, type
);
11065 case TRUTH_OR_EXPR
:
11066 /* If second arg is constant true, result is true, but we must
11067 evaluate first arg. */
11068 if (TREE_CODE (op1
) == INTEGER_CST
&& ! integer_zerop (op1
))
11069 return omit_one_operand (type
, op1
, op0
);
11070 /* Likewise for first arg, but note this only occurs here for
11072 if (TREE_CODE (op0
) == INTEGER_CST
&& ! integer_zerop (op0
))
11073 return omit_one_operand (type
, op0
, op1
);
11074 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
11075 return constant_boolean_node (false, type
);
11078 case TRUTH_XOR_EXPR
:
11079 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
11081 int x
= ! integer_zerop (op0
) ^ ! integer_zerop (op1
);
11082 return constant_boolean_node (x
, type
);
11091 /* Given the components of a unary expression CODE, TYPE and OP0,
11092 attempt to fold the expression to a constant without modifying
11095 If the expression could be simplified to a constant, then return
11096 the constant. If the expression would not be simplified to a
11097 constant, then return NULL_TREE.
11099 Note this is primarily designed to be called after gimplification
11100 of the tree structures and when op0 is a constant. As a result
11101 of those simplifying assumptions this routine is far simpler than
11102 the generic fold routine. */
11105 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
11107 /* Make sure we have a suitable constant argument. */
11108 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
11112 if (TREE_CODE (op0
) == COMPLEX_CST
)
11113 subop
= TREE_REALPART (op0
);
11117 if (TREE_CODE (subop
) != INTEGER_CST
&& TREE_CODE (subop
) != REAL_CST
)
11126 case FIX_TRUNC_EXPR
:
11127 case FIX_FLOOR_EXPR
:
11128 case FIX_CEIL_EXPR
:
11129 case FIX_ROUND_EXPR
:
11130 return fold_convert_const (code
, type
, op0
);
11133 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
11134 return fold_negate_const (op0
, type
);
11139 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
11140 return fold_abs_const (op0
, type
);
11145 if (TREE_CODE (op0
) == INTEGER_CST
)
11146 return fold_not_const (op0
, type
);
11150 case REALPART_EXPR
:
11151 if (TREE_CODE (op0
) == COMPLEX_CST
)
11152 return TREE_REALPART (op0
);
11156 case IMAGPART_EXPR
:
11157 if (TREE_CODE (op0
) == COMPLEX_CST
)
11158 return TREE_IMAGPART (op0
);
11163 if (TREE_CODE (op0
) == COMPLEX_CST
11164 && TREE_CODE (TREE_TYPE (op0
)) == COMPLEX_TYPE
)
11165 return build_complex (type
, TREE_REALPART (op0
),
11166 negate_expr (TREE_IMAGPART (op0
)));
11174 /* If EXP represents referencing an element in a constant string
11175 (either via pointer arithmetic or array indexing), return the
11176 tree representing the value accessed, otherwise return NULL. */
11179 fold_read_from_constant_string (tree exp
)
11181 if (TREE_CODE (exp
) == INDIRECT_REF
|| TREE_CODE (exp
) == ARRAY_REF
)
11183 tree exp1
= TREE_OPERAND (exp
, 0);
11187 if (TREE_CODE (exp
) == INDIRECT_REF
)
11188 string
= string_constant (exp1
, &index
);
11191 tree low_bound
= array_ref_low_bound (exp
);
11192 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
11194 /* Optimize the special-case of a zero lower bound.
11196 We convert the low_bound to sizetype to avoid some problems
11197 with constant folding. (E.g. suppose the lower bound is 1,
11198 and its mode is QI. Without the conversion,l (ARRAY
11199 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11200 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11201 if (! integer_zerop (low_bound
))
11202 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
11208 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (string
))
11209 && TREE_CODE (string
) == STRING_CST
11210 && TREE_CODE (index
) == INTEGER_CST
11211 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
11212 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
11214 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
11215 return fold_convert (TREE_TYPE (exp
),
11216 build_int_cst (NULL_TREE
,
11217 (TREE_STRING_POINTER (string
)
11218 [TREE_INT_CST_LOW (index
)])));
11223 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11224 an integer constant or real constant.
11226 TYPE is the type of the result. */
11229 fold_negate_const (tree arg0
, tree type
)
11231 tree t
= NULL_TREE
;
11233 switch (TREE_CODE (arg0
))
11237 unsigned HOST_WIDE_INT low
;
11238 HOST_WIDE_INT high
;
11239 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
11240 TREE_INT_CST_HIGH (arg0
),
11242 t
= build_int_cst_wide (type
, low
, high
);
11243 t
= force_fit_type (t
, 1,
11244 (overflow
| TREE_OVERFLOW (arg0
))
11245 && !TYPE_UNSIGNED (type
),
11246 TREE_CONSTANT_OVERFLOW (arg0
));
11251 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
11255 gcc_unreachable ();
11261 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11262 an integer constant or real constant.
11264 TYPE is the type of the result. */
11267 fold_abs_const (tree arg0
, tree type
)
11269 tree t
= NULL_TREE
;
11271 switch (TREE_CODE (arg0
))
11274 /* If the value is unsigned, then the absolute value is
11275 the same as the ordinary value. */
11276 if (TYPE_UNSIGNED (type
))
11278 /* Similarly, if the value is non-negative. */
11279 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
11281 /* If the value is negative, then the absolute value is
11285 unsigned HOST_WIDE_INT low
;
11286 HOST_WIDE_INT high
;
11287 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
11288 TREE_INT_CST_HIGH (arg0
),
11290 t
= build_int_cst_wide (type
, low
, high
);
11291 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg0
),
11292 TREE_CONSTANT_OVERFLOW (arg0
));
11297 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
11298 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
11304 gcc_unreachable ();
11310 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11311 constant. TYPE is the type of the result. */
11314 fold_not_const (tree arg0
, tree type
)
11316 tree t
= NULL_TREE
;
11318 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
11320 t
= build_int_cst_wide (type
,
11321 ~ TREE_INT_CST_LOW (arg0
),
11322 ~ TREE_INT_CST_HIGH (arg0
));
11323 t
= force_fit_type (t
, 0, TREE_OVERFLOW (arg0
),
11324 TREE_CONSTANT_OVERFLOW (arg0
));
11329 /* Given CODE, a relational operator, the target type, TYPE and two
11330 constant operands OP0 and OP1, return the result of the
11331 relational operation. If the result is not a compile time
11332 constant, then return NULL_TREE. */
11335 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
11337 int result
, invert
;
11339 /* From here on, the only cases we handle are when the result is
11340 known to be a constant. */
11342 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
11344 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
11345 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
11347 /* Handle the cases where either operand is a NaN. */
11348 if (real_isnan (c0
) || real_isnan (c1
))
11358 case UNORDERED_EXPR
:
11372 if (flag_trapping_math
)
11378 gcc_unreachable ();
11381 return constant_boolean_node (result
, type
);
11384 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
11387 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11389 To compute GT, swap the arguments and do LT.
11390 To compute GE, do LT and invert the result.
11391 To compute LE, swap the arguments, do LT and invert the result.
11392 To compute NE, do EQ and invert the result.
11394 Therefore, the code below must handle only EQ and LT. */
11396 if (code
== LE_EXPR
|| code
== GT_EXPR
)
11401 code
= swap_tree_comparison (code
);
11404 /* Note that it is safe to invert for real values here because we
11405 have already handled the one case that it matters. */
11408 if (code
== NE_EXPR
|| code
== GE_EXPR
)
11411 code
= invert_tree_comparison (code
, false);
11414 /* Compute a result for LT or EQ if args permit;
11415 Otherwise return T. */
11416 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
11418 if (code
== EQ_EXPR
)
11419 result
= tree_int_cst_equal (op0
, op1
);
11420 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
11421 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
11423 result
= INT_CST_LT (op0
, op1
);
11430 return constant_boolean_node (result
, type
);
11433 /* Build an expression for the a clean point containing EXPR with type TYPE.
11434 Don't build a cleanup point expression for EXPR which don't have side
11438 fold_build_cleanup_point_expr (tree type
, tree expr
)
11440 /* If the expression does not have side effects then we don't have to wrap
11441 it with a cleanup point expression. */
11442 if (!TREE_SIDE_EFFECTS (expr
))
11445 /* If the expression is a return, check to see if the expression inside the
11446 return has no side effects or the right hand side of the modify expression
11447 inside the return. If either don't have side effects set we don't need to
11448 wrap the expression in a cleanup point expression. Note we don't check the
11449 left hand side of the modify because it should always be a return decl. */
11450 if (TREE_CODE (expr
) == RETURN_EXPR
)
11452 tree op
= TREE_OPERAND (expr
, 0);
11453 if (!op
|| !TREE_SIDE_EFFECTS (op
))
11455 op
= TREE_OPERAND (op
, 1);
11456 if (!TREE_SIDE_EFFECTS (op
))
11460 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
11463 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11464 avoid confusing the gimplify process. */
11467 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
11469 /* The size of the object is not relevant when talking about its address. */
11470 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
11471 t
= TREE_OPERAND (t
, 0);
11473 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11474 if (TREE_CODE (t
) == INDIRECT_REF
11475 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
11477 t
= TREE_OPERAND (t
, 0);
11478 if (TREE_TYPE (t
) != ptrtype
)
11479 t
= build1 (NOP_EXPR
, ptrtype
, t
);
11485 while (handled_component_p (base
))
11486 base
= TREE_OPERAND (base
, 0);
11488 TREE_ADDRESSABLE (base
) = 1;
11490 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
11497 build_fold_addr_expr (tree t
)
11499 return build_fold_addr_expr_with_type (t
, build_pointer_type (TREE_TYPE (t
)));
11502 /* Given a pointer value T, return a simplified version of an indirection
11503 through T, or NULL_TREE if no simplification is possible. */
11506 fold_indirect_ref_1 (tree t
)
11508 tree type
= TREE_TYPE (TREE_TYPE (t
));
11513 subtype
= TREE_TYPE (sub
);
11514 if (!POINTER_TYPE_P (subtype
))
11517 if (TREE_CODE (sub
) == ADDR_EXPR
)
11519 tree op
= TREE_OPERAND (sub
, 0);
11520 tree optype
= TREE_TYPE (op
);
11522 if (lang_hooks
.types_compatible_p (type
, optype
))
11524 /* *(foo *)&fooarray => fooarray[0] */
11525 else if (TREE_CODE (optype
) == ARRAY_TYPE
11526 && lang_hooks
.types_compatible_p (type
, TREE_TYPE (optype
)))
11528 tree type_domain
= TYPE_DOMAIN (optype
);
11529 tree min_val
= size_zero_node
;
11530 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
11531 min_val
= TYPE_MIN_VALUE (type_domain
);
11532 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
11536 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11537 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
11538 && lang_hooks
.types_compatible_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
11541 tree min_val
= size_zero_node
;
11542 sub
= build_fold_indirect_ref (sub
);
11543 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
11544 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
11545 min_val
= TYPE_MIN_VALUE (type_domain
);
11546 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
11552 /* Builds an expression for an indirection through T, simplifying some
11556 build_fold_indirect_ref (tree t
)
11558 tree sub
= fold_indirect_ref_1 (t
);
11563 return build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (t
)), t
);
11566 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11569 fold_indirect_ref (tree t
)
11571 tree sub
= fold_indirect_ref_1 (TREE_OPERAND (t
, 0));
11579 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11580 whose result is ignored. The type of the returned tree need not be
11581 the same as the original expression. */
11584 fold_ignored_result (tree t
)
11586 if (!TREE_SIDE_EFFECTS (t
))
11587 return integer_zero_node
;
11590 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
11593 t
= TREE_OPERAND (t
, 0);
11597 case tcc_comparison
:
11598 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
11599 t
= TREE_OPERAND (t
, 0);
11600 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
11601 t
= TREE_OPERAND (t
, 1);
11606 case tcc_expression
:
11607 switch (TREE_CODE (t
))
11609 case COMPOUND_EXPR
:
11610 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
11612 t
= TREE_OPERAND (t
, 0);
11616 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
11617 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
11619 t
= TREE_OPERAND (t
, 0);
11632 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11633 This can only be applied to objects of a sizetype. */
11636 round_up (tree value
, int divisor
)
11638 tree div
= NULL_TREE
;
11640 gcc_assert (divisor
> 0);
11644 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11645 have to do anything. Only do this when we are not given a const,
11646 because in that case, this check is more expensive than just
11648 if (TREE_CODE (value
) != INTEGER_CST
)
11650 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11652 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
11656 /* If divisor is a power of two, simplify this to bit manipulation. */
11657 if (divisor
== (divisor
& -divisor
))
11661 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
11662 value
= size_binop (PLUS_EXPR
, value
, t
);
11663 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
11664 value
= size_binop (BIT_AND_EXPR
, value
, t
);
11669 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11670 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
11671 value
= size_binop (MULT_EXPR
, value
, div
);
11677 /* Likewise, but round down. */
11680 round_down (tree value
, int divisor
)
11682 tree div
= NULL_TREE
;
11684 gcc_assert (divisor
> 0);
11688 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11689 have to do anything. Only do this when we are not given a const,
11690 because in that case, this check is more expensive than just
11692 if (TREE_CODE (value
) != INTEGER_CST
)
11694 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11696 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
11700 /* If divisor is a power of two, simplify this to bit manipulation. */
11701 if (divisor
== (divisor
& -divisor
))
11705 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
11706 value
= size_binop (BIT_AND_EXPR
, value
, t
);
11711 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11712 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
11713 value
= size_binop (MULT_EXPR
, value
, div
);
11719 /* Returns the pointer to the base of the object addressed by EXP and
11720 extracts the information about the offset of the access, storing it
11721 to PBITPOS and POFFSET. */
11724 split_address_to_core_and_offset (tree exp
,
11725 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
11728 enum machine_mode mode
;
11729 int unsignedp
, volatilep
;
11730 HOST_WIDE_INT bitsize
;
11732 if (TREE_CODE (exp
) == ADDR_EXPR
)
11734 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
11735 poffset
, &mode
, &unsignedp
, &volatilep
,
11738 if (TREE_CODE (core
) == INDIRECT_REF
)
11739 core
= TREE_OPERAND (core
, 0);
11745 *poffset
= NULL_TREE
;
11751 /* Returns true if addresses of E1 and E2 differ by a constant, false
11752 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11755 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
11758 HOST_WIDE_INT bitpos1
, bitpos2
;
11759 tree toffset1
, toffset2
, tdiff
, type
;
11761 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
11762 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
11764 if (bitpos1
% BITS_PER_UNIT
!= 0
11765 || bitpos2
% BITS_PER_UNIT
!= 0
11766 || !operand_equal_p (core1
, core2
, 0))
11769 if (toffset1
&& toffset2
)
11771 type
= TREE_TYPE (toffset1
);
11772 if (type
!= TREE_TYPE (toffset2
))
11773 toffset2
= fold_convert (type
, toffset2
);
11775 tdiff
= fold (build2 (MINUS_EXPR
, type
, toffset1
, toffset2
));
11776 if (!host_integerp (tdiff
, 0))
11779 *diff
= tree_low_cst (tdiff
, 0);
11781 else if (toffset1
|| toffset2
)
11783 /* If only one of the offsets is non-constant, the difference cannot
11790 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
11794 /* Simplify the floating point expression EXP when the sign of the
11795 result is not significant. Return NULL_TREE if no simplification
11799 fold_strip_sign_ops (tree exp
)
11803 switch (TREE_CODE (exp
))
11807 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
11808 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
11812 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
11814 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
11815 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
11816 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
11817 return fold (build2 (TREE_CODE (exp
), TREE_TYPE (exp
),
11818 arg0
? arg0
: TREE_OPERAND (exp
, 0),
11819 arg1
? arg1
: TREE_OPERAND (exp
, 1)));