1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* Non-zero if we are folding constants inside an initializer; zero
64 int folding_initializer
= 0;
66 /* The following constants represent a bit based encoding of GCC's
67 comparison operators. This encoding simplifies transformations
68 on relational comparison operators, such as AND and OR. */
69 enum comparison_code
{
88 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
89 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
90 static bool negate_mathfn_p (enum built_in_function
);
91 static bool negate_expr_p (tree
);
92 static tree
negate_expr (tree
);
93 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
94 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
95 static tree
const_binop (enum tree_code
, tree
, tree
, int);
96 static enum comparison_code
comparison_to_compcode (enum tree_code
);
97 static enum tree_code
compcode_to_comparison (enum comparison_code
);
98 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
99 enum tree_code
, tree
, tree
, tree
);
100 static int truth_value_p (enum tree_code
);
101 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
102 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
103 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
104 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
105 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
106 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
107 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
108 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
109 enum machine_mode
*, int *, int *,
111 static int all_ones_mask_p (tree
, int);
112 static tree
sign_bit_p (tree
, tree
);
113 static int simple_operand_p (tree
);
114 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
115 static tree
range_predecessor (tree
);
116 static tree
range_successor (tree
);
117 static tree
make_range (tree
, int *, tree
*, tree
*);
118 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
119 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
121 static tree
fold_range_test (enum tree_code
, tree
, tree
, tree
);
122 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
123 static tree
unextend (tree
, int, int, tree
);
124 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
125 static tree
optimize_minmax_comparison (enum tree_code
, tree
, tree
, tree
);
126 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
);
127 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
);
128 static int multiple_of_p (tree
, tree
, tree
);
129 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
,
132 static bool fold_real_zero_addition_p (tree
, tree
, int);
133 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
135 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
136 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
137 static bool reorder_operands_p (tree
, tree
);
138 static tree
fold_negate_const (tree
, tree
);
139 static tree
fold_not_const (tree
, tree
);
140 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
141 static int native_encode_expr (tree
, unsigned char *, int);
142 static tree
native_interpret_expr (tree
, unsigned char *, int);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
171 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
173 words
[0] = LOWPART (low
);
174 words
[1] = HIGHPART (low
);
175 words
[2] = LOWPART (hi
);
176 words
[3] = HIGHPART (hi
);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
187 *low
= words
[0] + words
[1] * BASE
;
188 *hi
= words
[2] + words
[3] * BASE
;
191 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
192 in overflow of the value, when >0 we are only interested in signed
193 overflow, for <0 we are interested in any overflow. OVERFLOWED
194 indicates whether overflow has already occurred. CONST_OVERFLOWED
195 indicates whether constant overflow has already occurred. We force
196 T's value to be within range of T's type (by setting to 0 or 1 all
197 the bits outside the type's range). We set TREE_OVERFLOWED if,
198 OVERFLOWED is nonzero,
199 or OVERFLOWABLE is >0 and signed overflow occurs
200 or OVERFLOWABLE is <0 and any overflow occurs
201 We set TREE_CONSTANT_OVERFLOWED if,
202 CONST_OVERFLOWED is nonzero
203 or we set TREE_OVERFLOWED.
204 We return either the original T, or a copy. */
207 force_fit_type (tree t
, int overflowable
,
208 bool overflowed
, bool overflowed_const
)
210 unsigned HOST_WIDE_INT low
;
213 int sign_extended_type
;
215 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
217 low
= TREE_INT_CST_LOW (t
);
218 high
= TREE_INT_CST_HIGH (t
);
220 if (POINTER_TYPE_P (TREE_TYPE (t
))
221 || TREE_CODE (TREE_TYPE (t
)) == OFFSET_TYPE
)
224 prec
= TYPE_PRECISION (TREE_TYPE (t
));
225 /* Size types *are* sign extended. */
226 sign_extended_type
= (!TYPE_UNSIGNED (TREE_TYPE (t
))
227 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
228 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))));
230 /* First clear all bits that are beyond the type's precision. */
232 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
234 else if (prec
> HOST_BITS_PER_WIDE_INT
)
235 high
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
239 if (prec
< HOST_BITS_PER_WIDE_INT
)
240 low
&= ~((HOST_WIDE_INT
) (-1) << prec
);
243 if (!sign_extended_type
)
244 /* No sign extension */;
245 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
246 /* Correct width already. */;
247 else if (prec
> HOST_BITS_PER_WIDE_INT
)
249 /* Sign extend top half? */
250 if (high
& ((unsigned HOST_WIDE_INT
)1
251 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
252 high
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
254 else if (prec
== HOST_BITS_PER_WIDE_INT
)
256 if ((HOST_WIDE_INT
)low
< 0)
261 /* Sign extend bottom half? */
262 if (low
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
265 low
|= (HOST_WIDE_INT
)(-1) << prec
;
269 /* If the value changed, return a new node. */
270 if (overflowed
|| overflowed_const
271 || low
!= TREE_INT_CST_LOW (t
) || high
!= TREE_INT_CST_HIGH (t
))
273 t
= build_int_cst_wide (TREE_TYPE (t
), low
, high
);
277 || (overflowable
> 0 && sign_extended_type
))
280 TREE_OVERFLOW (t
) = 1;
281 TREE_CONSTANT_OVERFLOW (t
) = 1;
283 else if (overflowed_const
)
286 TREE_CONSTANT_OVERFLOW (t
) = 1;
293 /* Add two doubleword integers with doubleword result.
294 Return nonzero if the operation overflows according to UNSIGNED_P.
295 Each argument is given as two `HOST_WIDE_INT' pieces.
296 One argument is L1 and H1; the other, L2 and H2.
297 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
300 add_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
301 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
302 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
305 unsigned HOST_WIDE_INT l
;
309 h
= h1
+ h2
+ (l
< l1
);
315 return (unsigned HOST_WIDE_INT
) h
< (unsigned HOST_WIDE_INT
) h1
;
317 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
320 /* Negate a doubleword integer with doubleword result.
321 Return nonzero if the operation overflows, assuming it's signed.
322 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
323 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
326 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
327 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
333 return (*hv
& h1
) < 0;
343 /* Multiply two doubleword integers with doubleword result.
344 Return nonzero if the operation overflows according to UNSIGNED_P.
345 Each argument is given as two `HOST_WIDE_INT' pieces.
346 One argument is L1 and H1; the other, L2 and H2.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 mul_double_with_sign (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
351 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
352 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
355 HOST_WIDE_INT arg1
[4];
356 HOST_WIDE_INT arg2
[4];
357 HOST_WIDE_INT prod
[4 * 2];
358 unsigned HOST_WIDE_INT carry
;
360 unsigned HOST_WIDE_INT toplow
, neglow
;
361 HOST_WIDE_INT tophigh
, neghigh
;
363 encode (arg1
, l1
, h1
);
364 encode (arg2
, l2
, h2
);
366 memset (prod
, 0, sizeof prod
);
368 for (i
= 0; i
< 4; i
++)
371 for (j
= 0; j
< 4; j
++)
374 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
375 carry
+= arg1
[i
] * arg2
[j
];
376 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
378 prod
[k
] = LOWPART (carry
);
379 carry
= HIGHPART (carry
);
384 decode (prod
, lv
, hv
);
385 decode (prod
+ 4, &toplow
, &tophigh
);
387 /* Unsigned overflow is immediate. */
389 return (toplow
| tophigh
) != 0;
391 /* Check for signed overflow by calculating the signed representation of the
392 top half of the result; it should agree with the low half's sign bit. */
395 neg_double (l2
, h2
, &neglow
, &neghigh
);
396 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
400 neg_double (l1
, h1
, &neglow
, &neghigh
);
401 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
403 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
406 /* Shift the doubleword integer in L1, H1 left by COUNT places
407 keeping only PREC bits of result.
408 Shift right if COUNT is negative.
409 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
410 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
413 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
414 HOST_WIDE_INT count
, unsigned int prec
,
415 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
417 unsigned HOST_WIDE_INT signmask
;
421 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
425 if (SHIFT_COUNT_TRUNCATED
)
428 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
430 /* Shifting by the host word size is undefined according to the
431 ANSI standard, so we must handle this as a special case. */
435 else if (count
>= HOST_BITS_PER_WIDE_INT
)
437 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
442 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
443 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
447 /* Sign extend all bits that are beyond the precision. */
449 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
450 ? ((unsigned HOST_WIDE_INT
) *hv
451 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
452 : (*lv
>> (prec
- 1))) & 1);
454 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
456 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
458 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
459 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
464 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
465 *lv
|= signmask
<< prec
;
469 /* Shift the doubleword integer in L1, H1 right by COUNT places
470 keeping only PREC bits of result. COUNT must be positive.
471 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
472 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
475 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
476 HOST_WIDE_INT count
, unsigned int prec
,
477 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
480 unsigned HOST_WIDE_INT signmask
;
483 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
486 if (SHIFT_COUNT_TRUNCATED
)
489 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
491 /* Shifting by the host word size is undefined according to the
492 ANSI standard, so we must handle this as a special case. */
496 else if (count
>= HOST_BITS_PER_WIDE_INT
)
499 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
503 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
505 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
508 /* Zero / sign extend all bits that are beyond the precision. */
510 if (count
>= (HOST_WIDE_INT
)prec
)
515 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
517 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
519 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
520 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
525 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
526 *lv
|= signmask
<< (prec
- count
);
530 /* Rotate the doubleword integer in L1, H1 left by COUNT places
531 keeping only PREC bits of result.
532 Rotate right if COUNT is negative.
533 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
536 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
537 HOST_WIDE_INT count
, unsigned int prec
,
538 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
540 unsigned HOST_WIDE_INT s1l
, s2l
;
541 HOST_WIDE_INT s1h
, s2h
;
547 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
548 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
553 /* Rotate the doubleword integer in L1, H1 left by COUNT places
554 keeping only PREC bits of result. COUNT must be positive.
555 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
558 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
559 HOST_WIDE_INT count
, unsigned int prec
,
560 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
562 unsigned HOST_WIDE_INT s1l
, s2l
;
563 HOST_WIDE_INT s1h
, s2h
;
569 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
570 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
575 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
576 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
577 CODE is a tree code for a kind of division, one of
578 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
580 It controls how the quotient is rounded to an integer.
581 Return nonzero if the operation overflows.
582 UNS nonzero says do unsigned division. */
585 div_and_round_double (enum tree_code code
, int uns
,
586 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
587 HOST_WIDE_INT hnum_orig
,
588 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
589 HOST_WIDE_INT hden_orig
,
590 unsigned HOST_WIDE_INT
*lquo
,
591 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
595 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
596 HOST_WIDE_INT den
[4], quo
[4];
598 unsigned HOST_WIDE_INT work
;
599 unsigned HOST_WIDE_INT carry
= 0;
600 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
601 HOST_WIDE_INT hnum
= hnum_orig
;
602 unsigned HOST_WIDE_INT lden
= lden_orig
;
603 HOST_WIDE_INT hden
= hden_orig
;
606 if (hden
== 0 && lden
== 0)
607 overflow
= 1, lden
= 1;
609 /* Calculate quotient sign and convert operands to unsigned. */
615 /* (minimum integer) / (-1) is the only overflow case. */
616 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
617 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
623 neg_double (lden
, hden
, &lden
, &hden
);
627 if (hnum
== 0 && hden
== 0)
628 { /* single precision */
630 /* This unsigned division rounds toward zero. */
636 { /* trivial case: dividend < divisor */
637 /* hden != 0 already checked. */
644 memset (quo
, 0, sizeof quo
);
646 memset (num
, 0, sizeof num
); /* to zero 9th element */
647 memset (den
, 0, sizeof den
);
649 encode (num
, lnum
, hnum
);
650 encode (den
, lden
, hden
);
652 /* Special code for when the divisor < BASE. */
653 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
655 /* hnum != 0 already checked. */
656 for (i
= 4 - 1; i
>= 0; i
--)
658 work
= num
[i
] + carry
* BASE
;
659 quo
[i
] = work
/ lden
;
665 /* Full double precision division,
666 with thanks to Don Knuth's "Seminumerical Algorithms". */
667 int num_hi_sig
, den_hi_sig
;
668 unsigned HOST_WIDE_INT quo_est
, scale
;
670 /* Find the highest nonzero divisor digit. */
671 for (i
= 4 - 1;; i
--)
678 /* Insure that the first digit of the divisor is at least BASE/2.
679 This is required by the quotient digit estimation algorithm. */
681 scale
= BASE
/ (den
[den_hi_sig
] + 1);
683 { /* scale divisor and dividend */
685 for (i
= 0; i
<= 4 - 1; i
++)
687 work
= (num
[i
] * scale
) + carry
;
688 num
[i
] = LOWPART (work
);
689 carry
= HIGHPART (work
);
694 for (i
= 0; i
<= 4 - 1; i
++)
696 work
= (den
[i
] * scale
) + carry
;
697 den
[i
] = LOWPART (work
);
698 carry
= HIGHPART (work
);
699 if (den
[i
] != 0) den_hi_sig
= i
;
706 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
708 /* Guess the next quotient digit, quo_est, by dividing the first
709 two remaining dividend digits by the high order quotient digit.
710 quo_est is never low and is at most 2 high. */
711 unsigned HOST_WIDE_INT tmp
;
713 num_hi_sig
= i
+ den_hi_sig
+ 1;
714 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
715 if (num
[num_hi_sig
] != den
[den_hi_sig
])
716 quo_est
= work
/ den
[den_hi_sig
];
720 /* Refine quo_est so it's usually correct, and at most one high. */
721 tmp
= work
- quo_est
* den
[den_hi_sig
];
723 && (den
[den_hi_sig
- 1] * quo_est
724 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
727 /* Try QUO_EST as the quotient digit, by multiplying the
728 divisor by QUO_EST and subtracting from the remaining dividend.
729 Keep in mind that QUO_EST is the I - 1st digit. */
732 for (j
= 0; j
<= den_hi_sig
; j
++)
734 work
= quo_est
* den
[j
] + carry
;
735 carry
= HIGHPART (work
);
736 work
= num
[i
+ j
] - LOWPART (work
);
737 num
[i
+ j
] = LOWPART (work
);
738 carry
+= HIGHPART (work
) != 0;
741 /* If quo_est was high by one, then num[i] went negative and
742 we need to correct things. */
743 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
746 carry
= 0; /* add divisor back in */
747 for (j
= 0; j
<= den_hi_sig
; j
++)
749 work
= num
[i
+ j
] + den
[j
] + carry
;
750 carry
= HIGHPART (work
);
751 num
[i
+ j
] = LOWPART (work
);
754 num
[num_hi_sig
] += carry
;
757 /* Store the quotient digit. */
762 decode (quo
, lquo
, hquo
);
765 /* If result is negative, make it so. */
767 neg_double (*lquo
, *hquo
, lquo
, hquo
);
769 /* Compute trial remainder: rem = num - (quo * den) */
770 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
771 neg_double (*lrem
, *hrem
, lrem
, hrem
);
772 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
777 case TRUNC_MOD_EXPR
: /* round toward zero */
778 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
782 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
783 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
786 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
794 case CEIL_MOD_EXPR
: /* round toward positive infinity */
795 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
797 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
805 case ROUND_MOD_EXPR
: /* round to closest integer */
807 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
808 HOST_WIDE_INT habs_rem
= *hrem
;
809 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
810 HOST_WIDE_INT habs_den
= hden
, htwice
;
812 /* Get absolute values. */
814 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
816 neg_double (lden
, hden
, &labs_den
, &habs_den
);
818 /* If (2 * abs (lrem) >= abs (lden)) */
819 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
820 labs_rem
, habs_rem
, <wice
, &htwice
);
822 if (((unsigned HOST_WIDE_INT
) habs_den
823 < (unsigned HOST_WIDE_INT
) htwice
)
824 || (((unsigned HOST_WIDE_INT
) habs_den
825 == (unsigned HOST_WIDE_INT
) htwice
)
826 && (labs_den
< ltwice
)))
830 add_double (*lquo
, *hquo
,
831 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
834 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
846 /* Compute true remainder: rem = num - (quo * den) */
847 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
848 neg_double (*lrem
, *hrem
, lrem
, hrem
);
849 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
853 /* If ARG2 divides ARG1 with zero remainder, carries out the division
854 of type CODE and returns the quotient.
855 Otherwise returns NULL_TREE. */
858 div_if_zero_remainder (enum tree_code code
, tree arg1
, tree arg2
)
860 unsigned HOST_WIDE_INT int1l
, int2l
;
861 HOST_WIDE_INT int1h
, int2h
;
862 unsigned HOST_WIDE_INT quol
, reml
;
863 HOST_WIDE_INT quoh
, remh
;
864 tree type
= TREE_TYPE (arg1
);
865 int uns
= TYPE_UNSIGNED (type
);
867 int1l
= TREE_INT_CST_LOW (arg1
);
868 int1h
= TREE_INT_CST_HIGH (arg1
);
869 int2l
= TREE_INT_CST_LOW (arg2
);
870 int2h
= TREE_INT_CST_HIGH (arg2
);
872 div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
873 &quol
, &quoh
, &reml
, &remh
);
874 if (remh
!= 0 || reml
!= 0)
877 return build_int_cst_wide (type
, quol
, quoh
);
880 /* Return true if the built-in mathematical function specified by CODE
881 is odd, i.e. -f(x) == f(-x). */
884 negate_mathfn_p (enum built_in_function code
)
888 CASE_FLT_FN (BUILT_IN_ASIN
):
889 CASE_FLT_FN (BUILT_IN_ASINH
):
890 CASE_FLT_FN (BUILT_IN_ATAN
):
891 CASE_FLT_FN (BUILT_IN_ATANH
):
892 CASE_FLT_FN (BUILT_IN_CBRT
):
893 CASE_FLT_FN (BUILT_IN_SIN
):
894 CASE_FLT_FN (BUILT_IN_SINH
):
895 CASE_FLT_FN (BUILT_IN_TAN
):
896 CASE_FLT_FN (BUILT_IN_TANH
):
905 /* Check whether we may negate an integer constant T without causing
909 may_negate_without_overflow_p (tree t
)
911 unsigned HOST_WIDE_INT val
;
915 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
917 type
= TREE_TYPE (t
);
918 if (TYPE_UNSIGNED (type
))
921 prec
= TYPE_PRECISION (type
);
922 if (prec
> HOST_BITS_PER_WIDE_INT
)
924 if (TREE_INT_CST_LOW (t
) != 0)
926 prec
-= HOST_BITS_PER_WIDE_INT
;
927 val
= TREE_INT_CST_HIGH (t
);
930 val
= TREE_INT_CST_LOW (t
);
931 if (prec
< HOST_BITS_PER_WIDE_INT
)
932 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
933 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
936 /* Determine whether an expression T can be cheaply negated using
937 the function negate_expr without introducing undefined overflow. */
940 negate_expr_p (tree t
)
947 type
= TREE_TYPE (t
);
950 switch (TREE_CODE (t
))
953 if (TYPE_UNSIGNED (type
)
954 || (flag_wrapv
&& ! flag_trapv
))
957 /* Check that -CST will not overflow type. */
958 return may_negate_without_overflow_p (t
);
960 return INTEGRAL_TYPE_P (type
)
961 && (TYPE_UNSIGNED (type
)
962 || (flag_wrapv
&& !flag_trapv
));
969 return negate_expr_p (TREE_REALPART (t
))
970 && negate_expr_p (TREE_IMAGPART (t
));
973 if (FLOAT_TYPE_P (type
) && !flag_unsafe_math_optimizations
)
975 /* -(A + B) -> (-B) - A. */
976 if (negate_expr_p (TREE_OPERAND (t
, 1))
977 && reorder_operands_p (TREE_OPERAND (t
, 0),
978 TREE_OPERAND (t
, 1)))
980 /* -(A + B) -> (-A) - B. */
981 return negate_expr_p (TREE_OPERAND (t
, 0));
984 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
985 return (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
986 && reorder_operands_p (TREE_OPERAND (t
, 0),
987 TREE_OPERAND (t
, 1));
990 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
996 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
997 return negate_expr_p (TREE_OPERAND (t
, 1))
998 || negate_expr_p (TREE_OPERAND (t
, 0));
1001 case TRUNC_DIV_EXPR
:
1002 case ROUND_DIV_EXPR
:
1003 case FLOOR_DIV_EXPR
:
1005 case EXACT_DIV_EXPR
:
1006 if (TYPE_UNSIGNED (TREE_TYPE (t
)) || flag_wrapv
)
1008 return negate_expr_p (TREE_OPERAND (t
, 1))
1009 || negate_expr_p (TREE_OPERAND (t
, 0));
1012 /* Negate -((double)float) as (double)(-float). */
1013 if (TREE_CODE (type
) == REAL_TYPE
)
1015 tree tem
= strip_float_extensions (t
);
1017 return negate_expr_p (tem
);
1022 /* Negate -f(x) as f(-x). */
1023 if (negate_mathfn_p (builtin_mathfn_code (t
)))
1024 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1)));
1028 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1029 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1031 tree op1
= TREE_OPERAND (t
, 1);
1032 if (TREE_INT_CST_HIGH (op1
) == 0
1033 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1034 == TREE_INT_CST_LOW (op1
))
1045 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1046 simplification is possible.
1047 If negate_expr_p would return true for T, NULL_TREE will never be
1051 fold_negate_expr (tree t
)
1053 tree type
= TREE_TYPE (t
);
1056 switch (TREE_CODE (t
))
1058 /* Convert - (~A) to A + 1. */
1060 if (INTEGRAL_TYPE_P (type
))
1061 return fold_build2 (PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
1062 build_int_cst (type
, 1));
1066 tem
= fold_negate_const (t
, type
);
1067 if (! TREE_OVERFLOW (tem
)
1068 || TYPE_UNSIGNED (type
)
1074 tem
= fold_negate_const (t
, type
);
1075 /* Two's complement FP formats, such as c4x, may overflow. */
1076 if (! TREE_OVERFLOW (tem
) || ! flag_trapping_math
)
1082 tree rpart
= negate_expr (TREE_REALPART (t
));
1083 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1085 if ((TREE_CODE (rpart
) == REAL_CST
1086 && TREE_CODE (ipart
) == REAL_CST
)
1087 || (TREE_CODE (rpart
) == INTEGER_CST
1088 && TREE_CODE (ipart
) == INTEGER_CST
))
1089 return build_complex (type
, rpart
, ipart
);
1094 return TREE_OPERAND (t
, 0);
1097 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1099 /* -(A + B) -> (-B) - A. */
1100 if (negate_expr_p (TREE_OPERAND (t
, 1))
1101 && reorder_operands_p (TREE_OPERAND (t
, 0),
1102 TREE_OPERAND (t
, 1)))
1104 tem
= negate_expr (TREE_OPERAND (t
, 1));
1105 return fold_build2 (MINUS_EXPR
, type
,
1106 tem
, TREE_OPERAND (t
, 0));
1109 /* -(A + B) -> (-A) - B. */
1110 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1112 tem
= negate_expr (TREE_OPERAND (t
, 0));
1113 return fold_build2 (MINUS_EXPR
, type
,
1114 tem
, TREE_OPERAND (t
, 1));
1120 /* - (A - B) -> B - A */
1121 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1122 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1123 return fold_build2 (MINUS_EXPR
, type
,
1124 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
1128 if (TYPE_UNSIGNED (type
))
1134 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
1136 tem
= TREE_OPERAND (t
, 1);
1137 if (negate_expr_p (tem
))
1138 return fold_build2 (TREE_CODE (t
), type
,
1139 TREE_OPERAND (t
, 0), negate_expr (tem
));
1140 tem
= TREE_OPERAND (t
, 0);
1141 if (negate_expr_p (tem
))
1142 return fold_build2 (TREE_CODE (t
), type
,
1143 negate_expr (tem
), TREE_OPERAND (t
, 1));
1147 case TRUNC_DIV_EXPR
:
1148 case ROUND_DIV_EXPR
:
1149 case FLOOR_DIV_EXPR
:
1151 case EXACT_DIV_EXPR
:
1152 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
1154 tem
= TREE_OPERAND (t
, 1);
1155 if (negate_expr_p (tem
))
1156 return fold_build2 (TREE_CODE (t
), type
,
1157 TREE_OPERAND (t
, 0), negate_expr (tem
));
1158 tem
= TREE_OPERAND (t
, 0);
1159 if (negate_expr_p (tem
))
1160 return fold_build2 (TREE_CODE (t
), type
,
1161 negate_expr (tem
), TREE_OPERAND (t
, 1));
1166 /* Convert -((double)float) into (double)(-float). */
1167 if (TREE_CODE (type
) == REAL_TYPE
)
1169 tem
= strip_float_extensions (t
);
1170 if (tem
!= t
&& negate_expr_p (tem
))
1171 return negate_expr (tem
);
1176 /* Negate -f(x) as f(-x). */
1177 if (negate_mathfn_p (builtin_mathfn_code (t
))
1178 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1))))
1180 tree fndecl
, arg
, arglist
;
1182 fndecl
= get_callee_fndecl (t
);
1183 arg
= negate_expr (TREE_VALUE (TREE_OPERAND (t
, 1)));
1184 arglist
= build_tree_list (NULL_TREE
, arg
);
1185 return build_function_call_expr (fndecl
, arglist
);
1190 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1191 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1193 tree op1
= TREE_OPERAND (t
, 1);
1194 if (TREE_INT_CST_HIGH (op1
) == 0
1195 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1196 == TREE_INT_CST_LOW (op1
))
1198 tree ntype
= TYPE_UNSIGNED (type
)
1199 ? lang_hooks
.types
.signed_type (type
)
1200 : lang_hooks
.types
.unsigned_type (type
);
1201 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1202 temp
= fold_build2 (RSHIFT_EXPR
, ntype
, temp
, op1
);
1203 return fold_convert (type
, temp
);
1215 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1216 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1217 return NULL_TREE. */
1220 negate_expr (tree t
)
1227 type
= TREE_TYPE (t
);
1228 STRIP_SIGN_NOPS (t
);
1230 tem
= fold_negate_expr (t
);
1232 tem
= build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1233 return fold_convert (type
, tem
);
1236 /* Split a tree IN into a constant, literal and variable parts that could be
1237 combined with CODE to make IN. "constant" means an expression with
1238 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1239 commutative arithmetic operation. Store the constant part into *CONP,
1240 the literal in *LITP and return the variable part. If a part isn't
1241 present, set it to null. If the tree does not decompose in this way,
1242 return the entire tree as the variable part and the other parts as null.
1244 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1245 case, we negate an operand that was subtracted. Except if it is a
1246 literal for which we use *MINUS_LITP instead.
1248 If NEGATE_P is true, we are negating all of IN, again except a literal
1249 for which we use *MINUS_LITP instead.
1251 If IN is itself a literal or constant, return it as appropriate.
1253 Note that we do not guarantee that any of the three values will be the
1254 same type as IN, but they will have the same signedness and mode. */
1257 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1258 tree
*minus_litp
, int negate_p
)
1266 /* Strip any conversions that don't change the machine mode or signedness. */
1267 STRIP_SIGN_NOPS (in
);
1269 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1271 else if (TREE_CODE (in
) == code
1272 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1273 /* We can associate addition and subtraction together (even
1274 though the C standard doesn't say so) for integers because
1275 the value is not affected. For reals, the value might be
1276 affected, so we can't. */
1277 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1278 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1280 tree op0
= TREE_OPERAND (in
, 0);
1281 tree op1
= TREE_OPERAND (in
, 1);
1282 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1283 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1285 /* First see if either of the operands is a literal, then a constant. */
1286 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1287 *litp
= op0
, op0
= 0;
1288 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1289 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1291 if (op0
!= 0 && TREE_CONSTANT (op0
))
1292 *conp
= op0
, op0
= 0;
1293 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1294 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1296 /* If we haven't dealt with either operand, this is not a case we can
1297 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1298 if (op0
!= 0 && op1
!= 0)
1303 var
= op1
, neg_var_p
= neg1_p
;
1305 /* Now do any needed negations. */
1307 *minus_litp
= *litp
, *litp
= 0;
1309 *conp
= negate_expr (*conp
);
1311 var
= negate_expr (var
);
1313 else if (TREE_CONSTANT (in
))
1321 *minus_litp
= *litp
, *litp
= 0;
1322 else if (*minus_litp
)
1323 *litp
= *minus_litp
, *minus_litp
= 0;
1324 *conp
= negate_expr (*conp
);
1325 var
= negate_expr (var
);
1331 /* Re-associate trees split by the above function. T1 and T2 are either
1332 expressions to associate or null. Return the new expression, if any. If
1333 we build an operation, do it in TYPE and with CODE. */
1336 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1343 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1344 try to fold this since we will have infinite recursion. But do
1345 deal with any NEGATE_EXPRs. */
1346 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1347 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1349 if (code
== PLUS_EXPR
)
1351 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1352 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1353 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1354 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1355 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1356 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1357 else if (integer_zerop (t2
))
1358 return fold_convert (type
, t1
);
1360 else if (code
== MINUS_EXPR
)
1362 if (integer_zerop (t2
))
1363 return fold_convert (type
, t1
);
1366 return build2 (code
, type
, fold_convert (type
, t1
),
1367 fold_convert (type
, t2
));
1370 return fold_build2 (code
, type
, fold_convert (type
, t1
),
1371 fold_convert (type
, t2
));
1374 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1375 to produce a new constant. Return NULL_TREE if we don't know how
1376 to evaluate CODE at compile-time.
1378 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1381 int_const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1383 unsigned HOST_WIDE_INT int1l
, int2l
;
1384 HOST_WIDE_INT int1h
, int2h
;
1385 unsigned HOST_WIDE_INT low
;
1387 unsigned HOST_WIDE_INT garbagel
;
1388 HOST_WIDE_INT garbageh
;
1390 tree type
= TREE_TYPE (arg1
);
1391 int uns
= TYPE_UNSIGNED (type
);
1393 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1396 int1l
= TREE_INT_CST_LOW (arg1
);
1397 int1h
= TREE_INT_CST_HIGH (arg1
);
1398 int2l
= TREE_INT_CST_LOW (arg2
);
1399 int2h
= TREE_INT_CST_HIGH (arg2
);
1404 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1408 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1412 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1418 /* It's unclear from the C standard whether shifts can overflow.
1419 The following code ignores overflow; perhaps a C standard
1420 interpretation ruling is needed. */
1421 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1428 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1433 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1437 neg_double (int2l
, int2h
, &low
, &hi
);
1438 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1439 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1443 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1446 case TRUNC_DIV_EXPR
:
1447 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1448 case EXACT_DIV_EXPR
:
1449 /* This is a shortcut for a common special case. */
1450 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1451 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1452 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1453 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1455 if (code
== CEIL_DIV_EXPR
)
1458 low
= int1l
/ int2l
, hi
= 0;
1462 /* ... fall through ... */
1464 case ROUND_DIV_EXPR
:
1465 if (int2h
== 0 && int2l
== 0)
1467 if (int2h
== 0 && int2l
== 1)
1469 low
= int1l
, hi
= int1h
;
1472 if (int1l
== int2l
&& int1h
== int2h
1473 && ! (int1l
== 0 && int1h
== 0))
1478 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1479 &low
, &hi
, &garbagel
, &garbageh
);
1482 case TRUNC_MOD_EXPR
:
1483 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1484 /* This is a shortcut for a common special case. */
1485 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1486 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1487 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1488 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1490 if (code
== CEIL_MOD_EXPR
)
1492 low
= int1l
% int2l
, hi
= 0;
1496 /* ... fall through ... */
1498 case ROUND_MOD_EXPR
:
1499 if (int2h
== 0 && int2l
== 0)
1501 overflow
= div_and_round_double (code
, uns
,
1502 int1l
, int1h
, int2l
, int2h
,
1503 &garbagel
, &garbageh
, &low
, &hi
);
1509 low
= (((unsigned HOST_WIDE_INT
) int1h
1510 < (unsigned HOST_WIDE_INT
) int2h
)
1511 || (((unsigned HOST_WIDE_INT
) int1h
1512 == (unsigned HOST_WIDE_INT
) int2h
)
1515 low
= (int1h
< int2h
1516 || (int1h
== int2h
&& int1l
< int2l
));
1518 if (low
== (code
== MIN_EXPR
))
1519 low
= int1l
, hi
= int1h
;
1521 low
= int2l
, hi
= int2h
;
1528 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1532 /* Propagate overflow flags ourselves. */
1533 if (((!uns
|| is_sizetype
) && overflow
)
1534 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1537 TREE_OVERFLOW (t
) = 1;
1538 TREE_CONSTANT_OVERFLOW (t
) = 1;
1540 else if (TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
))
1543 TREE_CONSTANT_OVERFLOW (t
) = 1;
1547 t
= force_fit_type (t
, 1,
1548 ((!uns
|| is_sizetype
) && overflow
)
1549 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
),
1550 TREE_CONSTANT_OVERFLOW (arg1
)
1551 | TREE_CONSTANT_OVERFLOW (arg2
));
1556 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1557 constant. We assume ARG1 and ARG2 have the same data type, or at least
1558 are the same kind of constant and the same machine mode. Return zero if
1559 combining the constants is not allowed in the current operating mode.
1561 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1564 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1566 /* Sanity check for the recursive cases. */
1573 if (TREE_CODE (arg1
) == INTEGER_CST
)
1574 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1576 if (TREE_CODE (arg1
) == REAL_CST
)
1578 enum machine_mode mode
;
1581 REAL_VALUE_TYPE value
;
1582 REAL_VALUE_TYPE result
;
1586 /* The following codes are handled by real_arithmetic. */
1601 d1
= TREE_REAL_CST (arg1
);
1602 d2
= TREE_REAL_CST (arg2
);
1604 type
= TREE_TYPE (arg1
);
1605 mode
= TYPE_MODE (type
);
1607 /* Don't perform operation if we honor signaling NaNs and
1608 either operand is a NaN. */
1609 if (HONOR_SNANS (mode
)
1610 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1613 /* Don't perform operation if it would raise a division
1614 by zero exception. */
1615 if (code
== RDIV_EXPR
1616 && REAL_VALUES_EQUAL (d2
, dconst0
)
1617 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1620 /* If either operand is a NaN, just return it. Otherwise, set up
1621 for floating-point trap; we return an overflow. */
1622 if (REAL_VALUE_ISNAN (d1
))
1624 else if (REAL_VALUE_ISNAN (d2
))
1627 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1628 real_convert (&result
, mode
, &value
);
1630 /* Don't constant fold this floating point operation if
1631 the result has overflowed and flag_trapping_math. */
1632 if (flag_trapping_math
1633 && MODE_HAS_INFINITIES (mode
)
1634 && REAL_VALUE_ISINF (result
)
1635 && !REAL_VALUE_ISINF (d1
)
1636 && !REAL_VALUE_ISINF (d2
))
1639 /* Don't constant fold this floating point operation if the
1640 result may dependent upon the run-time rounding mode and
1641 flag_rounding_math is set, or if GCC's software emulation
1642 is unable to accurately represent the result. */
1643 if ((flag_rounding_math
1644 || (REAL_MODE_FORMAT_COMPOSITE_P (mode
)
1645 && !flag_unsafe_math_optimizations
))
1646 && (inexact
|| !real_identical (&result
, &value
)))
1649 t
= build_real (type
, result
);
1651 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1652 TREE_CONSTANT_OVERFLOW (t
)
1654 | TREE_CONSTANT_OVERFLOW (arg1
)
1655 | TREE_CONSTANT_OVERFLOW (arg2
);
1659 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1661 tree type
= TREE_TYPE (arg1
);
1662 tree r1
= TREE_REALPART (arg1
);
1663 tree i1
= TREE_IMAGPART (arg1
);
1664 tree r2
= TREE_REALPART (arg2
);
1665 tree i2
= TREE_IMAGPART (arg2
);
1672 real
= const_binop (code
, r1
, r2
, notrunc
);
1673 imag
= const_binop (code
, i1
, i2
, notrunc
);
1677 real
= const_binop (MINUS_EXPR
,
1678 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1679 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1681 imag
= const_binop (PLUS_EXPR
,
1682 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1683 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1690 = const_binop (PLUS_EXPR
,
1691 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1692 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1695 = const_binop (PLUS_EXPR
,
1696 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1697 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1700 = const_binop (MINUS_EXPR
,
1701 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1702 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1705 if (INTEGRAL_TYPE_P (TREE_TYPE (r1
)))
1706 code
= TRUNC_DIV_EXPR
;
1708 real
= const_binop (code
, t1
, magsquared
, notrunc
);
1709 imag
= const_binop (code
, t2
, magsquared
, notrunc
);
1718 return build_complex (type
, real
, imag
);
1724 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1725 indicates which particular sizetype to create. */
1728 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1730 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1733 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1734 is a tree code. The type of the result is taken from the operands.
1735 Both must be the same type integer type and it must be a size type.
1736 If the operands are constant, so is the result. */
1739 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
1741 tree type
= TREE_TYPE (arg0
);
1743 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1744 return error_mark_node
;
1746 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1747 && type
== TREE_TYPE (arg1
));
1749 /* Handle the special case of two integer constants faster. */
1750 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1752 /* And some specific cases even faster than that. */
1753 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1755 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1756 && integer_zerop (arg1
))
1758 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1761 /* Handle general case of two integer constants. */
1762 return int_const_binop (code
, arg0
, arg1
, 0);
1765 return fold_build2 (code
, type
, arg0
, arg1
);
1768 /* Given two values, either both of sizetype or both of bitsizetype,
1769 compute the difference between the two values. Return the value
1770 in signed type corresponding to the type of the operands. */
1773 size_diffop (tree arg0
, tree arg1
)
1775 tree type
= TREE_TYPE (arg0
);
1778 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1779 && type
== TREE_TYPE (arg1
));
1781 /* If the type is already signed, just do the simple thing. */
1782 if (!TYPE_UNSIGNED (type
))
1783 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1785 ctype
= type
== bitsizetype
? sbitsizetype
: ssizetype
;
1787 /* If either operand is not a constant, do the conversions to the signed
1788 type and subtract. The hardware will do the right thing with any
1789 overflow in the subtraction. */
1790 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1791 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
1792 fold_convert (ctype
, arg1
));
1794 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1795 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1796 overflow) and negate (which can't either). Special-case a result
1797 of zero while we're here. */
1798 if (tree_int_cst_equal (arg0
, arg1
))
1799 return build_int_cst (ctype
, 0);
1800 else if (tree_int_cst_lt (arg1
, arg0
))
1801 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1803 return size_binop (MINUS_EXPR
, build_int_cst (ctype
, 0),
1804 fold_convert (ctype
, size_binop (MINUS_EXPR
,
1808 /* A subroutine of fold_convert_const handling conversions of an
1809 INTEGER_CST to another integer type. */
1812 fold_convert_const_int_from_int (tree type
, tree arg1
)
1816 /* Given an integer constant, make new constant with new type,
1817 appropriately sign-extended or truncated. */
1818 t
= build_int_cst_wide (type
, TREE_INT_CST_LOW (arg1
),
1819 TREE_INT_CST_HIGH (arg1
));
1821 t
= force_fit_type (t
,
1822 /* Don't set the overflow when
1823 converting a pointer */
1824 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1825 (TREE_INT_CST_HIGH (arg1
) < 0
1826 && (TYPE_UNSIGNED (type
)
1827 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1828 | TREE_OVERFLOW (arg1
),
1829 TREE_CONSTANT_OVERFLOW (arg1
));
1834 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1835 to an integer type. */
1838 fold_convert_const_int_from_real (enum tree_code code
, tree type
, tree arg1
)
1843 /* The following code implements the floating point to integer
1844 conversion rules required by the Java Language Specification,
1845 that IEEE NaNs are mapped to zero and values that overflow
1846 the target precision saturate, i.e. values greater than
1847 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1848 are mapped to INT_MIN. These semantics are allowed by the
1849 C and C++ standards that simply state that the behavior of
1850 FP-to-integer conversion is unspecified upon overflow. */
1852 HOST_WIDE_INT high
, low
;
1854 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1858 case FIX_TRUNC_EXPR
:
1859 real_trunc (&r
, VOIDmode
, &x
);
1863 real_ceil (&r
, VOIDmode
, &x
);
1866 case FIX_FLOOR_EXPR
:
1867 real_floor (&r
, VOIDmode
, &x
);
1870 case FIX_ROUND_EXPR
:
1871 real_round (&r
, VOIDmode
, &x
);
1878 /* If R is NaN, return zero and show we have an overflow. */
1879 if (REAL_VALUE_ISNAN (r
))
1886 /* See if R is less than the lower bound or greater than the
1891 tree lt
= TYPE_MIN_VALUE (type
);
1892 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1893 if (REAL_VALUES_LESS (r
, l
))
1896 high
= TREE_INT_CST_HIGH (lt
);
1897 low
= TREE_INT_CST_LOW (lt
);
1903 tree ut
= TYPE_MAX_VALUE (type
);
1906 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1907 if (REAL_VALUES_LESS (u
, r
))
1910 high
= TREE_INT_CST_HIGH (ut
);
1911 low
= TREE_INT_CST_LOW (ut
);
1917 REAL_VALUE_TO_INT (&low
, &high
, r
);
1919 t
= build_int_cst_wide (type
, low
, high
);
1921 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg1
),
1922 TREE_CONSTANT_OVERFLOW (arg1
));
1926 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1927 to another floating point type. */
1930 fold_convert_const_real_from_real (tree type
, tree arg1
)
1932 REAL_VALUE_TYPE value
;
1935 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1936 t
= build_real (type
, value
);
1938 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1939 TREE_CONSTANT_OVERFLOW (t
)
1940 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1944 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1945 type TYPE. If no simplification can be done return NULL_TREE. */
1948 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1950 if (TREE_TYPE (arg1
) == type
)
1953 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
1955 if (TREE_CODE (arg1
) == INTEGER_CST
)
1956 return fold_convert_const_int_from_int (type
, arg1
);
1957 else if (TREE_CODE (arg1
) == REAL_CST
)
1958 return fold_convert_const_int_from_real (code
, type
, arg1
);
1960 else if (TREE_CODE (type
) == REAL_TYPE
)
1962 if (TREE_CODE (arg1
) == INTEGER_CST
)
1963 return build_real_from_int_cst (type
, arg1
);
1964 if (TREE_CODE (arg1
) == REAL_CST
)
1965 return fold_convert_const_real_from_real (type
, arg1
);
1970 /* Construct a vector of zero elements of vector type TYPE. */
1973 build_zero_vector (tree type
)
1978 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1979 units
= TYPE_VECTOR_SUBPARTS (type
);
1982 for (i
= 0; i
< units
; i
++)
1983 list
= tree_cons (NULL_TREE
, elem
, list
);
1984 return build_vector (type
, list
);
1987 /* Convert expression ARG to type TYPE. Used by the middle-end for
1988 simple conversions in preference to calling the front-end's convert. */
1991 fold_convert (tree type
, tree arg
)
1993 tree orig
= TREE_TYPE (arg
);
1999 if (TREE_CODE (arg
) == ERROR_MARK
2000 || TREE_CODE (type
) == ERROR_MARK
2001 || TREE_CODE (orig
) == ERROR_MARK
)
2002 return error_mark_node
;
2004 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
)
2005 || lang_hooks
.types_compatible_p (TYPE_MAIN_VARIANT (type
),
2006 TYPE_MAIN_VARIANT (orig
)))
2007 return fold_build1 (NOP_EXPR
, type
, arg
);
2009 switch (TREE_CODE (type
))
2011 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2012 case POINTER_TYPE
: case REFERENCE_TYPE
:
2014 if (TREE_CODE (arg
) == INTEGER_CST
)
2016 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2017 if (tem
!= NULL_TREE
)
2020 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2021 || TREE_CODE (orig
) == OFFSET_TYPE
)
2022 return fold_build1 (NOP_EXPR
, type
, arg
);
2023 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2025 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2026 return fold_convert (type
, tem
);
2028 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2029 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2030 return fold_build1 (NOP_EXPR
, type
, arg
);
2033 if (TREE_CODE (arg
) == INTEGER_CST
)
2035 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2036 if (tem
!= NULL_TREE
)
2039 else if (TREE_CODE (arg
) == REAL_CST
)
2041 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2042 if (tem
!= NULL_TREE
)
2046 switch (TREE_CODE (orig
))
2049 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2050 case POINTER_TYPE
: case REFERENCE_TYPE
:
2051 return fold_build1 (FLOAT_EXPR
, type
, arg
);
2054 return fold_build1 (NOP_EXPR
, type
, arg
);
2057 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2058 return fold_convert (type
, tem
);
2065 switch (TREE_CODE (orig
))
2068 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2069 case POINTER_TYPE
: case REFERENCE_TYPE
:
2071 return build2 (COMPLEX_EXPR
, type
,
2072 fold_convert (TREE_TYPE (type
), arg
),
2073 fold_convert (TREE_TYPE (type
), integer_zero_node
));
2078 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2080 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
2081 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
2082 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2085 arg
= save_expr (arg
);
2086 rpart
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2087 ipart
= fold_build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2088 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
2089 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
2090 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2098 if (integer_zerop (arg
))
2099 return build_zero_vector (type
);
2100 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2101 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2102 || TREE_CODE (orig
) == VECTOR_TYPE
);
2103 return fold_build1 (VIEW_CONVERT_EXPR
, type
, arg
);
2106 return fold_build1 (NOP_EXPR
, type
, fold_ignored_result (arg
));
2113 /* Return false if expr can be assumed not to be an lvalue, true
2117 maybe_lvalue_p (tree x
)
2119 /* We only need to wrap lvalue tree codes. */
2120 switch (TREE_CODE (x
))
2131 case ALIGN_INDIRECT_REF
:
2132 case MISALIGNED_INDIRECT_REF
:
2134 case ARRAY_RANGE_REF
:
2140 case PREINCREMENT_EXPR
:
2141 case PREDECREMENT_EXPR
:
2143 case TRY_CATCH_EXPR
:
2144 case WITH_CLEANUP_EXPR
:
2155 /* Assume the worst for front-end tree codes. */
2156 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2164 /* Return an expr equal to X but certainly not valid as an lvalue. */
2169 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2174 if (! maybe_lvalue_p (x
))
2176 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2179 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2180 Zero means allow extended lvalues. */
2182 int pedantic_lvalues
;
2184 /* When pedantic, return an expr equal to X but certainly not valid as a
2185 pedantic lvalue. Otherwise, return X. */
2188 pedantic_non_lvalue (tree x
)
2190 if (pedantic_lvalues
)
2191 return non_lvalue (x
);
2196 /* Given a tree comparison code, return the code that is the logical inverse
2197 of the given code. It is not safe to do this for floating-point
2198 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2199 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2202 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2204 if (honor_nans
&& flag_trapping_math
)
2214 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2216 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2218 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2220 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2234 return UNORDERED_EXPR
;
2235 case UNORDERED_EXPR
:
2236 return ORDERED_EXPR
;
2242 /* Similar, but return the comparison that results if the operands are
2243 swapped. This is safe for floating-point. */
2246 swap_tree_comparison (enum tree_code code
)
2253 case UNORDERED_EXPR
:
2279 /* Convert a comparison tree code from an enum tree_code representation
2280 into a compcode bit-based encoding. This function is the inverse of
2281 compcode_to_comparison. */
2283 static enum comparison_code
2284 comparison_to_compcode (enum tree_code code
)
2301 return COMPCODE_ORD
;
2302 case UNORDERED_EXPR
:
2303 return COMPCODE_UNORD
;
2305 return COMPCODE_UNLT
;
2307 return COMPCODE_UNEQ
;
2309 return COMPCODE_UNLE
;
2311 return COMPCODE_UNGT
;
2313 return COMPCODE_LTGT
;
2315 return COMPCODE_UNGE
;
2321 /* Convert a compcode bit-based encoding of a comparison operator back
2322 to GCC's enum tree_code representation. This function is the
2323 inverse of comparison_to_compcode. */
2325 static enum tree_code
2326 compcode_to_comparison (enum comparison_code code
)
2343 return ORDERED_EXPR
;
2344 case COMPCODE_UNORD
:
2345 return UNORDERED_EXPR
;
2363 /* Return a tree for the comparison which is the combination of
2364 doing the AND or OR (depending on CODE) of the two operations LCODE
2365 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2366 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2367 if this makes the transformation invalid. */
2370 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2371 enum tree_code rcode
, tree truth_type
,
2372 tree ll_arg
, tree lr_arg
)
2374 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2375 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2376 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2377 enum comparison_code compcode
;
2381 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2382 compcode
= lcompcode
& rcompcode
;
2385 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2386 compcode
= lcompcode
| rcompcode
;
2395 /* Eliminate unordered comparisons, as well as LTGT and ORD
2396 which are not used unless the mode has NaNs. */
2397 compcode
&= ~COMPCODE_UNORD
;
2398 if (compcode
== COMPCODE_LTGT
)
2399 compcode
= COMPCODE_NE
;
2400 else if (compcode
== COMPCODE_ORD
)
2401 compcode
= COMPCODE_TRUE
;
2403 else if (flag_trapping_math
)
2405 /* Check that the original operation and the optimized ones will trap
2406 under the same condition. */
2407 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2408 && (lcompcode
!= COMPCODE_EQ
)
2409 && (lcompcode
!= COMPCODE_ORD
);
2410 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2411 && (rcompcode
!= COMPCODE_EQ
)
2412 && (rcompcode
!= COMPCODE_ORD
);
2413 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2414 && (compcode
!= COMPCODE_EQ
)
2415 && (compcode
!= COMPCODE_ORD
);
2417 /* In a short-circuited boolean expression the LHS might be
2418 such that the RHS, if evaluated, will never trap. For
2419 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2420 if neither x nor y is NaN. (This is a mixed blessing: for
2421 example, the expression above will never trap, hence
2422 optimizing it to x < y would be invalid). */
2423 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2424 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2427 /* If the comparison was short-circuited, and only the RHS
2428 trapped, we may now generate a spurious trap. */
2430 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2433 /* If we changed the conditions that cause a trap, we lose. */
2434 if ((ltrap
|| rtrap
) != trap
)
2438 if (compcode
== COMPCODE_TRUE
)
2439 return constant_boolean_node (true, truth_type
);
2440 else if (compcode
== COMPCODE_FALSE
)
2441 return constant_boolean_node (false, truth_type
);
2443 return fold_build2 (compcode_to_comparison (compcode
),
2444 truth_type
, ll_arg
, lr_arg
);
2447 /* Return nonzero if CODE is a tree code that represents a truth value. */
2450 truth_value_p (enum tree_code code
)
2452 return (TREE_CODE_CLASS (code
) == tcc_comparison
2453 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2454 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2455 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2458 /* Return nonzero if two operands (typically of the same tree node)
2459 are necessarily equal. If either argument has side-effects this
2460 function returns zero. FLAGS modifies behavior as follows:
2462 If OEP_ONLY_CONST is set, only return nonzero for constants.
2463 This function tests whether the operands are indistinguishable;
2464 it does not test whether they are equal using C's == operation.
2465 The distinction is important for IEEE floating point, because
2466 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2467 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2469 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2470 even though it may hold multiple values during a function.
2471 This is because a GCC tree node guarantees that nothing else is
2472 executed between the evaluation of its "operands" (which may often
2473 be evaluated in arbitrary order). Hence if the operands themselves
2474 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2475 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2476 unset means assuming isochronic (or instantaneous) tree equivalence.
2477 Unless comparing arbitrary expression trees, such as from different
2478 statements, this flag can usually be left unset.
2480 If OEP_PURE_SAME is set, then pure functions with identical arguments
2481 are considered the same. It is used when the caller has other ways
2482 to ensure that global memory is unchanged in between. */
2485 operand_equal_p (tree arg0
, tree arg1
, unsigned int flags
)
2487 /* If either is ERROR_MARK, they aren't equal. */
2488 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
2491 /* If both types don't have the same signedness, then we can't consider
2492 them equal. We must check this before the STRIP_NOPS calls
2493 because they may change the signedness of the arguments. */
2494 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2497 /* If both types don't have the same precision, then it is not safe
2499 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
2505 /* In case both args are comparisons but with different comparison
2506 code, try to swap the comparison operands of one arg to produce
2507 a match and compare that variant. */
2508 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2509 && COMPARISON_CLASS_P (arg0
)
2510 && COMPARISON_CLASS_P (arg1
))
2512 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2514 if (TREE_CODE (arg0
) == swap_code
)
2515 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2516 TREE_OPERAND (arg1
, 1), flags
)
2517 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2518 TREE_OPERAND (arg1
, 0), flags
);
2521 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2522 /* This is needed for conversions and for COMPONENT_REF.
2523 Might as well play it safe and always test this. */
2524 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2525 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2526 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2529 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2530 We don't care about side effects in that case because the SAVE_EXPR
2531 takes care of that for us. In all other cases, two expressions are
2532 equal if they have no side effects. If we have two identical
2533 expressions with side effects that should be treated the same due
2534 to the only side effects being identical SAVE_EXPR's, that will
2535 be detected in the recursive calls below. */
2536 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2537 && (TREE_CODE (arg0
) == SAVE_EXPR
2538 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2541 /* Next handle constant cases, those for which we can return 1 even
2542 if ONLY_CONST is set. */
2543 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2544 switch (TREE_CODE (arg0
))
2547 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2548 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2549 && tree_int_cst_equal (arg0
, arg1
));
2552 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2553 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2554 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2555 TREE_REAL_CST (arg1
)));
2561 if (TREE_CONSTANT_OVERFLOW (arg0
)
2562 || TREE_CONSTANT_OVERFLOW (arg1
))
2565 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2566 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2569 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2572 v1
= TREE_CHAIN (v1
);
2573 v2
= TREE_CHAIN (v2
);
2580 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2582 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2586 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2587 && ! memcmp (TREE_STRING_POINTER (arg0
),
2588 TREE_STRING_POINTER (arg1
),
2589 TREE_STRING_LENGTH (arg0
)));
2592 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2598 if (flags
& OEP_ONLY_CONST
)
2601 /* Define macros to test an operand from arg0 and arg1 for equality and a
2602 variant that allows null and views null as being different from any
2603 non-null value. In the latter case, if either is null, the both
2604 must be; otherwise, do the normal comparison. */
2605 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2606 TREE_OPERAND (arg1, N), flags)
2608 #define OP_SAME_WITH_NULL(N) \
2609 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2610 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2612 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2615 /* Two conversions are equal only if signedness and modes match. */
2616 switch (TREE_CODE (arg0
))
2621 case FIX_TRUNC_EXPR
:
2622 case FIX_FLOOR_EXPR
:
2623 case FIX_ROUND_EXPR
:
2624 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2625 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2635 case tcc_comparison
:
2637 if (OP_SAME (0) && OP_SAME (1))
2640 /* For commutative ops, allow the other order. */
2641 return (commutative_tree_code (TREE_CODE (arg0
))
2642 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2643 TREE_OPERAND (arg1
, 1), flags
)
2644 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2645 TREE_OPERAND (arg1
, 0), flags
));
2648 /* If either of the pointer (or reference) expressions we are
2649 dereferencing contain a side effect, these cannot be equal. */
2650 if (TREE_SIDE_EFFECTS (arg0
)
2651 || TREE_SIDE_EFFECTS (arg1
))
2654 switch (TREE_CODE (arg0
))
2657 case ALIGN_INDIRECT_REF
:
2658 case MISALIGNED_INDIRECT_REF
:
2664 case ARRAY_RANGE_REF
:
2665 /* Operands 2 and 3 may be null. */
2668 && OP_SAME_WITH_NULL (2)
2669 && OP_SAME_WITH_NULL (3));
2672 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2673 may be NULL when we're called to compare MEM_EXPRs. */
2674 return OP_SAME_WITH_NULL (0)
2676 && OP_SAME_WITH_NULL (2);
2679 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2685 case tcc_expression
:
2686 switch (TREE_CODE (arg0
))
2689 case TRUTH_NOT_EXPR
:
2692 case TRUTH_ANDIF_EXPR
:
2693 case TRUTH_ORIF_EXPR
:
2694 return OP_SAME (0) && OP_SAME (1);
2696 case TRUTH_AND_EXPR
:
2698 case TRUTH_XOR_EXPR
:
2699 if (OP_SAME (0) && OP_SAME (1))
2702 /* Otherwise take into account this is a commutative operation. */
2703 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2704 TREE_OPERAND (arg1
, 1), flags
)
2705 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2706 TREE_OPERAND (arg1
, 0), flags
));
2709 /* If the CALL_EXPRs call different functions, then they
2710 clearly can not be equal. */
2715 unsigned int cef
= call_expr_flags (arg0
);
2716 if (flags
& OEP_PURE_SAME
)
2717 cef
&= ECF_CONST
| ECF_PURE
;
2724 /* Now see if all the arguments are the same. operand_equal_p
2725 does not handle TREE_LIST, so we walk the operands here
2726 feeding them to operand_equal_p. */
2727 arg0
= TREE_OPERAND (arg0
, 1);
2728 arg1
= TREE_OPERAND (arg1
, 1);
2729 while (arg0
&& arg1
)
2731 if (! operand_equal_p (TREE_VALUE (arg0
), TREE_VALUE (arg1
),
2735 arg0
= TREE_CHAIN (arg0
);
2736 arg1
= TREE_CHAIN (arg1
);
2739 /* If we get here and both argument lists are exhausted
2740 then the CALL_EXPRs are equal. */
2741 return ! (arg0
|| arg1
);
2747 case tcc_declaration
:
2748 /* Consider __builtin_sqrt equal to sqrt. */
2749 return (TREE_CODE (arg0
) == FUNCTION_DECL
2750 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2751 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2752 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2759 #undef OP_SAME_WITH_NULL
2762 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2763 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2765 When in doubt, return 0. */
2768 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2770 int unsignedp1
, unsignedpo
;
2771 tree primarg0
, primarg1
, primother
;
2772 unsigned int correct_width
;
2774 if (operand_equal_p (arg0
, arg1
, 0))
2777 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2778 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2781 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2782 and see if the inner values are the same. This removes any
2783 signedness comparison, which doesn't matter here. */
2784 primarg0
= arg0
, primarg1
= arg1
;
2785 STRIP_NOPS (primarg0
);
2786 STRIP_NOPS (primarg1
);
2787 if (operand_equal_p (primarg0
, primarg1
, 0))
2790 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2791 actual comparison operand, ARG0.
2793 First throw away any conversions to wider types
2794 already present in the operands. */
2796 primarg1
= get_narrower (arg1
, &unsignedp1
);
2797 primother
= get_narrower (other
, &unsignedpo
);
2799 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2800 if (unsignedp1
== unsignedpo
2801 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2802 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2804 tree type
= TREE_TYPE (arg0
);
2806 /* Make sure shorter operand is extended the right way
2807 to match the longer operand. */
2808 primarg1
= fold_convert (lang_hooks
.types
.signed_or_unsigned_type
2809 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2811 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2818 /* See if ARG is an expression that is either a comparison or is performing
2819 arithmetic on comparisons. The comparisons must only be comparing
2820 two different values, which will be stored in *CVAL1 and *CVAL2; if
2821 they are nonzero it means that some operands have already been found.
2822 No variables may be used anywhere else in the expression except in the
2823 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2824 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2826 If this is true, return 1. Otherwise, return zero. */
2829 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2831 enum tree_code code
= TREE_CODE (arg
);
2832 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2834 /* We can handle some of the tcc_expression cases here. */
2835 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2837 else if (class == tcc_expression
2838 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2839 || code
== COMPOUND_EXPR
))
2842 else if (class == tcc_expression
&& code
== SAVE_EXPR
2843 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2845 /* If we've already found a CVAL1 or CVAL2, this expression is
2846 two complex to handle. */
2847 if (*cval1
|| *cval2
)
2857 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2860 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2861 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2862 cval1
, cval2
, save_p
));
2867 case tcc_expression
:
2868 if (code
== COND_EXPR
)
2869 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2870 cval1
, cval2
, save_p
)
2871 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2872 cval1
, cval2
, save_p
)
2873 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2874 cval1
, cval2
, save_p
));
2877 case tcc_comparison
:
2878 /* First see if we can handle the first operand, then the second. For
2879 the second operand, we know *CVAL1 can't be zero. It must be that
2880 one side of the comparison is each of the values; test for the
2881 case where this isn't true by failing if the two operands
2884 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2885 TREE_OPERAND (arg
, 1), 0))
2889 *cval1
= TREE_OPERAND (arg
, 0);
2890 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2892 else if (*cval2
== 0)
2893 *cval2
= TREE_OPERAND (arg
, 0);
2894 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2899 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2901 else if (*cval2
== 0)
2902 *cval2
= TREE_OPERAND (arg
, 1);
2903 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2915 /* ARG is a tree that is known to contain just arithmetic operations and
2916 comparisons. Evaluate the operations in the tree substituting NEW0 for
2917 any occurrence of OLD0 as an operand of a comparison and likewise for
2921 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
2923 tree type
= TREE_TYPE (arg
);
2924 enum tree_code code
= TREE_CODE (arg
);
2925 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2927 /* We can handle some of the tcc_expression cases here. */
2928 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2930 else if (class == tcc_expression
2931 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2937 return fold_build1 (code
, type
,
2938 eval_subst (TREE_OPERAND (arg
, 0),
2939 old0
, new0
, old1
, new1
));
2942 return fold_build2 (code
, type
,
2943 eval_subst (TREE_OPERAND (arg
, 0),
2944 old0
, new0
, old1
, new1
),
2945 eval_subst (TREE_OPERAND (arg
, 1),
2946 old0
, new0
, old1
, new1
));
2948 case tcc_expression
:
2952 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
2955 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
2958 return fold_build3 (code
, type
,
2959 eval_subst (TREE_OPERAND (arg
, 0),
2960 old0
, new0
, old1
, new1
),
2961 eval_subst (TREE_OPERAND (arg
, 1),
2962 old0
, new0
, old1
, new1
),
2963 eval_subst (TREE_OPERAND (arg
, 2),
2964 old0
, new0
, old1
, new1
));
2968 /* Fall through - ??? */
2970 case tcc_comparison
:
2972 tree arg0
= TREE_OPERAND (arg
, 0);
2973 tree arg1
= TREE_OPERAND (arg
, 1);
2975 /* We need to check both for exact equality and tree equality. The
2976 former will be true if the operand has a side-effect. In that
2977 case, we know the operand occurred exactly once. */
2979 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2981 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2984 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2986 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2989 return fold_build2 (code
, type
, arg0
, arg1
);
2997 /* Return a tree for the case when the result of an expression is RESULT
2998 converted to TYPE and OMITTED was previously an operand of the expression
2999 but is now not needed (e.g., we folded OMITTED * 0).
3001 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3002 the conversion of RESULT to TYPE. */
3005 omit_one_operand (tree type
, tree result
, tree omitted
)
3007 tree t
= fold_convert (type
, result
);
3009 if (TREE_SIDE_EFFECTS (omitted
))
3010 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3012 return non_lvalue (t
);
3015 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3018 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
3020 tree t
= fold_convert (type
, result
);
3022 if (TREE_SIDE_EFFECTS (omitted
))
3023 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3025 return pedantic_non_lvalue (t
);
3028 /* Return a tree for the case when the result of an expression is RESULT
3029 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3030 of the expression but are now not needed.
3032 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3033 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3034 evaluated before OMITTED2. Otherwise, if neither has side effects,
3035 just do the conversion of RESULT to TYPE. */
3038 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
3040 tree t
= fold_convert (type
, result
);
3042 if (TREE_SIDE_EFFECTS (omitted2
))
3043 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
3044 if (TREE_SIDE_EFFECTS (omitted1
))
3045 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
3047 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
3051 /* Return a simplified tree node for the truth-negation of ARG. This
3052 never alters ARG itself. We assume that ARG is an operation that
3053 returns a truth value (0 or 1).
3055 FIXME: one would think we would fold the result, but it causes
3056 problems with the dominator optimizer. */
3059 fold_truth_not_expr (tree arg
)
3061 tree type
= TREE_TYPE (arg
);
3062 enum tree_code code
= TREE_CODE (arg
);
3064 /* If this is a comparison, we can simply invert it, except for
3065 floating-point non-equality comparisons, in which case we just
3066 enclose a TRUTH_NOT_EXPR around what we have. */
3068 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3070 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3071 if (FLOAT_TYPE_P (op_type
)
3072 && flag_trapping_math
3073 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3074 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3078 code
= invert_tree_comparison (code
,
3079 HONOR_NANS (TYPE_MODE (op_type
)));
3080 if (code
== ERROR_MARK
)
3083 return build2 (code
, type
,
3084 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
3091 return constant_boolean_node (integer_zerop (arg
), type
);
3093 case TRUTH_AND_EXPR
:
3094 return build2 (TRUTH_OR_EXPR
, type
,
3095 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3096 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3099 return build2 (TRUTH_AND_EXPR
, type
,
3100 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3101 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3103 case TRUTH_XOR_EXPR
:
3104 /* Here we can invert either operand. We invert the first operand
3105 unless the second operand is a TRUTH_NOT_EXPR in which case our
3106 result is the XOR of the first operand with the inside of the
3107 negation of the second operand. */
3109 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3110 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3111 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3113 return build2 (TRUTH_XOR_EXPR
, type
,
3114 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3115 TREE_OPERAND (arg
, 1));
3117 case TRUTH_ANDIF_EXPR
:
3118 return build2 (TRUTH_ORIF_EXPR
, type
,
3119 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3120 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3122 case TRUTH_ORIF_EXPR
:
3123 return build2 (TRUTH_ANDIF_EXPR
, type
,
3124 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3125 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3127 case TRUTH_NOT_EXPR
:
3128 return TREE_OPERAND (arg
, 0);
3132 tree arg1
= TREE_OPERAND (arg
, 1);
3133 tree arg2
= TREE_OPERAND (arg
, 2);
3134 /* A COND_EXPR may have a throw as one operand, which
3135 then has void type. Just leave void operands
3137 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3138 VOID_TYPE_P (TREE_TYPE (arg1
))
3139 ? arg1
: invert_truthvalue (arg1
),
3140 VOID_TYPE_P (TREE_TYPE (arg2
))
3141 ? arg2
: invert_truthvalue (arg2
));
3145 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3146 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3148 case NON_LVALUE_EXPR
:
3149 return invert_truthvalue (TREE_OPERAND (arg
, 0));
3152 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3153 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3157 return build1 (TREE_CODE (arg
), type
,
3158 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3161 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3163 return build2 (EQ_EXPR
, type
, arg
,
3164 build_int_cst (type
, 0));
3167 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3169 case CLEANUP_POINT_EXPR
:
3170 return build1 (CLEANUP_POINT_EXPR
, type
,
3171 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3180 /* Return a simplified tree node for the truth-negation of ARG. This
3181 never alters ARG itself. We assume that ARG is an operation that
3182 returns a truth value (0 or 1).
3184 FIXME: one would think we would fold the result, but it causes
3185 problems with the dominator optimizer. */
3188 invert_truthvalue (tree arg
)
3192 if (TREE_CODE (arg
) == ERROR_MARK
)
3195 tem
= fold_truth_not_expr (arg
);
3197 tem
= build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3202 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3203 operands are another bit-wise operation with a common input. If so,
3204 distribute the bit operations to save an operation and possibly two if
3205 constants are involved. For example, convert
3206 (A | B) & (A | C) into A | (B & C)
3207 Further simplification will occur if B and C are constants.
3209 If this optimization cannot be done, 0 will be returned. */
3212 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3217 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3218 || TREE_CODE (arg0
) == code
3219 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3220 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3223 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3225 common
= TREE_OPERAND (arg0
, 0);
3226 left
= TREE_OPERAND (arg0
, 1);
3227 right
= TREE_OPERAND (arg1
, 1);
3229 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3231 common
= TREE_OPERAND (arg0
, 0);
3232 left
= TREE_OPERAND (arg0
, 1);
3233 right
= TREE_OPERAND (arg1
, 0);
3235 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3237 common
= TREE_OPERAND (arg0
, 1);
3238 left
= TREE_OPERAND (arg0
, 0);
3239 right
= TREE_OPERAND (arg1
, 1);
3241 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3243 common
= TREE_OPERAND (arg0
, 1);
3244 left
= TREE_OPERAND (arg0
, 0);
3245 right
= TREE_OPERAND (arg1
, 0);
3250 return fold_build2 (TREE_CODE (arg0
), type
, common
,
3251 fold_build2 (code
, type
, left
, right
));
3254 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3255 with code CODE. This optimization is unsafe. */
3257 distribute_real_division (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3259 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3260 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3262 /* (A / C) +- (B / C) -> (A +- B) / C. */
3264 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3265 TREE_OPERAND (arg1
, 1), 0))
3266 return fold_build2 (mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3267 fold_build2 (code
, type
,
3268 TREE_OPERAND (arg0
, 0),
3269 TREE_OPERAND (arg1
, 0)),
3270 TREE_OPERAND (arg0
, 1));
3272 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3273 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3274 TREE_OPERAND (arg1
, 0), 0)
3275 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3276 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3278 REAL_VALUE_TYPE r0
, r1
;
3279 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3280 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3282 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3284 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3285 real_arithmetic (&r0
, code
, &r0
, &r1
);
3286 return fold_build2 (MULT_EXPR
, type
,
3287 TREE_OPERAND (arg0
, 0),
3288 build_real (type
, r0
));
3294 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3295 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3298 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
3305 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3306 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3307 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3308 && host_integerp (size
, 0)
3309 && tree_low_cst (size
, 0) == bitsize
)
3310 return fold_convert (type
, inner
);
3313 result
= build3 (BIT_FIELD_REF
, type
, inner
,
3314 size_int (bitsize
), bitsize_int (bitpos
));
3316 BIT_FIELD_REF_UNSIGNED (result
) = unsignedp
;
3321 /* Optimize a bit-field compare.
3323 There are two cases: First is a compare against a constant and the
3324 second is a comparison of two items where the fields are at the same
3325 bit position relative to the start of a chunk (byte, halfword, word)
3326 large enough to contain it. In these cases we can avoid the shift
3327 implicit in bitfield extractions.
3329 For constants, we emit a compare of the shifted constant with the
3330 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3331 compared. For two fields at the same position, we do the ANDs with the
3332 similar mask and compare the result of the ANDs.
3334 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3335 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3336 are the left and right operands of the comparison, respectively.
3338 If the optimization described above can be done, we return the resulting
3339 tree. Otherwise we return zero. */
3342 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
3345 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3346 tree type
= TREE_TYPE (lhs
);
3347 tree signed_type
, unsigned_type
;
3348 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3349 enum machine_mode lmode
, rmode
, nmode
;
3350 int lunsignedp
, runsignedp
;
3351 int lvolatilep
= 0, rvolatilep
= 0;
3352 tree linner
, rinner
= NULL_TREE
;
3356 /* Get all the information about the extractions being done. If the bit size
3357 if the same as the size of the underlying object, we aren't doing an
3358 extraction at all and so can do nothing. We also don't want to
3359 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3360 then will no longer be able to replace it. */
3361 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3362 &lunsignedp
, &lvolatilep
, false);
3363 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3364 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3369 /* If this is not a constant, we can only do something if bit positions,
3370 sizes, and signedness are the same. */
3371 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3372 &runsignedp
, &rvolatilep
, false);
3374 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3375 || lunsignedp
!= runsignedp
|| offset
!= 0
3376 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3380 /* See if we can find a mode to refer to this field. We should be able to,
3381 but fail if we can't. */
3382 nmode
= get_best_mode (lbitsize
, lbitpos
,
3383 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3384 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3385 TYPE_ALIGN (TREE_TYPE (rinner
))),
3386 word_mode
, lvolatilep
|| rvolatilep
);
3387 if (nmode
== VOIDmode
)
3390 /* Set signed and unsigned types of the precision of this mode for the
3392 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3393 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3395 /* Compute the bit position and size for the new reference and our offset
3396 within it. If the new reference is the same size as the original, we
3397 won't optimize anything, so return zero. */
3398 nbitsize
= GET_MODE_BITSIZE (nmode
);
3399 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3401 if (nbitsize
== lbitsize
)
3404 if (BYTES_BIG_ENDIAN
)
3405 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3407 /* Make the mask to be used against the extracted field. */
3408 mask
= build_int_cst (unsigned_type
, -1);
3409 mask
= force_fit_type (mask
, 0, false, false);
3410 mask
= fold_convert (unsigned_type
, mask
);
3411 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
3412 mask
= const_binop (RSHIFT_EXPR
, mask
,
3413 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
3416 /* If not comparing with constant, just rework the comparison
3418 return build2 (code
, compare_type
,
3419 build2 (BIT_AND_EXPR
, unsigned_type
,
3420 make_bit_field_ref (linner
, unsigned_type
,
3421 nbitsize
, nbitpos
, 1),
3423 build2 (BIT_AND_EXPR
, unsigned_type
,
3424 make_bit_field_ref (rinner
, unsigned_type
,
3425 nbitsize
, nbitpos
, 1),
3428 /* Otherwise, we are handling the constant case. See if the constant is too
3429 big for the field. Warn and return a tree of for 0 (false) if so. We do
3430 this not only for its own sake, but to avoid having to test for this
3431 error case below. If we didn't, we might generate wrong code.
3433 For unsigned fields, the constant shifted right by the field length should
3434 be all zero. For signed fields, the high-order bits should agree with
3439 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3440 fold_convert (unsigned_type
, rhs
),
3441 size_int (lbitsize
), 0)))
3443 warning (0, "comparison is always %d due to width of bit-field",
3445 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3450 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
3451 size_int (lbitsize
- 1), 0);
3452 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3454 warning (0, "comparison is always %d due to width of bit-field",
3456 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3460 /* Single-bit compares should always be against zero. */
3461 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3463 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3464 rhs
= build_int_cst (type
, 0);
3467 /* Make a new bitfield reference, shift the constant over the
3468 appropriate number of bits and mask it with the computed mask
3469 (in case this was a signed field). If we changed it, make a new one. */
3470 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3473 TREE_SIDE_EFFECTS (lhs
) = 1;
3474 TREE_THIS_VOLATILE (lhs
) = 1;
3477 rhs
= const_binop (BIT_AND_EXPR
,
3478 const_binop (LSHIFT_EXPR
,
3479 fold_convert (unsigned_type
, rhs
),
3480 size_int (lbitpos
), 0),
3483 return build2 (code
, compare_type
,
3484 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
3488 /* Subroutine for fold_truthop: decode a field reference.
3490 If EXP is a comparison reference, we return the innermost reference.
3492 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3493 set to the starting bit number.
3495 If the innermost field can be completely contained in a mode-sized
3496 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3498 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3499 otherwise it is not changed.
3501 *PUNSIGNEDP is set to the signedness of the field.
3503 *PMASK is set to the mask used. This is either contained in a
3504 BIT_AND_EXPR or derived from the width of the field.
3506 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3508 Return 0 if this is not a component reference or is one that we can't
3509 do anything with. */
3512 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3513 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3514 int *punsignedp
, int *pvolatilep
,
3515 tree
*pmask
, tree
*pand_mask
)
3517 tree outer_type
= 0;
3519 tree mask
, inner
, offset
;
3521 unsigned int precision
;
3523 /* All the optimizations using this function assume integer fields.
3524 There are problems with FP fields since the type_for_size call
3525 below can fail for, e.g., XFmode. */
3526 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3529 /* We are interested in the bare arrangement of bits, so strip everything
3530 that doesn't affect the machine mode. However, record the type of the
3531 outermost expression if it may matter below. */
3532 if (TREE_CODE (exp
) == NOP_EXPR
3533 || TREE_CODE (exp
) == CONVERT_EXPR
3534 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3535 outer_type
= TREE_TYPE (exp
);
3538 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3540 and_mask
= TREE_OPERAND (exp
, 1);
3541 exp
= TREE_OPERAND (exp
, 0);
3542 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3543 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3547 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3548 punsignedp
, pvolatilep
, false);
3549 if ((inner
== exp
&& and_mask
== 0)
3550 || *pbitsize
< 0 || offset
!= 0
3551 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3554 /* If the number of bits in the reference is the same as the bitsize of
3555 the outer type, then the outer type gives the signedness. Otherwise
3556 (in case of a small bitfield) the signedness is unchanged. */
3557 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3558 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3560 /* Compute the mask to access the bitfield. */
3561 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3562 precision
= TYPE_PRECISION (unsigned_type
);
3564 mask
= build_int_cst (unsigned_type
, -1);
3565 mask
= force_fit_type (mask
, 0, false, false);
3567 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3568 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3570 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3572 mask
= fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3573 fold_convert (unsigned_type
, and_mask
), mask
);
3576 *pand_mask
= and_mask
;
3580 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3584 all_ones_mask_p (tree mask
, int size
)
3586 tree type
= TREE_TYPE (mask
);
3587 unsigned int precision
= TYPE_PRECISION (type
);
3590 tmask
= build_int_cst (lang_hooks
.types
.signed_type (type
), -1);
3591 tmask
= force_fit_type (tmask
, 0, false, false);
3594 tree_int_cst_equal (mask
,
3595 const_binop (RSHIFT_EXPR
,
3596 const_binop (LSHIFT_EXPR
, tmask
,
3597 size_int (precision
- size
),
3599 size_int (precision
- size
), 0));
3602 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3603 represents the sign bit of EXP's type. If EXP represents a sign
3604 or zero extension, also test VAL against the unextended type.
3605 The return value is the (sub)expression whose sign bit is VAL,
3606 or NULL_TREE otherwise. */
3609 sign_bit_p (tree exp
, tree val
)
3611 unsigned HOST_WIDE_INT mask_lo
, lo
;
3612 HOST_WIDE_INT mask_hi
, hi
;
3616 /* Tree EXP must have an integral type. */
3617 t
= TREE_TYPE (exp
);
3618 if (! INTEGRAL_TYPE_P (t
))
3621 /* Tree VAL must be an integer constant. */
3622 if (TREE_CODE (val
) != INTEGER_CST
3623 || TREE_CONSTANT_OVERFLOW (val
))
3626 width
= TYPE_PRECISION (t
);
3627 if (width
> HOST_BITS_PER_WIDE_INT
)
3629 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3632 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3633 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3639 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3642 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3643 >> (HOST_BITS_PER_WIDE_INT
- width
));
3646 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3647 treat VAL as if it were unsigned. */
3648 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3649 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3652 /* Handle extension from a narrower type. */
3653 if (TREE_CODE (exp
) == NOP_EXPR
3654 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3655 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3660 /* Subroutine for fold_truthop: determine if an operand is simple enough
3661 to be evaluated unconditionally. */
3664 simple_operand_p (tree exp
)
3666 /* Strip any conversions that don't change the machine mode. */
3669 return (CONSTANT_CLASS_P (exp
)
3670 || TREE_CODE (exp
) == SSA_NAME
3672 && ! TREE_ADDRESSABLE (exp
)
3673 && ! TREE_THIS_VOLATILE (exp
)
3674 && ! DECL_NONLOCAL (exp
)
3675 /* Don't regard global variables as simple. They may be
3676 allocated in ways unknown to the compiler (shared memory,
3677 #pragma weak, etc). */
3678 && ! TREE_PUBLIC (exp
)
3679 && ! DECL_EXTERNAL (exp
)
3680 /* Loading a static variable is unduly expensive, but global
3681 registers aren't expensive. */
3682 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3685 /* The following functions are subroutines to fold_range_test and allow it to
3686 try to change a logical combination of comparisons into a range test.
3689 X == 2 || X == 3 || X == 4 || X == 5
3693 (unsigned) (X - 2) <= 3
3695 We describe each set of comparisons as being either inside or outside
3696 a range, using a variable named like IN_P, and then describe the
3697 range with a lower and upper bound. If one of the bounds is omitted,
3698 it represents either the highest or lowest value of the type.
3700 In the comments below, we represent a range by two numbers in brackets
3701 preceded by a "+" to designate being inside that range, or a "-" to
3702 designate being outside that range, so the condition can be inverted by
3703 flipping the prefix. An omitted bound is represented by a "-". For
3704 example, "- [-, 10]" means being outside the range starting at the lowest
3705 possible value and ending at 10, in other words, being greater than 10.
3706 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3709 We set up things so that the missing bounds are handled in a consistent
3710 manner so neither a missing bound nor "true" and "false" need to be
3711 handled using a special case. */
3713 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3714 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3715 and UPPER1_P are nonzero if the respective argument is an upper bound
3716 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3717 must be specified for a comparison. ARG1 will be converted to ARG0's
3718 type if both are specified. */
3721 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3722 tree arg1
, int upper1_p
)
3728 /* If neither arg represents infinity, do the normal operation.
3729 Else, if not a comparison, return infinity. Else handle the special
3730 comparison rules. Note that most of the cases below won't occur, but
3731 are handled for consistency. */
3733 if (arg0
!= 0 && arg1
!= 0)
3735 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3736 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3738 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3741 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3744 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3745 for neither. In real maths, we cannot assume open ended ranges are
3746 the same. But, this is computer arithmetic, where numbers are finite.
3747 We can therefore make the transformation of any unbounded range with
3748 the value Z, Z being greater than any representable number. This permits
3749 us to treat unbounded ranges as equal. */
3750 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3751 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3755 result
= sgn0
== sgn1
;
3758 result
= sgn0
!= sgn1
;
3761 result
= sgn0
< sgn1
;
3764 result
= sgn0
<= sgn1
;
3767 result
= sgn0
> sgn1
;
3770 result
= sgn0
>= sgn1
;
3776 return constant_boolean_node (result
, type
);
3779 /* Given EXP, a logical expression, set the range it is testing into
3780 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3781 actually being tested. *PLOW and *PHIGH will be made of the same type
3782 as the returned expression. If EXP is not a comparison, we will most
3783 likely not be returning a useful value and range. */
3786 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
)
3788 enum tree_code code
;
3789 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
3790 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
3792 tree low
, high
, n_low
, n_high
;
3794 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3795 and see if we can refine the range. Some of the cases below may not
3796 happen, but it doesn't seem worth worrying about this. We "continue"
3797 the outer loop when we've changed something; otherwise we "break"
3798 the switch, which will "break" the while. */
3801 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
3805 code
= TREE_CODE (exp
);
3806 exp_type
= TREE_TYPE (exp
);
3808 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3810 if (TREE_CODE_LENGTH (code
) > 0)
3811 arg0
= TREE_OPERAND (exp
, 0);
3812 if (TREE_CODE_CLASS (code
) == tcc_comparison
3813 || TREE_CODE_CLASS (code
) == tcc_unary
3814 || TREE_CODE_CLASS (code
) == tcc_binary
)
3815 arg0_type
= TREE_TYPE (arg0
);
3816 if (TREE_CODE_CLASS (code
) == tcc_binary
3817 || TREE_CODE_CLASS (code
) == tcc_comparison
3818 || (TREE_CODE_CLASS (code
) == tcc_expression
3819 && TREE_CODE_LENGTH (code
) > 1))
3820 arg1
= TREE_OPERAND (exp
, 1);
3825 case TRUTH_NOT_EXPR
:
3826 in_p
= ! in_p
, exp
= arg0
;
3829 case EQ_EXPR
: case NE_EXPR
:
3830 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3831 /* We can only do something if the range is testing for zero
3832 and if the second operand is an integer constant. Note that
3833 saying something is "in" the range we make is done by
3834 complementing IN_P since it will set in the initial case of
3835 being not equal to zero; "out" is leaving it alone. */
3836 if (low
== 0 || high
== 0
3837 || ! integer_zerop (low
) || ! integer_zerop (high
)
3838 || TREE_CODE (arg1
) != INTEGER_CST
)
3843 case NE_EXPR
: /* - [c, c] */
3846 case EQ_EXPR
: /* + [c, c] */
3847 in_p
= ! in_p
, low
= high
= arg1
;
3849 case GT_EXPR
: /* - [-, c] */
3850 low
= 0, high
= arg1
;
3852 case GE_EXPR
: /* + [c, -] */
3853 in_p
= ! in_p
, low
= arg1
, high
= 0;
3855 case LT_EXPR
: /* - [c, -] */
3856 low
= arg1
, high
= 0;
3858 case LE_EXPR
: /* + [-, c] */
3859 in_p
= ! in_p
, low
= 0, high
= arg1
;
3865 /* If this is an unsigned comparison, we also know that EXP is
3866 greater than or equal to zero. We base the range tests we make
3867 on that fact, so we record it here so we can parse existing
3868 range tests. We test arg0_type since often the return type
3869 of, e.g. EQ_EXPR, is boolean. */
3870 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3872 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3874 build_int_cst (arg0_type
, 0),
3878 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3880 /* If the high bound is missing, but we have a nonzero low
3881 bound, reverse the range so it goes from zero to the low bound
3883 if (high
== 0 && low
&& ! integer_zerop (low
))
3886 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3887 integer_one_node
, 0);
3888 low
= build_int_cst (arg0_type
, 0);
3896 /* (-x) IN [a,b] -> x in [-b, -a] */
3897 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3898 build_int_cst (exp_type
, 0),
3900 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3901 build_int_cst (exp_type
, 0),
3903 low
= n_low
, high
= n_high
;
3909 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3910 build_int_cst (exp_type
, 1));
3913 case PLUS_EXPR
: case MINUS_EXPR
:
3914 if (TREE_CODE (arg1
) != INTEGER_CST
)
3917 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3918 move a constant to the other side. */
3919 if (flag_wrapv
&& !TYPE_UNSIGNED (arg0_type
))
3922 /* If EXP is signed, any overflow in the computation is undefined,
3923 so we don't worry about it so long as our computations on
3924 the bounds don't overflow. For unsigned, overflow is defined
3925 and this is exactly the right thing. */
3926 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3927 arg0_type
, low
, 0, arg1
, 0);
3928 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3929 arg0_type
, high
, 1, arg1
, 0);
3930 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3931 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3934 /* Check for an unsigned range which has wrapped around the maximum
3935 value thus making n_high < n_low, and normalize it. */
3936 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3938 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3939 integer_one_node
, 0);
3940 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3941 integer_one_node
, 0);
3943 /* If the range is of the form +/- [ x+1, x ], we won't
3944 be able to normalize it. But then, it represents the
3945 whole range or the empty set, so make it
3947 if (tree_int_cst_equal (n_low
, low
)
3948 && tree_int_cst_equal (n_high
, high
))
3954 low
= n_low
, high
= n_high
;
3959 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
3960 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3963 if (! INTEGRAL_TYPE_P (arg0_type
)
3964 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3965 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3968 n_low
= low
, n_high
= high
;
3971 n_low
= fold_convert (arg0_type
, n_low
);
3974 n_high
= fold_convert (arg0_type
, n_high
);
3977 /* If we're converting arg0 from an unsigned type, to exp,
3978 a signed type, we will be doing the comparison as unsigned.
3979 The tests above have already verified that LOW and HIGH
3982 So we have to ensure that we will handle large unsigned
3983 values the same way that the current signed bounds treat
3986 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
3989 tree equiv_type
= lang_hooks
.types
.type_for_mode
3990 (TYPE_MODE (arg0_type
), 1);
3992 /* A range without an upper bound is, naturally, unbounded.
3993 Since convert would have cropped a very large value, use
3994 the max value for the destination type. */
3996 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3997 : TYPE_MAX_VALUE (arg0_type
);
3999 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4000 high_positive
= fold_build2 (RSHIFT_EXPR
, arg0_type
,
4001 fold_convert (arg0_type
,
4003 fold_convert (arg0_type
,
4006 /* If the low bound is specified, "and" the range with the
4007 range for which the original unsigned value will be
4011 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4012 1, n_low
, n_high
, 1,
4013 fold_convert (arg0_type
,
4018 in_p
= (n_in_p
== in_p
);
4022 /* Otherwise, "or" the range with the range of the input
4023 that will be interpreted as negative. */
4024 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4025 0, n_low
, n_high
, 1,
4026 fold_convert (arg0_type
,
4031 in_p
= (in_p
!= n_in_p
);
4036 low
= n_low
, high
= n_high
;
4046 /* If EXP is a constant, we can evaluate whether this is true or false. */
4047 if (TREE_CODE (exp
) == INTEGER_CST
)
4049 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4051 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4057 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4061 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4062 type, TYPE, return an expression to test if EXP is in (or out of, depending
4063 on IN_P) the range. Return 0 if the test couldn't be created. */
4066 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
4068 tree etype
= TREE_TYPE (exp
);
4071 #ifdef HAVE_canonicalize_funcptr_for_compare
4072 /* Disable this optimization for function pointer expressions
4073 on targets that require function pointer canonicalization. */
4074 if (HAVE_canonicalize_funcptr_for_compare
4075 && TREE_CODE (etype
) == POINTER_TYPE
4076 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4082 value
= build_range_check (type
, exp
, 1, low
, high
);
4084 return invert_truthvalue (value
);
4089 if (low
== 0 && high
== 0)
4090 return build_int_cst (type
, 1);
4093 return fold_build2 (LE_EXPR
, type
, exp
,
4094 fold_convert (etype
, high
));
4097 return fold_build2 (GE_EXPR
, type
, exp
,
4098 fold_convert (etype
, low
));
4100 if (operand_equal_p (low
, high
, 0))
4101 return fold_build2 (EQ_EXPR
, type
, exp
,
4102 fold_convert (etype
, low
));
4104 if (integer_zerop (low
))
4106 if (! TYPE_UNSIGNED (etype
))
4108 etype
= lang_hooks
.types
.unsigned_type (etype
);
4109 high
= fold_convert (etype
, high
);
4110 exp
= fold_convert (etype
, exp
);
4112 return build_range_check (type
, exp
, 1, 0, high
);
4115 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4116 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4118 unsigned HOST_WIDE_INT lo
;
4122 prec
= TYPE_PRECISION (etype
);
4123 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4126 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4130 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4131 lo
= (unsigned HOST_WIDE_INT
) -1;
4134 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4136 if (TYPE_UNSIGNED (etype
))
4138 etype
= lang_hooks
.types
.signed_type (etype
);
4139 exp
= fold_convert (etype
, exp
);
4141 return fold_build2 (GT_EXPR
, type
, exp
,
4142 build_int_cst (etype
, 0));
4146 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4147 This requires wrap-around arithmetics for the type of the expression. */
4148 switch (TREE_CODE (etype
))
4151 /* There is no requirement that LOW be within the range of ETYPE
4152 if the latter is a subtype. It must, however, be within the base
4153 type of ETYPE. So be sure we do the subtraction in that type. */
4154 if (TREE_TYPE (etype
))
4155 etype
= TREE_TYPE (etype
);
4160 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4161 TYPE_UNSIGNED (etype
));
4168 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4169 if (TREE_CODE (etype
) == INTEGER_TYPE
4170 && !TYPE_UNSIGNED (etype
) && !flag_wrapv
)
4172 tree utype
, minv
, maxv
;
4174 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4175 for the type in question, as we rely on this here. */
4176 utype
= lang_hooks
.types
.unsigned_type (etype
);
4177 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
4178 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4179 integer_one_node
, 1);
4180 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
4182 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4189 high
= fold_convert (etype
, high
);
4190 low
= fold_convert (etype
, low
);
4191 exp
= fold_convert (etype
, exp
);
4193 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4195 if (value
!= 0 && !TREE_OVERFLOW (value
))
4196 return build_range_check (type
,
4197 fold_build2 (MINUS_EXPR
, etype
, exp
, low
),
4198 1, build_int_cst (etype
, 0), value
);
4203 /* Return the predecessor of VAL in its type, handling the infinite case. */
4206 range_predecessor (tree val
)
4208 tree type
= TREE_TYPE (val
);
4210 if (INTEGRAL_TYPE_P (type
)
4211 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4214 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4217 /* Return the successor of VAL in its type, handling the infinite case. */
4220 range_successor (tree val
)
4222 tree type
= TREE_TYPE (val
);
4224 if (INTEGRAL_TYPE_P (type
)
4225 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4228 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4231 /* Given two ranges, see if we can merge them into one. Return 1 if we
4232 can, 0 if we can't. Set the output range into the specified parameters. */
4235 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4236 tree high0
, int in1_p
, tree low1
, tree high1
)
4244 int lowequal
= ((low0
== 0 && low1
== 0)
4245 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4246 low0
, 0, low1
, 0)));
4247 int highequal
= ((high0
== 0 && high1
== 0)
4248 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4249 high0
, 1, high1
, 1)));
4251 /* Make range 0 be the range that starts first, or ends last if they
4252 start at the same value. Swap them if it isn't. */
4253 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4256 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4257 high1
, 1, high0
, 1))))
4259 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4260 tem
= low0
, low0
= low1
, low1
= tem
;
4261 tem
= high0
, high0
= high1
, high1
= tem
;
4264 /* Now flag two cases, whether the ranges are disjoint or whether the
4265 second range is totally subsumed in the first. Note that the tests
4266 below are simplified by the ones above. */
4267 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4268 high0
, 1, low1
, 0));
4269 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4270 high1
, 1, high0
, 1));
4272 /* We now have four cases, depending on whether we are including or
4273 excluding the two ranges. */
4276 /* If they don't overlap, the result is false. If the second range
4277 is a subset it is the result. Otherwise, the range is from the start
4278 of the second to the end of the first. */
4280 in_p
= 0, low
= high
= 0;
4282 in_p
= 1, low
= low1
, high
= high1
;
4284 in_p
= 1, low
= low1
, high
= high0
;
4287 else if (in0_p
&& ! in1_p
)
4289 /* If they don't overlap, the result is the first range. If they are
4290 equal, the result is false. If the second range is a subset of the
4291 first, and the ranges begin at the same place, we go from just after
4292 the end of the second range to the end of the first. If the second
4293 range is not a subset of the first, or if it is a subset and both
4294 ranges end at the same place, the range starts at the start of the
4295 first range and ends just before the second range.
4296 Otherwise, we can't describe this as a single range. */
4298 in_p
= 1, low
= low0
, high
= high0
;
4299 else if (lowequal
&& highequal
)
4300 in_p
= 0, low
= high
= 0;
4301 else if (subset
&& lowequal
)
4303 low
= range_successor (high1
);
4307 else if (! subset
|| highequal
)
4310 high
= range_predecessor (low1
);
4317 else if (! in0_p
&& in1_p
)
4319 /* If they don't overlap, the result is the second range. If the second
4320 is a subset of the first, the result is false. Otherwise,
4321 the range starts just after the first range and ends at the
4322 end of the second. */
4324 in_p
= 1, low
= low1
, high
= high1
;
4325 else if (subset
|| highequal
)
4326 in_p
= 0, low
= high
= 0;
4329 low
= range_successor (high0
);
4337 /* The case where we are excluding both ranges. Here the complex case
4338 is if they don't overlap. In that case, the only time we have a
4339 range is if they are adjacent. If the second is a subset of the
4340 first, the result is the first. Otherwise, the range to exclude
4341 starts at the beginning of the first range and ends at the end of the
4345 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4346 range_successor (high0
),
4348 in_p
= 0, low
= low0
, high
= high1
;
4351 /* Canonicalize - [min, x] into - [-, x]. */
4352 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4353 switch (TREE_CODE (TREE_TYPE (low0
)))
4356 if (TYPE_PRECISION (TREE_TYPE (low0
))
4357 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4361 if (tree_int_cst_equal (low0
,
4362 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4366 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4367 && integer_zerop (low0
))
4374 /* Canonicalize - [x, max] into - [x, -]. */
4375 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4376 switch (TREE_CODE (TREE_TYPE (high1
)))
4379 if (TYPE_PRECISION (TREE_TYPE (high1
))
4380 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4384 if (tree_int_cst_equal (high1
,
4385 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4389 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4390 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4392 integer_one_node
, 1)))
4399 /* The ranges might be also adjacent between the maximum and
4400 minimum values of the given type. For
4401 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4402 return + [x + 1, y - 1]. */
4403 if (low0
== 0 && high1
== 0)
4405 low
= range_successor (high0
);
4406 high
= range_predecessor (low1
);
4407 if (low
== 0 || high
== 0)
4417 in_p
= 0, low
= low0
, high
= high0
;
4419 in_p
= 0, low
= low0
, high
= high1
;
4422 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4427 /* Subroutine of fold, looking inside expressions of the form
4428 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4429 of the COND_EXPR. This function is being used also to optimize
4430 A op B ? C : A, by reversing the comparison first.
4432 Return a folded expression whose code is not a COND_EXPR
4433 anymore, or NULL_TREE if no folding opportunity is found. */
4436 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
4438 enum tree_code comp_code
= TREE_CODE (arg0
);
4439 tree arg00
= TREE_OPERAND (arg0
, 0);
4440 tree arg01
= TREE_OPERAND (arg0
, 1);
4441 tree arg1_type
= TREE_TYPE (arg1
);
4447 /* If we have A op 0 ? A : -A, consider applying the following
4450 A == 0? A : -A same as -A
4451 A != 0? A : -A same as A
4452 A >= 0? A : -A same as abs (A)
4453 A > 0? A : -A same as abs (A)
4454 A <= 0? A : -A same as -abs (A)
4455 A < 0? A : -A same as -abs (A)
4457 None of these transformations work for modes with signed
4458 zeros. If A is +/-0, the first two transformations will
4459 change the sign of the result (from +0 to -0, or vice
4460 versa). The last four will fix the sign of the result,
4461 even though the original expressions could be positive or
4462 negative, depending on the sign of A.
4464 Note that all these transformations are correct if A is
4465 NaN, since the two alternatives (A and -A) are also NaNs. */
4466 if ((FLOAT_TYPE_P (TREE_TYPE (arg01
))
4467 ? real_zerop (arg01
)
4468 : integer_zerop (arg01
))
4469 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4470 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4471 /* In the case that A is of the form X-Y, '-A' (arg2) may
4472 have already been folded to Y-X, check for that. */
4473 || (TREE_CODE (arg1
) == MINUS_EXPR
4474 && TREE_CODE (arg2
) == MINUS_EXPR
4475 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4476 TREE_OPERAND (arg2
, 1), 0)
4477 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4478 TREE_OPERAND (arg2
, 0), 0))))
4483 tem
= fold_convert (arg1_type
, arg1
);
4484 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
4487 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4490 if (flag_trapping_math
)
4495 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4496 arg1
= fold_convert (lang_hooks
.types
.signed_type
4497 (TREE_TYPE (arg1
)), arg1
);
4498 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4499 return pedantic_non_lvalue (fold_convert (type
, tem
));
4502 if (flag_trapping_math
)
4506 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4507 arg1
= fold_convert (lang_hooks
.types
.signed_type
4508 (TREE_TYPE (arg1
)), arg1
);
4509 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4510 return negate_expr (fold_convert (type
, tem
));
4512 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4516 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4517 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4518 both transformations are correct when A is NaN: A != 0
4519 is then true, and A == 0 is false. */
4521 if (integer_zerop (arg01
) && integer_zerop (arg2
))
4523 if (comp_code
== NE_EXPR
)
4524 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4525 else if (comp_code
== EQ_EXPR
)
4526 return build_int_cst (type
, 0);
4529 /* Try some transformations of A op B ? A : B.
4531 A == B? A : B same as B
4532 A != B? A : B same as A
4533 A >= B? A : B same as max (A, B)
4534 A > B? A : B same as max (B, A)
4535 A <= B? A : B same as min (A, B)
4536 A < B? A : B same as min (B, A)
4538 As above, these transformations don't work in the presence
4539 of signed zeros. For example, if A and B are zeros of
4540 opposite sign, the first two transformations will change
4541 the sign of the result. In the last four, the original
4542 expressions give different results for (A=+0, B=-0) and
4543 (A=-0, B=+0), but the transformed expressions do not.
4545 The first two transformations are correct if either A or B
4546 is a NaN. In the first transformation, the condition will
4547 be false, and B will indeed be chosen. In the case of the
4548 second transformation, the condition A != B will be true,
4549 and A will be chosen.
4551 The conversions to max() and min() are not correct if B is
4552 a number and A is not. The conditions in the original
4553 expressions will be false, so all four give B. The min()
4554 and max() versions would give a NaN instead. */
4555 if (operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4556 /* Avoid these transformations if the COND_EXPR may be used
4557 as an lvalue in the C++ front-end. PR c++/19199. */
4559 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4560 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4561 || ! maybe_lvalue_p (arg1
)
4562 || ! maybe_lvalue_p (arg2
)))
4564 tree comp_op0
= arg00
;
4565 tree comp_op1
= arg01
;
4566 tree comp_type
= TREE_TYPE (comp_op0
);
4568 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4569 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4579 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4581 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4586 /* In C++ a ?: expression can be an lvalue, so put the
4587 operand which will be used if they are equal first
4588 so that we can convert this back to the
4589 corresponding COND_EXPR. */
4590 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4592 comp_op0
= fold_convert (comp_type
, comp_op0
);
4593 comp_op1
= fold_convert (comp_type
, comp_op1
);
4594 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4595 ? fold_build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4596 : fold_build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
);
4597 return pedantic_non_lvalue (fold_convert (type
, tem
));
4604 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4606 comp_op0
= fold_convert (comp_type
, comp_op0
);
4607 comp_op1
= fold_convert (comp_type
, comp_op1
);
4608 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4609 ? fold_build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4610 : fold_build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
);
4611 return pedantic_non_lvalue (fold_convert (type
, tem
));
4615 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4616 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4619 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4620 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4623 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4628 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4629 we might still be able to simplify this. For example,
4630 if C1 is one less or one more than C2, this might have started
4631 out as a MIN or MAX and been transformed by this function.
4632 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4634 if (INTEGRAL_TYPE_P (type
)
4635 && TREE_CODE (arg01
) == INTEGER_CST
4636 && TREE_CODE (arg2
) == INTEGER_CST
)
4640 /* We can replace A with C1 in this case. */
4641 arg1
= fold_convert (type
, arg01
);
4642 return fold_build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
);
4645 /* If C1 is C2 + 1, this is min(A, C2). */
4646 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4648 && operand_equal_p (arg01
,
4649 const_binop (PLUS_EXPR
, arg2
,
4650 integer_one_node
, 0),
4652 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4657 /* If C1 is C2 - 1, this is min(A, C2). */
4658 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4660 && operand_equal_p (arg01
,
4661 const_binop (MINUS_EXPR
, arg2
,
4662 integer_one_node
, 0),
4664 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4669 /* If C1 is C2 - 1, this is max(A, C2). */
4670 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4672 && operand_equal_p (arg01
,
4673 const_binop (MINUS_EXPR
, arg2
,
4674 integer_one_node
, 0),
4676 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4681 /* If C1 is C2 + 1, this is max(A, C2). */
4682 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4684 && operand_equal_p (arg01
,
4685 const_binop (PLUS_EXPR
, arg2
,
4686 integer_one_node
, 0),
4688 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4702 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4703 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4706 /* EXP is some logical combination of boolean tests. See if we can
4707 merge it into some range test. Return the new tree if so. */
4710 fold_range_test (enum tree_code code
, tree type
, tree op0
, tree op1
)
4712 int or_op
= (code
== TRUTH_ORIF_EXPR
4713 || code
== TRUTH_OR_EXPR
);
4714 int in0_p
, in1_p
, in_p
;
4715 tree low0
, low1
, low
, high0
, high1
, high
;
4716 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
);
4717 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
);
4720 /* If this is an OR operation, invert both sides; we will invert
4721 again at the end. */
4723 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4725 /* If both expressions are the same, if we can merge the ranges, and we
4726 can build the range test, return it or it inverted. If one of the
4727 ranges is always true or always false, consider it to be the same
4728 expression as the other. */
4729 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4730 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4732 && 0 != (tem
= (build_range_check (type
,
4734 : rhs
!= 0 ? rhs
: integer_zero_node
,
4736 return or_op
? invert_truthvalue (tem
) : tem
;
4738 /* On machines where the branch cost is expensive, if this is a
4739 short-circuited branch and the underlying object on both sides
4740 is the same, make a non-short-circuit operation. */
4741 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4742 && lhs
!= 0 && rhs
!= 0
4743 && (code
== TRUTH_ANDIF_EXPR
4744 || code
== TRUTH_ORIF_EXPR
)
4745 && operand_equal_p (lhs
, rhs
, 0))
4747 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4748 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4749 which cases we can't do this. */
4750 if (simple_operand_p (lhs
))
4751 return build2 (code
== TRUTH_ANDIF_EXPR
4752 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4755 else if (lang_hooks
.decls
.global_bindings_p () == 0
4756 && ! CONTAINS_PLACEHOLDER_P (lhs
))
4758 tree common
= save_expr (lhs
);
4760 if (0 != (lhs
= build_range_check (type
, common
,
4761 or_op
? ! in0_p
: in0_p
,
4763 && (0 != (rhs
= build_range_check (type
, common
,
4764 or_op
? ! in1_p
: in1_p
,
4766 return build2 (code
== TRUTH_ANDIF_EXPR
4767 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4775 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4776 bit value. Arrange things so the extra bits will be set to zero if and
4777 only if C is signed-extended to its full width. If MASK is nonzero,
4778 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4781 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4783 tree type
= TREE_TYPE (c
);
4784 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4787 if (p
== modesize
|| unsignedp
)
4790 /* We work by getting just the sign bit into the low-order bit, then
4791 into the high-order bit, then sign-extend. We then XOR that value
4793 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
4794 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
4796 /* We must use a signed type in order to get an arithmetic right shift.
4797 However, we must also avoid introducing accidental overflows, so that
4798 a subsequent call to integer_zerop will work. Hence we must
4799 do the type conversion here. At this point, the constant is either
4800 zero or one, and the conversion to a signed type can never overflow.
4801 We could get an overflow if this conversion is done anywhere else. */
4802 if (TYPE_UNSIGNED (type
))
4803 temp
= fold_convert (lang_hooks
.types
.signed_type (type
), temp
);
4805 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
4806 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
4808 temp
= const_binop (BIT_AND_EXPR
, temp
,
4809 fold_convert (TREE_TYPE (c
), mask
), 0);
4810 /* If necessary, convert the type back to match the type of C. */
4811 if (TYPE_UNSIGNED (type
))
4812 temp
= fold_convert (type
, temp
);
4814 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
4817 /* Find ways of folding logical expressions of LHS and RHS:
4818 Try to merge two comparisons to the same innermost item.
4819 Look for range tests like "ch >= '0' && ch <= '9'".
4820 Look for combinations of simple terms on machines with expensive branches
4821 and evaluate the RHS unconditionally.
4823 For example, if we have p->a == 2 && p->b == 4 and we can make an
4824 object large enough to span both A and B, we can do this with a comparison
4825 against the object ANDed with the a mask.
4827 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4828 operations to do this with one comparison.
4830 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4831 function and the one above.
4833 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4834 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4836 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4839 We return the simplified tree or 0 if no optimization is possible. */
4842 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
4844 /* If this is the "or" of two comparisons, we can do something if
4845 the comparisons are NE_EXPR. If this is the "and", we can do something
4846 if the comparisons are EQ_EXPR. I.e.,
4847 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4849 WANTED_CODE is this operation code. For single bit fields, we can
4850 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4851 comparison for one-bit fields. */
4853 enum tree_code wanted_code
;
4854 enum tree_code lcode
, rcode
;
4855 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
4856 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
4857 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
4858 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
4859 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
4860 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
4861 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
4862 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
4863 enum machine_mode lnmode
, rnmode
;
4864 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
4865 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
4866 tree l_const
, r_const
;
4867 tree lntype
, rntype
, result
;
4868 int first_bit
, end_bit
;
4870 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
4871 enum tree_code orig_code
= code
;
4873 /* Start by getting the comparison codes. Fail if anything is volatile.
4874 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4875 it were surrounded with a NE_EXPR. */
4877 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
4880 lcode
= TREE_CODE (lhs
);
4881 rcode
= TREE_CODE (rhs
);
4883 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
4885 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
4886 build_int_cst (TREE_TYPE (lhs
), 0));
4890 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
4892 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
4893 build_int_cst (TREE_TYPE (rhs
), 0));
4897 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
4898 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
4901 ll_arg
= TREE_OPERAND (lhs
, 0);
4902 lr_arg
= TREE_OPERAND (lhs
, 1);
4903 rl_arg
= TREE_OPERAND (rhs
, 0);
4904 rr_arg
= TREE_OPERAND (rhs
, 1);
4906 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4907 if (simple_operand_p (ll_arg
)
4908 && simple_operand_p (lr_arg
))
4911 if (operand_equal_p (ll_arg
, rl_arg
, 0)
4912 && operand_equal_p (lr_arg
, rr_arg
, 0))
4914 result
= combine_comparisons (code
, lcode
, rcode
,
4915 truth_type
, ll_arg
, lr_arg
);
4919 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
4920 && operand_equal_p (lr_arg
, rl_arg
, 0))
4922 result
= combine_comparisons (code
, lcode
,
4923 swap_tree_comparison (rcode
),
4924 truth_type
, ll_arg
, lr_arg
);
4930 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
4931 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
4933 /* If the RHS can be evaluated unconditionally and its operands are
4934 simple, it wins to evaluate the RHS unconditionally on machines
4935 with expensive branches. In this case, this isn't a comparison
4936 that can be merged. Avoid doing this if the RHS is a floating-point
4937 comparison since those can trap. */
4939 if (BRANCH_COST
>= 2
4940 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
4941 && simple_operand_p (rl_arg
)
4942 && simple_operand_p (rr_arg
))
4944 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4945 if (code
== TRUTH_OR_EXPR
4946 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
4947 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
4948 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4949 return build2 (NE_EXPR
, truth_type
,
4950 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4952 build_int_cst (TREE_TYPE (ll_arg
), 0));
4954 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4955 if (code
== TRUTH_AND_EXPR
4956 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
4957 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
4958 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4959 return build2 (EQ_EXPR
, truth_type
,
4960 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4962 build_int_cst (TREE_TYPE (ll_arg
), 0));
4964 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
4966 if (code
!= orig_code
|| lhs
!= orig_lhs
|| rhs
!= orig_rhs
)
4967 return build2 (code
, truth_type
, lhs
, rhs
);
4972 /* See if the comparisons can be merged. Then get all the parameters for
4975 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
4976 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
4980 ll_inner
= decode_field_reference (ll_arg
,
4981 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
4982 &ll_unsignedp
, &volatilep
, &ll_mask
,
4984 lr_inner
= decode_field_reference (lr_arg
,
4985 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
4986 &lr_unsignedp
, &volatilep
, &lr_mask
,
4988 rl_inner
= decode_field_reference (rl_arg
,
4989 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
4990 &rl_unsignedp
, &volatilep
, &rl_mask
,
4992 rr_inner
= decode_field_reference (rr_arg
,
4993 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
4994 &rr_unsignedp
, &volatilep
, &rr_mask
,
4997 /* It must be true that the inner operation on the lhs of each
4998 comparison must be the same if we are to be able to do anything.
4999 Then see if we have constants. If not, the same must be true for
5001 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5002 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5005 if (TREE_CODE (lr_arg
) == INTEGER_CST
5006 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5007 l_const
= lr_arg
, r_const
= rr_arg
;
5008 else if (lr_inner
== 0 || rr_inner
== 0
5009 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5012 l_const
= r_const
= 0;
5014 /* If either comparison code is not correct for our logical operation,
5015 fail. However, we can convert a one-bit comparison against zero into
5016 the opposite comparison against that bit being set in the field. */
5018 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5019 if (lcode
!= wanted_code
)
5021 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5023 /* Make the left operand unsigned, since we are only interested
5024 in the value of one bit. Otherwise we are doing the wrong
5033 /* This is analogous to the code for l_const above. */
5034 if (rcode
!= wanted_code
)
5036 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5045 /* After this point all optimizations will generate bit-field
5046 references, which we might not want. */
5047 if (! lang_hooks
.can_use_bit_fields_p ())
5050 /* See if we can find a mode that contains both fields being compared on
5051 the left. If we can't, fail. Otherwise, update all constants and masks
5052 to be relative to a field of that size. */
5053 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5054 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5055 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5056 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5058 if (lnmode
== VOIDmode
)
5061 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5062 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5063 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5064 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5066 if (BYTES_BIG_ENDIAN
)
5068 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5069 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5072 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
5073 size_int (xll_bitpos
), 0);
5074 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
5075 size_int (xrl_bitpos
), 0);
5079 l_const
= fold_convert (lntype
, l_const
);
5080 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5081 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
5082 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5083 fold_build1 (BIT_NOT_EXPR
,
5087 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5089 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5094 r_const
= fold_convert (lntype
, r_const
);
5095 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5096 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
5097 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5098 fold_build1 (BIT_NOT_EXPR
,
5102 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5104 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5108 /* If the right sides are not constant, do the same for it. Also,
5109 disallow this optimization if a size or signedness mismatch occurs
5110 between the left and right sides. */
5113 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5114 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5115 /* Make sure the two fields on the right
5116 correspond to the left without being swapped. */
5117 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5120 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5121 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5122 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5123 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5125 if (rnmode
== VOIDmode
)
5128 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5129 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5130 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5131 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5133 if (BYTES_BIG_ENDIAN
)
5135 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5136 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5139 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
5140 size_int (xlr_bitpos
), 0);
5141 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
5142 size_int (xrr_bitpos
), 0);
5144 /* Make a mask that corresponds to both fields being compared.
5145 Do this for both items being compared. If the operands are the
5146 same size and the bits being compared are in the same position
5147 then we can do this by masking both and comparing the masked
5149 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5150 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
5151 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5153 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5154 ll_unsignedp
|| rl_unsignedp
);
5155 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5156 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5158 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5159 lr_unsignedp
|| rr_unsignedp
);
5160 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5161 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5163 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5166 /* There is still another way we can do something: If both pairs of
5167 fields being compared are adjacent, we may be able to make a wider
5168 field containing them both.
5170 Note that we still must mask the lhs/rhs expressions. Furthermore,
5171 the mask must be shifted to account for the shift done by
5172 make_bit_field_ref. */
5173 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5174 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5175 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5176 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5180 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
5181 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5182 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
5183 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5185 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5186 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
5187 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5188 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
5190 /* Convert to the smaller type before masking out unwanted bits. */
5192 if (lntype
!= rntype
)
5194 if (lnbitsize
> rnbitsize
)
5196 lhs
= fold_convert (rntype
, lhs
);
5197 ll_mask
= fold_convert (rntype
, ll_mask
);
5200 else if (lnbitsize
< rnbitsize
)
5202 rhs
= fold_convert (lntype
, rhs
);
5203 lr_mask
= fold_convert (lntype
, lr_mask
);
5208 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5209 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5211 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5212 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5214 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5220 /* Handle the case of comparisons with constants. If there is something in
5221 common between the masks, those bits of the constants must be the same.
5222 If not, the condition is always false. Test for this to avoid generating
5223 incorrect code below. */
5224 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
5225 if (! integer_zerop (result
)
5226 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
5227 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
5229 if (wanted_code
== NE_EXPR
)
5231 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5232 return constant_boolean_node (true, truth_type
);
5236 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5237 return constant_boolean_node (false, truth_type
);
5241 /* Construct the expression we will return. First get the component
5242 reference we will make. Unless the mask is all ones the width of
5243 that field, perform the mask operation. Then compare with the
5245 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5246 ll_unsignedp
|| rl_unsignedp
);
5248 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5249 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5250 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5252 return build2 (wanted_code
, truth_type
, result
,
5253 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
5256 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5260 optimize_minmax_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
5263 enum tree_code op_code
;
5264 tree comp_const
= op1
;
5266 int consts_equal
, consts_lt
;
5269 STRIP_SIGN_NOPS (arg0
);
5271 op_code
= TREE_CODE (arg0
);
5272 minmax_const
= TREE_OPERAND (arg0
, 1);
5273 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5274 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5275 inner
= TREE_OPERAND (arg0
, 0);
5277 /* If something does not permit us to optimize, return the original tree. */
5278 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5279 || TREE_CODE (comp_const
) != INTEGER_CST
5280 || TREE_CONSTANT_OVERFLOW (comp_const
)
5281 || TREE_CODE (minmax_const
) != INTEGER_CST
5282 || TREE_CONSTANT_OVERFLOW (minmax_const
))
5285 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5286 and GT_EXPR, doing the rest with recursive calls using logical
5290 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5292 tree tem
= optimize_minmax_comparison (invert_tree_comparison (code
, false),
5295 return invert_truthvalue (tem
);
5301 fold_build2 (TRUTH_ORIF_EXPR
, type
,
5302 optimize_minmax_comparison
5303 (EQ_EXPR
, type
, arg0
, comp_const
),
5304 optimize_minmax_comparison
5305 (GT_EXPR
, type
, arg0
, comp_const
));
5308 if (op_code
== MAX_EXPR
&& consts_equal
)
5309 /* MAX (X, 0) == 0 -> X <= 0 */
5310 return fold_build2 (LE_EXPR
, type
, inner
, comp_const
);
5312 else if (op_code
== MAX_EXPR
&& consts_lt
)
5313 /* MAX (X, 0) == 5 -> X == 5 */
5314 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5316 else if (op_code
== MAX_EXPR
)
5317 /* MAX (X, 0) == -1 -> false */
5318 return omit_one_operand (type
, integer_zero_node
, inner
);
5320 else if (consts_equal
)
5321 /* MIN (X, 0) == 0 -> X >= 0 */
5322 return fold_build2 (GE_EXPR
, type
, inner
, comp_const
);
5325 /* MIN (X, 0) == 5 -> false */
5326 return omit_one_operand (type
, integer_zero_node
, inner
);
5329 /* MIN (X, 0) == -1 -> X == -1 */
5330 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5333 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5334 /* MAX (X, 0) > 0 -> X > 0
5335 MAX (X, 0) > 5 -> X > 5 */
5336 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5338 else if (op_code
== MAX_EXPR
)
5339 /* MAX (X, 0) > -1 -> true */
5340 return omit_one_operand (type
, integer_one_node
, inner
);
5342 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5343 /* MIN (X, 0) > 0 -> false
5344 MIN (X, 0) > 5 -> false */
5345 return omit_one_operand (type
, integer_zero_node
, inner
);
5348 /* MIN (X, 0) > -1 -> X > -1 */
5349 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5356 /* T is an integer expression that is being multiplied, divided, or taken a
5357 modulus (CODE says which and what kind of divide or modulus) by a
5358 constant C. See if we can eliminate that operation by folding it with
5359 other operations already in T. WIDE_TYPE, if non-null, is a type that
5360 should be used for the computation if wider than our type.
5362 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5363 (X * 2) + (Y * 4). We must, however, be assured that either the original
5364 expression would not overflow or that overflow is undefined for the type
5365 in the language in question.
5367 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5368 the machine has a multiply-accumulate insn or that this is part of an
5369 addressing calculation.
5371 If we return a non-null expression, it is an equivalent form of the
5372 original computation, but need not be in the original type. */
5375 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5377 /* To avoid exponential search depth, refuse to allow recursion past
5378 three levels. Beyond that (1) it's highly unlikely that we'll find
5379 something interesting and (2) we've probably processed it before
5380 when we built the inner expression. */
5389 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
);
5396 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5398 tree type
= TREE_TYPE (t
);
5399 enum tree_code tcode
= TREE_CODE (t
);
5400 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5401 > GET_MODE_SIZE (TYPE_MODE (type
)))
5402 ? wide_type
: type
);
5404 int same_p
= tcode
== code
;
5405 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5407 /* Don't deal with constants of zero here; they confuse the code below. */
5408 if (integer_zerop (c
))
5411 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5412 op0
= TREE_OPERAND (t
, 0);
5414 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5415 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5417 /* Note that we need not handle conditional operations here since fold
5418 already handles those cases. So just do arithmetic here. */
5422 /* For a constant, we can always simplify if we are a multiply
5423 or (for divide and modulus) if it is a multiple of our constant. */
5424 if (code
== MULT_EXPR
5425 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
5426 return const_binop (code
, fold_convert (ctype
, t
),
5427 fold_convert (ctype
, c
), 0);
5430 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
5431 /* If op0 is an expression ... */
5432 if ((COMPARISON_CLASS_P (op0
)
5433 || UNARY_CLASS_P (op0
)
5434 || BINARY_CLASS_P (op0
)
5435 || EXPRESSION_CLASS_P (op0
))
5436 /* ... and is unsigned, and its type is smaller than ctype,
5437 then we cannot pass through as widening. */
5438 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
5439 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5440 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5441 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
5442 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
5443 /* ... or this is a truncation (t is narrower than op0),
5444 then we cannot pass through this narrowing. */
5445 || (GET_MODE_SIZE (TYPE_MODE (type
))
5446 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
5447 /* ... or signedness changes for division or modulus,
5448 then we cannot pass through this conversion. */
5449 || (code
!= MULT_EXPR
5450 && (TYPE_UNSIGNED (ctype
)
5451 != TYPE_UNSIGNED (TREE_TYPE (op0
))))))
5454 /* Pass the constant down and see if we can make a simplification. If
5455 we can, replace this expression with the inner simplification for
5456 possible later conversion to our or some other type. */
5457 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5458 && TREE_CODE (t2
) == INTEGER_CST
5459 && ! TREE_CONSTANT_OVERFLOW (t2
)
5460 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5462 ? ctype
: NULL_TREE
))))
5467 /* If widening the type changes it from signed to unsigned, then we
5468 must avoid building ABS_EXPR itself as unsigned. */
5469 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5471 tree cstype
= (*lang_hooks
.types
.signed_type
) (ctype
);
5472 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
)) != 0)
5474 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5475 return fold_convert (ctype
, t1
);
5481 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5482 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5485 case MIN_EXPR
: case MAX_EXPR
:
5486 /* If widening the type changes the signedness, then we can't perform
5487 this optimization as that changes the result. */
5488 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5491 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5492 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
5493 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5495 if (tree_int_cst_sgn (c
) < 0)
5496 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5498 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5499 fold_convert (ctype
, t2
));
5503 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5504 /* If the second operand is constant, this is a multiplication
5505 or floor division, by a power of two, so we can treat it that
5506 way unless the multiplier or divisor overflows. Signed
5507 left-shift overflow is implementation-defined rather than
5508 undefined in C90, so do not convert signed left shift into
5510 if (TREE_CODE (op1
) == INTEGER_CST
5511 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5512 /* const_binop may not detect overflow correctly,
5513 so check for it explicitly here. */
5514 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5515 && TREE_INT_CST_HIGH (op1
) == 0
5516 && 0 != (t1
= fold_convert (ctype
,
5517 const_binop (LSHIFT_EXPR
,
5520 && ! TREE_OVERFLOW (t1
))
5521 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5522 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5523 ctype
, fold_convert (ctype
, op0
), t1
),
5524 c
, code
, wide_type
);
5527 case PLUS_EXPR
: case MINUS_EXPR
:
5528 /* See if we can eliminate the operation on both sides. If we can, we
5529 can return a new PLUS or MINUS. If we can't, the only remaining
5530 cases where we can do anything are if the second operand is a
5532 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
5533 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
5534 if (t1
!= 0 && t2
!= 0
5535 && (code
== MULT_EXPR
5536 /* If not multiplication, we can only do this if both operands
5537 are divisible by c. */
5538 || (multiple_of_p (ctype
, op0
, c
)
5539 && multiple_of_p (ctype
, op1
, c
))))
5540 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5541 fold_convert (ctype
, t2
));
5543 /* If this was a subtraction, negate OP1 and set it to be an addition.
5544 This simplifies the logic below. */
5545 if (tcode
== MINUS_EXPR
)
5546 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5548 if (TREE_CODE (op1
) != INTEGER_CST
)
5551 /* If either OP1 or C are negative, this optimization is not safe for
5552 some of the division and remainder types while for others we need
5553 to change the code. */
5554 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5556 if (code
== CEIL_DIV_EXPR
)
5557 code
= FLOOR_DIV_EXPR
;
5558 else if (code
== FLOOR_DIV_EXPR
)
5559 code
= CEIL_DIV_EXPR
;
5560 else if (code
!= MULT_EXPR
5561 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5565 /* If it's a multiply or a division/modulus operation of a multiple
5566 of our constant, do the operation and verify it doesn't overflow. */
5567 if (code
== MULT_EXPR
5568 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5570 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5571 fold_convert (ctype
, c
), 0);
5572 /* We allow the constant to overflow with wrapping semantics. */
5574 || (TREE_OVERFLOW (op1
) && ! flag_wrapv
))
5580 /* If we have an unsigned type is not a sizetype, we cannot widen
5581 the operation since it will change the result if the original
5582 computation overflowed. */
5583 if (TYPE_UNSIGNED (ctype
)
5584 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5588 /* If we were able to eliminate our operation from the first side,
5589 apply our operation to the second side and reform the PLUS. */
5590 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5591 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5593 /* The last case is if we are a multiply. In that case, we can
5594 apply the distributive law to commute the multiply and addition
5595 if the multiplication of the constants doesn't overflow. */
5596 if (code
== MULT_EXPR
)
5597 return fold_build2 (tcode
, ctype
,
5598 fold_build2 (code
, ctype
,
5599 fold_convert (ctype
, op0
),
5600 fold_convert (ctype
, c
)),
5606 /* We have a special case here if we are doing something like
5607 (C * 8) % 4 since we know that's zero. */
5608 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5609 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5610 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5611 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5612 return omit_one_operand (type
, integer_zero_node
, op0
);
5614 /* ... fall through ... */
5616 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5617 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5618 /* If we can extract our operation from the LHS, do so and return a
5619 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5620 do something only if the second operand is a constant. */
5622 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5623 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5624 fold_convert (ctype
, op1
));
5625 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5626 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5627 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5628 fold_convert (ctype
, t1
));
5629 else if (TREE_CODE (op1
) != INTEGER_CST
)
5632 /* If these are the same operation types, we can associate them
5633 assuming no overflow. */
5635 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
5636 fold_convert (ctype
, c
), 0))
5637 && ! TREE_OVERFLOW (t1
))
5638 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
5640 /* If these operations "cancel" each other, we have the main
5641 optimizations of this pass, which occur when either constant is a
5642 multiple of the other, in which case we replace this with either an
5643 operation or CODE or TCODE.
5645 If we have an unsigned type that is not a sizetype, we cannot do
5646 this since it will change the result if the original computation
5648 if ((! TYPE_UNSIGNED (ctype
)
5649 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5651 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5652 || (tcode
== MULT_EXPR
5653 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5654 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
5656 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5657 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5658 fold_convert (ctype
,
5659 const_binop (TRUNC_DIV_EXPR
,
5661 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
5662 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5663 fold_convert (ctype
,
5664 const_binop (TRUNC_DIV_EXPR
,
5676 /* Return a node which has the indicated constant VALUE (either 0 or
5677 1), and is of the indicated TYPE. */
5680 constant_boolean_node (int value
, tree type
)
5682 if (type
== integer_type_node
)
5683 return value
? integer_one_node
: integer_zero_node
;
5684 else if (type
== boolean_type_node
)
5685 return value
? boolean_true_node
: boolean_false_node
;
5687 return build_int_cst (type
, value
);
5691 /* Return true if expr looks like an ARRAY_REF and set base and
5692 offset to the appropriate trees. If there is no offset,
5693 offset is set to NULL_TREE. Base will be canonicalized to
5694 something you can get the element type from using
5695 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5696 in bytes to the base. */
5699 extract_array_ref (tree expr
, tree
*base
, tree
*offset
)
5701 /* One canonical form is a PLUS_EXPR with the first
5702 argument being an ADDR_EXPR with a possible NOP_EXPR
5704 if (TREE_CODE (expr
) == PLUS_EXPR
)
5706 tree op0
= TREE_OPERAND (expr
, 0);
5707 tree inner_base
, dummy1
;
5708 /* Strip NOP_EXPRs here because the C frontends and/or
5709 folders present us (int *)&x.a + 4B possibly. */
5711 if (extract_array_ref (op0
, &inner_base
, &dummy1
))
5714 if (dummy1
== NULL_TREE
)
5715 *offset
= TREE_OPERAND (expr
, 1);
5717 *offset
= fold_build2 (PLUS_EXPR
, TREE_TYPE (expr
),
5718 dummy1
, TREE_OPERAND (expr
, 1));
5722 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5723 which we transform into an ADDR_EXPR with appropriate
5724 offset. For other arguments to the ADDR_EXPR we assume
5725 zero offset and as such do not care about the ADDR_EXPR
5726 type and strip possible nops from it. */
5727 else if (TREE_CODE (expr
) == ADDR_EXPR
)
5729 tree op0
= TREE_OPERAND (expr
, 0);
5730 if (TREE_CODE (op0
) == ARRAY_REF
)
5732 tree idx
= TREE_OPERAND (op0
, 1);
5733 *base
= TREE_OPERAND (op0
, 0);
5734 *offset
= fold_build2 (MULT_EXPR
, TREE_TYPE (idx
), idx
,
5735 array_ref_element_size (op0
));
5739 /* Handle array-to-pointer decay as &a. */
5740 if (TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
5741 *base
= TREE_OPERAND (expr
, 0);
5744 *offset
= NULL_TREE
;
5748 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5749 else if (SSA_VAR_P (expr
)
5750 && TREE_CODE (TREE_TYPE (expr
)) == POINTER_TYPE
)
5753 *offset
= NULL_TREE
;
5761 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5762 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5763 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5764 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5765 COND is the first argument to CODE; otherwise (as in the example
5766 given here), it is the second argument. TYPE is the type of the
5767 original expression. Return NULL_TREE if no simplification is
5771 fold_binary_op_with_conditional_arg (enum tree_code code
,
5772 tree type
, tree op0
, tree op1
,
5773 tree cond
, tree arg
, int cond_first_p
)
5775 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
5776 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
5777 tree test
, true_value
, false_value
;
5778 tree lhs
= NULL_TREE
;
5779 tree rhs
= NULL_TREE
;
5781 /* This transformation is only worthwhile if we don't have to wrap
5782 arg in a SAVE_EXPR, and the operation can be simplified on at least
5783 one of the branches once its pushed inside the COND_EXPR. */
5784 if (!TREE_CONSTANT (arg
))
5787 if (TREE_CODE (cond
) == COND_EXPR
)
5789 test
= TREE_OPERAND (cond
, 0);
5790 true_value
= TREE_OPERAND (cond
, 1);
5791 false_value
= TREE_OPERAND (cond
, 2);
5792 /* If this operand throws an expression, then it does not make
5793 sense to try to perform a logical or arithmetic operation
5795 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
5797 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
5802 tree testtype
= TREE_TYPE (cond
);
5804 true_value
= constant_boolean_node (true, testtype
);
5805 false_value
= constant_boolean_node (false, testtype
);
5808 arg
= fold_convert (arg_type
, arg
);
5811 true_value
= fold_convert (cond_type
, true_value
);
5813 lhs
= fold_build2 (code
, type
, true_value
, arg
);
5815 lhs
= fold_build2 (code
, type
, arg
, true_value
);
5819 false_value
= fold_convert (cond_type
, false_value
);
5821 rhs
= fold_build2 (code
, type
, false_value
, arg
);
5823 rhs
= fold_build2 (code
, type
, arg
, false_value
);
5826 test
= fold_build3 (COND_EXPR
, type
, test
, lhs
, rhs
);
5827 return fold_convert (type
, test
);
5831 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5833 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5834 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5835 ADDEND is the same as X.
5837 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5838 and finite. The problematic cases are when X is zero, and its mode
5839 has signed zeros. In the case of rounding towards -infinity,
5840 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5841 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5844 fold_real_zero_addition_p (tree type
, tree addend
, int negate
)
5846 if (!real_zerop (addend
))
5849 /* Don't allow the fold with -fsignaling-nans. */
5850 if (HONOR_SNANS (TYPE_MODE (type
)))
5853 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5854 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
5857 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5858 if (TREE_CODE (addend
) == REAL_CST
5859 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
5862 /* The mode has signed zeros, and we have to honor their sign.
5863 In this situation, there is only one case we can return true for.
5864 X - 0 is the same as X unless rounding towards -infinity is
5866 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
5869 /* Subroutine of fold() that checks comparisons of built-in math
5870 functions against real constants.
5872 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5873 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5874 is the type of the result and ARG0 and ARG1 are the operands of the
5875 comparison. ARG1 must be a TREE_REAL_CST.
5877 The function returns the constant folded tree if a simplification
5878 can be made, and NULL_TREE otherwise. */
5881 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
5882 tree type
, tree arg0
, tree arg1
)
5886 if (BUILTIN_SQRT_P (fcode
))
5888 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5889 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
5891 c
= TREE_REAL_CST (arg1
);
5892 if (REAL_VALUE_NEGATIVE (c
))
5894 /* sqrt(x) < y is always false, if y is negative. */
5895 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
5896 return omit_one_operand (type
, integer_zero_node
, arg
);
5898 /* sqrt(x) > y is always true, if y is negative and we
5899 don't care about NaNs, i.e. negative values of x. */
5900 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
5901 return omit_one_operand (type
, integer_one_node
, arg
);
5903 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5904 return fold_build2 (GE_EXPR
, type
, arg
,
5905 build_real (TREE_TYPE (arg
), dconst0
));
5907 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
5911 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5912 real_convert (&c2
, mode
, &c2
);
5914 if (REAL_VALUE_ISINF (c2
))
5916 /* sqrt(x) > y is x == +Inf, when y is very large. */
5917 if (HONOR_INFINITIES (mode
))
5918 return fold_build2 (EQ_EXPR
, type
, arg
,
5919 build_real (TREE_TYPE (arg
), c2
));
5921 /* sqrt(x) > y is always false, when y is very large
5922 and we don't care about infinities. */
5923 return omit_one_operand (type
, integer_zero_node
, arg
);
5926 /* sqrt(x) > c is the same as x > c*c. */
5927 return fold_build2 (code
, type
, arg
,
5928 build_real (TREE_TYPE (arg
), c2
));
5930 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
5934 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5935 real_convert (&c2
, mode
, &c2
);
5937 if (REAL_VALUE_ISINF (c2
))
5939 /* sqrt(x) < y is always true, when y is a very large
5940 value and we don't care about NaNs or Infinities. */
5941 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
5942 return omit_one_operand (type
, integer_one_node
, arg
);
5944 /* sqrt(x) < y is x != +Inf when y is very large and we
5945 don't care about NaNs. */
5946 if (! HONOR_NANS (mode
))
5947 return fold_build2 (NE_EXPR
, type
, arg
,
5948 build_real (TREE_TYPE (arg
), c2
));
5950 /* sqrt(x) < y is x >= 0 when y is very large and we
5951 don't care about Infinities. */
5952 if (! HONOR_INFINITIES (mode
))
5953 return fold_build2 (GE_EXPR
, type
, arg
,
5954 build_real (TREE_TYPE (arg
), dconst0
));
5956 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5957 if (lang_hooks
.decls
.global_bindings_p () != 0
5958 || CONTAINS_PLACEHOLDER_P (arg
))
5961 arg
= save_expr (arg
);
5962 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
5963 fold_build2 (GE_EXPR
, type
, arg
,
5964 build_real (TREE_TYPE (arg
),
5966 fold_build2 (NE_EXPR
, type
, arg
,
5967 build_real (TREE_TYPE (arg
),
5971 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5972 if (! HONOR_NANS (mode
))
5973 return fold_build2 (code
, type
, arg
,
5974 build_real (TREE_TYPE (arg
), c2
));
5976 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5977 if (lang_hooks
.decls
.global_bindings_p () == 0
5978 && ! CONTAINS_PLACEHOLDER_P (arg
))
5980 arg
= save_expr (arg
);
5981 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
5982 fold_build2 (GE_EXPR
, type
, arg
,
5983 build_real (TREE_TYPE (arg
),
5985 fold_build2 (code
, type
, arg
,
5986 build_real (TREE_TYPE (arg
),
5995 /* Subroutine of fold() that optimizes comparisons against Infinities,
5996 either +Inf or -Inf.
5998 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5999 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6000 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6002 The function returns the constant folded tree if a simplification
6003 can be made, and NULL_TREE otherwise. */
6006 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6008 enum machine_mode mode
;
6009 REAL_VALUE_TYPE max
;
6013 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6015 /* For negative infinity swap the sense of the comparison. */
6016 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6018 code
= swap_tree_comparison (code
);
6023 /* x > +Inf is always false, if with ignore sNANs. */
6024 if (HONOR_SNANS (mode
))
6026 return omit_one_operand (type
, integer_zero_node
, arg0
);
6029 /* x <= +Inf is always true, if we don't case about NaNs. */
6030 if (! HONOR_NANS (mode
))
6031 return omit_one_operand (type
, integer_one_node
, arg0
);
6033 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6034 if (lang_hooks
.decls
.global_bindings_p () == 0
6035 && ! CONTAINS_PLACEHOLDER_P (arg0
))
6037 arg0
= save_expr (arg0
);
6038 return fold_build2 (EQ_EXPR
, type
, arg0
, arg0
);
6044 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6045 real_maxval (&max
, neg
, mode
);
6046 return fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6047 arg0
, build_real (TREE_TYPE (arg0
), max
));
6050 /* x < +Inf is always equal to x <= DBL_MAX. */
6051 real_maxval (&max
, neg
, mode
);
6052 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6053 arg0
, build_real (TREE_TYPE (arg0
), max
));
6056 /* x != +Inf is always equal to !(x > DBL_MAX). */
6057 real_maxval (&max
, neg
, mode
);
6058 if (! HONOR_NANS (mode
))
6059 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
6060 arg0
, build_real (TREE_TYPE (arg0
), max
));
6062 /* The transformation below creates non-gimple code and thus is
6063 not appropriate if we are in gimple form. */
6067 temp
= fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
6068 arg0
, build_real (TREE_TYPE (arg0
), max
));
6069 return fold_build1 (TRUTH_NOT_EXPR
, type
, temp
);
6078 /* Subroutine of fold() that optimizes comparisons of a division by
6079 a nonzero integer constant against an integer constant, i.e.
6082 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6083 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6084 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6086 The function returns the constant folded tree if a simplification
6087 can be made, and NULL_TREE otherwise. */
6090 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6092 tree prod
, tmp
, hi
, lo
;
6093 tree arg00
= TREE_OPERAND (arg0
, 0);
6094 tree arg01
= TREE_OPERAND (arg0
, 1);
6095 unsigned HOST_WIDE_INT lpart
;
6096 HOST_WIDE_INT hpart
;
6097 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6101 /* We have to do this the hard way to detect unsigned overflow.
6102 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6103 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6104 TREE_INT_CST_HIGH (arg01
),
6105 TREE_INT_CST_LOW (arg1
),
6106 TREE_INT_CST_HIGH (arg1
),
6107 &lpart
, &hpart
, unsigned_p
);
6108 prod
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
6109 prod
= force_fit_type (prod
, -1, overflow
, false);
6110 neg_overflow
= false;
6114 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
6117 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6118 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6119 TREE_INT_CST_HIGH (prod
),
6120 TREE_INT_CST_LOW (tmp
),
6121 TREE_INT_CST_HIGH (tmp
),
6122 &lpart
, &hpart
, unsigned_p
);
6123 hi
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
6124 hi
= force_fit_type (hi
, -1, overflow
| TREE_OVERFLOW (prod
),
6125 TREE_CONSTANT_OVERFLOW (prod
));
6127 else if (tree_int_cst_sgn (arg01
) >= 0)
6129 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
6130 switch (tree_int_cst_sgn (arg1
))
6133 neg_overflow
= true;
6134 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6139 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6144 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6154 /* A negative divisor reverses the relational operators. */
6155 code
= swap_tree_comparison (code
);
6157 tmp
= int_const_binop (PLUS_EXPR
, arg01
, integer_one_node
, 0);
6158 switch (tree_int_cst_sgn (arg1
))
6161 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6166 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6171 neg_overflow
= true;
6172 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6184 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6185 return omit_one_operand (type
, integer_zero_node
, arg00
);
6186 if (TREE_OVERFLOW (hi
))
6187 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6188 if (TREE_OVERFLOW (lo
))
6189 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6190 return build_range_check (type
, arg00
, 1, lo
, hi
);
6193 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6194 return omit_one_operand (type
, integer_one_node
, arg00
);
6195 if (TREE_OVERFLOW (hi
))
6196 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6197 if (TREE_OVERFLOW (lo
))
6198 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6199 return build_range_check (type
, arg00
, 0, lo
, hi
);
6202 if (TREE_OVERFLOW (lo
))
6204 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6205 return omit_one_operand (type
, tmp
, arg00
);
6207 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6210 if (TREE_OVERFLOW (hi
))
6212 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6213 return omit_one_operand (type
, tmp
, arg00
);
6215 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6218 if (TREE_OVERFLOW (hi
))
6220 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6221 return omit_one_operand (type
, tmp
, arg00
);
6223 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6226 if (TREE_OVERFLOW (lo
))
6228 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6229 return omit_one_operand (type
, tmp
, arg00
);
6231 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6241 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6242 equality/inequality test, then return a simplified form of the test
6243 using a sign testing. Otherwise return NULL. TYPE is the desired
6247 fold_single_bit_test_into_sign_test (enum tree_code code
, tree arg0
, tree arg1
,
6250 /* If this is testing a single bit, we can optimize the test. */
6251 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6252 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6253 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6255 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6256 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6257 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6259 if (arg00
!= NULL_TREE
6260 /* This is only a win if casting to a signed type is cheap,
6261 i.e. when arg00's type is not a partial mode. */
6262 && TYPE_PRECISION (TREE_TYPE (arg00
))
6263 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6265 tree stype
= lang_hooks
.types
.signed_type (TREE_TYPE (arg00
));
6266 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6267 result_type
, fold_convert (stype
, arg00
),
6268 build_int_cst (stype
, 0));
6275 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6276 equality/inequality test, then return a simplified form of
6277 the test using shifts and logical operations. Otherwise return
6278 NULL. TYPE is the desired result type. */
6281 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
6284 /* If this is testing a single bit, we can optimize the test. */
6285 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6286 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6287 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6289 tree inner
= TREE_OPERAND (arg0
, 0);
6290 tree type
= TREE_TYPE (arg0
);
6291 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6292 enum machine_mode operand_mode
= TYPE_MODE (type
);
6294 tree signed_type
, unsigned_type
, intermediate_type
;
6297 /* First, see if we can fold the single bit test into a sign-bit
6299 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
,
6304 /* Otherwise we have (A & C) != 0 where C is a single bit,
6305 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6306 Similarly for (A & C) == 0. */
6308 /* If INNER is a right shift of a constant and it plus BITNUM does
6309 not overflow, adjust BITNUM and INNER. */
6310 if (TREE_CODE (inner
) == RSHIFT_EXPR
6311 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6312 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6313 && bitnum
< TYPE_PRECISION (type
)
6314 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6315 bitnum
- TYPE_PRECISION (type
)))
6317 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6318 inner
= TREE_OPERAND (inner
, 0);
6321 /* If we are going to be able to omit the AND below, we must do our
6322 operations as unsigned. If we must use the AND, we have a choice.
6323 Normally unsigned is faster, but for some machines signed is. */
6324 #ifdef LOAD_EXTEND_OP
6325 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6326 && !flag_syntax_only
) ? 0 : 1;
6331 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6332 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6333 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6334 inner
= fold_convert (intermediate_type
, inner
);
6337 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6338 inner
, size_int (bitnum
));
6340 if (code
== EQ_EXPR
)
6341 inner
= fold_build2 (BIT_XOR_EXPR
, intermediate_type
,
6342 inner
, integer_one_node
);
6344 /* Put the AND last so it can combine with more things. */
6345 inner
= build2 (BIT_AND_EXPR
, intermediate_type
,
6346 inner
, integer_one_node
);
6348 /* Make sure to return the proper type. */
6349 inner
= fold_convert (result_type
, inner
);
6356 /* Check whether we are allowed to reorder operands arg0 and arg1,
6357 such that the evaluation of arg1 occurs before arg0. */
6360 reorder_operands_p (tree arg0
, tree arg1
)
6362 if (! flag_evaluation_order
)
6364 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6366 return ! TREE_SIDE_EFFECTS (arg0
)
6367 && ! TREE_SIDE_EFFECTS (arg1
);
6370 /* Test whether it is preferable two swap two operands, ARG0 and
6371 ARG1, for example because ARG0 is an integer constant and ARG1
6372 isn't. If REORDER is true, only recommend swapping if we can
6373 evaluate the operands in reverse order. */
6376 tree_swap_operands_p (tree arg0
, tree arg1
, bool reorder
)
6378 STRIP_SIGN_NOPS (arg0
);
6379 STRIP_SIGN_NOPS (arg1
);
6381 if (TREE_CODE (arg1
) == INTEGER_CST
)
6383 if (TREE_CODE (arg0
) == INTEGER_CST
)
6386 if (TREE_CODE (arg1
) == REAL_CST
)
6388 if (TREE_CODE (arg0
) == REAL_CST
)
6391 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6393 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6396 if (TREE_CONSTANT (arg1
))
6398 if (TREE_CONSTANT (arg0
))
6404 if (reorder
&& flag_evaluation_order
6405 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6413 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6414 for commutative and comparison operators. Ensuring a canonical
6415 form allows the optimizers to find additional redundancies without
6416 having to explicitly check for both orderings. */
6417 if (TREE_CODE (arg0
) == SSA_NAME
6418 && TREE_CODE (arg1
) == SSA_NAME
6419 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6425 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6426 ARG0 is extended to a wider type. */
6429 fold_widened_comparison (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6431 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6433 tree shorter_type
, outer_type
;
6437 if (arg0_unw
== arg0
)
6439 shorter_type
= TREE_TYPE (arg0_unw
);
6441 #ifdef HAVE_canonicalize_funcptr_for_compare
6442 /* Disable this optimization if we're casting a function pointer
6443 type on targets that require function pointer canonicalization. */
6444 if (HAVE_canonicalize_funcptr_for_compare
6445 && TREE_CODE (shorter_type
) == POINTER_TYPE
6446 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6450 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6453 arg1_unw
= get_unwidened (arg1
, shorter_type
);
6455 /* If possible, express the comparison in the shorter mode. */
6456 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6457 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6458 && (TREE_TYPE (arg1_unw
) == shorter_type
6459 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6460 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6461 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6462 && int_fits_type_p (arg1_unw
, shorter_type
))))
6463 return fold_build2 (code
, type
, arg0_unw
,
6464 fold_convert (shorter_type
, arg1_unw
));
6466 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6467 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6468 || !int_fits_type_p (arg1_unw
, shorter_type
))
6471 /* If we are comparing with the integer that does not fit into the range
6472 of the shorter type, the result is known. */
6473 outer_type
= TREE_TYPE (arg1_unw
);
6474 min
= lower_bound_in_type (outer_type
, shorter_type
);
6475 max
= upper_bound_in_type (outer_type
, shorter_type
);
6477 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6479 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6486 return omit_one_operand (type
, integer_zero_node
, arg0
);
6491 return omit_one_operand (type
, integer_one_node
, arg0
);
6497 return omit_one_operand (type
, integer_one_node
, arg0
);
6499 return omit_one_operand (type
, integer_zero_node
, arg0
);
6504 return omit_one_operand (type
, integer_zero_node
, arg0
);
6506 return omit_one_operand (type
, integer_one_node
, arg0
);
6515 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6516 ARG0 just the signedness is changed. */
6519 fold_sign_changed_comparison (enum tree_code code
, tree type
,
6520 tree arg0
, tree arg1
)
6522 tree arg0_inner
, tmp
;
6523 tree inner_type
, outer_type
;
6525 if (TREE_CODE (arg0
) != NOP_EXPR
6526 && TREE_CODE (arg0
) != CONVERT_EXPR
)
6529 outer_type
= TREE_TYPE (arg0
);
6530 arg0_inner
= TREE_OPERAND (arg0
, 0);
6531 inner_type
= TREE_TYPE (arg0_inner
);
6533 #ifdef HAVE_canonicalize_funcptr_for_compare
6534 /* Disable this optimization if we're casting a function pointer
6535 type on targets that require function pointer canonicalization. */
6536 if (HAVE_canonicalize_funcptr_for_compare
6537 && TREE_CODE (inner_type
) == POINTER_TYPE
6538 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6542 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6545 if (TREE_CODE (arg1
) != INTEGER_CST
6546 && !((TREE_CODE (arg1
) == NOP_EXPR
6547 || TREE_CODE (arg1
) == CONVERT_EXPR
)
6548 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6551 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6556 if (TREE_CODE (arg1
) == INTEGER_CST
)
6558 tmp
= build_int_cst_wide (inner_type
,
6559 TREE_INT_CST_LOW (arg1
),
6560 TREE_INT_CST_HIGH (arg1
));
6561 arg1
= force_fit_type (tmp
, 0,
6562 TREE_OVERFLOW (arg1
),
6563 TREE_CONSTANT_OVERFLOW (arg1
));
6566 arg1
= fold_convert (inner_type
, arg1
);
6568 return fold_build2 (code
, type
, arg0_inner
, arg1
);
6571 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6572 step of the array. Reconstructs s and delta in the case of s * delta
6573 being an integer constant (and thus already folded).
6574 ADDR is the address. MULT is the multiplicative expression.
6575 If the function succeeds, the new address expression is returned. Otherwise
6576 NULL_TREE is returned. */
6579 try_move_mult_to_index (enum tree_code code
, tree addr
, tree op1
)
6581 tree s
, delta
, step
;
6582 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6586 /* Canonicalize op1 into a possibly non-constant delta
6587 and an INTEGER_CST s. */
6588 if (TREE_CODE (op1
) == MULT_EXPR
)
6590 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6595 if (TREE_CODE (arg0
) == INTEGER_CST
)
6600 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6608 else if (TREE_CODE (op1
) == INTEGER_CST
)
6615 /* Simulate we are delta * 1. */
6617 s
= integer_one_node
;
6620 for (;; ref
= TREE_OPERAND (ref
, 0))
6622 if (TREE_CODE (ref
) == ARRAY_REF
)
6624 itype
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6628 step
= array_ref_element_size (ref
);
6629 if (TREE_CODE (step
) != INTEGER_CST
)
6634 if (! tree_int_cst_equal (step
, s
))
6639 /* Try if delta is a multiple of step. */
6640 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, delta
, step
);
6649 if (!handled_component_p (ref
))
6653 /* We found the suitable array reference. So copy everything up to it,
6654 and replace the index. */
6656 pref
= TREE_OPERAND (addr
, 0);
6657 ret
= copy_node (pref
);
6662 pref
= TREE_OPERAND (pref
, 0);
6663 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6664 pos
= TREE_OPERAND (pos
, 0);
6667 TREE_OPERAND (pos
, 1) = fold_build2 (code
, itype
,
6668 fold_convert (itype
,
6669 TREE_OPERAND (pos
, 1)),
6670 fold_convert (itype
, delta
));
6672 return fold_build1 (ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6676 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6677 means A >= Y && A != MAX, but in this case we know that
6678 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6681 fold_to_nonsharp_ineq_using_bound (tree ineq
, tree bound
)
6683 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6685 if (TREE_CODE (bound
) == LT_EXPR
)
6686 a
= TREE_OPERAND (bound
, 0);
6687 else if (TREE_CODE (bound
) == GT_EXPR
)
6688 a
= TREE_OPERAND (bound
, 1);
6692 typea
= TREE_TYPE (a
);
6693 if (!INTEGRAL_TYPE_P (typea
)
6694 && !POINTER_TYPE_P (typea
))
6697 if (TREE_CODE (ineq
) == LT_EXPR
)
6699 a1
= TREE_OPERAND (ineq
, 1);
6700 y
= TREE_OPERAND (ineq
, 0);
6702 else if (TREE_CODE (ineq
) == GT_EXPR
)
6704 a1
= TREE_OPERAND (ineq
, 0);
6705 y
= TREE_OPERAND (ineq
, 1);
6710 if (TREE_TYPE (a1
) != typea
)
6713 diff
= fold_build2 (MINUS_EXPR
, typea
, a1
, a
);
6714 if (!integer_onep (diff
))
6717 return fold_build2 (GE_EXPR
, type
, a
, y
);
6720 /* Fold a sum or difference of at least one multiplication.
6721 Returns the folded tree or NULL if no simplification could be made. */
6724 fold_plusminus_mult_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6726 tree arg00
, arg01
, arg10
, arg11
;
6727 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6729 /* (A * C) +- (B * C) -> (A+-B) * C.
6730 (A * C) +- A -> A * (C+-1).
6731 We are most concerned about the case where C is a constant,
6732 but other combinations show up during loop reduction. Since
6733 it is not difficult, try all four possibilities. */
6735 if (TREE_CODE (arg0
) == MULT_EXPR
)
6737 arg00
= TREE_OPERAND (arg0
, 0);
6738 arg01
= TREE_OPERAND (arg0
, 1);
6743 arg01
= build_one_cst (type
);
6745 if (TREE_CODE (arg1
) == MULT_EXPR
)
6747 arg10
= TREE_OPERAND (arg1
, 0);
6748 arg11
= TREE_OPERAND (arg1
, 1);
6753 arg11
= build_one_cst (type
);
6757 if (operand_equal_p (arg01
, arg11
, 0))
6758 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6759 else if (operand_equal_p (arg00
, arg10
, 0))
6760 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6761 else if (operand_equal_p (arg00
, arg11
, 0))
6762 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6763 else if (operand_equal_p (arg01
, arg10
, 0))
6764 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6766 /* No identical multiplicands; see if we can find a common
6767 power-of-two factor in non-power-of-two multiplies. This
6768 can help in multi-dimensional array access. */
6769 else if (host_integerp (arg01
, 0)
6770 && host_integerp (arg11
, 0))
6772 HOST_WIDE_INT int01
, int11
, tmp
;
6775 int01
= TREE_INT_CST_LOW (arg01
);
6776 int11
= TREE_INT_CST_LOW (arg11
);
6778 /* Move min of absolute values to int11. */
6779 if ((int01
>= 0 ? int01
: -int01
)
6780 < (int11
>= 0 ? int11
: -int11
))
6782 tmp
= int01
, int01
= int11
, int11
= tmp
;
6783 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6790 if (exact_log2 (int11
) > 0 && int01
% int11
== 0)
6792 alt0
= fold_build2 (MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
6793 build_int_cst (TREE_TYPE (arg00
),
6798 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
6803 return fold_build2 (MULT_EXPR
, type
,
6804 fold_build2 (code
, type
,
6805 fold_convert (type
, alt0
),
6806 fold_convert (type
, alt1
)),
6807 fold_convert (type
, same
));
6812 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6813 specified by EXPR into the buffer PTR of length LEN bytes.
6814 Return the number of bytes placed in the buffer, or zero
6818 native_encode_int (tree expr
, unsigned char *ptr
, int len
)
6820 tree type
= TREE_TYPE (expr
);
6821 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
6822 int byte
, offset
, word
, words
;
6823 unsigned char value
;
6825 if (total_bytes
> len
)
6827 words
= total_bytes
/ UNITS_PER_WORD
;
6829 for (byte
= 0; byte
< total_bytes
; byte
++)
6831 int bitpos
= byte
* BITS_PER_UNIT
;
6832 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
6833 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
6835 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
6836 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
6838 if (total_bytes
> UNITS_PER_WORD
)
6840 word
= byte
/ UNITS_PER_WORD
;
6841 if (WORDS_BIG_ENDIAN
)
6842 word
= (words
- 1) - word
;
6843 offset
= word
* UNITS_PER_WORD
;
6844 if (BYTES_BIG_ENDIAN
)
6845 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
6847 offset
+= byte
% UNITS_PER_WORD
;
6850 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
6851 ptr
[offset
] = value
;
6857 /* Subroutine of native_encode_expr. Encode the REAL_CST
6858 specified by EXPR into the buffer PTR of length LEN bytes.
6859 Return the number of bytes placed in the buffer, or zero
6863 native_encode_real (tree expr
, unsigned char *ptr
, int len
)
6865 tree type
= TREE_TYPE (expr
);
6866 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
6867 int byte
, offset
, word
, words
;
6868 unsigned char value
;
6870 /* There are always 32 bits in each long, no matter the size of
6871 the hosts long. We handle floating point representations with
6875 if (total_bytes
> len
)
6877 words
= total_bytes
/ UNITS_PER_WORD
;
6879 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
6881 for (byte
= 0; byte
< total_bytes
; byte
++)
6883 int bitpos
= byte
* BITS_PER_UNIT
;
6884 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
6886 if (total_bytes
> UNITS_PER_WORD
)
6888 word
= byte
/ UNITS_PER_WORD
;
6889 if (FLOAT_WORDS_BIG_ENDIAN
)
6890 word
= (words
- 1) - word
;
6891 offset
= word
* UNITS_PER_WORD
;
6892 if (BYTES_BIG_ENDIAN
)
6893 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
6895 offset
+= byte
% UNITS_PER_WORD
;
6898 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
6899 ptr
[offset
] = value
;
6904 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6905 specified by EXPR into the buffer PTR of length LEN bytes.
6906 Return the number of bytes placed in the buffer, or zero
6910 native_encode_complex (tree expr
, unsigned char *ptr
, int len
)
6915 part
= TREE_REALPART (expr
);
6916 rsize
= native_encode_expr (part
, ptr
, len
);
6919 part
= TREE_IMAGPART (expr
);
6920 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
6923 return rsize
+ isize
;
6927 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6928 specified by EXPR into the buffer PTR of length LEN bytes.
6929 Return the number of bytes placed in the buffer, or zero
6933 native_encode_vector (tree expr
, unsigned char *ptr
, int len
)
6935 int i
, size
, offset
, count
;
6936 tree itype
, elem
, elements
;
6939 elements
= TREE_VECTOR_CST_ELTS (expr
);
6940 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
6941 itype
= TREE_TYPE (TREE_TYPE (expr
));
6942 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
6943 for (i
= 0; i
< count
; i
++)
6947 elem
= TREE_VALUE (elements
);
6948 elements
= TREE_CHAIN (elements
);
6955 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
6960 if (offset
+ size
> len
)
6962 memset (ptr
+offset
, 0, size
);
6970 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
6971 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
6972 buffer PTR of length LEN bytes. Return the number of bytes
6973 placed in the buffer, or zero upon failure. */
6976 native_encode_expr (tree expr
, unsigned char *ptr
, int len
)
6978 switch (TREE_CODE (expr
))
6981 return native_encode_int (expr
, ptr
, len
);
6984 return native_encode_real (expr
, ptr
, len
);
6987 return native_encode_complex (expr
, ptr
, len
);
6990 return native_encode_vector (expr
, ptr
, len
);
6998 /* Subroutine of native_interpret_expr. Interpret the contents of
6999 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7000 If the buffer cannot be interpreted, return NULL_TREE. */
7003 native_interpret_int (tree type
, unsigned char *ptr
, int len
)
7005 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7006 int byte
, offset
, word
, words
;
7007 unsigned char value
;
7008 unsigned int HOST_WIDE_INT lo
= 0;
7009 HOST_WIDE_INT hi
= 0;
7011 if (total_bytes
> len
)
7013 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
7015 words
= total_bytes
/ UNITS_PER_WORD
;
7017 for (byte
= 0; byte
< total_bytes
; byte
++)
7019 int bitpos
= byte
* BITS_PER_UNIT
;
7020 if (total_bytes
> UNITS_PER_WORD
)
7022 word
= byte
/ UNITS_PER_WORD
;
7023 if (WORDS_BIG_ENDIAN
)
7024 word
= (words
- 1) - word
;
7025 offset
= word
* UNITS_PER_WORD
;
7026 if (BYTES_BIG_ENDIAN
)
7027 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7029 offset
+= byte
% UNITS_PER_WORD
;
7032 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7033 value
= ptr
[offset
];
7035 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7036 lo
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7038 hi
|= (unsigned HOST_WIDE_INT
) value
7039 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7042 return force_fit_type (build_int_cst_wide (type
, lo
, hi
),
7047 /* Subroutine of native_interpret_expr. Interpret the contents of
7048 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7049 If the buffer cannot be interpreted, return NULL_TREE. */
7052 native_interpret_real (tree type
, unsigned char *ptr
, int len
)
7054 enum machine_mode mode
= TYPE_MODE (type
);
7055 int total_bytes
= GET_MODE_SIZE (mode
);
7056 int byte
, offset
, word
, words
;
7057 unsigned char value
;
7058 /* There are always 32 bits in each long, no matter the size of
7059 the hosts long. We handle floating point representations with
7064 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7065 if (total_bytes
> len
|| total_bytes
> 24)
7067 words
= total_bytes
/ UNITS_PER_WORD
;
7069 memset (tmp
, 0, sizeof (tmp
));
7070 for (byte
= 0; byte
< total_bytes
; byte
++)
7072 int bitpos
= byte
* BITS_PER_UNIT
;
7073 if (total_bytes
> UNITS_PER_WORD
)
7075 word
= byte
/ UNITS_PER_WORD
;
7076 if (FLOAT_WORDS_BIG_ENDIAN
)
7077 word
= (words
- 1) - word
;
7078 offset
= word
* UNITS_PER_WORD
;
7079 if (BYTES_BIG_ENDIAN
)
7080 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7082 offset
+= byte
% UNITS_PER_WORD
;
7085 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7086 value
= ptr
[offset
];
7088 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7091 real_from_target (&r
, tmp
, mode
);
7092 return build_real (type
, r
);
7096 /* Subroutine of native_interpret_expr. Interpret the contents of
7097 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7098 If the buffer cannot be interpreted, return NULL_TREE. */
7101 native_interpret_complex (tree type
, unsigned char *ptr
, int len
)
7103 tree etype
, rpart
, ipart
;
7106 etype
= TREE_TYPE (type
);
7107 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7110 rpart
= native_interpret_expr (etype
, ptr
, size
);
7113 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7116 return build_complex (type
, rpart
, ipart
);
7120 /* Subroutine of native_interpret_expr. Interpret the contents of
7121 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7122 If the buffer cannot be interpreted, return NULL_TREE. */
7125 native_interpret_vector (tree type
, unsigned char *ptr
, int len
)
7127 tree etype
, elem
, elements
;
7130 etype
= TREE_TYPE (type
);
7131 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7132 count
= TYPE_VECTOR_SUBPARTS (type
);
7133 if (size
* count
> len
)
7136 elements
= NULL_TREE
;
7137 for (i
= count
- 1; i
>= 0; i
--)
7139 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7142 elements
= tree_cons (NULL_TREE
, elem
, elements
);
7144 return build_vector (type
, elements
);
7148 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7149 the buffer PTR of length LEN as a constant of type TYPE. For
7150 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7151 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7152 return NULL_TREE. */
7155 native_interpret_expr (tree type
, unsigned char *ptr
, int len
)
7157 switch (TREE_CODE (type
))
7162 return native_interpret_int (type
, ptr
, len
);
7165 return native_interpret_real (type
, ptr
, len
);
7168 return native_interpret_complex (type
, ptr
, len
);
7171 return native_interpret_vector (type
, ptr
, len
);
7179 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7180 TYPE at compile-time. If we're unable to perform the conversion
7181 return NULL_TREE. */
7184 fold_view_convert_expr (tree type
, tree expr
)
7186 /* We support up to 512-bit values (for V8DFmode). */
7187 unsigned char buffer
[64];
7190 /* Check that the host and target are sane. */
7191 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7194 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7198 return native_interpret_expr (type
, buffer
, len
);
7202 /* Fold a unary expression of code CODE and type TYPE with operand
7203 OP0. Return the folded expression if folding is successful.
7204 Otherwise, return NULL_TREE. */
7207 fold_unary (enum tree_code code
, tree type
, tree op0
)
7211 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7213 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7214 && TREE_CODE_LENGTH (code
) == 1);
7219 if (code
== NOP_EXPR
|| code
== CONVERT_EXPR
7220 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
7222 /* Don't use STRIP_NOPS, because signedness of argument type
7224 STRIP_SIGN_NOPS (arg0
);
7228 /* Strip any conversions that don't change the mode. This
7229 is safe for every expression, except for a comparison
7230 expression because its signedness is derived from its
7233 Note that this is done as an internal manipulation within
7234 the constant folder, in order to find the simplest
7235 representation of the arguments so that their form can be
7236 studied. In any cases, the appropriate type conversions
7237 should be put back in the tree that will get out of the
7243 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7245 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7246 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7247 fold_build1 (code
, type
, TREE_OPERAND (arg0
, 1)));
7248 else if (TREE_CODE (arg0
) == COND_EXPR
)
7250 tree arg01
= TREE_OPERAND (arg0
, 1);
7251 tree arg02
= TREE_OPERAND (arg0
, 2);
7252 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7253 arg01
= fold_build1 (code
, type
, arg01
);
7254 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7255 arg02
= fold_build1 (code
, type
, arg02
);
7256 tem
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7259 /* If this was a conversion, and all we did was to move into
7260 inside the COND_EXPR, bring it back out. But leave it if
7261 it is a conversion from integer to integer and the
7262 result precision is no wider than a word since such a
7263 conversion is cheap and may be optimized away by combine,
7264 while it couldn't if it were outside the COND_EXPR. Then return
7265 so we don't get into an infinite recursion loop taking the
7266 conversion out and then back in. */
7268 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
7269 || code
== NON_LVALUE_EXPR
)
7270 && TREE_CODE (tem
) == COND_EXPR
7271 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7272 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7273 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7274 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7275 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7276 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7277 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7279 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7280 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7281 || flag_syntax_only
))
7282 tem
= build1 (code
, type
,
7284 TREE_TYPE (TREE_OPERAND
7285 (TREE_OPERAND (tem
, 1), 0)),
7286 TREE_OPERAND (tem
, 0),
7287 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7288 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
7291 else if (COMPARISON_CLASS_P (arg0
))
7293 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7295 arg0
= copy_node (arg0
);
7296 TREE_TYPE (arg0
) = type
;
7299 else if (TREE_CODE (type
) != INTEGER_TYPE
)
7300 return fold_build3 (COND_EXPR
, type
, arg0
,
7301 fold_build1 (code
, type
,
7303 fold_build1 (code
, type
,
7304 integer_zero_node
));
7313 case FIX_TRUNC_EXPR
:
7315 case FIX_FLOOR_EXPR
:
7316 case FIX_ROUND_EXPR
:
7317 if (TREE_TYPE (op0
) == type
)
7320 /* If we have (type) (a CMP b) and type is an integral type, return
7321 new expression involving the new type. */
7322 if (COMPARISON_CLASS_P (op0
) && INTEGRAL_TYPE_P (type
))
7323 return fold_build2 (TREE_CODE (op0
), type
, TREE_OPERAND (op0
, 0),
7324 TREE_OPERAND (op0
, 1));
7326 /* Handle cases of two conversions in a row. */
7327 if (TREE_CODE (op0
) == NOP_EXPR
7328 || TREE_CODE (op0
) == CONVERT_EXPR
)
7330 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7331 tree inter_type
= TREE_TYPE (op0
);
7332 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7333 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7334 int inside_float
= FLOAT_TYPE_P (inside_type
);
7335 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7336 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7337 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7338 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7339 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7340 int inter_float
= FLOAT_TYPE_P (inter_type
);
7341 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7342 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7343 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7344 int final_int
= INTEGRAL_TYPE_P (type
);
7345 int final_ptr
= POINTER_TYPE_P (type
);
7346 int final_float
= FLOAT_TYPE_P (type
);
7347 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7348 unsigned int final_prec
= TYPE_PRECISION (type
);
7349 int final_unsignedp
= TYPE_UNSIGNED (type
);
7351 /* In addition to the cases of two conversions in a row
7352 handled below, if we are converting something to its own
7353 type via an object of identical or wider precision, neither
7354 conversion is needed. */
7355 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7356 && (((inter_int
|| inter_ptr
) && final_int
)
7357 || (inter_float
&& final_float
))
7358 && inter_prec
>= final_prec
)
7359 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7361 /* Likewise, if the intermediate and final types are either both
7362 float or both integer, we don't need the middle conversion if
7363 it is wider than the final type and doesn't change the signedness
7364 (for integers). Avoid this if the final type is a pointer
7365 since then we sometimes need the inner conversion. Likewise if
7366 the outer has a precision not equal to the size of its mode. */
7367 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
7368 || (inter_float
&& inside_float
)
7369 || (inter_vec
&& inside_vec
))
7370 && inter_prec
>= inside_prec
7371 && (inter_float
|| inter_vec
7372 || inter_unsignedp
== inside_unsignedp
)
7373 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7374 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7376 && (! final_vec
|| inter_prec
== inside_prec
))
7377 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7379 /* If we have a sign-extension of a zero-extended value, we can
7380 replace that by a single zero-extension. */
7381 if (inside_int
&& inter_int
&& final_int
7382 && inside_prec
< inter_prec
&& inter_prec
< final_prec
7383 && inside_unsignedp
&& !inter_unsignedp
)
7384 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7386 /* Two conversions in a row are not needed unless:
7387 - some conversion is floating-point (overstrict for now), or
7388 - some conversion is a vector (overstrict for now), or
7389 - the intermediate type is narrower than both initial and
7391 - the intermediate type and innermost type differ in signedness,
7392 and the outermost type is wider than the intermediate, or
7393 - the initial type is a pointer type and the precisions of the
7394 intermediate and final types differ, or
7395 - the final type is a pointer type and the precisions of the
7396 initial and intermediate types differ.
7397 - the final type is a pointer type and the initial type not
7398 - the initial type is a pointer to an array and the final type
7400 /* Java pointer type conversions generate checks in some
7401 cases, so we explicitly disallow this optimization. */
7402 if (! inside_float
&& ! inter_float
&& ! final_float
7403 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7404 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7405 && ! (inside_int
&& inter_int
7406 && inter_unsignedp
!= inside_unsignedp
7407 && inter_prec
< final_prec
)
7408 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7409 == (final_unsignedp
&& final_prec
> inter_prec
))
7410 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7411 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7412 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7413 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7414 && final_ptr
== inside_ptr
7416 && TREE_CODE (TREE_TYPE (inside_type
)) == ARRAY_TYPE
7417 && TREE_CODE (TREE_TYPE (type
)) != ARRAY_TYPE
)
7418 && ! ((strcmp (lang_hooks
.name
, "GNU Java") == 0)
7420 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
7423 /* Handle (T *)&A.B.C for A being of type T and B and C
7424 living at offset zero. This occurs frequently in
7425 C++ upcasting and then accessing the base. */
7426 if (TREE_CODE (op0
) == ADDR_EXPR
7427 && POINTER_TYPE_P (type
)
7428 && handled_component_p (TREE_OPERAND (op0
, 0)))
7430 HOST_WIDE_INT bitsize
, bitpos
;
7432 enum machine_mode mode
;
7433 int unsignedp
, volatilep
;
7434 tree base
= TREE_OPERAND (op0
, 0);
7435 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7436 &mode
, &unsignedp
, &volatilep
, false);
7437 /* If the reference was to a (constant) zero offset, we can use
7438 the address of the base if it has the same base type
7439 as the result type. */
7440 if (! offset
&& bitpos
== 0
7441 && TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7442 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7443 return fold_convert (type
, build_fold_addr_expr (base
));
7446 if (TREE_CODE (op0
) == MODIFY_EXPR
7447 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7448 /* Detect assigning a bitfield. */
7449 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7450 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7452 /* Don't leave an assignment inside a conversion
7453 unless assigning a bitfield. */
7454 tem
= fold_build1 (code
, type
, TREE_OPERAND (op0
, 1));
7455 /* First do the assignment, then return converted constant. */
7456 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7457 TREE_NO_WARNING (tem
) = 1;
7458 TREE_USED (tem
) = 1;
7462 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7463 constants (if x has signed type, the sign bit cannot be set
7464 in c). This folds extension into the BIT_AND_EXPR. */
7465 if (INTEGRAL_TYPE_P (type
)
7466 && TREE_CODE (type
) != BOOLEAN_TYPE
7467 && TREE_CODE (op0
) == BIT_AND_EXPR
7468 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7471 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
7474 if (TYPE_UNSIGNED (TREE_TYPE (and))
7475 || (TYPE_PRECISION (type
)
7476 <= TYPE_PRECISION (TREE_TYPE (and))))
7478 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7479 <= HOST_BITS_PER_WIDE_INT
7480 && host_integerp (and1
, 1))
7482 unsigned HOST_WIDE_INT cst
;
7484 cst
= tree_low_cst (and1
, 1);
7485 cst
&= (HOST_WIDE_INT
) -1
7486 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7487 change
= (cst
== 0);
7488 #ifdef LOAD_EXTEND_OP
7490 && !flag_syntax_only
7491 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7494 tree uns
= lang_hooks
.types
.unsigned_type (TREE_TYPE (and0
));
7495 and0
= fold_convert (uns
, and0
);
7496 and1
= fold_convert (uns
, and1
);
7502 tem
= build_int_cst_wide (type
, TREE_INT_CST_LOW (and1
),
7503 TREE_INT_CST_HIGH (and1
));
7504 tem
= force_fit_type (tem
, 0, TREE_OVERFLOW (and1
),
7505 TREE_CONSTANT_OVERFLOW (and1
));
7506 return fold_build2 (BIT_AND_EXPR
, type
,
7507 fold_convert (type
, and0
), tem
);
7511 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7512 T2 being pointers to types of the same size. */
7513 if (POINTER_TYPE_P (type
)
7514 && BINARY_CLASS_P (arg0
)
7515 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7516 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7518 tree arg00
= TREE_OPERAND (arg0
, 0);
7520 tree t1
= TREE_TYPE (arg00
);
7521 tree tt0
= TREE_TYPE (t0
);
7522 tree tt1
= TREE_TYPE (t1
);
7523 tree s0
= TYPE_SIZE (tt0
);
7524 tree s1
= TYPE_SIZE (tt1
);
7526 if (s0
&& s1
&& operand_equal_p (s0
, s1
, OEP_ONLY_CONST
))
7527 return build2 (TREE_CODE (arg0
), t0
, fold_convert (t0
, arg00
),
7528 TREE_OPERAND (arg0
, 1));
7531 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7532 of the same precision, and X is a integer type not narrower than
7533 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7534 if (INTEGRAL_TYPE_P (type
)
7535 && TREE_CODE (op0
) == BIT_NOT_EXPR
7536 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7537 && (TREE_CODE (TREE_OPERAND (op0
, 0)) == NOP_EXPR
7538 || TREE_CODE (TREE_OPERAND (op0
, 0)) == CONVERT_EXPR
)
7539 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7541 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7542 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7543 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7544 return fold_build1 (BIT_NOT_EXPR
, type
, fold_convert (type
, tem
));
7547 tem
= fold_convert_const (code
, type
, arg0
);
7548 return tem
? tem
: NULL_TREE
;
7550 case VIEW_CONVERT_EXPR
:
7551 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
7552 return fold_build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
7553 return fold_view_convert_expr (type
, op0
);
7556 tem
= fold_negate_expr (arg0
);
7558 return fold_convert (type
, tem
);
7562 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
7563 return fold_abs_const (arg0
, type
);
7564 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
7565 return fold_build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7566 /* Convert fabs((double)float) into (double)fabsf(float). */
7567 else if (TREE_CODE (arg0
) == NOP_EXPR
7568 && TREE_CODE (type
) == REAL_TYPE
)
7570 tree targ0
= strip_float_extensions (arg0
);
7572 return fold_convert (type
, fold_build1 (ABS_EXPR
,
7576 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7577 else if (tree_expr_nonnegative_p (arg0
) || TREE_CODE (arg0
) == ABS_EXPR
)
7580 /* Strip sign ops from argument. */
7581 if (TREE_CODE (type
) == REAL_TYPE
)
7583 tem
= fold_strip_sign_ops (arg0
);
7585 return fold_build1 (ABS_EXPR
, type
, fold_convert (type
, tem
));
7590 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7591 return fold_convert (type
, arg0
);
7592 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7594 tree itype
= TREE_TYPE (type
);
7595 tree rpart
= fold_convert (itype
, TREE_OPERAND (arg0
, 0));
7596 tree ipart
= fold_convert (itype
, TREE_OPERAND (arg0
, 1));
7597 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, negate_expr (ipart
));
7599 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7601 tree itype
= TREE_TYPE (type
);
7602 tree rpart
= fold_convert (itype
, TREE_REALPART (arg0
));
7603 tree ipart
= fold_convert (itype
, TREE_IMAGPART (arg0
));
7604 return build_complex (type
, rpart
, negate_expr (ipart
));
7606 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7607 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
7611 if (TREE_CODE (arg0
) == INTEGER_CST
)
7612 return fold_not_const (arg0
, type
);
7613 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
7614 return TREE_OPERAND (arg0
, 0);
7615 /* Convert ~ (-A) to A - 1. */
7616 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
7617 return fold_build2 (MINUS_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7618 build_int_cst (type
, 1));
7619 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7620 else if (INTEGRAL_TYPE_P (type
)
7621 && ((TREE_CODE (arg0
) == MINUS_EXPR
7622 && integer_onep (TREE_OPERAND (arg0
, 1)))
7623 || (TREE_CODE (arg0
) == PLUS_EXPR
7624 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
7625 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7626 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7627 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7628 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
7630 TREE_OPERAND (arg0
, 0)))))
7631 return fold_build2 (BIT_XOR_EXPR
, type
, tem
,
7632 fold_convert (type
, TREE_OPERAND (arg0
, 1)));
7633 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7634 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
7636 TREE_OPERAND (arg0
, 1)))))
7637 return fold_build2 (BIT_XOR_EXPR
, type
,
7638 fold_convert (type
, TREE_OPERAND (arg0
, 0)), tem
);
7642 case TRUTH_NOT_EXPR
:
7643 /* The argument to invert_truthvalue must have Boolean type. */
7644 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
7645 arg0
= fold_convert (boolean_type_node
, arg0
);
7647 /* Note that the operand of this must be an int
7648 and its values must be 0 or 1.
7649 ("true" is a fixed value perhaps depending on the language,
7650 but we don't handle values other than 1 correctly yet.) */
7651 tem
= fold_truth_not_expr (arg0
);
7654 return fold_convert (type
, tem
);
7657 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7658 return fold_convert (type
, arg0
);
7659 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7660 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
7661 TREE_OPERAND (arg0
, 1));
7662 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7663 return fold_convert (type
, TREE_REALPART (arg0
));
7664 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7666 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7667 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
7668 fold_build1 (REALPART_EXPR
, itype
,
7669 TREE_OPERAND (arg0
, 0)),
7670 fold_build1 (REALPART_EXPR
, itype
,
7671 TREE_OPERAND (arg0
, 1)));
7672 return fold_convert (type
, tem
);
7674 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7676 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7677 tem
= fold_build1 (REALPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
7678 return fold_convert (type
, tem
);
7683 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7684 return fold_convert (type
, integer_zero_node
);
7685 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7686 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
7687 TREE_OPERAND (arg0
, 0));
7688 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7689 return fold_convert (type
, TREE_IMAGPART (arg0
));
7690 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7692 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7693 tem
= fold_build2 (TREE_CODE (arg0
), itype
,
7694 fold_build1 (IMAGPART_EXPR
, itype
,
7695 TREE_OPERAND (arg0
, 0)),
7696 fold_build1 (IMAGPART_EXPR
, itype
,
7697 TREE_OPERAND (arg0
, 1)));
7698 return fold_convert (type
, tem
);
7700 if (TREE_CODE (arg0
) == CONJ_EXPR
)
7702 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
7703 tem
= fold_build1 (IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
7704 return fold_convert (type
, negate_expr (tem
));
7710 } /* switch (code) */
7713 /* Fold a binary expression of code CODE and type TYPE with operands
7714 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7715 Return the folded expression if folding is successful. Otherwise,
7716 return NULL_TREE. */
7719 fold_minmax (enum tree_code code
, tree type
, tree op0
, tree op1
)
7721 enum tree_code compl_code
;
7723 if (code
== MIN_EXPR
)
7724 compl_code
= MAX_EXPR
;
7725 else if (code
== MAX_EXPR
)
7726 compl_code
= MIN_EXPR
;
7730 /* MIN (MAX (a, b), b) == b. Â */
7731 if (TREE_CODE (op0
) == compl_code
7732 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
7733 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 0));
7735 /* MIN (MAX (b, a), b) == b. Â */
7736 if (TREE_CODE (op0
) == compl_code
7737 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
7738 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
7739 return omit_one_operand (type
, op1
, TREE_OPERAND (op0
, 1));
7741 /* MIN (a, MAX (a, b)) == a. Â */
7742 if (TREE_CODE (op1
) == compl_code
7743 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
7744 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
7745 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 1));
7747 /* MIN (a, MAX (b, a)) == a. Â */
7748 if (TREE_CODE (op1
) == compl_code
7749 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
7750 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
7751 return omit_one_operand (type
, op0
, TREE_OPERAND (op1
, 0));
7756 /* Subroutine of fold_binary. This routine performs all of the
7757 transformations that are common to the equality/inequality
7758 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7759 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7760 fold_binary should call fold_binary. Fold a comparison with
7761 tree code CODE and type TYPE with operands OP0 and OP1. Return
7762 the folded comparison or NULL_TREE. */
7765 fold_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
7767 tree arg0
, arg1
, tem
;
7772 STRIP_SIGN_NOPS (arg0
);
7773 STRIP_SIGN_NOPS (arg1
);
7775 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
7776 if (tem
!= NULL_TREE
)
7779 /* If one arg is a real or integer constant, put it last. */
7780 if (tree_swap_operands_p (arg0
, arg1
, true))
7781 return fold_build2 (swap_tree_comparison (code
), type
, op1
, op0
);
7783 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7784 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7785 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7786 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
7787 && !TYPE_UNSIGNED (TREE_TYPE (arg1
))
7788 && !(flag_wrapv
|| flag_trapv
))
7789 && (TREE_CODE (arg1
) == INTEGER_CST
7790 && !TREE_OVERFLOW (arg1
)))
7792 tree const1
= TREE_OPERAND (arg0
, 1);
7794 tree variable
= TREE_OPERAND (arg0
, 0);
7797 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
7799 lhs
= fold_build2 (lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
7800 TREE_TYPE (arg1
), const2
, const1
);
7801 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
7802 && (TREE_CODE (lhs
) != INTEGER_CST
7803 || !TREE_OVERFLOW (lhs
)))
7804 return fold_build2 (code
, type
, variable
, lhs
);
7807 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
7808 same object, then we can fold this to a comparison of the two offsets in
7809 signed size type. This is possible because pointer arithmetic is
7810 restricted to retain within an object and overflow on pointer differences
7811 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
7812 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
7813 && !flag_wrapv
&& !flag_trapv
)
7815 tree base0
, offset0
, base1
, offset1
;
7817 if (extract_array_ref (arg0
, &base0
, &offset0
)
7818 && extract_array_ref (arg1
, &base1
, &offset1
)
7819 && operand_equal_p (base0
, base1
, 0))
7821 tree signed_size_type_node
;
7822 signed_size_type_node
= signed_type_for (size_type_node
);
7824 /* By converting to signed size type we cover middle-end pointer
7825 arithmetic which operates on unsigned pointer types of size
7826 type size and ARRAY_REF offsets which are properly sign or
7827 zero extended from their type in case it is narrower than
7829 if (offset0
== NULL_TREE
)
7830 offset0
= build_int_cst (signed_size_type_node
, 0);
7832 offset0
= fold_convert (signed_size_type_node
, offset0
);
7833 if (offset1
== NULL_TREE
)
7834 offset1
= build_int_cst (signed_size_type_node
, 0);
7836 offset1
= fold_convert (signed_size_type_node
, offset1
);
7838 return fold_build2 (code
, type
, offset0
, offset1
);
7842 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
7844 tree targ0
= strip_float_extensions (arg0
);
7845 tree targ1
= strip_float_extensions (arg1
);
7846 tree newtype
= TREE_TYPE (targ0
);
7848 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
7849 newtype
= TREE_TYPE (targ1
);
7851 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7852 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
7853 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
7854 fold_convert (newtype
, targ1
));
7856 /* (-a) CMP (-b) -> b CMP a */
7857 if (TREE_CODE (arg0
) == NEGATE_EXPR
7858 && TREE_CODE (arg1
) == NEGATE_EXPR
)
7859 return fold_build2 (code
, type
, TREE_OPERAND (arg1
, 0),
7860 TREE_OPERAND (arg0
, 0));
7862 if (TREE_CODE (arg1
) == REAL_CST
)
7864 REAL_VALUE_TYPE cst
;
7865 cst
= TREE_REAL_CST (arg1
);
7867 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7868 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
7869 return fold_build2 (swap_tree_comparison (code
), type
,
7870 TREE_OPERAND (arg0
, 0),
7871 build_real (TREE_TYPE (arg1
),
7872 REAL_VALUE_NEGATE (cst
)));
7874 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7875 /* a CMP (-0) -> a CMP 0 */
7876 if (REAL_VALUE_MINUS_ZERO (cst
))
7877 return fold_build2 (code
, type
, arg0
,
7878 build_real (TREE_TYPE (arg1
), dconst0
));
7880 /* x != NaN is always true, other ops are always false. */
7881 if (REAL_VALUE_ISNAN (cst
)
7882 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
7884 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
7885 return omit_one_operand (type
, tem
, arg0
);
7888 /* Fold comparisons against infinity. */
7889 if (REAL_VALUE_ISINF (cst
))
7891 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
7892 if (tem
!= NULL_TREE
)
7897 /* If this is a comparison of a real constant with a PLUS_EXPR
7898 or a MINUS_EXPR of a real constant, we can convert it into a
7899 comparison with a revised real constant as long as no overflow
7900 occurs when unsafe_math_optimizations are enabled. */
7901 if (flag_unsafe_math_optimizations
7902 && TREE_CODE (arg1
) == REAL_CST
7903 && (TREE_CODE (arg0
) == PLUS_EXPR
7904 || TREE_CODE (arg0
) == MINUS_EXPR
)
7905 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
7906 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
7907 ? MINUS_EXPR
: PLUS_EXPR
,
7908 arg1
, TREE_OPERAND (arg0
, 1), 0))
7909 && ! TREE_CONSTANT_OVERFLOW (tem
))
7910 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
7912 /* Likewise, we can simplify a comparison of a real constant with
7913 a MINUS_EXPR whose first operand is also a real constant, i.e.
7914 (c1 - x) < c2 becomes x > c1-c2. */
7915 if (flag_unsafe_math_optimizations
7916 && TREE_CODE (arg1
) == REAL_CST
7917 && TREE_CODE (arg0
) == MINUS_EXPR
7918 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
7919 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
7921 && ! TREE_CONSTANT_OVERFLOW (tem
))
7922 return fold_build2 (swap_tree_comparison (code
), type
,
7923 TREE_OPERAND (arg0
, 1), tem
);
7925 /* Fold comparisons against built-in math functions. */
7926 if (TREE_CODE (arg1
) == REAL_CST
7927 && flag_unsafe_math_optimizations
7928 && ! flag_errno_math
)
7930 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
7932 if (fcode
!= END_BUILTINS
)
7934 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
7935 if (tem
!= NULL_TREE
)
7941 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7942 if (TREE_CONSTANT (arg1
)
7943 && (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
7944 || TREE_CODE (arg0
) == POSTDECREMENT_EXPR
)
7945 /* This optimization is invalid for ordered comparisons
7946 if CONST+INCR overflows or if foo+incr might overflow.
7947 This optimization is invalid for floating point due to rounding.
7948 For pointer types we assume overflow doesn't happen. */
7949 && (POINTER_TYPE_P (TREE_TYPE (arg0
))
7950 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
7951 && (code
== EQ_EXPR
|| code
== NE_EXPR
))))
7953 tree varop
, newconst
;
7955 if (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
)
7957 newconst
= fold_build2 (PLUS_EXPR
, TREE_TYPE (arg0
),
7958 arg1
, TREE_OPERAND (arg0
, 1));
7959 varop
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (arg0
),
7960 TREE_OPERAND (arg0
, 0),
7961 TREE_OPERAND (arg0
, 1));
7965 newconst
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg0
),
7966 arg1
, TREE_OPERAND (arg0
, 1));
7967 varop
= build2 (PREDECREMENT_EXPR
, TREE_TYPE (arg0
),
7968 TREE_OPERAND (arg0
, 0),
7969 TREE_OPERAND (arg0
, 1));
7973 /* If VAROP is a reference to a bitfield, we must mask
7974 the constant by the width of the field. */
7975 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
7976 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop
, 0), 1))
7977 && host_integerp (DECL_SIZE (TREE_OPERAND
7978 (TREE_OPERAND (varop
, 0), 1)), 1))
7980 tree fielddecl
= TREE_OPERAND (TREE_OPERAND (varop
, 0), 1);
7981 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (fielddecl
), 1);
7982 tree folded_compare
, shift
;
7984 /* First check whether the comparison would come out
7985 always the same. If we don't do that we would
7986 change the meaning with the masking. */
7987 folded_compare
= fold_build2 (code
, type
,
7988 TREE_OPERAND (varop
, 0), arg1
);
7989 if (TREE_CODE (folded_compare
) == INTEGER_CST
)
7990 return omit_one_operand (type
, folded_compare
, varop
);
7992 shift
= build_int_cst (NULL_TREE
,
7993 TYPE_PRECISION (TREE_TYPE (varop
)) - size
);
7994 shift
= fold_convert (TREE_TYPE (varop
), shift
);
7995 newconst
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (varop
),
7997 newconst
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (varop
),
8001 return fold_build2 (code
, type
, varop
, newconst
);
8004 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
8005 && (TREE_CODE (arg0
) == NOP_EXPR
8006 || TREE_CODE (arg0
) == CONVERT_EXPR
))
8008 /* If we are widening one operand of an integer comparison,
8009 see if the other operand is similarly being widened. Perhaps we
8010 can do the comparison in the narrower type. */
8011 tem
= fold_widened_comparison (code
, type
, arg0
, arg1
);
8015 /* Or if we are changing signedness. */
8016 tem
= fold_sign_changed_comparison (code
, type
, arg0
, arg1
);
8021 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8022 constant, we can simplify it. */
8023 if (TREE_CODE (arg1
) == INTEGER_CST
8024 && (TREE_CODE (arg0
) == MIN_EXPR
8025 || TREE_CODE (arg0
) == MAX_EXPR
)
8026 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8028 tem
= optimize_minmax_comparison (code
, type
, op0
, op1
);
8033 /* Simplify comparison of something with itself. (For IEEE
8034 floating-point, we can only do some of these simplifications.) */
8035 if (operand_equal_p (arg0
, arg1
, 0))
8040 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8041 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8042 return constant_boolean_node (1, type
);
8047 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8048 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8049 return constant_boolean_node (1, type
);
8050 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
8053 /* For NE, we can only do this simplification if integer
8054 or we don't honor IEEE floating point NaNs. */
8055 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
8056 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8058 /* ... fall through ... */
8061 return constant_boolean_node (0, type
);
8067 /* If we are comparing an expression that just has comparisons
8068 of two integer values, arithmetic expressions of those comparisons,
8069 and constants, we can simplify it. There are only three cases
8070 to check: the two values can either be equal, the first can be
8071 greater, or the second can be greater. Fold the expression for
8072 those three values. Since each value must be 0 or 1, we have
8073 eight possibilities, each of which corresponds to the constant 0
8074 or 1 or one of the six possible comparisons.
8076 This handles common cases like (a > b) == 0 but also handles
8077 expressions like ((x > y) - (y > x)) > 0, which supposedly
8078 occur in macroized code. */
8080 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8082 tree cval1
= 0, cval2
= 0;
8085 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8086 /* Don't handle degenerate cases here; they should already
8087 have been handled anyway. */
8088 && cval1
!= 0 && cval2
!= 0
8089 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8090 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8091 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8092 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8093 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8094 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8095 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8097 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8098 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8100 /* We can't just pass T to eval_subst in case cval1 or cval2
8101 was the same as ARG1. */
8104 = fold_build2 (code
, type
,
8105 eval_subst (arg0
, cval1
, maxval
,
8109 = fold_build2 (code
, type
,
8110 eval_subst (arg0
, cval1
, maxval
,
8114 = fold_build2 (code
, type
,
8115 eval_subst (arg0
, cval1
, minval
,
8119 /* All three of these results should be 0 or 1. Confirm they are.
8120 Then use those values to select the proper code to use. */
8122 if (TREE_CODE (high_result
) == INTEGER_CST
8123 && TREE_CODE (equal_result
) == INTEGER_CST
8124 && TREE_CODE (low_result
) == INTEGER_CST
)
8126 /* Make a 3-bit mask with the high-order bit being the
8127 value for `>', the next for '=', and the low for '<'. */
8128 switch ((integer_onep (high_result
) * 4)
8129 + (integer_onep (equal_result
) * 2)
8130 + integer_onep (low_result
))
8134 return omit_one_operand (type
, integer_zero_node
, arg0
);
8155 return omit_one_operand (type
, integer_one_node
, arg0
);
8159 return save_expr (build2 (code
, type
, cval1
, cval2
));
8160 return fold_build2 (code
, type
, cval1
, cval2
);
8165 /* Fold a comparison of the address of COMPONENT_REFs with the same
8166 type and component to a comparison of the address of the base
8167 object. In short, &x->a OP &y->a to x OP y and
8168 &x->a OP &y.a to x OP &y */
8169 if (TREE_CODE (arg0
) == ADDR_EXPR
8170 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == COMPONENT_REF
8171 && TREE_CODE (arg1
) == ADDR_EXPR
8172 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == COMPONENT_REF
)
8174 tree cref0
= TREE_OPERAND (arg0
, 0);
8175 tree cref1
= TREE_OPERAND (arg1
, 0);
8176 if (TREE_OPERAND (cref0
, 1) == TREE_OPERAND (cref1
, 1))
8178 tree op0
= TREE_OPERAND (cref0
, 0);
8179 tree op1
= TREE_OPERAND (cref1
, 0);
8180 return fold_build2 (code
, type
,
8181 build_fold_addr_expr (op0
),
8182 build_fold_addr_expr (op1
));
8186 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8187 into a single range test. */
8188 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
8189 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
8190 && TREE_CODE (arg1
) == INTEGER_CST
8191 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8192 && !integer_zerop (TREE_OPERAND (arg0
, 1))
8193 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8194 && !TREE_OVERFLOW (arg1
))
8196 tem
= fold_div_compare (code
, type
, arg0
, arg1
);
8197 if (tem
!= NULL_TREE
)
8205 /* Subroutine of fold_binary. Optimize complex multiplications of the
8206 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8207 argument EXPR represents the expression "z" of type TYPE. */
8210 fold_mult_zconjz (tree type
, tree expr
)
8212 tree itype
= TREE_TYPE (type
);
8213 tree rpart
, ipart
, tem
;
8215 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
8217 rpart
= TREE_OPERAND (expr
, 0);
8218 ipart
= TREE_OPERAND (expr
, 1);
8220 else if (TREE_CODE (expr
) == COMPLEX_CST
)
8222 rpart
= TREE_REALPART (expr
);
8223 ipart
= TREE_IMAGPART (expr
);
8227 expr
= save_expr (expr
);
8228 rpart
= fold_build1 (REALPART_EXPR
, itype
, expr
);
8229 ipart
= fold_build1 (IMAGPART_EXPR
, itype
, expr
);
8232 rpart
= save_expr (rpart
);
8233 ipart
= save_expr (ipart
);
8234 tem
= fold_build2 (PLUS_EXPR
, itype
,
8235 fold_build2 (MULT_EXPR
, itype
, rpart
, rpart
),
8236 fold_build2 (MULT_EXPR
, itype
, ipart
, ipart
));
8237 return fold_build2 (COMPLEX_EXPR
, type
, tem
,
8238 fold_convert (itype
, integer_zero_node
));
8242 /* Fold a binary expression of code CODE and type TYPE with operands
8243 OP0 and OP1. Return the folded expression if folding is
8244 successful. Otherwise, return NULL_TREE. */
8247 fold_binary (enum tree_code code
, tree type
, tree op0
, tree op1
)
8249 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
8250 tree arg0
, arg1
, tem
;
8251 tree t1
= NULL_TREE
;
8253 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
8254 && TREE_CODE_LENGTH (code
) == 2
8256 && op1
!= NULL_TREE
);
8261 /* Strip any conversions that don't change the mode. This is
8262 safe for every expression, except for a comparison expression
8263 because its signedness is derived from its operands. So, in
8264 the latter case, only strip conversions that don't change the
8267 Note that this is done as an internal manipulation within the
8268 constant folder, in order to find the simplest representation
8269 of the arguments so that their form can be studied. In any
8270 cases, the appropriate type conversions should be put back in
8271 the tree that will get out of the constant folder. */
8273 if (kind
== tcc_comparison
)
8275 STRIP_SIGN_NOPS (arg0
);
8276 STRIP_SIGN_NOPS (arg1
);
8284 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8285 constant but we can't do arithmetic on them. */
8286 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
8287 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
8288 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
8289 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
8291 if (kind
== tcc_binary
)
8292 tem
= const_binop (code
, arg0
, arg1
, 0);
8293 else if (kind
== tcc_comparison
)
8294 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8298 if (tem
!= NULL_TREE
)
8300 if (TREE_TYPE (tem
) != type
)
8301 tem
= fold_convert (type
, tem
);
8306 /* If this is a commutative operation, and ARG0 is a constant, move it
8307 to ARG1 to reduce the number of tests below. */
8308 if (commutative_tree_code (code
)
8309 && tree_swap_operands_p (arg0
, arg1
, true))
8310 return fold_build2 (code
, type
, op1
, op0
);
8312 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8314 First check for cases where an arithmetic operation is applied to a
8315 compound, conditional, or comparison operation. Push the arithmetic
8316 operation inside the compound or conditional to see if any folding
8317 can then be done. Convert comparison to conditional for this purpose.
8318 The also optimizes non-constant cases that used to be done in
8321 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8322 one of the operands is a comparison and the other is a comparison, a
8323 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8324 code below would make the expression more complex. Change it to a
8325 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8326 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8328 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
8329 || code
== EQ_EXPR
|| code
== NE_EXPR
)
8330 && ((truth_value_p (TREE_CODE (arg0
))
8331 && (truth_value_p (TREE_CODE (arg1
))
8332 || (TREE_CODE (arg1
) == BIT_AND_EXPR
8333 && integer_onep (TREE_OPERAND (arg1
, 1)))))
8334 || (truth_value_p (TREE_CODE (arg1
))
8335 && (truth_value_p (TREE_CODE (arg0
))
8336 || (TREE_CODE (arg0
) == BIT_AND_EXPR
8337 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
8339 tem
= fold_build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
8340 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
8343 fold_convert (boolean_type_node
, arg0
),
8344 fold_convert (boolean_type_node
, arg1
));
8346 if (code
== EQ_EXPR
)
8347 tem
= invert_truthvalue (tem
);
8349 return fold_convert (type
, tem
);
8352 if (TREE_CODE_CLASS (code
) == tcc_binary
8353 || TREE_CODE_CLASS (code
) == tcc_comparison
)
8355 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
8356 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8357 fold_build2 (code
, type
,
8358 TREE_OPERAND (arg0
, 1), op1
));
8359 if (TREE_CODE (arg1
) == COMPOUND_EXPR
8360 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
8361 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
8362 fold_build2 (code
, type
,
8363 op0
, TREE_OPERAND (arg1
, 1)));
8365 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
8367 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
8369 /*cond_first_p=*/1);
8370 if (tem
!= NULL_TREE
)
8374 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
8376 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
8378 /*cond_first_p=*/0);
8379 if (tem
!= NULL_TREE
)
8387 /* A + (-B) -> A - B */
8388 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
8389 return fold_build2 (MINUS_EXPR
, type
,
8390 fold_convert (type
, arg0
),
8391 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
8392 /* (-A) + B -> B - A */
8393 if (TREE_CODE (arg0
) == NEGATE_EXPR
8394 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
8395 return fold_build2 (MINUS_EXPR
, type
,
8396 fold_convert (type
, arg1
),
8397 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
8398 /* Convert ~A + 1 to -A. */
8399 if (INTEGRAL_TYPE_P (type
)
8400 && TREE_CODE (arg0
) == BIT_NOT_EXPR
8401 && integer_onep (arg1
))
8402 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8404 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8406 if ((TREE_CODE (arg0
) == MULT_EXPR
8407 || TREE_CODE (arg1
) == MULT_EXPR
)
8408 && (!FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
8410 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
8415 if (! FLOAT_TYPE_P (type
))
8417 if (integer_zerop (arg1
))
8418 return non_lvalue (fold_convert (type
, arg0
));
8420 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8421 with a constant, and the two constants have no bits in common,
8422 we should treat this as a BIT_IOR_EXPR since this may produce more
8424 if (TREE_CODE (arg0
) == BIT_AND_EXPR
8425 && TREE_CODE (arg1
) == BIT_AND_EXPR
8426 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8427 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8428 && integer_zerop (const_binop (BIT_AND_EXPR
,
8429 TREE_OPERAND (arg0
, 1),
8430 TREE_OPERAND (arg1
, 1), 0)))
8432 code
= BIT_IOR_EXPR
;
8436 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8437 (plus (plus (mult) (mult)) (foo)) so that we can
8438 take advantage of the factoring cases below. */
8439 if (((TREE_CODE (arg0
) == PLUS_EXPR
8440 || TREE_CODE (arg0
) == MINUS_EXPR
)
8441 && TREE_CODE (arg1
) == MULT_EXPR
)
8442 || ((TREE_CODE (arg1
) == PLUS_EXPR
8443 || TREE_CODE (arg1
) == MINUS_EXPR
)
8444 && TREE_CODE (arg0
) == MULT_EXPR
))
8446 tree parg0
, parg1
, parg
, marg
;
8447 enum tree_code pcode
;
8449 if (TREE_CODE (arg1
) == MULT_EXPR
)
8450 parg
= arg0
, marg
= arg1
;
8452 parg
= arg1
, marg
= arg0
;
8453 pcode
= TREE_CODE (parg
);
8454 parg0
= TREE_OPERAND (parg
, 0);
8455 parg1
= TREE_OPERAND (parg
, 1);
8459 if (TREE_CODE (parg0
) == MULT_EXPR
8460 && TREE_CODE (parg1
) != MULT_EXPR
)
8461 return fold_build2 (pcode
, type
,
8462 fold_build2 (PLUS_EXPR
, type
,
8463 fold_convert (type
, parg0
),
8464 fold_convert (type
, marg
)),
8465 fold_convert (type
, parg1
));
8466 if (TREE_CODE (parg0
) != MULT_EXPR
8467 && TREE_CODE (parg1
) == MULT_EXPR
)
8468 return fold_build2 (PLUS_EXPR
, type
,
8469 fold_convert (type
, parg0
),
8470 fold_build2 (pcode
, type
,
8471 fold_convert (type
, marg
),
8476 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8477 of the array. Loop optimizer sometimes produce this type of
8479 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8481 tem
= try_move_mult_to_index (PLUS_EXPR
, arg0
, arg1
);
8483 return fold_convert (type
, tem
);
8485 else if (TREE_CODE (arg1
) == ADDR_EXPR
)
8487 tem
= try_move_mult_to_index (PLUS_EXPR
, arg1
, arg0
);
8489 return fold_convert (type
, tem
);
8494 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8495 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
8496 return non_lvalue (fold_convert (type
, arg0
));
8498 /* Likewise if the operands are reversed. */
8499 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
8500 return non_lvalue (fold_convert (type
, arg1
));
8502 /* Convert X + -C into X - C. */
8503 if (TREE_CODE (arg1
) == REAL_CST
8504 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
8506 tem
= fold_negate_const (arg1
, type
);
8507 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
8508 return fold_build2 (MINUS_EXPR
, type
,
8509 fold_convert (type
, arg0
),
8510 fold_convert (type
, tem
));
8513 if (flag_unsafe_math_optimizations
8514 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
8515 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
8516 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
8519 /* Convert x+x into x*2.0. */
8520 if (operand_equal_p (arg0
, arg1
, 0)
8521 && SCALAR_FLOAT_TYPE_P (type
))
8522 return fold_build2 (MULT_EXPR
, type
, arg0
,
8523 build_real (type
, dconst2
));
8525 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8526 if (flag_unsafe_math_optimizations
8527 && TREE_CODE (arg1
) == PLUS_EXPR
8528 && TREE_CODE (arg0
) != MULT_EXPR
)
8530 tree tree10
= TREE_OPERAND (arg1
, 0);
8531 tree tree11
= TREE_OPERAND (arg1
, 1);
8532 if (TREE_CODE (tree11
) == MULT_EXPR
8533 && TREE_CODE (tree10
) == MULT_EXPR
)
8536 tree0
= fold_build2 (PLUS_EXPR
, type
, arg0
, tree10
);
8537 return fold_build2 (PLUS_EXPR
, type
, tree0
, tree11
);
8540 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8541 if (flag_unsafe_math_optimizations
8542 && TREE_CODE (arg0
) == PLUS_EXPR
8543 && TREE_CODE (arg1
) != MULT_EXPR
)
8545 tree tree00
= TREE_OPERAND (arg0
, 0);
8546 tree tree01
= TREE_OPERAND (arg0
, 1);
8547 if (TREE_CODE (tree01
) == MULT_EXPR
8548 && TREE_CODE (tree00
) == MULT_EXPR
)
8551 tree0
= fold_build2 (PLUS_EXPR
, type
, tree01
, arg1
);
8552 return fold_build2 (PLUS_EXPR
, type
, tree00
, tree0
);
8558 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8559 is a rotate of A by C1 bits. */
8560 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8561 is a rotate of A by B bits. */
8563 enum tree_code code0
, code1
;
8564 code0
= TREE_CODE (arg0
);
8565 code1
= TREE_CODE (arg1
);
8566 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
8567 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
8568 && operand_equal_p (TREE_OPERAND (arg0
, 0),
8569 TREE_OPERAND (arg1
, 0), 0)
8570 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
8572 tree tree01
, tree11
;
8573 enum tree_code code01
, code11
;
8575 tree01
= TREE_OPERAND (arg0
, 1);
8576 tree11
= TREE_OPERAND (arg1
, 1);
8577 STRIP_NOPS (tree01
);
8578 STRIP_NOPS (tree11
);
8579 code01
= TREE_CODE (tree01
);
8580 code11
= TREE_CODE (tree11
);
8581 if (code01
== INTEGER_CST
8582 && code11
== INTEGER_CST
8583 && TREE_INT_CST_HIGH (tree01
) == 0
8584 && TREE_INT_CST_HIGH (tree11
) == 0
8585 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
8586 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
8587 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8588 code0
== LSHIFT_EXPR
? tree01
: tree11
);
8589 else if (code11
== MINUS_EXPR
)
8591 tree tree110
, tree111
;
8592 tree110
= TREE_OPERAND (tree11
, 0);
8593 tree111
= TREE_OPERAND (tree11
, 1);
8594 STRIP_NOPS (tree110
);
8595 STRIP_NOPS (tree111
);
8596 if (TREE_CODE (tree110
) == INTEGER_CST
8597 && 0 == compare_tree_int (tree110
,
8599 (TREE_TYPE (TREE_OPERAND
8601 && operand_equal_p (tree01
, tree111
, 0))
8602 return build2 ((code0
== LSHIFT_EXPR
8605 type
, TREE_OPERAND (arg0
, 0), tree01
);
8607 else if (code01
== MINUS_EXPR
)
8609 tree tree010
, tree011
;
8610 tree010
= TREE_OPERAND (tree01
, 0);
8611 tree011
= TREE_OPERAND (tree01
, 1);
8612 STRIP_NOPS (tree010
);
8613 STRIP_NOPS (tree011
);
8614 if (TREE_CODE (tree010
) == INTEGER_CST
8615 && 0 == compare_tree_int (tree010
,
8617 (TREE_TYPE (TREE_OPERAND
8619 && operand_equal_p (tree11
, tree011
, 0))
8620 return build2 ((code0
!= LSHIFT_EXPR
8623 type
, TREE_OPERAND (arg0
, 0), tree11
);
8629 /* In most languages, can't associate operations on floats through
8630 parentheses. Rather than remember where the parentheses were, we
8631 don't associate floats at all, unless the user has specified
8632 -funsafe-math-optimizations. */
8634 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
8636 tree var0
, con0
, lit0
, minus_lit0
;
8637 tree var1
, con1
, lit1
, minus_lit1
;
8639 /* Split both trees into variables, constants, and literals. Then
8640 associate each group together, the constants with literals,
8641 then the result with variables. This increases the chances of
8642 literals being recombined later and of generating relocatable
8643 expressions for the sum of a constant and literal. */
8644 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
8645 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
8646 code
== MINUS_EXPR
);
8648 /* Only do something if we found more than two objects. Otherwise,
8649 nothing has changed and we risk infinite recursion. */
8650 if (2 < ((var0
!= 0) + (var1
!= 0)
8651 + (con0
!= 0) + (con1
!= 0)
8652 + (lit0
!= 0) + (lit1
!= 0)
8653 + (minus_lit0
!= 0) + (minus_lit1
!= 0)))
8655 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8656 if (code
== MINUS_EXPR
)
8659 var0
= associate_trees (var0
, var1
, code
, type
);
8660 con0
= associate_trees (con0
, con1
, code
, type
);
8661 lit0
= associate_trees (lit0
, lit1
, code
, type
);
8662 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
8664 /* Preserve the MINUS_EXPR if the negative part of the literal is
8665 greater than the positive part. Otherwise, the multiplicative
8666 folding code (i.e extract_muldiv) may be fooled in case
8667 unsigned constants are subtracted, like in the following
8668 example: ((X*2 + 4) - 8U)/2. */
8669 if (minus_lit0
&& lit0
)
8671 if (TREE_CODE (lit0
) == INTEGER_CST
8672 && TREE_CODE (minus_lit0
) == INTEGER_CST
8673 && tree_int_cst_lt (lit0
, minus_lit0
))
8675 minus_lit0
= associate_trees (minus_lit0
, lit0
,
8681 lit0
= associate_trees (lit0
, minus_lit0
,
8689 return fold_convert (type
,
8690 associate_trees (var0
, minus_lit0
,
8694 con0
= associate_trees (con0
, minus_lit0
,
8696 return fold_convert (type
,
8697 associate_trees (var0
, con0
,
8702 con0
= associate_trees (con0
, lit0
, code
, type
);
8703 return fold_convert (type
, associate_trees (var0
, con0
,
8711 /* A - (-B) -> A + B */
8712 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
8713 return fold_build2 (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0));
8714 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8715 if (TREE_CODE (arg0
) == NEGATE_EXPR
8716 && (FLOAT_TYPE_P (type
)
8717 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
))
8718 && negate_expr_p (arg1
)
8719 && reorder_operands_p (arg0
, arg1
))
8720 return fold_build2 (MINUS_EXPR
, type
, negate_expr (arg1
),
8721 TREE_OPERAND (arg0
, 0));
8722 /* Convert -A - 1 to ~A. */
8723 if (INTEGRAL_TYPE_P (type
)
8724 && TREE_CODE (arg0
) == NEGATE_EXPR
8725 && integer_onep (arg1
))
8726 return fold_build1 (BIT_NOT_EXPR
, type
,
8727 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
8729 /* Convert -1 - A to ~A. */
8730 if (INTEGRAL_TYPE_P (type
)
8731 && integer_all_onesp (arg0
))
8732 return fold_build1 (BIT_NOT_EXPR
, type
, arg1
);
8734 if (! FLOAT_TYPE_P (type
))
8736 if (integer_zerop (arg0
))
8737 return negate_expr (fold_convert (type
, arg1
));
8738 if (integer_zerop (arg1
))
8739 return non_lvalue (fold_convert (type
, arg0
));
8741 /* Fold A - (A & B) into ~B & A. */
8742 if (!TREE_SIDE_EFFECTS (arg0
)
8743 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
8745 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
8746 return fold_build2 (BIT_AND_EXPR
, type
,
8747 fold_build1 (BIT_NOT_EXPR
, type
,
8748 TREE_OPERAND (arg1
, 0)),
8750 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8751 return fold_build2 (BIT_AND_EXPR
, type
,
8752 fold_build1 (BIT_NOT_EXPR
, type
,
8753 TREE_OPERAND (arg1
, 1)),
8757 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8758 any power of 2 minus 1. */
8759 if (TREE_CODE (arg0
) == BIT_AND_EXPR
8760 && TREE_CODE (arg1
) == BIT_AND_EXPR
8761 && operand_equal_p (TREE_OPERAND (arg0
, 0),
8762 TREE_OPERAND (arg1
, 0), 0))
8764 tree mask0
= TREE_OPERAND (arg0
, 1);
8765 tree mask1
= TREE_OPERAND (arg1
, 1);
8766 tree tem
= fold_build1 (BIT_NOT_EXPR
, type
, mask0
);
8768 if (operand_equal_p (tem
, mask1
, 0))
8770 tem
= fold_build2 (BIT_XOR_EXPR
, type
,
8771 TREE_OPERAND (arg0
, 0), mask1
);
8772 return fold_build2 (MINUS_EXPR
, type
, tem
, mask1
);
8777 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8778 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
8779 return non_lvalue (fold_convert (type
, arg0
));
8781 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
8782 ARG0 is zero and X + ARG0 reduces to X, since that would mean
8783 (-ARG1 + ARG0) reduces to -ARG1. */
8784 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
8785 return negate_expr (fold_convert (type
, arg1
));
8787 /* Fold &x - &x. This can happen from &x.foo - &x.
8788 This is unsafe for certain floats even in non-IEEE formats.
8789 In IEEE, it is unsafe because it does wrong for NaNs.
8790 Also note that operand_equal_p is always false if an operand
8793 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
8794 && operand_equal_p (arg0
, arg1
, 0))
8795 return fold_convert (type
, integer_zero_node
);
8797 /* A - B -> A + (-B) if B is easily negatable. */
8798 if (negate_expr_p (arg1
)
8799 && ((FLOAT_TYPE_P (type
)
8800 /* Avoid this transformation if B is a positive REAL_CST. */
8801 && (TREE_CODE (arg1
) != REAL_CST
8802 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
8803 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
)))
8804 return fold_build2 (PLUS_EXPR
, type
,
8805 fold_convert (type
, arg0
),
8806 fold_convert (type
, negate_expr (arg1
)));
8808 /* Try folding difference of addresses. */
8812 if ((TREE_CODE (arg0
) == ADDR_EXPR
8813 || TREE_CODE (arg1
) == ADDR_EXPR
)
8814 && ptr_difference_const (arg0
, arg1
, &diff
))
8815 return build_int_cst_type (type
, diff
);
8818 /* Fold &a[i] - &a[j] to i-j. */
8819 if (TREE_CODE (arg0
) == ADDR_EXPR
8820 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
8821 && TREE_CODE (arg1
) == ADDR_EXPR
8822 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
8824 tree aref0
= TREE_OPERAND (arg0
, 0);
8825 tree aref1
= TREE_OPERAND (arg1
, 0);
8826 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
8827 TREE_OPERAND (aref1
, 0), 0))
8829 tree op0
= fold_convert (type
, TREE_OPERAND (aref0
, 1));
8830 tree op1
= fold_convert (type
, TREE_OPERAND (aref1
, 1));
8831 tree esz
= array_ref_element_size (aref0
);
8832 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
8833 return fold_build2 (MULT_EXPR
, type
, diff
,
8834 fold_convert (type
, esz
));
8839 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
8840 of the array. Loop optimizer sometimes produce this type of
8842 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8844 tem
= try_move_mult_to_index (MINUS_EXPR
, arg0
, arg1
);
8846 return fold_convert (type
, tem
);
8849 if (flag_unsafe_math_optimizations
8850 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
8851 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
8852 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
8855 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
8857 if ((TREE_CODE (arg0
) == MULT_EXPR
8858 || TREE_CODE (arg1
) == MULT_EXPR
)
8859 && (!FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
8861 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
8869 /* (-A) * (-B) -> A * B */
8870 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
8871 return fold_build2 (MULT_EXPR
, type
,
8872 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
8873 fold_convert (type
, negate_expr (arg1
)));
8874 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
8875 return fold_build2 (MULT_EXPR
, type
,
8876 fold_convert (type
, negate_expr (arg0
)),
8877 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
8879 if (! FLOAT_TYPE_P (type
))
8881 if (integer_zerop (arg1
))
8882 return omit_one_operand (type
, arg1
, arg0
);
8883 if (integer_onep (arg1
))
8884 return non_lvalue (fold_convert (type
, arg0
));
8885 /* Transform x * -1 into -x. */
8886 if (integer_all_onesp (arg1
))
8887 return fold_convert (type
, negate_expr (arg0
));
8889 /* (a * (1 << b)) is (a << b) */
8890 if (TREE_CODE (arg1
) == LSHIFT_EXPR
8891 && integer_onep (TREE_OPERAND (arg1
, 0)))
8892 return fold_build2 (LSHIFT_EXPR
, type
, arg0
,
8893 TREE_OPERAND (arg1
, 1));
8894 if (TREE_CODE (arg0
) == LSHIFT_EXPR
8895 && integer_onep (TREE_OPERAND (arg0
, 0)))
8896 return fold_build2 (LSHIFT_EXPR
, type
, arg1
,
8897 TREE_OPERAND (arg0
, 1));
8899 if (TREE_CODE (arg1
) == INTEGER_CST
8900 && 0 != (tem
= extract_muldiv (op0
,
8901 fold_convert (type
, arg1
),
8903 return fold_convert (type
, tem
);
8905 /* Optimize z * conj(z) for integer complex numbers. */
8906 if (TREE_CODE (arg0
) == CONJ_EXPR
8907 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8908 return fold_mult_zconjz (type
, arg1
);
8909 if (TREE_CODE (arg1
) == CONJ_EXPR
8910 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8911 return fold_mult_zconjz (type
, arg0
);
8915 /* Maybe fold x * 0 to 0. The expressions aren't the same
8916 when x is NaN, since x * 0 is also NaN. Nor are they the
8917 same in modes with signed zeros, since multiplying a
8918 negative value by 0 gives -0, not +0. */
8919 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
8920 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
8921 && real_zerop (arg1
))
8922 return omit_one_operand (type
, arg1
, arg0
);
8923 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
8924 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8925 && real_onep (arg1
))
8926 return non_lvalue (fold_convert (type
, arg0
));
8928 /* Transform x * -1.0 into -x. */
8929 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8930 && real_minus_onep (arg1
))
8931 return fold_convert (type
, negate_expr (arg0
));
8933 /* Convert (C1/X)*C2 into (C1*C2)/X. */
8934 if (flag_unsafe_math_optimizations
8935 && TREE_CODE (arg0
) == RDIV_EXPR
8936 && TREE_CODE (arg1
) == REAL_CST
8937 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
8939 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
8942 return fold_build2 (RDIV_EXPR
, type
, tem
,
8943 TREE_OPERAND (arg0
, 1));
8946 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
8947 if (operand_equal_p (arg0
, arg1
, 0))
8949 tree tem
= fold_strip_sign_ops (arg0
);
8950 if (tem
!= NULL_TREE
)
8952 tem
= fold_convert (type
, tem
);
8953 return fold_build2 (MULT_EXPR
, type
, tem
, tem
);
8957 /* Optimize z * conj(z) for floating point complex numbers.
8958 Guarded by flag_unsafe_math_optimizations as non-finite
8959 imaginary components don't produce scalar results. */
8960 if (flag_unsafe_math_optimizations
8961 && TREE_CODE (arg0
) == CONJ_EXPR
8962 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8963 return fold_mult_zconjz (type
, arg1
);
8964 if (flag_unsafe_math_optimizations
8965 && TREE_CODE (arg1
) == CONJ_EXPR
8966 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8967 return fold_mult_zconjz (type
, arg0
);
8969 if (flag_unsafe_math_optimizations
)
8971 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
8972 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
8974 /* Optimizations of root(...)*root(...). */
8975 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
8977 tree rootfn
, arg
, arglist
;
8978 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
8979 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
8981 /* Optimize sqrt(x)*sqrt(x) as x. */
8982 if (BUILTIN_SQRT_P (fcode0
)
8983 && operand_equal_p (arg00
, arg10
, 0)
8984 && ! HONOR_SNANS (TYPE_MODE (type
)))
8987 /* Optimize root(x)*root(y) as root(x*y). */
8988 rootfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8989 arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
8990 arglist
= build_tree_list (NULL_TREE
, arg
);
8991 return build_function_call_expr (rootfn
, arglist
);
8994 /* Optimize expN(x)*expN(y) as expN(x+y). */
8995 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
8997 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8998 tree arg
= fold_build2 (PLUS_EXPR
, type
,
8999 TREE_VALUE (TREE_OPERAND (arg0
, 1)),
9000 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
9001 tree arglist
= build_tree_list (NULL_TREE
, arg
);
9002 return build_function_call_expr (expfn
, arglist
);
9005 /* Optimizations of pow(...)*pow(...). */
9006 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
9007 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
9008 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
9010 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
9011 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
9013 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
9014 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
9017 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9018 if (operand_equal_p (arg01
, arg11
, 0))
9020 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
9021 tree arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
9022 tree arglist
= tree_cons (NULL_TREE
, arg
,
9023 build_tree_list (NULL_TREE
,
9025 return build_function_call_expr (powfn
, arglist
);
9028 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9029 if (operand_equal_p (arg00
, arg10
, 0))
9031 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
9032 tree arg
= fold_build2 (PLUS_EXPR
, type
, arg01
, arg11
);
9033 tree arglist
= tree_cons (NULL_TREE
, arg00
,
9034 build_tree_list (NULL_TREE
,
9036 return build_function_call_expr (powfn
, arglist
);
9040 /* Optimize tan(x)*cos(x) as sin(x). */
9041 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
9042 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
9043 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
9044 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
9045 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
9046 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
9047 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
9048 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
9050 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
9052 if (sinfn
!= NULL_TREE
)
9053 return build_function_call_expr (sinfn
,
9054 TREE_OPERAND (arg0
, 1));
9057 /* Optimize x*pow(x,c) as pow(x,c+1). */
9058 if (fcode1
== BUILT_IN_POW
9059 || fcode1
== BUILT_IN_POWF
9060 || fcode1
== BUILT_IN_POWL
)
9062 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
9063 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
9065 if (TREE_CODE (arg11
) == REAL_CST
9066 && ! TREE_CONSTANT_OVERFLOW (arg11
)
9067 && operand_equal_p (arg0
, arg10
, 0))
9069 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
9073 c
= TREE_REAL_CST (arg11
);
9074 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
9075 arg
= build_real (type
, c
);
9076 arglist
= build_tree_list (NULL_TREE
, arg
);
9077 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
9078 return build_function_call_expr (powfn
, arglist
);
9082 /* Optimize pow(x,c)*x as pow(x,c+1). */
9083 if (fcode0
== BUILT_IN_POW
9084 || fcode0
== BUILT_IN_POWF
9085 || fcode0
== BUILT_IN_POWL
)
9087 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
9088 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
9090 if (TREE_CODE (arg01
) == REAL_CST
9091 && ! TREE_CONSTANT_OVERFLOW (arg01
)
9092 && operand_equal_p (arg1
, arg00
, 0))
9094 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
9098 c
= TREE_REAL_CST (arg01
);
9099 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
9100 arg
= build_real (type
, c
);
9101 arglist
= build_tree_list (NULL_TREE
, arg
);
9102 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
9103 return build_function_call_expr (powfn
, arglist
);
9107 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9109 && operand_equal_p (arg0
, arg1
, 0))
9111 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
9115 tree arg
= build_real (type
, dconst2
);
9116 tree arglist
= build_tree_list (NULL_TREE
, arg
);
9117 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
9118 return build_function_call_expr (powfn
, arglist
);
9127 if (integer_all_onesp (arg1
))
9128 return omit_one_operand (type
, arg1
, arg0
);
9129 if (integer_zerop (arg1
))
9130 return non_lvalue (fold_convert (type
, arg0
));
9131 if (operand_equal_p (arg0
, arg1
, 0))
9132 return non_lvalue (fold_convert (type
, arg0
));
9135 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9136 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9138 t1
= build_int_cst (type
, -1);
9139 t1
= force_fit_type (t1
, 0, false, false);
9140 return omit_one_operand (type
, t1
, arg1
);
9144 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9145 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9147 t1
= build_int_cst (type
, -1);
9148 t1
= force_fit_type (t1
, 0, false, false);
9149 return omit_one_operand (type
, t1
, arg0
);
9152 /* Canonicalize (X & C1) | C2. */
9153 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9154 && TREE_CODE (arg1
) == INTEGER_CST
9155 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9157 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, mlo
, mhi
;
9158 int width
= TYPE_PRECISION (type
);
9159 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
9160 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
9161 hi2
= TREE_INT_CST_HIGH (arg1
);
9162 lo2
= TREE_INT_CST_LOW (arg1
);
9164 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9165 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
9166 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
9168 if (width
> HOST_BITS_PER_WIDE_INT
)
9170 mhi
= (unsigned HOST_WIDE_INT
) -1
9171 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
9177 mlo
= (unsigned HOST_WIDE_INT
) -1
9178 >> (HOST_BITS_PER_WIDE_INT
- width
);
9181 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9182 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
9183 return fold_build2 (BIT_IOR_EXPR
, type
,
9184 TREE_OPERAND (arg0
, 0), arg1
);
9186 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9189 if ((hi1
& ~hi2
) != hi1
|| (lo1
& ~lo2
) != lo1
)
9190 return fold_build2 (BIT_IOR_EXPR
, type
,
9191 fold_build2 (BIT_AND_EXPR
, type
,
9192 TREE_OPERAND (arg0
, 0),
9193 build_int_cst_wide (type
,
9199 /* (X & Y) | Y is (X, Y). */
9200 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9201 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9202 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
9203 /* (X & Y) | X is (Y, X). */
9204 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9205 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
9206 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
9207 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
9208 /* X | (X & Y) is (Y, X). */
9209 if (TREE_CODE (arg1
) == BIT_AND_EXPR
9210 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
9211 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
9212 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
9213 /* X | (Y & X) is (Y, X). */
9214 if (TREE_CODE (arg1
) == BIT_AND_EXPR
9215 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
9216 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9217 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
9219 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
9220 if (t1
!= NULL_TREE
)
9223 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9225 This results in more efficient code for machines without a NAND
9226 instruction. Combine will canonicalize to the first form
9227 which will allow use of NAND instructions provided by the
9228 backend if they exist. */
9229 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9230 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9232 return fold_build1 (BIT_NOT_EXPR
, type
,
9233 build2 (BIT_AND_EXPR
, type
,
9234 TREE_OPERAND (arg0
, 0),
9235 TREE_OPERAND (arg1
, 0)));
9238 /* See if this can be simplified into a rotate first. If that
9239 is unsuccessful continue in the association code. */
9243 if (integer_zerop (arg1
))
9244 return non_lvalue (fold_convert (type
, arg0
));
9245 if (integer_all_onesp (arg1
))
9246 return fold_build1 (BIT_NOT_EXPR
, type
, arg0
);
9247 if (operand_equal_p (arg0
, arg1
, 0))
9248 return omit_one_operand (type
, integer_zero_node
, arg0
);
9251 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9252 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9254 t1
= build_int_cst (type
, -1);
9255 t1
= force_fit_type (t1
, 0, false, false);
9256 return omit_one_operand (type
, t1
, arg1
);
9260 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9261 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9263 t1
= build_int_cst (type
, -1);
9264 t1
= force_fit_type (t1
, 0, false, false);
9265 return omit_one_operand (type
, t1
, arg0
);
9268 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9269 with a constant, and the two constants have no bits in common,
9270 we should treat this as a BIT_IOR_EXPR since this may produce more
9272 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9273 && TREE_CODE (arg1
) == BIT_AND_EXPR
9274 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9275 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9276 && integer_zerop (const_binop (BIT_AND_EXPR
,
9277 TREE_OPERAND (arg0
, 1),
9278 TREE_OPERAND (arg1
, 1), 0)))
9280 code
= BIT_IOR_EXPR
;
9284 /* (X | Y) ^ X -> Y & ~ X*/
9285 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9286 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9288 tree t2
= TREE_OPERAND (arg0
, 1);
9289 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
9291 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
9292 fold_convert (type
, t1
));
9296 /* (Y | X) ^ X -> Y & ~ X*/
9297 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9298 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9300 tree t2
= TREE_OPERAND (arg0
, 0);
9301 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
9303 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
9304 fold_convert (type
, t1
));
9308 /* X ^ (X | Y) -> Y & ~ X*/
9309 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
9310 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
9312 tree t2
= TREE_OPERAND (arg1
, 1);
9313 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
9315 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
9316 fold_convert (type
, t1
));
9320 /* X ^ (Y | X) -> Y & ~ X*/
9321 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
9322 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
9324 tree t2
= TREE_OPERAND (arg1
, 0);
9325 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
9327 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
9328 fold_convert (type
, t1
));
9332 /* Convert ~X ^ ~Y to X ^ Y. */
9333 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9334 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9335 return fold_build2 (code
, type
,
9336 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
9337 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
9339 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9340 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9341 && integer_onep (TREE_OPERAND (arg0
, 1))
9342 && integer_onep (arg1
))
9343 return fold_build2 (EQ_EXPR
, type
, arg0
,
9344 build_int_cst (TREE_TYPE (arg0
), 0));
9346 /* Fold (X & Y) ^ Y as ~X & Y. */
9347 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9348 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9350 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
9351 return fold_build2 (BIT_AND_EXPR
, type
,
9352 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9353 fold_convert (type
, arg1
));
9355 /* Fold (X & Y) ^ X as ~Y & X. */
9356 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9357 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
9358 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
9360 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
9361 return fold_build2 (BIT_AND_EXPR
, type
,
9362 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9363 fold_convert (type
, arg1
));
9365 /* Fold X ^ (X & Y) as X & ~Y. */
9366 if (TREE_CODE (arg1
) == BIT_AND_EXPR
9367 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9369 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
9370 return fold_build2 (BIT_AND_EXPR
, type
,
9371 fold_convert (type
, arg0
),
9372 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
9374 /* Fold X ^ (Y & X) as ~Y & X. */
9375 if (TREE_CODE (arg1
) == BIT_AND_EXPR
9376 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
9377 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9379 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
9380 return fold_build2 (BIT_AND_EXPR
, type
,
9381 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9382 fold_convert (type
, arg0
));
9385 /* See if this can be simplified into a rotate first. If that
9386 is unsuccessful continue in the association code. */
9390 if (integer_all_onesp (arg1
))
9391 return non_lvalue (fold_convert (type
, arg0
));
9392 if (integer_zerop (arg1
))
9393 return omit_one_operand (type
, arg1
, arg0
);
9394 if (operand_equal_p (arg0
, arg1
, 0))
9395 return non_lvalue (fold_convert (type
, arg0
));
9397 /* ~X & X is always zero. */
9398 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9399 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9400 return omit_one_operand (type
, integer_zero_node
, arg1
);
9402 /* X & ~X is always zero. */
9403 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9404 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9405 return omit_one_operand (type
, integer_zero_node
, arg0
);
9407 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9408 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9409 && TREE_CODE (arg1
) == INTEGER_CST
9410 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9411 return fold_build2 (BIT_IOR_EXPR
, type
,
9412 fold_build2 (BIT_AND_EXPR
, type
,
9413 TREE_OPERAND (arg0
, 0), arg1
),
9414 fold_build2 (BIT_AND_EXPR
, type
,
9415 TREE_OPERAND (arg0
, 1), arg1
));
9417 /* (X | Y) & Y is (X, Y). */
9418 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9419 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9420 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 0));
9421 /* (X | Y) & X is (Y, X). */
9422 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
9423 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
9424 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
9425 return omit_one_operand (type
, arg1
, TREE_OPERAND (arg0
, 1));
9426 /* X & (X | Y) is (Y, X). */
9427 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
9428 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
9429 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
9430 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 1));
9431 /* X & (Y | X) is (Y, X). */
9432 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
9433 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
9434 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9435 return omit_one_operand (type
, arg0
, TREE_OPERAND (arg1
, 0));
9437 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9438 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9439 && integer_onep (TREE_OPERAND (arg0
, 1))
9440 && integer_onep (arg1
))
9442 tem
= TREE_OPERAND (arg0
, 0);
9443 return fold_build2 (EQ_EXPR
, type
,
9444 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
9445 build_int_cst (TREE_TYPE (tem
), 1)),
9446 build_int_cst (TREE_TYPE (tem
), 0));
9448 /* Fold ~X & 1 as (X & 1) == 0. */
9449 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9450 && integer_onep (arg1
))
9452 tem
= TREE_OPERAND (arg0
, 0);
9453 return fold_build2 (EQ_EXPR
, type
,
9454 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
9455 build_int_cst (TREE_TYPE (tem
), 1)),
9456 build_int_cst (TREE_TYPE (tem
), 0));
9459 /* Fold (X ^ Y) & Y as ~X & Y. */
9460 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9461 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9463 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
9464 return fold_build2 (BIT_AND_EXPR
, type
,
9465 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9466 fold_convert (type
, arg1
));
9468 /* Fold (X ^ Y) & X as ~Y & X. */
9469 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9470 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
9471 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
9473 tem
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
9474 return fold_build2 (BIT_AND_EXPR
, type
,
9475 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9476 fold_convert (type
, arg1
));
9478 /* Fold X & (X ^ Y) as X & ~Y. */
9479 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
9480 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9482 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 1));
9483 return fold_build2 (BIT_AND_EXPR
, type
,
9484 fold_convert (type
, arg0
),
9485 fold_build1 (BIT_NOT_EXPR
, type
, tem
));
9487 /* Fold X & (Y ^ X) as ~Y & X. */
9488 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
9489 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
9490 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9492 tem
= fold_convert (type
, TREE_OPERAND (arg1
, 0));
9493 return fold_build2 (BIT_AND_EXPR
, type
,
9494 fold_build1 (BIT_NOT_EXPR
, type
, tem
),
9495 fold_convert (type
, arg0
));
9498 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
9499 if (t1
!= NULL_TREE
)
9501 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9502 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
9503 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
9506 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
9508 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
9509 && (~TREE_INT_CST_LOW (arg1
)
9510 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
9511 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
9514 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9516 This results in more efficient code for machines without a NOR
9517 instruction. Combine will canonicalize to the first form
9518 which will allow use of NOR instructions provided by the
9519 backend if they exist. */
9520 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9521 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9523 return fold_build1 (BIT_NOT_EXPR
, type
,
9524 build2 (BIT_IOR_EXPR
, type
,
9525 TREE_OPERAND (arg0
, 0),
9526 TREE_OPERAND (arg1
, 0)));
9532 /* Don't touch a floating-point divide by zero unless the mode
9533 of the constant can represent infinity. */
9534 if (TREE_CODE (arg1
) == REAL_CST
9535 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
9536 && real_zerop (arg1
))
9539 /* Optimize A / A to 1.0 if we don't care about
9540 NaNs or Infinities. Skip the transformation
9541 for non-real operands. */
9542 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
9543 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
9544 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
9545 && operand_equal_p (arg0
, arg1
, 0))
9547 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
9549 return omit_two_operands (type
, r
, arg0
, arg1
);
9552 /* The complex version of the above A / A optimization. */
9553 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
9554 && operand_equal_p (arg0
, arg1
, 0))
9556 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
9557 if (! HONOR_NANS (TYPE_MODE (elem_type
))
9558 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
9560 tree r
= build_real (elem_type
, dconst1
);
9561 /* omit_two_operands will call fold_convert for us. */
9562 return omit_two_operands (type
, r
, arg0
, arg1
);
9566 /* (-A) / (-B) -> A / B */
9567 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
9568 return fold_build2 (RDIV_EXPR
, type
,
9569 TREE_OPERAND (arg0
, 0),
9570 negate_expr (arg1
));
9571 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
9572 return fold_build2 (RDIV_EXPR
, type
,
9574 TREE_OPERAND (arg1
, 0));
9576 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9577 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9578 && real_onep (arg1
))
9579 return non_lvalue (fold_convert (type
, arg0
));
9581 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9582 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9583 && real_minus_onep (arg1
))
9584 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
9586 /* If ARG1 is a constant, we can convert this to a multiply by the
9587 reciprocal. This does not have the same rounding properties,
9588 so only do this if -funsafe-math-optimizations. We can actually
9589 always safely do it if ARG1 is a power of two, but it's hard to
9590 tell if it is or not in a portable manner. */
9591 if (TREE_CODE (arg1
) == REAL_CST
)
9593 if (flag_unsafe_math_optimizations
9594 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
9596 return fold_build2 (MULT_EXPR
, type
, arg0
, tem
);
9597 /* Find the reciprocal if optimizing and the result is exact. */
9601 r
= TREE_REAL_CST (arg1
);
9602 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
9604 tem
= build_real (type
, r
);
9605 return fold_build2 (MULT_EXPR
, type
,
9606 fold_convert (type
, arg0
), tem
);
9610 /* Convert A/B/C to A/(B*C). */
9611 if (flag_unsafe_math_optimizations
9612 && TREE_CODE (arg0
) == RDIV_EXPR
)
9613 return fold_build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9614 fold_build2 (MULT_EXPR
, type
,
9615 TREE_OPERAND (arg0
, 1), arg1
));
9617 /* Convert A/(B/C) to (A/B)*C. */
9618 if (flag_unsafe_math_optimizations
9619 && TREE_CODE (arg1
) == RDIV_EXPR
)
9620 return fold_build2 (MULT_EXPR
, type
,
9621 fold_build2 (RDIV_EXPR
, type
, arg0
,
9622 TREE_OPERAND (arg1
, 0)),
9623 TREE_OPERAND (arg1
, 1));
9625 /* Convert C1/(X*C2) into (C1/C2)/X. */
9626 if (flag_unsafe_math_optimizations
9627 && TREE_CODE (arg1
) == MULT_EXPR
9628 && TREE_CODE (arg0
) == REAL_CST
9629 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
9631 tree tem
= const_binop (RDIV_EXPR
, arg0
,
9632 TREE_OPERAND (arg1
, 1), 0);
9634 return fold_build2 (RDIV_EXPR
, type
, tem
,
9635 TREE_OPERAND (arg1
, 0));
9638 if (flag_unsafe_math_optimizations
)
9640 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
9641 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
9643 /* Optimize sin(x)/cos(x) as tan(x). */
9644 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
9645 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
9646 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
9647 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
9648 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
9650 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
9652 if (tanfn
!= NULL_TREE
)
9653 return build_function_call_expr (tanfn
,
9654 TREE_OPERAND (arg0
, 1));
9657 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9658 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
9659 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
9660 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
9661 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
9662 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
9664 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
9666 if (tanfn
!= NULL_TREE
)
9668 tree tmp
= TREE_OPERAND (arg0
, 1);
9669 tmp
= build_function_call_expr (tanfn
, tmp
);
9670 return fold_build2 (RDIV_EXPR
, type
,
9671 build_real (type
, dconst1
), tmp
);
9675 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9676 NaNs or Infinities. */
9677 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
9678 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
9679 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
9681 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
9682 tree arg01
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
9684 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
9685 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
9686 && operand_equal_p (arg00
, arg01
, 0))
9688 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
9690 if (cosfn
!= NULL_TREE
)
9691 return build_function_call_expr (cosfn
,
9692 TREE_OPERAND (arg0
, 1));
9696 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9697 NaNs or Infinities. */
9698 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
9699 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
9700 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
9702 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
9703 tree arg01
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
9705 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
9706 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
9707 && operand_equal_p (arg00
, arg01
, 0))
9709 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
9711 if (cosfn
!= NULL_TREE
)
9713 tree tmp
= TREE_OPERAND (arg0
, 1);
9714 tmp
= build_function_call_expr (cosfn
, tmp
);
9715 return fold_build2 (RDIV_EXPR
, type
,
9716 build_real (type
, dconst1
),
9722 /* Optimize pow(x,c)/x as pow(x,c-1). */
9723 if (fcode0
== BUILT_IN_POW
9724 || fcode0
== BUILT_IN_POWF
9725 || fcode0
== BUILT_IN_POWL
)
9727 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
9728 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
9729 if (TREE_CODE (arg01
) == REAL_CST
9730 && ! TREE_CONSTANT_OVERFLOW (arg01
)
9731 && operand_equal_p (arg1
, arg00
, 0))
9733 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
9737 c
= TREE_REAL_CST (arg01
);
9738 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
9739 arg
= build_real (type
, c
);
9740 arglist
= build_tree_list (NULL_TREE
, arg
);
9741 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
9742 return build_function_call_expr (powfn
, arglist
);
9746 /* Optimize x/expN(y) into x*expN(-y). */
9747 if (BUILTIN_EXPONENT_P (fcode1
))
9749 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
9750 tree arg
= negate_expr (TREE_VALUE (TREE_OPERAND (arg1
, 1)));
9751 tree arglist
= build_tree_list (NULL_TREE
,
9752 fold_convert (type
, arg
));
9753 arg1
= build_function_call_expr (expfn
, arglist
);
9754 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
9757 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9758 if (fcode1
== BUILT_IN_POW
9759 || fcode1
== BUILT_IN_POWF
9760 || fcode1
== BUILT_IN_POWL
)
9762 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
9763 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
9764 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
, 1)));
9765 tree neg11
= fold_convert (type
, negate_expr (arg11
));
9766 tree arglist
= tree_cons(NULL_TREE
, arg10
,
9767 build_tree_list (NULL_TREE
, neg11
));
9768 arg1
= build_function_call_expr (powfn
, arglist
);
9769 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
9774 case TRUNC_DIV_EXPR
:
9775 case FLOOR_DIV_EXPR
:
9776 /* Simplify A / (B << N) where A and B are positive and B is
9777 a power of 2, to A >> (N + log2(B)). */
9778 if (TREE_CODE (arg1
) == LSHIFT_EXPR
9779 && (TYPE_UNSIGNED (type
) || tree_expr_nonnegative_p (arg0
)))
9781 tree sval
= TREE_OPERAND (arg1
, 0);
9782 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
9784 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
9785 unsigned long pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
9787 sh_cnt
= fold_build2 (PLUS_EXPR
, TREE_TYPE (sh_cnt
),
9788 sh_cnt
, build_int_cst (NULL_TREE
, pow2
));
9789 return fold_build2 (RSHIFT_EXPR
, type
,
9790 fold_convert (type
, arg0
), sh_cnt
);
9795 case ROUND_DIV_EXPR
:
9797 case EXACT_DIV_EXPR
:
9798 if (integer_onep (arg1
))
9799 return non_lvalue (fold_convert (type
, arg0
));
9800 if (integer_zerop (arg1
))
9803 if (!TYPE_UNSIGNED (type
)
9804 && TREE_CODE (arg1
) == INTEGER_CST
9805 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
9806 && TREE_INT_CST_HIGH (arg1
) == -1)
9807 return fold_convert (type
, negate_expr (arg0
));
9809 /* Convert -A / -B to A / B when the type is signed and overflow is
9811 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
9812 && TREE_CODE (arg0
) == NEGATE_EXPR
9813 && negate_expr_p (arg1
))
9814 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
9815 negate_expr (arg1
));
9816 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
9817 && TREE_CODE (arg1
) == NEGATE_EXPR
9818 && negate_expr_p (arg0
))
9819 return fold_build2 (code
, type
, negate_expr (arg0
),
9820 TREE_OPERAND (arg1
, 0));
9822 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
9823 operation, EXACT_DIV_EXPR.
9825 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
9826 At one time others generated faster code, it's not clear if they do
9827 after the last round to changes to the DIV code in expmed.c. */
9828 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
9829 && multiple_of_p (type
, arg0
, arg1
))
9830 return fold_build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
);
9832 if (TREE_CODE (arg1
) == INTEGER_CST
9833 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
9834 return fold_convert (type
, tem
);
9839 case FLOOR_MOD_EXPR
:
9840 case ROUND_MOD_EXPR
:
9841 case TRUNC_MOD_EXPR
:
9842 /* X % 1 is always zero, but be sure to preserve any side
9844 if (integer_onep (arg1
))
9845 return omit_one_operand (type
, integer_zero_node
, arg0
);
9847 /* X % 0, return X % 0 unchanged so that we can get the
9848 proper warnings and errors. */
9849 if (integer_zerop (arg1
))
9852 /* 0 % X is always zero, but be sure to preserve any side
9853 effects in X. Place this after checking for X == 0. */
9854 if (integer_zerop (arg0
))
9855 return omit_one_operand (type
, integer_zero_node
, arg1
);
9857 /* X % -1 is zero. */
9858 if (!TYPE_UNSIGNED (type
)
9859 && TREE_CODE (arg1
) == INTEGER_CST
9860 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
9861 && TREE_INT_CST_HIGH (arg1
) == -1)
9862 return omit_one_operand (type
, integer_zero_node
, arg0
);
9864 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
9865 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
9866 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
9867 && (TYPE_UNSIGNED (type
) || tree_expr_nonnegative_p (arg0
)))
9870 /* Also optimize A % (C << N) where C is a power of 2,
9871 to A & ((C << N) - 1). */
9872 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
9873 c
= TREE_OPERAND (arg1
, 0);
9875 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
9877 tree mask
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg1
),
9878 arg1
, integer_one_node
);
9879 return fold_build2 (BIT_AND_EXPR
, type
,
9880 fold_convert (type
, arg0
),
9881 fold_convert (type
, mask
));
9885 /* X % -C is the same as X % C. */
9886 if (code
== TRUNC_MOD_EXPR
9887 && !TYPE_UNSIGNED (type
)
9888 && TREE_CODE (arg1
) == INTEGER_CST
9889 && !TREE_CONSTANT_OVERFLOW (arg1
)
9890 && TREE_INT_CST_HIGH (arg1
) < 0
9892 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
9893 && !sign_bit_p (arg1
, arg1
))
9894 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
9895 fold_convert (type
, negate_expr (arg1
)));
9897 /* X % -Y is the same as X % Y. */
9898 if (code
== TRUNC_MOD_EXPR
9899 && !TYPE_UNSIGNED (type
)
9900 && TREE_CODE (arg1
) == NEGATE_EXPR
9902 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
9903 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
9905 if (TREE_CODE (arg1
) == INTEGER_CST
9906 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
9907 return fold_convert (type
, tem
);
9913 if (integer_all_onesp (arg0
))
9914 return omit_one_operand (type
, arg0
, arg1
);
9918 /* Optimize -1 >> x for arithmetic right shifts. */
9919 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
9920 return omit_one_operand (type
, arg0
, arg1
);
9921 /* ... fall through ... */
9925 if (integer_zerop (arg1
))
9926 return non_lvalue (fold_convert (type
, arg0
));
9927 if (integer_zerop (arg0
))
9928 return omit_one_operand (type
, arg0
, arg1
);
9930 /* Since negative shift count is not well-defined,
9931 don't try to compute it in the compiler. */
9932 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
9935 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
9936 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
9937 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
9938 && host_integerp (TREE_OPERAND (arg0
, 1), false)
9939 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
9941 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
9942 + TREE_INT_CST_LOW (arg1
));
9944 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
9945 being well defined. */
9946 if (low
>= TYPE_PRECISION (type
))
9948 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
9949 low
= low
% TYPE_PRECISION (type
);
9950 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
9951 return build_int_cst (type
, 0);
9953 low
= TYPE_PRECISION (type
) - 1;
9956 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
9957 build_int_cst (type
, low
));
9960 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
9961 into x & ((unsigned)-1 >> c) for unsigned types. */
9962 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
9963 || (TYPE_UNSIGNED (type
)
9964 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
9965 && host_integerp (arg1
, false)
9966 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
9967 && host_integerp (TREE_OPERAND (arg0
, 1), false)
9968 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
9970 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
9971 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
9977 arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
9979 lshift
= build_int_cst (type
, -1);
9980 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
9982 return fold_build2 (BIT_AND_EXPR
, type
, arg00
, lshift
);
9986 /* Rewrite an LROTATE_EXPR by a constant into an
9987 RROTATE_EXPR by a new constant. */
9988 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
9990 tree tem
= build_int_cst (NULL_TREE
,
9991 GET_MODE_BITSIZE (TYPE_MODE (type
)));
9992 tem
= fold_convert (TREE_TYPE (arg1
), tem
);
9993 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
9994 return fold_build2 (RROTATE_EXPR
, type
, arg0
, tem
);
9997 /* If we have a rotate of a bit operation with the rotate count and
9998 the second operand of the bit operation both constant,
9999 permute the two operations. */
10000 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10001 && (TREE_CODE (arg0
) == BIT_AND_EXPR
10002 || TREE_CODE (arg0
) == BIT_IOR_EXPR
10003 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
10004 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10005 return fold_build2 (TREE_CODE (arg0
), type
,
10006 fold_build2 (code
, type
,
10007 TREE_OPERAND (arg0
, 0), arg1
),
10008 fold_build2 (code
, type
,
10009 TREE_OPERAND (arg0
, 1), arg1
));
10011 /* Two consecutive rotates adding up to the width of the mode can
10013 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10014 && TREE_CODE (arg0
) == RROTATE_EXPR
10015 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10016 && TREE_INT_CST_HIGH (arg1
) == 0
10017 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
10018 && ((TREE_INT_CST_LOW (arg1
)
10019 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
10020 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
10021 return TREE_OPERAND (arg0
, 0);
10026 if (operand_equal_p (arg0
, arg1
, 0))
10027 return omit_one_operand (type
, arg0
, arg1
);
10028 if (INTEGRAL_TYPE_P (type
)
10029 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
10030 return omit_one_operand (type
, arg1
, arg0
);
10031 tem
= fold_minmax (MIN_EXPR
, type
, arg0
, arg1
);
10037 if (operand_equal_p (arg0
, arg1
, 0))
10038 return omit_one_operand (type
, arg0
, arg1
);
10039 if (INTEGRAL_TYPE_P (type
)
10040 && TYPE_MAX_VALUE (type
)
10041 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
10042 return omit_one_operand (type
, arg1
, arg0
);
10043 tem
= fold_minmax (MAX_EXPR
, type
, arg0
, arg1
);
10048 case TRUTH_ANDIF_EXPR
:
10049 /* Note that the operands of this must be ints
10050 and their values must be 0 or 1.
10051 ("true" is a fixed value perhaps depending on the language.) */
10052 /* If first arg is constant zero, return it. */
10053 if (integer_zerop (arg0
))
10054 return fold_convert (type
, arg0
);
10055 case TRUTH_AND_EXPR
:
10056 /* If either arg is constant true, drop it. */
10057 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10058 return non_lvalue (fold_convert (type
, arg1
));
10059 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
10060 /* Preserve sequence points. */
10061 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10062 return non_lvalue (fold_convert (type
, arg0
));
10063 /* If second arg is constant zero, result is zero, but first arg
10064 must be evaluated. */
10065 if (integer_zerop (arg1
))
10066 return omit_one_operand (type
, arg1
, arg0
);
10067 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10068 case will be handled here. */
10069 if (integer_zerop (arg0
))
10070 return omit_one_operand (type
, arg0
, arg1
);
10072 /* !X && X is always false. */
10073 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10074 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10075 return omit_one_operand (type
, integer_zero_node
, arg1
);
10076 /* X && !X is always false. */
10077 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10078 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10079 return omit_one_operand (type
, integer_zero_node
, arg0
);
10081 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10082 means A >= Y && A != MAX, but in this case we know that
10085 if (!TREE_SIDE_EFFECTS (arg0
)
10086 && !TREE_SIDE_EFFECTS (arg1
))
10088 tem
= fold_to_nonsharp_ineq_using_bound (arg0
, arg1
);
10089 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
10090 return fold_build2 (code
, type
, tem
, arg1
);
10092 tem
= fold_to_nonsharp_ineq_using_bound (arg1
, arg0
);
10093 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
10094 return fold_build2 (code
, type
, arg0
, tem
);
10098 /* We only do these simplifications if we are optimizing. */
10102 /* Check for things like (A || B) && (A || C). We can convert this
10103 to A || (B && C). Note that either operator can be any of the four
10104 truth and/or operations and the transformation will still be
10105 valid. Also note that we only care about order for the
10106 ANDIF and ORIF operators. If B contains side effects, this
10107 might change the truth-value of A. */
10108 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
10109 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
10110 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
10111 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
10112 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
10113 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
10115 tree a00
= TREE_OPERAND (arg0
, 0);
10116 tree a01
= TREE_OPERAND (arg0
, 1);
10117 tree a10
= TREE_OPERAND (arg1
, 0);
10118 tree a11
= TREE_OPERAND (arg1
, 1);
10119 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
10120 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
10121 && (code
== TRUTH_AND_EXPR
10122 || code
== TRUTH_OR_EXPR
));
10124 if (operand_equal_p (a00
, a10
, 0))
10125 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
10126 fold_build2 (code
, type
, a01
, a11
));
10127 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
10128 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
10129 fold_build2 (code
, type
, a01
, a10
));
10130 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
10131 return fold_build2 (TREE_CODE (arg0
), type
, a01
,
10132 fold_build2 (code
, type
, a00
, a11
));
10134 /* This case if tricky because we must either have commutative
10135 operators or else A10 must not have side-effects. */
10137 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
10138 && operand_equal_p (a01
, a11
, 0))
10139 return fold_build2 (TREE_CODE (arg0
), type
,
10140 fold_build2 (code
, type
, a00
, a10
),
10144 /* See if we can build a range comparison. */
10145 if (0 != (tem
= fold_range_test (code
, type
, op0
, op1
)))
10148 /* Check for the possibility of merging component references. If our
10149 lhs is another similar operation, try to merge its rhs with our
10150 rhs. Then try to merge our lhs and rhs. */
10151 if (TREE_CODE (arg0
) == code
10152 && 0 != (tem
= fold_truthop (code
, type
,
10153 TREE_OPERAND (arg0
, 1), arg1
)))
10154 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
10156 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
10161 case TRUTH_ORIF_EXPR
:
10162 /* Note that the operands of this must be ints
10163 and their values must be 0 or true.
10164 ("true" is a fixed value perhaps depending on the language.) */
10165 /* If first arg is constant true, return it. */
10166 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10167 return fold_convert (type
, arg0
);
10168 case TRUTH_OR_EXPR
:
10169 /* If either arg is constant zero, drop it. */
10170 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
10171 return non_lvalue (fold_convert (type
, arg1
));
10172 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
10173 /* Preserve sequence points. */
10174 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10175 return non_lvalue (fold_convert (type
, arg0
));
10176 /* If second arg is constant true, result is true, but we must
10177 evaluate first arg. */
10178 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
10179 return omit_one_operand (type
, arg1
, arg0
);
10180 /* Likewise for first arg, but note this only occurs here for
10182 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10183 return omit_one_operand (type
, arg0
, arg1
);
10185 /* !X || X is always true. */
10186 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10187 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10188 return omit_one_operand (type
, integer_one_node
, arg1
);
10189 /* X || !X is always true. */
10190 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10191 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10192 return omit_one_operand (type
, integer_one_node
, arg0
);
10196 case TRUTH_XOR_EXPR
:
10197 /* If the second arg is constant zero, drop it. */
10198 if (integer_zerop (arg1
))
10199 return non_lvalue (fold_convert (type
, arg0
));
10200 /* If the second arg is constant true, this is a logical inversion. */
10201 if (integer_onep (arg1
))
10203 /* Only call invert_truthvalue if operand is a truth value. */
10204 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
10205 tem
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
10207 tem
= invert_truthvalue (arg0
);
10208 return non_lvalue (fold_convert (type
, tem
));
10210 /* Identical arguments cancel to zero. */
10211 if (operand_equal_p (arg0
, arg1
, 0))
10212 return omit_one_operand (type
, integer_zero_node
, arg0
);
10214 /* !X ^ X is always true. */
10215 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10216 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10217 return omit_one_operand (type
, integer_one_node
, arg1
);
10219 /* X ^ !X is always true. */
10220 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10221 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10222 return omit_one_operand (type
, integer_one_node
, arg0
);
10228 tem
= fold_comparison (code
, type
, op0
, op1
);
10229 if (tem
!= NULL_TREE
)
10232 /* bool_var != 0 becomes bool_var. */
10233 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
10234 && code
== NE_EXPR
)
10235 return non_lvalue (fold_convert (type
, arg0
));
10237 /* bool_var == 1 becomes bool_var. */
10238 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
10239 && code
== EQ_EXPR
)
10240 return non_lvalue (fold_convert (type
, arg0
));
10242 /* bool_var != 1 becomes !bool_var. */
10243 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
10244 && code
== NE_EXPR
)
10245 return fold_build1 (TRUTH_NOT_EXPR
, type
, arg0
);
10247 /* bool_var == 0 becomes !bool_var. */
10248 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
10249 && code
== EQ_EXPR
)
10250 return fold_build1 (TRUTH_NOT_EXPR
, type
, arg0
);
10252 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
10253 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10254 && TREE_CODE (arg1
) == INTEGER_CST
)
10255 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
10256 fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10259 /* If this is an equality comparison of the address of a non-weak
10260 object against zero, then we know the result. */
10261 if (TREE_CODE (arg0
) == ADDR_EXPR
10262 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
10263 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
10264 && integer_zerop (arg1
))
10265 return constant_boolean_node (code
!= EQ_EXPR
, type
);
10267 /* If this is an equality comparison of the address of two non-weak,
10268 unaliased symbols neither of which are extern (since we do not
10269 have access to attributes for externs), then we know the result. */
10270 if (TREE_CODE (arg0
) == ADDR_EXPR
10271 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
10272 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
10273 && ! lookup_attribute ("alias",
10274 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
10275 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
10276 && TREE_CODE (arg1
) == ADDR_EXPR
10277 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
10278 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
10279 && ! lookup_attribute ("alias",
10280 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
10281 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
10283 /* We know that we're looking at the address of two
10284 non-weak, unaliased, static _DECL nodes.
10286 It is both wasteful and incorrect to call operand_equal_p
10287 to compare the two ADDR_EXPR nodes. It is wasteful in that
10288 all we need to do is test pointer equality for the arguments
10289 to the two ADDR_EXPR nodes. It is incorrect to use
10290 operand_equal_p as that function is NOT equivalent to a
10291 C equality test. It can in fact return false for two
10292 objects which would test as equal using the C equality
10294 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
10295 return constant_boolean_node (equal
10296 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
10300 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10301 a MINUS_EXPR of a constant, we can convert it into a comparison with
10302 a revised constant as long as no overflow occurs. */
10303 if (TREE_CODE (arg1
) == INTEGER_CST
10304 && (TREE_CODE (arg0
) == PLUS_EXPR
10305 || TREE_CODE (arg0
) == MINUS_EXPR
)
10306 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10307 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
10308 ? MINUS_EXPR
: PLUS_EXPR
,
10309 fold_convert (TREE_TYPE (arg0
), arg1
),
10310 TREE_OPERAND (arg0
, 1), 0))
10311 && ! TREE_CONSTANT_OVERFLOW (tem
))
10312 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
10314 /* Similarly for a NEGATE_EXPR. */
10315 if (TREE_CODE (arg0
) == NEGATE_EXPR
10316 && TREE_CODE (arg1
) == INTEGER_CST
10317 && 0 != (tem
= negate_expr (arg1
))
10318 && TREE_CODE (tem
) == INTEGER_CST
10319 && ! TREE_CONSTANT_OVERFLOW (tem
))
10320 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
10322 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10323 for !=. Don't do this for ordered comparisons due to overflow. */
10324 if (TREE_CODE (arg0
) == MINUS_EXPR
10325 && integer_zerop (arg1
))
10326 return fold_build2 (code
, type
,
10327 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
10329 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10330 if (TREE_CODE (arg0
) == ABS_EXPR
10331 && (integer_zerop (arg1
) || real_zerop (arg1
)))
10332 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
10334 /* If this is an EQ or NE comparison with zero and ARG0 is
10335 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10336 two operations, but the latter can be done in one less insn
10337 on machines that have only two-operand insns or on which a
10338 constant cannot be the first operand. */
10339 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10340 && integer_zerop (arg1
))
10342 tree arg00
= TREE_OPERAND (arg0
, 0);
10343 tree arg01
= TREE_OPERAND (arg0
, 1);
10344 if (TREE_CODE (arg00
) == LSHIFT_EXPR
10345 && integer_onep (TREE_OPERAND (arg00
, 0)))
10347 fold_build2 (code
, type
,
10348 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10349 build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
10350 arg01
, TREE_OPERAND (arg00
, 1)),
10351 fold_convert (TREE_TYPE (arg0
),
10352 integer_one_node
)),
10354 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
10355 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
10357 fold_build2 (code
, type
,
10358 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10359 build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
10360 arg00
, TREE_OPERAND (arg01
, 1)),
10361 fold_convert (TREE_TYPE (arg0
),
10362 integer_one_node
)),
10366 /* If this is an NE or EQ comparison of zero against the result of a
10367 signed MOD operation whose second operand is a power of 2, make
10368 the MOD operation unsigned since it is simpler and equivalent. */
10369 if (integer_zerop (arg1
)
10370 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
10371 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
10372 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
10373 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
10374 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
10375 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10377 tree newtype
= lang_hooks
.types
.unsigned_type (TREE_TYPE (arg0
));
10378 tree newmod
= fold_build2 (TREE_CODE (arg0
), newtype
,
10379 fold_convert (newtype
,
10380 TREE_OPERAND (arg0
, 0)),
10381 fold_convert (newtype
,
10382 TREE_OPERAND (arg0
, 1)));
10384 return fold_build2 (code
, type
, newmod
,
10385 fold_convert (newtype
, arg1
));
10388 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10389 C1 is a valid shift constant, and C2 is a power of two, i.e.
10391 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10392 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
10393 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
10395 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10396 && integer_zerop (arg1
))
10398 tree itype
= TREE_TYPE (arg0
);
10399 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
10400 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
10402 /* Check for a valid shift count. */
10403 if (TREE_INT_CST_HIGH (arg001
) == 0
10404 && TREE_INT_CST_LOW (arg001
) < prec
)
10406 tree arg01
= TREE_OPERAND (arg0
, 1);
10407 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10408 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
10409 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10410 can be rewritten as (X & (C2 << C1)) != 0. */
10411 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
10413 tem
= fold_build2 (LSHIFT_EXPR
, itype
, arg01
, arg001
);
10414 tem
= fold_build2 (BIT_AND_EXPR
, itype
, arg000
, tem
);
10415 return fold_build2 (code
, type
, tem
, arg1
);
10417 /* Otherwise, for signed (arithmetic) shifts,
10418 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10419 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10420 else if (!TYPE_UNSIGNED (itype
))
10421 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
10422 arg000
, build_int_cst (itype
, 0));
10423 /* Otherwise, of unsigned (logical) shifts,
10424 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10425 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10427 return omit_one_operand (type
,
10428 code
== EQ_EXPR
? integer_one_node
10429 : integer_zero_node
,
10434 /* If this is an NE comparison of zero with an AND of one, remove the
10435 comparison since the AND will give the correct value. */
10436 if (code
== NE_EXPR
10437 && integer_zerop (arg1
)
10438 && TREE_CODE (arg0
) == BIT_AND_EXPR
10439 && integer_onep (TREE_OPERAND (arg0
, 1)))
10440 return fold_convert (type
, arg0
);
10442 /* If we have (A & C) == C where C is a power of 2, convert this into
10443 (A & C) != 0. Similarly for NE_EXPR. */
10444 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10445 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10446 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10447 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10448 arg0
, fold_convert (TREE_TYPE (arg0
),
10449 integer_zero_node
));
10451 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10452 bit, then fold the expression into A < 0 or A >= 0. */
10453 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
, type
);
10457 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10458 Similarly for NE_EXPR. */
10459 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10460 && TREE_CODE (arg1
) == INTEGER_CST
10461 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10463 tree notc
= fold_build1 (BIT_NOT_EXPR
,
10464 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
10465 TREE_OPERAND (arg0
, 1));
10466 tree dandnotc
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10468 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
10469 if (integer_nonzerop (dandnotc
))
10470 return omit_one_operand (type
, rslt
, arg0
);
10473 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10474 Similarly for NE_EXPR. */
10475 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10476 && TREE_CODE (arg1
) == INTEGER_CST
10477 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10479 tree notd
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
10480 tree candnotd
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10481 TREE_OPERAND (arg0
, 1), notd
);
10482 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
10483 if (integer_nonzerop (candnotd
))
10484 return omit_one_operand (type
, rslt
, arg0
);
10487 /* If this is a comparison of a field, we may be able to simplify it. */
10488 if (((TREE_CODE (arg0
) == COMPONENT_REF
10489 && lang_hooks
.can_use_bit_fields_p ())
10490 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
10491 /* Handle the constant case even without -O
10492 to make sure the warnings are given. */
10493 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
10495 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
10500 /* Optimize comparisons of strlen vs zero to a compare of the
10501 first character of the string vs zero. To wit,
10502 strlen(ptr) == 0 => *ptr == 0
10503 strlen(ptr) != 0 => *ptr != 0
10504 Other cases should reduce to one of these two (or a constant)
10505 due to the return value of strlen being unsigned. */
10506 if (TREE_CODE (arg0
) == CALL_EXPR
10507 && integer_zerop (arg1
))
10509 tree fndecl
= get_callee_fndecl (arg0
);
10513 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
10514 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
10515 && (arglist
= TREE_OPERAND (arg0
, 1))
10516 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
10517 && ! TREE_CHAIN (arglist
))
10519 tree iref
= build_fold_indirect_ref (TREE_VALUE (arglist
));
10520 return fold_build2 (code
, type
, iref
,
10521 build_int_cst (TREE_TYPE (iref
), 0));
10525 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10526 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10527 if (TREE_CODE (arg0
) == RSHIFT_EXPR
10528 && integer_zerop (arg1
)
10529 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10531 tree arg00
= TREE_OPERAND (arg0
, 0);
10532 tree arg01
= TREE_OPERAND (arg0
, 1);
10533 tree itype
= TREE_TYPE (arg00
);
10534 if (TREE_INT_CST_HIGH (arg01
) == 0
10535 && TREE_INT_CST_LOW (arg01
)
10536 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
10538 if (TYPE_UNSIGNED (itype
))
10540 itype
= lang_hooks
.types
.signed_type (itype
);
10541 arg00
= fold_convert (itype
, arg00
);
10543 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
10544 type
, arg00
, build_int_cst (itype
, 0));
10548 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10549 if (integer_zerop (arg1
)
10550 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
10551 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
10552 TREE_OPERAND (arg0
, 1));
10554 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10555 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10556 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10557 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
10558 build_int_cst (TREE_TYPE (arg1
), 0));
10559 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10560 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10561 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10562 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10563 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 1),
10564 build_int_cst (TREE_TYPE (arg1
), 0));
10566 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10567 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10568 && TREE_CODE (arg1
) == INTEGER_CST
10569 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10570 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
10571 fold_build2 (BIT_XOR_EXPR
, TREE_TYPE (arg1
),
10572 TREE_OPERAND (arg0
, 1), arg1
));
10574 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10575 (X & C) == 0 when C is a single bit. */
10576 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10577 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
10578 && integer_zerop (arg1
)
10579 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10581 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
10582 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
10583 TREE_OPERAND (arg0
, 1));
10584 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
10588 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10589 constant C is a power of two, i.e. a single bit. */
10590 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10591 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
10592 && integer_zerop (arg1
)
10593 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10594 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10595 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
10597 tree arg00
= TREE_OPERAND (arg0
, 0);
10598 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10599 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
10602 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10603 when is C is a power of two, i.e. a single bit. */
10604 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10605 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
10606 && integer_zerop (arg1
)
10607 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10608 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10609 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
10611 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10612 tem
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg000
),
10613 arg000
, TREE_OPERAND (arg0
, 1));
10614 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10615 tem
, build_int_cst (TREE_TYPE (tem
), 0));
10618 if (integer_zerop (arg1
)
10619 && tree_expr_nonzero_p (arg0
))
10621 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
10622 return omit_one_operand (type
, res
, arg0
);
10630 tem
= fold_comparison (code
, type
, op0
, op1
);
10631 if (tem
!= NULL_TREE
)
10634 /* Transform comparisons of the form X +- C CMP X. */
10635 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
10636 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10637 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
10638 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
10639 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10640 && !TYPE_UNSIGNED (TREE_TYPE (arg1
))
10641 && !(flag_wrapv
|| flag_trapv
))))
10643 tree arg01
= TREE_OPERAND (arg0
, 1);
10644 enum tree_code code0
= TREE_CODE (arg0
);
10647 if (TREE_CODE (arg01
) == REAL_CST
)
10648 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
10650 is_positive
= tree_int_cst_sgn (arg01
);
10652 /* (X - c) > X becomes false. */
10653 if (code
== GT_EXPR
10654 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
10655 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
10656 return constant_boolean_node (0, type
);
10658 /* Likewise (X + c) < X becomes false. */
10659 if (code
== LT_EXPR
10660 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
10661 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
10662 return constant_boolean_node (0, type
);
10664 /* Convert (X - c) <= X to true. */
10665 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
10667 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
10668 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
10669 return constant_boolean_node (1, type
);
10671 /* Convert (X + c) >= X to true. */
10672 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
10674 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
10675 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
10676 return constant_boolean_node (1, type
);
10678 if (TREE_CODE (arg01
) == INTEGER_CST
)
10680 /* Convert X + c > X and X - c < X to true for integers. */
10681 if (code
== GT_EXPR
10682 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
10683 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
10684 return constant_boolean_node (1, type
);
10686 if (code
== LT_EXPR
10687 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
10688 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
10689 return constant_boolean_node (1, type
);
10691 /* Convert X + c <= X and X - c >= X to false for integers. */
10692 if (code
== LE_EXPR
10693 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
10694 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
10695 return constant_boolean_node (0, type
);
10697 if (code
== GE_EXPR
10698 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
10699 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
10700 return constant_boolean_node (0, type
);
10704 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10705 This transformation affects the cases which are handled in later
10706 optimizations involving comparisons with non-negative constants. */
10707 if (TREE_CODE (arg1
) == INTEGER_CST
10708 && TREE_CODE (arg0
) != INTEGER_CST
10709 && tree_int_cst_sgn (arg1
) > 0)
10711 if (code
== GE_EXPR
)
10713 arg1
= const_binop (MINUS_EXPR
, arg1
,
10714 build_int_cst (TREE_TYPE (arg1
), 1), 0);
10715 return fold_build2 (GT_EXPR
, type
, arg0
,
10716 fold_convert (TREE_TYPE (arg0
), arg1
));
10718 if (code
== LT_EXPR
)
10720 arg1
= const_binop (MINUS_EXPR
, arg1
,
10721 build_int_cst (TREE_TYPE (arg1
), 1), 0);
10722 return fold_build2 (LE_EXPR
, type
, arg0
,
10723 fold_convert (TREE_TYPE (arg0
), arg1
));
10727 /* Comparisons with the highest or lowest possible integer of
10728 the specified size will have known values. */
10730 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1
)));
10732 if (TREE_CODE (arg1
) == INTEGER_CST
10733 && ! TREE_CONSTANT_OVERFLOW (arg1
)
10734 && width
<= 2 * HOST_BITS_PER_WIDE_INT
10735 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10736 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
10738 HOST_WIDE_INT signed_max_hi
;
10739 unsigned HOST_WIDE_INT signed_max_lo
;
10740 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
10742 if (width
<= HOST_BITS_PER_WIDE_INT
)
10744 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
10749 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
10751 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
10757 max_lo
= signed_max_lo
;
10758 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
10764 width
-= HOST_BITS_PER_WIDE_INT
;
10765 signed_max_lo
= -1;
10766 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
10771 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
10773 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
10778 max_hi
= signed_max_hi
;
10779 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
10783 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
10784 && TREE_INT_CST_LOW (arg1
) == max_lo
)
10788 return omit_one_operand (type
, integer_zero_node
, arg0
);
10791 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
10794 return omit_one_operand (type
, integer_one_node
, arg0
);
10797 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
10799 /* The GE_EXPR and LT_EXPR cases above are not normally
10800 reached because of previous transformations. */
10805 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
10807 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
10811 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
10812 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
10814 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
10815 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
10819 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
10821 && TREE_INT_CST_LOW (arg1
) == min_lo
)
10825 return omit_one_operand (type
, integer_zero_node
, arg0
);
10828 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
10831 return omit_one_operand (type
, integer_one_node
, arg0
);
10834 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
10839 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
10841 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
10845 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
10846 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
10848 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
10849 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
10854 else if (!in_gimple_form
10855 && TREE_INT_CST_HIGH (arg1
) == signed_max_hi
10856 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
10857 && TYPE_UNSIGNED (TREE_TYPE (arg1
))
10858 /* signed_type does not work on pointer types. */
10859 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
10861 /* The following case also applies to X < signed_max+1
10862 and X >= signed_max+1 because previous transformations. */
10863 if (code
== LE_EXPR
|| code
== GT_EXPR
)
10866 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (arg0
));
10867 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (arg1
));
10868 return fold_build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
10869 type
, fold_convert (st0
, arg0
),
10870 build_int_cst (st1
, 0));
10876 /* If we are comparing an ABS_EXPR with a constant, we can
10877 convert all the cases into explicit comparisons, but they may
10878 well not be faster than doing the ABS and one comparison.
10879 But ABS (X) <= C is a range comparison, which becomes a subtraction
10880 and a comparison, and is probably faster. */
10881 if (code
== LE_EXPR
10882 && TREE_CODE (arg1
) == INTEGER_CST
10883 && TREE_CODE (arg0
) == ABS_EXPR
10884 && ! TREE_SIDE_EFFECTS (arg0
)
10885 && (0 != (tem
= negate_expr (arg1
)))
10886 && TREE_CODE (tem
) == INTEGER_CST
10887 && ! TREE_CONSTANT_OVERFLOW (tem
))
10888 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
10889 build2 (GE_EXPR
, type
,
10890 TREE_OPERAND (arg0
, 0), tem
),
10891 build2 (LE_EXPR
, type
,
10892 TREE_OPERAND (arg0
, 0), arg1
));
10894 /* Convert ABS_EXPR<x> >= 0 to true. */
10895 if (code
== GE_EXPR
10896 && tree_expr_nonnegative_p (arg0
)
10897 && (integer_zerop (arg1
)
10898 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10899 && real_zerop (arg1
))))
10900 return omit_one_operand (type
, integer_one_node
, arg0
);
10902 /* Convert ABS_EXPR<x> < 0 to false. */
10903 if (code
== LT_EXPR
10904 && tree_expr_nonnegative_p (arg0
)
10905 && (integer_zerop (arg1
) || real_zerop (arg1
)))
10906 return omit_one_operand (type
, integer_zero_node
, arg0
);
10908 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
10909 and similarly for >= into !=. */
10910 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
10911 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
10912 && TREE_CODE (arg1
) == LSHIFT_EXPR
10913 && integer_onep (TREE_OPERAND (arg1
, 0)))
10914 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
10915 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
10916 TREE_OPERAND (arg1
, 1)),
10917 build_int_cst (TREE_TYPE (arg0
), 0));
10919 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
10920 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
10921 && (TREE_CODE (arg1
) == NOP_EXPR
10922 || TREE_CODE (arg1
) == CONVERT_EXPR
)
10923 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
10924 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
10926 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
10927 fold_convert (TREE_TYPE (arg0
),
10928 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
10929 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
10931 build_int_cst (TREE_TYPE (arg0
), 0));
10935 case UNORDERED_EXPR
:
10943 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
10945 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
10946 if (t1
!= NULL_TREE
)
10950 /* If the first operand is NaN, the result is constant. */
10951 if (TREE_CODE (arg0
) == REAL_CST
10952 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
10953 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
10955 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
10956 ? integer_zero_node
10957 : integer_one_node
;
10958 return omit_one_operand (type
, t1
, arg1
);
10961 /* If the second operand is NaN, the result is constant. */
10962 if (TREE_CODE (arg1
) == REAL_CST
10963 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
10964 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
10966 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
10967 ? integer_zero_node
10968 : integer_one_node
;
10969 return omit_one_operand (type
, t1
, arg0
);
10972 /* Simplify unordered comparison of something with itself. */
10973 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
10974 && operand_equal_p (arg0
, arg1
, 0))
10975 return constant_boolean_node (1, type
);
10977 if (code
== LTGT_EXPR
10978 && !flag_trapping_math
10979 && operand_equal_p (arg0
, arg1
, 0))
10980 return constant_boolean_node (0, type
);
10982 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10984 tree targ0
= strip_float_extensions (arg0
);
10985 tree targ1
= strip_float_extensions (arg1
);
10986 tree newtype
= TREE_TYPE (targ0
);
10988 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
10989 newtype
= TREE_TYPE (targ1
);
10991 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
10992 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
10993 fold_convert (newtype
, targ1
));
10998 case COMPOUND_EXPR
:
10999 /* When pedantic, a compound expression can be neither an lvalue
11000 nor an integer constant expression. */
11001 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
11003 /* Don't let (0, 0) be null pointer constant. */
11004 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
11005 : fold_convert (type
, arg1
);
11006 return pedantic_non_lvalue (tem
);
11009 if ((TREE_CODE (arg0
) == REAL_CST
11010 && TREE_CODE (arg1
) == REAL_CST
)
11011 || (TREE_CODE (arg0
) == INTEGER_CST
11012 && TREE_CODE (arg1
) == INTEGER_CST
))
11013 return build_complex (type
, arg0
, arg1
);
11017 /* An ASSERT_EXPR should never be passed to fold_binary. */
11018 gcc_unreachable ();
11022 } /* switch (code) */
11025 /* Callback for walk_tree, looking for LABEL_EXPR.
11026 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11027 Do not check the sub-tree of GOTO_EXPR. */
11030 contains_label_1 (tree
*tp
,
11031 int *walk_subtrees
,
11032 void *data ATTRIBUTE_UNUSED
)
11034 switch (TREE_CODE (*tp
))
11039 *walk_subtrees
= 0;
11046 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11047 accessible from outside the sub-tree. Returns NULL_TREE if no
11048 addressable label is found. */
11051 contains_label_p (tree st
)
11053 return (walk_tree (&st
, contains_label_1
, NULL
, NULL
) != NULL_TREE
);
11056 /* Fold a ternary expression of code CODE and type TYPE with operands
11057 OP0, OP1, and OP2. Return the folded expression if folding is
11058 successful. Otherwise, return NULL_TREE. */
11061 fold_ternary (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
11064 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
11065 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11067 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
11068 && TREE_CODE_LENGTH (code
) == 3);
11070 /* Strip any conversions that don't change the mode. This is safe
11071 for every expression, except for a comparison expression because
11072 its signedness is derived from its operands. So, in the latter
11073 case, only strip conversions that don't change the signedness.
11075 Note that this is done as an internal manipulation within the
11076 constant folder, in order to find the simplest representation of
11077 the arguments so that their form can be studied. In any cases,
11078 the appropriate type conversions should be put back in the tree
11079 that will get out of the constant folder. */
11094 case COMPONENT_REF
:
11095 if (TREE_CODE (arg0
) == CONSTRUCTOR
11096 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
11098 unsigned HOST_WIDE_INT idx
;
11100 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
11107 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11108 so all simple results must be passed through pedantic_non_lvalue. */
11109 if (TREE_CODE (arg0
) == INTEGER_CST
)
11111 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
11112 tem
= integer_zerop (arg0
) ? op2
: op1
;
11113 /* Only optimize constant conditions when the selected branch
11114 has the same type as the COND_EXPR. This avoids optimizing
11115 away "c ? x : throw", where the throw has a void type.
11116 Avoid throwing away that operand which contains label. */
11117 if ((!TREE_SIDE_EFFECTS (unused_op
)
11118 || !contains_label_p (unused_op
))
11119 && (! VOID_TYPE_P (TREE_TYPE (tem
))
11120 || VOID_TYPE_P (type
)))
11121 return pedantic_non_lvalue (tem
);
11124 if (operand_equal_p (arg1
, op2
, 0))
11125 return pedantic_omit_one_operand (type
, arg1
, arg0
);
11127 /* If we have A op B ? A : C, we may be able to convert this to a
11128 simpler expression, depending on the operation and the values
11129 of B and C. Signed zeros prevent all of these transformations,
11130 for reasons given above each one.
11132 Also try swapping the arguments and inverting the conditional. */
11133 if (COMPARISON_CLASS_P (arg0
)
11134 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11135 arg1
, TREE_OPERAND (arg0
, 1))
11136 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
11138 tem
= fold_cond_expr_with_comparison (type
, arg0
, op1
, op2
);
11143 if (COMPARISON_CLASS_P (arg0
)
11144 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11146 TREE_OPERAND (arg0
, 1))
11147 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
11149 tem
= fold_truth_not_expr (arg0
);
11150 if (tem
&& COMPARISON_CLASS_P (tem
))
11152 tem
= fold_cond_expr_with_comparison (type
, tem
, op2
, op1
);
11158 /* If the second operand is simpler than the third, swap them
11159 since that produces better jump optimization results. */
11160 if (truth_value_p (TREE_CODE (arg0
))
11161 && tree_swap_operands_p (op1
, op2
, false))
11163 /* See if this can be inverted. If it can't, possibly because
11164 it was a floating-point inequality comparison, don't do
11166 tem
= fold_truth_not_expr (arg0
);
11168 return fold_build3 (code
, type
, tem
, op2
, op1
);
11171 /* Convert A ? 1 : 0 to simply A. */
11172 if (integer_onep (op1
)
11173 && integer_zerop (op2
)
11174 /* If we try to convert OP0 to our type, the
11175 call to fold will try to move the conversion inside
11176 a COND, which will recurse. In that case, the COND_EXPR
11177 is probably the best choice, so leave it alone. */
11178 && type
== TREE_TYPE (arg0
))
11179 return pedantic_non_lvalue (arg0
);
11181 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11182 over COND_EXPR in cases such as floating point comparisons. */
11183 if (integer_zerop (op1
)
11184 && integer_onep (op2
)
11185 && truth_value_p (TREE_CODE (arg0
)))
11186 return pedantic_non_lvalue (fold_convert (type
,
11187 invert_truthvalue (arg0
)));
11189 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11190 if (TREE_CODE (arg0
) == LT_EXPR
11191 && integer_zerop (TREE_OPERAND (arg0
, 1))
11192 && integer_zerop (op2
)
11193 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
11195 /* sign_bit_p only checks ARG1 bits within A's precision.
11196 If <sign bit of A> has wider type than A, bits outside
11197 of A's precision in <sign bit of A> need to be checked.
11198 If they are all 0, this optimization needs to be done
11199 in unsigned A's type, if they are all 1 in signed A's type,
11200 otherwise this can't be done. */
11201 if (TYPE_PRECISION (TREE_TYPE (tem
))
11202 < TYPE_PRECISION (TREE_TYPE (arg1
))
11203 && TYPE_PRECISION (TREE_TYPE (tem
))
11204 < TYPE_PRECISION (type
))
11206 unsigned HOST_WIDE_INT mask_lo
;
11207 HOST_WIDE_INT mask_hi
;
11208 int inner_width
, outer_width
;
11211 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
11212 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
11213 if (outer_width
> TYPE_PRECISION (type
))
11214 outer_width
= TYPE_PRECISION (type
);
11216 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
11218 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
11219 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
11225 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
11226 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
11228 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
11230 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
11231 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
11235 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
11236 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
11238 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
11239 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
11241 tem_type
= lang_hooks
.types
.signed_type (TREE_TYPE (tem
));
11242 tem
= fold_convert (tem_type
, tem
);
11244 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
11245 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
11247 tem_type
= lang_hooks
.types
.unsigned_type (TREE_TYPE (tem
));
11248 tem
= fold_convert (tem_type
, tem
);
11255 return fold_convert (type
,
11256 fold_build2 (BIT_AND_EXPR
,
11257 TREE_TYPE (tem
), tem
,
11258 fold_convert (TREE_TYPE (tem
),
11262 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11263 already handled above. */
11264 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11265 && integer_onep (TREE_OPERAND (arg0
, 1))
11266 && integer_zerop (op2
)
11267 && integer_pow2p (arg1
))
11269 tree tem
= TREE_OPERAND (arg0
, 0);
11271 if (TREE_CODE (tem
) == RSHIFT_EXPR
11272 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
11273 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
11274 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
11275 return fold_build2 (BIT_AND_EXPR
, type
,
11276 TREE_OPERAND (tem
, 0), arg1
);
11279 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11280 is probably obsolete because the first operand should be a
11281 truth value (that's why we have the two cases above), but let's
11282 leave it in until we can confirm this for all front-ends. */
11283 if (integer_zerop (op2
)
11284 && TREE_CODE (arg0
) == NE_EXPR
11285 && integer_zerop (TREE_OPERAND (arg0
, 1))
11286 && integer_pow2p (arg1
)
11287 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11288 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11289 arg1
, OEP_ONLY_CONST
))
11290 return pedantic_non_lvalue (fold_convert (type
,
11291 TREE_OPERAND (arg0
, 0)));
11293 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11294 if (integer_zerop (op2
)
11295 && truth_value_p (TREE_CODE (arg0
))
11296 && truth_value_p (TREE_CODE (arg1
)))
11297 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
11298 fold_convert (type
, arg0
),
11301 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11302 if (integer_onep (op2
)
11303 && truth_value_p (TREE_CODE (arg0
))
11304 && truth_value_p (TREE_CODE (arg1
)))
11306 /* Only perform transformation if ARG0 is easily inverted. */
11307 tem
= fold_truth_not_expr (arg0
);
11309 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
11310 fold_convert (type
, tem
),
11314 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11315 if (integer_zerop (arg1
)
11316 && truth_value_p (TREE_CODE (arg0
))
11317 && truth_value_p (TREE_CODE (op2
)))
11319 /* Only perform transformation if ARG0 is easily inverted. */
11320 tem
= fold_truth_not_expr (arg0
);
11322 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
11323 fold_convert (type
, tem
),
11327 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11328 if (integer_onep (arg1
)
11329 && truth_value_p (TREE_CODE (arg0
))
11330 && truth_value_p (TREE_CODE (op2
)))
11331 return fold_build2 (TRUTH_ORIF_EXPR
, type
,
11332 fold_convert (type
, arg0
),
11338 /* Check for a built-in function. */
11339 if (TREE_CODE (op0
) == ADDR_EXPR
11340 && TREE_CODE (TREE_OPERAND (op0
, 0)) == FUNCTION_DECL
11341 && DECL_BUILT_IN (TREE_OPERAND (op0
, 0)))
11342 return fold_builtin (TREE_OPERAND (op0
, 0), op1
, false);
11345 case BIT_FIELD_REF
:
11346 if (TREE_CODE (arg0
) == VECTOR_CST
11347 && type
== TREE_TYPE (TREE_TYPE (arg0
))
11348 && host_integerp (arg1
, 1)
11349 && host_integerp (op2
, 1))
11351 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
11352 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
11355 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
11356 && (idx
% width
) == 0
11357 && (idx
= idx
/ width
)
11358 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
11360 tree elements
= TREE_VECTOR_CST_ELTS (arg0
);
11361 while (idx
-- > 0 && elements
)
11362 elements
= TREE_CHAIN (elements
);
11364 return TREE_VALUE (elements
);
11366 return fold_convert (type
, integer_zero_node
);
11373 } /* switch (code) */
11376 /* Perform constant folding and related simplification of EXPR.
11377 The related simplifications include x*1 => x, x*0 => 0, etc.,
11378 and application of the associative law.
11379 NOP_EXPR conversions may be removed freely (as long as we
11380 are careful not to change the type of the overall expression).
11381 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11382 but we can constant-fold them if they have constant operands. */
11384 #ifdef ENABLE_FOLD_CHECKING
11385 # define fold(x) fold_1 (x)
11386 static tree
fold_1 (tree
);
11392 const tree t
= expr
;
11393 enum tree_code code
= TREE_CODE (t
);
11394 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11397 /* Return right away if a constant. */
11398 if (kind
== tcc_constant
)
11401 if (IS_EXPR_CODE_CLASS (kind
))
11403 tree type
= TREE_TYPE (t
);
11404 tree op0
, op1
, op2
;
11406 switch (TREE_CODE_LENGTH (code
))
11409 op0
= TREE_OPERAND (t
, 0);
11410 tem
= fold_unary (code
, type
, op0
);
11411 return tem
? tem
: expr
;
11413 op0
= TREE_OPERAND (t
, 0);
11414 op1
= TREE_OPERAND (t
, 1);
11415 tem
= fold_binary (code
, type
, op0
, op1
);
11416 return tem
? tem
: expr
;
11418 op0
= TREE_OPERAND (t
, 0);
11419 op1
= TREE_OPERAND (t
, 1);
11420 op2
= TREE_OPERAND (t
, 2);
11421 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
11422 return tem
? tem
: expr
;
11431 return fold (DECL_INITIAL (t
));
11435 } /* switch (code) */
11438 #ifdef ENABLE_FOLD_CHECKING
11441 static void fold_checksum_tree (tree
, struct md5_ctx
*, htab_t
);
11442 static void fold_check_failed (tree
, tree
);
11443 void print_fold_checksum (tree
);
11445 /* When --enable-checking=fold, compute a digest of expr before
11446 and after actual fold call to see if fold did not accidentally
11447 change original expr. */
11453 struct md5_ctx ctx
;
11454 unsigned char checksum_before
[16], checksum_after
[16];
11457 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
11458 md5_init_ctx (&ctx
);
11459 fold_checksum_tree (expr
, &ctx
, ht
);
11460 md5_finish_ctx (&ctx
, checksum_before
);
11463 ret
= fold_1 (expr
);
11465 md5_init_ctx (&ctx
);
11466 fold_checksum_tree (expr
, &ctx
, ht
);
11467 md5_finish_ctx (&ctx
, checksum_after
);
11470 if (memcmp (checksum_before
, checksum_after
, 16))
11471 fold_check_failed (expr
, ret
);
11477 print_fold_checksum (tree expr
)
11479 struct md5_ctx ctx
;
11480 unsigned char checksum
[16], cnt
;
11483 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
11484 md5_init_ctx (&ctx
);
11485 fold_checksum_tree (expr
, &ctx
, ht
);
11486 md5_finish_ctx (&ctx
, checksum
);
11488 for (cnt
= 0; cnt
< 16; ++cnt
)
11489 fprintf (stderr
, "%02x", checksum
[cnt
]);
11490 putc ('\n', stderr
);
11494 fold_check_failed (tree expr ATTRIBUTE_UNUSED
, tree ret ATTRIBUTE_UNUSED
)
11496 internal_error ("fold check: original tree changed by fold");
11500 fold_checksum_tree (tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
11503 enum tree_code code
;
11504 struct tree_function_decl buf
;
11509 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
11510 <= sizeof (struct tree_function_decl
))
11511 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
11514 slot
= htab_find_slot (ht
, expr
, INSERT
);
11518 code
= TREE_CODE (expr
);
11519 if (TREE_CODE_CLASS (code
) == tcc_declaration
11520 && DECL_ASSEMBLER_NAME_SET_P (expr
))
11522 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11523 memcpy ((char *) &buf
, expr
, tree_size (expr
));
11524 expr
= (tree
) &buf
;
11525 SET_DECL_ASSEMBLER_NAME (expr
, NULL
);
11527 else if (TREE_CODE_CLASS (code
) == tcc_type
11528 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
11529 || TYPE_CACHED_VALUES_P (expr
)
11530 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)))
11532 /* Allow these fields to be modified. */
11533 memcpy ((char *) &buf
, expr
, tree_size (expr
));
11534 expr
= (tree
) &buf
;
11535 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
) = 0;
11536 TYPE_POINTER_TO (expr
) = NULL
;
11537 TYPE_REFERENCE_TO (expr
) = NULL
;
11538 if (TYPE_CACHED_VALUES_P (expr
))
11540 TYPE_CACHED_VALUES_P (expr
) = 0;
11541 TYPE_CACHED_VALUES (expr
) = NULL
;
11544 md5_process_bytes (expr
, tree_size (expr
), ctx
);
11545 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
11546 if (TREE_CODE_CLASS (code
) != tcc_type
11547 && TREE_CODE_CLASS (code
) != tcc_declaration
11548 && code
!= TREE_LIST
)
11549 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
11550 switch (TREE_CODE_CLASS (code
))
11556 md5_process_bytes (TREE_STRING_POINTER (expr
),
11557 TREE_STRING_LENGTH (expr
), ctx
);
11560 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
11561 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
11564 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
11570 case tcc_exceptional
:
11574 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
11575 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
11576 expr
= TREE_CHAIN (expr
);
11577 goto recursive_label
;
11580 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
11581 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
11587 case tcc_expression
:
11588 case tcc_reference
:
11589 case tcc_comparison
:
11592 case tcc_statement
:
11593 len
= TREE_CODE_LENGTH (code
);
11594 for (i
= 0; i
< len
; ++i
)
11595 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
11597 case tcc_declaration
:
11598 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
11599 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
11600 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
11602 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
11603 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
11604 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
11605 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
11606 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
11608 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
11609 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
11611 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
11613 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
11614 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
11615 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
11619 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
11620 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
11621 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
11622 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
11623 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
11624 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
11625 if (INTEGRAL_TYPE_P (expr
)
11626 || SCALAR_FLOAT_TYPE_P (expr
))
11628 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
11629 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
11631 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
11632 if (TREE_CODE (expr
) == RECORD_TYPE
11633 || TREE_CODE (expr
) == UNION_TYPE
11634 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
11635 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
11636 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
11645 /* Fold a unary tree expression with code CODE of type TYPE with an
11646 operand OP0. Return a folded expression if successful. Otherwise,
11647 return a tree expression with code CODE of type TYPE with an
11651 fold_build1_stat (enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
11654 #ifdef ENABLE_FOLD_CHECKING
11655 unsigned char checksum_before
[16], checksum_after
[16];
11656 struct md5_ctx ctx
;
11659 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
11660 md5_init_ctx (&ctx
);
11661 fold_checksum_tree (op0
, &ctx
, ht
);
11662 md5_finish_ctx (&ctx
, checksum_before
);
11666 tem
= fold_unary (code
, type
, op0
);
11668 tem
= build1_stat (code
, type
, op0 PASS_MEM_STAT
);
11670 #ifdef ENABLE_FOLD_CHECKING
11671 md5_init_ctx (&ctx
);
11672 fold_checksum_tree (op0
, &ctx
, ht
);
11673 md5_finish_ctx (&ctx
, checksum_after
);
11676 if (memcmp (checksum_before
, checksum_after
, 16))
11677 fold_check_failed (op0
, tem
);
11682 /* Fold a binary tree expression with code CODE of type TYPE with
11683 operands OP0 and OP1. Return a folded expression if successful.
11684 Otherwise, return a tree expression with code CODE of type TYPE
11685 with operands OP0 and OP1. */
11688 fold_build2_stat (enum tree_code code
, tree type
, tree op0
, tree op1
11692 #ifdef ENABLE_FOLD_CHECKING
11693 unsigned char checksum_before_op0
[16],
11694 checksum_before_op1
[16],
11695 checksum_after_op0
[16],
11696 checksum_after_op1
[16];
11697 struct md5_ctx ctx
;
11700 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
11701 md5_init_ctx (&ctx
);
11702 fold_checksum_tree (op0
, &ctx
, ht
);
11703 md5_finish_ctx (&ctx
, checksum_before_op0
);
11706 md5_init_ctx (&ctx
);
11707 fold_checksum_tree (op1
, &ctx
, ht
);
11708 md5_finish_ctx (&ctx
, checksum_before_op1
);
11712 tem
= fold_binary (code
, type
, op0
, op1
);
11714 tem
= build2_stat (code
, type
, op0
, op1 PASS_MEM_STAT
);
11716 #ifdef ENABLE_FOLD_CHECKING
11717 md5_init_ctx (&ctx
);
11718 fold_checksum_tree (op0
, &ctx
, ht
);
11719 md5_finish_ctx (&ctx
, checksum_after_op0
);
11722 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
11723 fold_check_failed (op0
, tem
);
11725 md5_init_ctx (&ctx
);
11726 fold_checksum_tree (op1
, &ctx
, ht
);
11727 md5_finish_ctx (&ctx
, checksum_after_op1
);
11730 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
11731 fold_check_failed (op1
, tem
);
11736 /* Fold a ternary tree expression with code CODE of type TYPE with
11737 operands OP0, OP1, and OP2. Return a folded expression if
11738 successful. Otherwise, return a tree expression with code CODE of
11739 type TYPE with operands OP0, OP1, and OP2. */
11742 fold_build3_stat (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
11746 #ifdef ENABLE_FOLD_CHECKING
11747 unsigned char checksum_before_op0
[16],
11748 checksum_before_op1
[16],
11749 checksum_before_op2
[16],
11750 checksum_after_op0
[16],
11751 checksum_after_op1
[16],
11752 checksum_after_op2
[16];
11753 struct md5_ctx ctx
;
11756 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
11757 md5_init_ctx (&ctx
);
11758 fold_checksum_tree (op0
, &ctx
, ht
);
11759 md5_finish_ctx (&ctx
, checksum_before_op0
);
11762 md5_init_ctx (&ctx
);
11763 fold_checksum_tree (op1
, &ctx
, ht
);
11764 md5_finish_ctx (&ctx
, checksum_before_op1
);
11767 md5_init_ctx (&ctx
);
11768 fold_checksum_tree (op2
, &ctx
, ht
);
11769 md5_finish_ctx (&ctx
, checksum_before_op2
);
11773 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
11775 tem
= build3_stat (code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
11777 #ifdef ENABLE_FOLD_CHECKING
11778 md5_init_ctx (&ctx
);
11779 fold_checksum_tree (op0
, &ctx
, ht
);
11780 md5_finish_ctx (&ctx
, checksum_after_op0
);
11783 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
11784 fold_check_failed (op0
, tem
);
11786 md5_init_ctx (&ctx
);
11787 fold_checksum_tree (op1
, &ctx
, ht
);
11788 md5_finish_ctx (&ctx
, checksum_after_op1
);
11791 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
11792 fold_check_failed (op1
, tem
);
11794 md5_init_ctx (&ctx
);
11795 fold_checksum_tree (op2
, &ctx
, ht
);
11796 md5_finish_ctx (&ctx
, checksum_after_op2
);
11799 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
11800 fold_check_failed (op2
, tem
);
11805 /* Perform constant folding and related simplification of initializer
11806 expression EXPR. These behave identically to "fold_buildN" but ignore
11807 potential run-time traps and exceptions that fold must preserve. */
11809 #define START_FOLD_INIT \
11810 int saved_signaling_nans = flag_signaling_nans;\
11811 int saved_trapping_math = flag_trapping_math;\
11812 int saved_rounding_math = flag_rounding_math;\
11813 int saved_trapv = flag_trapv;\
11814 int saved_folding_initializer = folding_initializer;\
11815 flag_signaling_nans = 0;\
11816 flag_trapping_math = 0;\
11817 flag_rounding_math = 0;\
11819 folding_initializer = 1;
11821 #define END_FOLD_INIT \
11822 flag_signaling_nans = saved_signaling_nans;\
11823 flag_trapping_math = saved_trapping_math;\
11824 flag_rounding_math = saved_rounding_math;\
11825 flag_trapv = saved_trapv;\
11826 folding_initializer = saved_folding_initializer;
11829 fold_build1_initializer (enum tree_code code
, tree type
, tree op
)
11834 result
= fold_build1 (code
, type
, op
);
11841 fold_build2_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
)
11846 result
= fold_build2 (code
, type
, op0
, op1
);
11853 fold_build3_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
,
11859 result
= fold_build3 (code
, type
, op0
, op1
, op2
);
11865 #undef START_FOLD_INIT
11866 #undef END_FOLD_INIT
11868 /* Determine if first argument is a multiple of second argument. Return 0 if
11869 it is not, or we cannot easily determined it to be.
11871 An example of the sort of thing we care about (at this point; this routine
11872 could surely be made more general, and expanded to do what the *_DIV_EXPR's
11873 fold cases do now) is discovering that
11875 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11881 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
11883 This code also handles discovering that
11885 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11887 is a multiple of 8 so we don't have to worry about dealing with a
11888 possible remainder.
11890 Note that we *look* inside a SAVE_EXPR only to determine how it was
11891 calculated; it is not safe for fold to do much of anything else with the
11892 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
11893 at run time. For example, the latter example above *cannot* be implemented
11894 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
11895 evaluation time of the original SAVE_EXPR is not necessarily the same at
11896 the time the new expression is evaluated. The only optimization of this
11897 sort that would be valid is changing
11899 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
11903 SAVE_EXPR (I) * SAVE_EXPR (J)
11905 (where the same SAVE_EXPR (J) is used in the original and the
11906 transformed version). */
11909 multiple_of_p (tree type
, tree top
, tree bottom
)
11911 if (operand_equal_p (top
, bottom
, 0))
11914 if (TREE_CODE (type
) != INTEGER_TYPE
)
11917 switch (TREE_CODE (top
))
11920 /* Bitwise and provides a power of two multiple. If the mask is
11921 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
11922 if (!integer_pow2p (bottom
))
11927 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
11928 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
11932 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
11933 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
11936 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
11940 op1
= TREE_OPERAND (top
, 1);
11941 /* const_binop may not detect overflow correctly,
11942 so check for it explicitly here. */
11943 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
11944 > TREE_INT_CST_LOW (op1
)
11945 && TREE_INT_CST_HIGH (op1
) == 0
11946 && 0 != (t1
= fold_convert (type
,
11947 const_binop (LSHIFT_EXPR
,
11950 && ! TREE_OVERFLOW (t1
))
11951 return multiple_of_p (type
, t1
, bottom
);
11956 /* Can't handle conversions from non-integral or wider integral type. */
11957 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
11958 || (TYPE_PRECISION (type
)
11959 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
11962 /* .. fall through ... */
11965 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
11968 if (TREE_CODE (bottom
) != INTEGER_CST
11969 || (TYPE_UNSIGNED (type
)
11970 && (tree_int_cst_sgn (top
) < 0
11971 || tree_int_cst_sgn (bottom
) < 0)))
11973 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,
11981 /* Return true if `t' is known to be non-negative. */
11984 tree_expr_nonnegative_p (tree t
)
11986 if (t
== error_mark_node
)
11989 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
11992 switch (TREE_CODE (t
))
11995 /* Query VRP to see if it has recorded any information about
11996 the range of this object. */
11997 return ssa_name_nonnegative_p (t
);
12000 /* We can't return 1 if flag_wrapv is set because
12001 ABS_EXPR<INT_MIN> = INT_MIN. */
12002 if (!(flag_wrapv
&& INTEGRAL_TYPE_P (TREE_TYPE (t
))))
12007 return tree_int_cst_sgn (t
) >= 0;
12010 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
12013 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
12014 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
12015 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
12017 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12018 both unsigned and at least 2 bits shorter than the result. */
12019 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
12020 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
12021 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
12023 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
12024 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
12025 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
12026 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
12028 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
12029 TYPE_PRECISION (inner2
)) + 1;
12030 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
12036 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
12038 /* x * x for floating point x is always non-negative. */
12039 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
12041 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
12042 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
12045 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12046 both unsigned and their total bits is shorter than the result. */
12047 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
12048 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
12049 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
12051 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
12052 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
12053 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
12054 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
12055 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
12056 < TYPE_PRECISION (TREE_TYPE (t
));
12062 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
12063 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
12069 case TRUNC_DIV_EXPR
:
12070 case CEIL_DIV_EXPR
:
12071 case FLOOR_DIV_EXPR
:
12072 case ROUND_DIV_EXPR
:
12073 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
12074 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
12076 case TRUNC_MOD_EXPR
:
12077 case CEIL_MOD_EXPR
:
12078 case FLOOR_MOD_EXPR
:
12079 case ROUND_MOD_EXPR
:
12081 case NON_LVALUE_EXPR
:
12083 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
12085 case COMPOUND_EXPR
:
12087 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
12090 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t
, 1)));
12093 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
12094 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
12098 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
12099 tree outer_type
= TREE_TYPE (t
);
12101 if (TREE_CODE (outer_type
) == REAL_TYPE
)
12103 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12104 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
12105 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
12107 if (TYPE_UNSIGNED (inner_type
))
12109 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
12112 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
12114 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12115 return tree_expr_nonnegative_p (TREE_OPERAND (t
,0));
12116 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
12117 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
12118 && TYPE_UNSIGNED (inner_type
);
12125 tree temp
= TARGET_EXPR_SLOT (t
);
12126 t
= TARGET_EXPR_INITIAL (t
);
12128 /* If the initializer is non-void, then it's a normal expression
12129 that will be assigned to the slot. */
12130 if (!VOID_TYPE_P (t
))
12131 return tree_expr_nonnegative_p (t
);
12133 /* Otherwise, the initializer sets the slot in some way. One common
12134 way is an assignment statement at the end of the initializer. */
12137 if (TREE_CODE (t
) == BIND_EXPR
)
12138 t
= expr_last (BIND_EXPR_BODY (t
));
12139 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
12140 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
12141 t
= expr_last (TREE_OPERAND (t
, 0));
12142 else if (TREE_CODE (t
) == STATEMENT_LIST
)
12147 if (TREE_CODE (t
) == MODIFY_EXPR
12148 && TREE_OPERAND (t
, 0) == temp
)
12149 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
12156 tree fndecl
= get_callee_fndecl (t
);
12157 tree arglist
= TREE_OPERAND (t
, 1);
12158 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
12159 switch (DECL_FUNCTION_CODE (fndecl
))
12161 CASE_FLT_FN (BUILT_IN_ACOS
):
12162 CASE_FLT_FN (BUILT_IN_ACOSH
):
12163 CASE_FLT_FN (BUILT_IN_CABS
):
12164 CASE_FLT_FN (BUILT_IN_COSH
):
12165 CASE_FLT_FN (BUILT_IN_ERFC
):
12166 CASE_FLT_FN (BUILT_IN_EXP
):
12167 CASE_FLT_FN (BUILT_IN_EXP10
):
12168 CASE_FLT_FN (BUILT_IN_EXP2
):
12169 CASE_FLT_FN (BUILT_IN_FABS
):
12170 CASE_FLT_FN (BUILT_IN_FDIM
):
12171 CASE_FLT_FN (BUILT_IN_HYPOT
):
12172 CASE_FLT_FN (BUILT_IN_POW10
):
12173 CASE_INT_FN (BUILT_IN_FFS
):
12174 CASE_INT_FN (BUILT_IN_PARITY
):
12175 CASE_INT_FN (BUILT_IN_POPCOUNT
):
12179 CASE_FLT_FN (BUILT_IN_SQRT
):
12180 /* sqrt(-0.0) is -0.0. */
12181 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
12183 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
12185 CASE_FLT_FN (BUILT_IN_ASINH
):
12186 CASE_FLT_FN (BUILT_IN_ATAN
):
12187 CASE_FLT_FN (BUILT_IN_ATANH
):
12188 CASE_FLT_FN (BUILT_IN_CBRT
):
12189 CASE_FLT_FN (BUILT_IN_CEIL
):
12190 CASE_FLT_FN (BUILT_IN_ERF
):
12191 CASE_FLT_FN (BUILT_IN_EXPM1
):
12192 CASE_FLT_FN (BUILT_IN_FLOOR
):
12193 CASE_FLT_FN (BUILT_IN_FMOD
):
12194 CASE_FLT_FN (BUILT_IN_FREXP
):
12195 CASE_FLT_FN (BUILT_IN_LCEIL
):
12196 CASE_FLT_FN (BUILT_IN_LDEXP
):
12197 CASE_FLT_FN (BUILT_IN_LFLOOR
):
12198 CASE_FLT_FN (BUILT_IN_LLCEIL
):
12199 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
12200 CASE_FLT_FN (BUILT_IN_LLRINT
):
12201 CASE_FLT_FN (BUILT_IN_LLROUND
):
12202 CASE_FLT_FN (BUILT_IN_LRINT
):
12203 CASE_FLT_FN (BUILT_IN_LROUND
):
12204 CASE_FLT_FN (BUILT_IN_MODF
):
12205 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
12206 CASE_FLT_FN (BUILT_IN_POW
):
12207 CASE_FLT_FN (BUILT_IN_RINT
):
12208 CASE_FLT_FN (BUILT_IN_ROUND
):
12209 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
12210 CASE_FLT_FN (BUILT_IN_SINH
):
12211 CASE_FLT_FN (BUILT_IN_TANH
):
12212 CASE_FLT_FN (BUILT_IN_TRUNC
):
12213 /* True if the 1st argument is nonnegative. */
12214 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
12216 CASE_FLT_FN (BUILT_IN_FMAX
):
12217 /* True if the 1st OR 2nd arguments are nonnegative. */
12218 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
12219 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
12221 CASE_FLT_FN (BUILT_IN_FMIN
):
12222 /* True if the 1st AND 2nd arguments are nonnegative. */
12223 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
12224 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
12226 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
12227 /* True if the 2nd argument is nonnegative. */
12228 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
12235 /* ... fall through ... */
12238 if (truth_value_p (TREE_CODE (t
)))
12239 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12243 /* We don't know sign of `t', so be conservative and return false. */
12247 /* Return true when T is an address and is known to be nonzero.
12248 For floating point we further ensure that T is not denormal.
12249 Similar logic is present in nonzero_address in rtlanal.h. */
12252 tree_expr_nonzero_p (tree t
)
12254 tree type
= TREE_TYPE (t
);
12256 /* Doing something useful for floating point would need more work. */
12257 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
12260 switch (TREE_CODE (t
))
12263 /* Query VRP to see if it has recorded any information about
12264 the range of this object. */
12265 return ssa_name_nonzero_p (t
);
12268 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
12271 /* We used to test for !integer_zerop here. This does not work correctly
12272 if TREE_CONSTANT_OVERFLOW (t). */
12273 return (TREE_INT_CST_LOW (t
) != 0
12274 || TREE_INT_CST_HIGH (t
) != 0);
12277 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
12279 /* With the presence of negative values it is hard
12280 to say something. */
12281 if (!tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
12282 || !tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
12284 /* One of operands must be positive and the other non-negative. */
12285 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
12286 || tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
12291 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
12293 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
12294 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
12300 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
12301 tree outer_type
= TREE_TYPE (t
);
12303 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
12304 && tree_expr_nonzero_p (TREE_OPERAND (t
, 0)));
12310 tree base
= get_base_address (TREE_OPERAND (t
, 0));
12315 /* Weak declarations may link to NULL. */
12316 if (VAR_OR_FUNCTION_DECL_P (base
))
12317 return !DECL_WEAK (base
);
12319 /* Constants are never weak. */
12320 if (CONSTANT_CLASS_P (base
))
12327 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
12328 && tree_expr_nonzero_p (TREE_OPERAND (t
, 2)));
12331 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
12332 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
12335 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 0)))
12337 /* When both operands are nonzero, then MAX must be too. */
12338 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1)))
12341 /* MAX where operand 0 is positive is positive. */
12342 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
12344 /* MAX where operand 1 is positive is positive. */
12345 else if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
12346 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
12350 case COMPOUND_EXPR
:
12353 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1));
12356 case NON_LVALUE_EXPR
:
12357 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
12360 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
12361 || tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
12364 return alloca_call_p (t
);
12372 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12373 attempt to fold the expression to a constant without modifying TYPE,
12376 If the expression could be simplified to a constant, then return
12377 the constant. If the expression would not be simplified to a
12378 constant, then return NULL_TREE. */
12381 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
12383 tree tem
= fold_binary (code
, type
, op0
, op1
);
12384 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
12387 /* Given the components of a unary expression CODE, TYPE and OP0,
12388 attempt to fold the expression to a constant without modifying
12391 If the expression could be simplified to a constant, then return
12392 the constant. If the expression would not be simplified to a
12393 constant, then return NULL_TREE. */
12396 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
12398 tree tem
= fold_unary (code
, type
, op0
);
12399 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
12402 /* If EXP represents referencing an element in a constant string
12403 (either via pointer arithmetic or array indexing), return the
12404 tree representing the value accessed, otherwise return NULL. */
12407 fold_read_from_constant_string (tree exp
)
12409 if (TREE_CODE (exp
) == INDIRECT_REF
|| TREE_CODE (exp
) == ARRAY_REF
)
12411 tree exp1
= TREE_OPERAND (exp
, 0);
12415 if (TREE_CODE (exp
) == INDIRECT_REF
)
12416 string
= string_constant (exp1
, &index
);
12419 tree low_bound
= array_ref_low_bound (exp
);
12420 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
12422 /* Optimize the special-case of a zero lower bound.
12424 We convert the low_bound to sizetype to avoid some problems
12425 with constant folding. (E.g. suppose the lower bound is 1,
12426 and its mode is QI. Without the conversion,l (ARRAY
12427 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12428 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12429 if (! integer_zerop (low_bound
))
12430 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
12436 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
12437 && TREE_CODE (string
) == STRING_CST
12438 && TREE_CODE (index
) == INTEGER_CST
12439 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
12440 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
12442 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
12443 return fold_convert (TREE_TYPE (exp
),
12444 build_int_cst (NULL_TREE
,
12445 (TREE_STRING_POINTER (string
)
12446 [TREE_INT_CST_LOW (index
)])));
12451 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12452 an integer constant or real constant.
12454 TYPE is the type of the result. */
12457 fold_negate_const (tree arg0
, tree type
)
12459 tree t
= NULL_TREE
;
12461 switch (TREE_CODE (arg0
))
12465 unsigned HOST_WIDE_INT low
;
12466 HOST_WIDE_INT high
;
12467 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
12468 TREE_INT_CST_HIGH (arg0
),
12470 t
= build_int_cst_wide (type
, low
, high
);
12471 t
= force_fit_type (t
, 1,
12472 (overflow
| TREE_OVERFLOW (arg0
))
12473 && !TYPE_UNSIGNED (type
),
12474 TREE_CONSTANT_OVERFLOW (arg0
));
12479 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
12483 gcc_unreachable ();
12489 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12490 an integer constant or real constant.
12492 TYPE is the type of the result. */
12495 fold_abs_const (tree arg0
, tree type
)
12497 tree t
= NULL_TREE
;
12499 switch (TREE_CODE (arg0
))
12502 /* If the value is unsigned, then the absolute value is
12503 the same as the ordinary value. */
12504 if (TYPE_UNSIGNED (type
))
12506 /* Similarly, if the value is non-negative. */
12507 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
12509 /* If the value is negative, then the absolute value is
12513 unsigned HOST_WIDE_INT low
;
12514 HOST_WIDE_INT high
;
12515 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
12516 TREE_INT_CST_HIGH (arg0
),
12518 t
= build_int_cst_wide (type
, low
, high
);
12519 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg0
),
12520 TREE_CONSTANT_OVERFLOW (arg0
));
12525 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
12526 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
12532 gcc_unreachable ();
12538 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
12539 constant. TYPE is the type of the result. */
12542 fold_not_const (tree arg0
, tree type
)
12544 tree t
= NULL_TREE
;
12546 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
12548 t
= build_int_cst_wide (type
,
12549 ~ TREE_INT_CST_LOW (arg0
),
12550 ~ TREE_INT_CST_HIGH (arg0
));
12551 t
= force_fit_type (t
, 0, TREE_OVERFLOW (arg0
),
12552 TREE_CONSTANT_OVERFLOW (arg0
));
12557 /* Given CODE, a relational operator, the target type, TYPE and two
12558 constant operands OP0 and OP1, return the result of the
12559 relational operation. If the result is not a compile time
12560 constant, then return NULL_TREE. */
12563 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
12565 int result
, invert
;
12567 /* From here on, the only cases we handle are when the result is
12568 known to be a constant. */
12570 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
12572 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
12573 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
12575 /* Handle the cases where either operand is a NaN. */
12576 if (real_isnan (c0
) || real_isnan (c1
))
12586 case UNORDERED_EXPR
:
12600 if (flag_trapping_math
)
12606 gcc_unreachable ();
12609 return constant_boolean_node (result
, type
);
12612 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
12615 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
12617 To compute GT, swap the arguments and do LT.
12618 To compute GE, do LT and invert the result.
12619 To compute LE, swap the arguments, do LT and invert the result.
12620 To compute NE, do EQ and invert the result.
12622 Therefore, the code below must handle only EQ and LT. */
12624 if (code
== LE_EXPR
|| code
== GT_EXPR
)
12629 code
= swap_tree_comparison (code
);
12632 /* Note that it is safe to invert for real values here because we
12633 have already handled the one case that it matters. */
12636 if (code
== NE_EXPR
|| code
== GE_EXPR
)
12639 code
= invert_tree_comparison (code
, false);
12642 /* Compute a result for LT or EQ if args permit;
12643 Otherwise return T. */
12644 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
12646 if (code
== EQ_EXPR
)
12647 result
= tree_int_cst_equal (op0
, op1
);
12648 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
12649 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
12651 result
= INT_CST_LT (op0
, op1
);
12658 return constant_boolean_node (result
, type
);
12661 /* Build an expression for the a clean point containing EXPR with type TYPE.
12662 Don't build a cleanup point expression for EXPR which don't have side
12666 fold_build_cleanup_point_expr (tree type
, tree expr
)
12668 /* If the expression does not have side effects then we don't have to wrap
12669 it with a cleanup point expression. */
12670 if (!TREE_SIDE_EFFECTS (expr
))
12673 /* If the expression is a return, check to see if the expression inside the
12674 return has no side effects or the right hand side of the modify expression
12675 inside the return. If either don't have side effects set we don't need to
12676 wrap the expression in a cleanup point expression. Note we don't check the
12677 left hand side of the modify because it should always be a return decl. */
12678 if (TREE_CODE (expr
) == RETURN_EXPR
)
12680 tree op
= TREE_OPERAND (expr
, 0);
12681 if (!op
|| !TREE_SIDE_EFFECTS (op
))
12683 op
= TREE_OPERAND (op
, 1);
12684 if (!TREE_SIDE_EFFECTS (op
))
12688 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
12691 /* Build an expression for the address of T. Folds away INDIRECT_REF to
12692 avoid confusing the gimplify process. */
12695 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
12697 /* The size of the object is not relevant when talking about its address. */
12698 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
12699 t
= TREE_OPERAND (t
, 0);
12701 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
12702 if (TREE_CODE (t
) == INDIRECT_REF
12703 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
12705 t
= TREE_OPERAND (t
, 0);
12706 if (TREE_TYPE (t
) != ptrtype
)
12707 t
= build1 (NOP_EXPR
, ptrtype
, t
);
12713 while (handled_component_p (base
))
12714 base
= TREE_OPERAND (base
, 0);
12716 TREE_ADDRESSABLE (base
) = 1;
12718 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
12725 build_fold_addr_expr (tree t
)
12727 return build_fold_addr_expr_with_type (t
, build_pointer_type (TREE_TYPE (t
)));
12730 /* Given a pointer value OP0 and a type TYPE, return a simplified version
12731 of an indirection through OP0, or NULL_TREE if no simplification is
12735 fold_indirect_ref_1 (tree type
, tree op0
)
12741 subtype
= TREE_TYPE (sub
);
12742 if (!POINTER_TYPE_P (subtype
))
12745 if (TREE_CODE (sub
) == ADDR_EXPR
)
12747 tree op
= TREE_OPERAND (sub
, 0);
12748 tree optype
= TREE_TYPE (op
);
12749 /* *&CONST_DECL -> to the value of the const decl. */
12750 if (TREE_CODE (op
) == CONST_DECL
)
12751 return DECL_INITIAL (op
);
12752 /* *&p => p; make sure to handle *&"str"[cst] here. */
12753 if (type
== optype
)
12755 tree fop
= fold_read_from_constant_string (op
);
12761 /* *(foo *)&fooarray => fooarray[0] */
12762 else if (TREE_CODE (optype
) == ARRAY_TYPE
12763 && type
== TREE_TYPE (optype
))
12765 tree type_domain
= TYPE_DOMAIN (optype
);
12766 tree min_val
= size_zero_node
;
12767 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
12768 min_val
= TYPE_MIN_VALUE (type_domain
);
12769 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
12771 /* *(foo *)&complexfoo => __real__ complexfoo */
12772 else if (TREE_CODE (optype
) == COMPLEX_TYPE
12773 && type
== TREE_TYPE (optype
))
12774 return fold_build1 (REALPART_EXPR
, type
, op
);
12777 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
12778 if (TREE_CODE (sub
) == PLUS_EXPR
12779 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
12781 tree op00
= TREE_OPERAND (sub
, 0);
12782 tree op01
= TREE_OPERAND (sub
, 1);
12786 op00type
= TREE_TYPE (op00
);
12787 if (TREE_CODE (op00
) == ADDR_EXPR
12788 && TREE_CODE (TREE_TYPE (op00type
)) == COMPLEX_TYPE
12789 && type
== TREE_TYPE (TREE_TYPE (op00type
)))
12791 tree size
= TYPE_SIZE_UNIT (type
);
12792 if (tree_int_cst_equal (size
, op01
))
12793 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (op00
, 0));
12797 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
12798 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
12799 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
12802 tree min_val
= size_zero_node
;
12803 sub
= build_fold_indirect_ref (sub
);
12804 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
12805 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
12806 min_val
= TYPE_MIN_VALUE (type_domain
);
12807 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
12813 /* Builds an expression for an indirection through T, simplifying some
12817 build_fold_indirect_ref (tree t
)
12819 tree type
= TREE_TYPE (TREE_TYPE (t
));
12820 tree sub
= fold_indirect_ref_1 (type
, t
);
12825 return build1 (INDIRECT_REF
, type
, t
);
12828 /* Given an INDIRECT_REF T, return either T or a simplified version. */
12831 fold_indirect_ref (tree t
)
12833 tree sub
= fold_indirect_ref_1 (TREE_TYPE (t
), TREE_OPERAND (t
, 0));
12841 /* Strip non-trapping, non-side-effecting tree nodes from an expression
12842 whose result is ignored. The type of the returned tree need not be
12843 the same as the original expression. */
12846 fold_ignored_result (tree t
)
12848 if (!TREE_SIDE_EFFECTS (t
))
12849 return integer_zero_node
;
12852 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
12855 t
= TREE_OPERAND (t
, 0);
12859 case tcc_comparison
:
12860 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
12861 t
= TREE_OPERAND (t
, 0);
12862 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
12863 t
= TREE_OPERAND (t
, 1);
12868 case tcc_expression
:
12869 switch (TREE_CODE (t
))
12871 case COMPOUND_EXPR
:
12872 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
12874 t
= TREE_OPERAND (t
, 0);
12878 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
12879 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
12881 t
= TREE_OPERAND (t
, 0);
12894 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
12895 This can only be applied to objects of a sizetype. */
12898 round_up (tree value
, int divisor
)
12900 tree div
= NULL_TREE
;
12902 gcc_assert (divisor
> 0);
12906 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12907 have to do anything. Only do this when we are not given a const,
12908 because in that case, this check is more expensive than just
12910 if (TREE_CODE (value
) != INTEGER_CST
)
12912 div
= build_int_cst (TREE_TYPE (value
), divisor
);
12914 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
12918 /* If divisor is a power of two, simplify this to bit manipulation. */
12919 if (divisor
== (divisor
& -divisor
))
12923 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
12924 value
= size_binop (PLUS_EXPR
, value
, t
);
12925 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
12926 value
= size_binop (BIT_AND_EXPR
, value
, t
);
12931 div
= build_int_cst (TREE_TYPE (value
), divisor
);
12932 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
12933 value
= size_binop (MULT_EXPR
, value
, div
);
12939 /* Likewise, but round down. */
12942 round_down (tree value
, int divisor
)
12944 tree div
= NULL_TREE
;
12946 gcc_assert (divisor
> 0);
12950 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12951 have to do anything. Only do this when we are not given a const,
12952 because in that case, this check is more expensive than just
12954 if (TREE_CODE (value
) != INTEGER_CST
)
12956 div
= build_int_cst (TREE_TYPE (value
), divisor
);
12958 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
12962 /* If divisor is a power of two, simplify this to bit manipulation. */
12963 if (divisor
== (divisor
& -divisor
))
12967 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
12968 value
= size_binop (BIT_AND_EXPR
, value
, t
);
12973 div
= build_int_cst (TREE_TYPE (value
), divisor
);
12974 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
12975 value
= size_binop (MULT_EXPR
, value
, div
);
12981 /* Returns the pointer to the base of the object addressed by EXP and
12982 extracts the information about the offset of the access, storing it
12983 to PBITPOS and POFFSET. */
12986 split_address_to_core_and_offset (tree exp
,
12987 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
12990 enum machine_mode mode
;
12991 int unsignedp
, volatilep
;
12992 HOST_WIDE_INT bitsize
;
12994 if (TREE_CODE (exp
) == ADDR_EXPR
)
12996 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
12997 poffset
, &mode
, &unsignedp
, &volatilep
,
12999 core
= build_fold_addr_expr (core
);
13005 *poffset
= NULL_TREE
;
13011 /* Returns true if addresses of E1 and E2 differ by a constant, false
13012 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13015 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
13018 HOST_WIDE_INT bitpos1
, bitpos2
;
13019 tree toffset1
, toffset2
, tdiff
, type
;
13021 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
13022 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
13024 if (bitpos1
% BITS_PER_UNIT
!= 0
13025 || bitpos2
% BITS_PER_UNIT
!= 0
13026 || !operand_equal_p (core1
, core2
, 0))
13029 if (toffset1
&& toffset2
)
13031 type
= TREE_TYPE (toffset1
);
13032 if (type
!= TREE_TYPE (toffset2
))
13033 toffset2
= fold_convert (type
, toffset2
);
13035 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
13036 if (!cst_and_fits_in_hwi (tdiff
))
13039 *diff
= int_cst_value (tdiff
);
13041 else if (toffset1
|| toffset2
)
13043 /* If only one of the offsets is non-constant, the difference cannot
13050 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
13054 /* Simplify the floating point expression EXP when the sign of the
13055 result is not significant. Return NULL_TREE if no simplification
13059 fold_strip_sign_ops (tree exp
)
13063 switch (TREE_CODE (exp
))
13067 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
13068 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
13072 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
13074 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
13075 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
13076 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
13077 return fold_build2 (TREE_CODE (exp
), TREE_TYPE (exp
),
13078 arg0
? arg0
: TREE_OPERAND (exp
, 0),
13079 arg1
? arg1
: TREE_OPERAND (exp
, 1));