1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code
{
84 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
85 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
86 static bool negate_mathfn_p (enum built_in_function
);
87 static bool negate_expr_p (tree
);
88 static tree
negate_expr (tree
);
89 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
90 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
91 static tree
const_binop (enum tree_code
, tree
, tree
, int);
92 static enum comparison_code
comparison_to_compcode (enum tree_code
);
93 static enum tree_code
compcode_to_comparison (enum comparison_code
);
94 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
95 enum tree_code
, tree
, tree
, tree
);
96 static int truth_value_p (enum tree_code
);
97 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
98 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
99 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
100 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
101 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
102 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
103 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
104 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
105 enum machine_mode
*, int *, int *,
107 static int all_ones_mask_p (tree
, int);
108 static tree
sign_bit_p (tree
, tree
);
109 static int simple_operand_p (tree
);
110 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
111 static tree
make_range (tree
, int *, tree
*, tree
*);
112 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
113 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
115 static tree
fold_range_test (enum tree_code
, tree
, tree
, tree
);
116 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
117 static tree
unextend (tree
, int, int, tree
);
118 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
119 static tree
optimize_minmax_comparison (enum tree_code
, tree
, tree
, tree
);
120 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
);
121 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
);
122 static int multiple_of_p (tree
, tree
, tree
);
123 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
,
126 static bool fold_real_zero_addition_p (tree
, tree
, int);
127 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
129 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
130 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
131 static bool reorder_operands_p (tree
, tree
);
132 static tree
fold_negate_const (tree
, tree
);
133 static tree
fold_not_const (tree
, tree
);
134 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
162 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
164 words
[0] = LOWPART (low
);
165 words
[1] = HIGHPART (low
);
166 words
[2] = LOWPART (hi
);
167 words
[3] = HIGHPART (hi
);
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
175 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
178 *low
= words
[0] + words
[1] * BASE
;
179 *hi
= words
[2] + words
[3] * BASE
;
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
198 force_fit_type (tree t
, int overflowable
,
199 bool overflowed
, bool overflowed_const
)
201 unsigned HOST_WIDE_INT low
;
204 int sign_extended_type
;
206 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
208 low
= TREE_INT_CST_LOW (t
);
209 high
= TREE_INT_CST_HIGH (t
);
211 if (POINTER_TYPE_P (TREE_TYPE (t
))
212 || TREE_CODE (TREE_TYPE (t
)) == OFFSET_TYPE
)
215 prec
= TYPE_PRECISION (TREE_TYPE (t
));
216 /* Size types *are* sign extended. */
217 sign_extended_type
= (!TYPE_UNSIGNED (TREE_TYPE (t
))
218 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
225 else if (prec
> HOST_BITS_PER_WIDE_INT
)
226 high
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
230 if (prec
< HOST_BITS_PER_WIDE_INT
)
231 low
&= ~((HOST_WIDE_INT
) (-1) << prec
);
234 if (!sign_extended_type
)
235 /* No sign extension */;
236 else if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
237 /* Correct width already. */;
238 else if (prec
> HOST_BITS_PER_WIDE_INT
)
240 /* Sign extend top half? */
241 if (high
& ((unsigned HOST_WIDE_INT
)1
242 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
243 high
|= (HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
);
245 else if (prec
== HOST_BITS_PER_WIDE_INT
)
247 if ((HOST_WIDE_INT
)low
< 0)
252 /* Sign extend bottom half? */
253 if (low
& ((unsigned HOST_WIDE_INT
)1 << (prec
- 1)))
256 low
|= (HOST_WIDE_INT
)(-1) << prec
;
260 /* If the value changed, return a new node. */
261 if (overflowed
|| overflowed_const
262 || low
!= TREE_INT_CST_LOW (t
) || high
!= TREE_INT_CST_HIGH (t
))
264 t
= build_int_cst_wide (TREE_TYPE (t
), low
, high
);
268 || (overflowable
> 0 && sign_extended_type
))
271 TREE_OVERFLOW (t
) = 1;
272 TREE_CONSTANT_OVERFLOW (t
) = 1;
274 else if (overflowed_const
)
277 TREE_CONSTANT_OVERFLOW (t
) = 1;
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290 add_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
291 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
292 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
294 unsigned HOST_WIDE_INT l
;
298 h
= h1
+ h2
+ (l
< l1
);
302 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
312 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
318 return (*hv
& h1
) < 0;
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
335 mul_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
336 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
337 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
339 HOST_WIDE_INT arg1
[4];
340 HOST_WIDE_INT arg2
[4];
341 HOST_WIDE_INT prod
[4 * 2];
342 unsigned HOST_WIDE_INT carry
;
344 unsigned HOST_WIDE_INT toplow
, neglow
;
345 HOST_WIDE_INT tophigh
, neghigh
;
347 encode (arg1
, l1
, h1
);
348 encode (arg2
, l2
, h2
);
350 memset (prod
, 0, sizeof prod
);
352 for (i
= 0; i
< 4; i
++)
355 for (j
= 0; j
< 4; j
++)
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry
+= arg1
[i
] * arg2
[j
];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
362 prod
[k
] = LOWPART (carry
);
363 carry
= HIGHPART (carry
);
368 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod
+ 4, &toplow
, &tophigh
);
375 neg_double (l2
, h2
, &neglow
, &neghigh
);
376 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
380 neg_double (l1
, h1
, &neglow
, &neghigh
);
381 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
383 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
393 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
394 HOST_WIDE_INT count
, unsigned int prec
,
395 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
397 unsigned HOST_WIDE_INT signmask
;
401 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
405 if (SHIFT_COUNT_TRUNCATED
)
408 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
415 else if (count
>= HOST_BITS_PER_WIDE_INT
)
417 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
422 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
423 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
427 /* Sign extend all bits that are beyond the precision. */
429 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT
) *hv
431 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
432 : (*lv
>> (prec
- 1))) & 1);
434 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
436 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
438 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
439 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
444 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
445 *lv
|= signmask
<< prec
;
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
455 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
456 HOST_WIDE_INT count
, unsigned int prec
,
457 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
460 unsigned HOST_WIDE_INT signmask
;
463 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
466 if (SHIFT_COUNT_TRUNCATED
)
469 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
476 else if (count
>= HOST_BITS_PER_WIDE_INT
)
479 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
483 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
485 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
488 /* Zero / sign extend all bits that are beyond the precision. */
490 if (count
>= (HOST_WIDE_INT
)prec
)
495 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
497 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
499 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
500 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
505 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
506 *lv
|= signmask
<< (prec
- count
);
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
516 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
517 HOST_WIDE_INT count
, unsigned int prec
,
518 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
520 unsigned HOST_WIDE_INT s1l
, s2l
;
521 HOST_WIDE_INT s1h
, s2h
;
527 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
528 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
538 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
539 HOST_WIDE_INT count
, unsigned int prec
,
540 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
542 unsigned HOST_WIDE_INT s1l
, s2l
;
543 HOST_WIDE_INT s1h
, s2h
;
549 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
550 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
565 div_and_round_double (enum tree_code code
, int uns
,
566 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig
,
568 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig
,
570 unsigned HOST_WIDE_INT
*lquo
,
571 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
575 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den
[4], quo
[4];
578 unsigned HOST_WIDE_INT work
;
579 unsigned HOST_WIDE_INT carry
= 0;
580 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
581 HOST_WIDE_INT hnum
= hnum_orig
;
582 unsigned HOST_WIDE_INT lden
= lden_orig
;
583 HOST_WIDE_INT hden
= hden_orig
;
586 if (hden
== 0 && lden
== 0)
587 overflow
= 1, lden
= 1;
589 /* Calculate quotient sign and convert operands to unsigned. */
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
597 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
603 neg_double (lden
, hden
, &lden
, &hden
);
607 if (hnum
== 0 && hden
== 0)
608 { /* single precision */
610 /* This unsigned division rounds toward zero. */
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
624 memset (quo
, 0, sizeof quo
);
626 memset (num
, 0, sizeof num
); /* to zero 9th element */
627 memset (den
, 0, sizeof den
);
629 encode (num
, lnum
, hnum
);
630 encode (den
, lden
, hden
);
632 /* Special code for when the divisor < BASE. */
633 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
635 /* hnum != 0 already checked. */
636 for (i
= 4 - 1; i
>= 0; i
--)
638 work
= num
[i
] + carry
* BASE
;
639 quo
[i
] = work
/ lden
;
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig
, den_hi_sig
;
648 unsigned HOST_WIDE_INT quo_est
, scale
;
650 /* Find the highest nonzero divisor digit. */
651 for (i
= 4 - 1;; i
--)
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
661 scale
= BASE
/ (den
[den_hi_sig
] + 1);
663 { /* scale divisor and dividend */
665 for (i
= 0; i
<= 4 - 1; i
++)
667 work
= (num
[i
] * scale
) + carry
;
668 num
[i
] = LOWPART (work
);
669 carry
= HIGHPART (work
);
674 for (i
= 0; i
<= 4 - 1; i
++)
676 work
= (den
[i
] * scale
) + carry
;
677 den
[i
] = LOWPART (work
);
678 carry
= HIGHPART (work
);
679 if (den
[i
] != 0) den_hi_sig
= i
;
686 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp
;
693 num_hi_sig
= i
+ den_hi_sig
+ 1;
694 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
695 if (num
[num_hi_sig
] != den
[den_hi_sig
])
696 quo_est
= work
/ den
[den_hi_sig
];
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp
= work
- quo_est
* den
[den_hi_sig
];
703 && (den
[den_hi_sig
- 1] * quo_est
704 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
712 for (j
= 0; j
<= den_hi_sig
; j
++)
714 work
= quo_est
* den
[j
] + carry
;
715 carry
= HIGHPART (work
);
716 work
= num
[i
+ j
] - LOWPART (work
);
717 num
[i
+ j
] = LOWPART (work
);
718 carry
+= HIGHPART (work
) != 0;
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
726 carry
= 0; /* add divisor back in */
727 for (j
= 0; j
<= den_hi_sig
; j
++)
729 work
= num
[i
+ j
] + den
[j
] + carry
;
730 carry
= HIGHPART (work
);
731 num
[i
+ j
] = LOWPART (work
);
734 num
[num_hi_sig
] += carry
;
737 /* Store the quotient digit. */
742 decode (quo
, lquo
, hquo
);
745 /* If result is negative, make it so. */
747 neg_double (*lquo
, *hquo
, lquo
, hquo
);
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
751 neg_double (*lrem
, *hrem
, lrem
, hrem
);
752 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
757 case TRUNC_MOD_EXPR
: /* round toward zero */
758 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
762 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
763 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
766 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
774 case CEIL_MOD_EXPR
: /* round toward positive infinity */
775 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
777 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
785 case ROUND_MOD_EXPR
: /* round to closest integer */
787 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
788 HOST_WIDE_INT habs_rem
= *hrem
;
789 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
790 HOST_WIDE_INT habs_den
= hden
, htwice
;
792 /* Get absolute values. */
794 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
796 neg_double (lden
, hden
, &labs_den
, &habs_den
);
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
800 labs_rem
, habs_rem
, <wice
, &htwice
);
802 if (((unsigned HOST_WIDE_INT
) habs_den
803 < (unsigned HOST_WIDE_INT
) htwice
)
804 || (((unsigned HOST_WIDE_INT
) habs_den
805 == (unsigned HOST_WIDE_INT
) htwice
)
806 && (labs_den
< ltwice
)))
810 add_double (*lquo
, *hquo
,
811 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
814 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
828 neg_double (*lrem
, *hrem
, lrem
, hrem
);
829 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
838 div_if_zero_remainder (enum tree_code code
, tree arg1
, tree arg2
)
840 unsigned HOST_WIDE_INT int1l
, int2l
;
841 HOST_WIDE_INT int1h
, int2h
;
842 unsigned HOST_WIDE_INT quol
, reml
;
843 HOST_WIDE_INT quoh
, remh
;
844 tree type
= TREE_TYPE (arg1
);
845 int uns
= TYPE_UNSIGNED (type
);
847 int1l
= TREE_INT_CST_LOW (arg1
);
848 int1h
= TREE_INT_CST_HIGH (arg1
);
849 int2l
= TREE_INT_CST_LOW (arg2
);
850 int2h
= TREE_INT_CST_HIGH (arg2
);
852 div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
853 &quol
, &quoh
, &reml
, &remh
);
854 if (remh
!= 0 || reml
!= 0)
857 return build_int_cst_wide (type
, quol
, quoh
);
860 /* Return true if the built-in mathematical function specified by CODE
861 is odd, i.e. -f(x) == f(-x). */
864 negate_mathfn_p (enum built_in_function code
)
868 CASE_FLT_FN (BUILT_IN_ASIN
):
869 CASE_FLT_FN (BUILT_IN_ASINH
):
870 CASE_FLT_FN (BUILT_IN_ATAN
):
871 CASE_FLT_FN (BUILT_IN_ATANH
):
872 CASE_FLT_FN (BUILT_IN_CBRT
):
873 CASE_FLT_FN (BUILT_IN_SIN
):
874 CASE_FLT_FN (BUILT_IN_SINH
):
875 CASE_FLT_FN (BUILT_IN_TAN
):
876 CASE_FLT_FN (BUILT_IN_TANH
):
885 /* Check whether we may negate an integer constant T without causing
889 may_negate_without_overflow_p (tree t
)
891 unsigned HOST_WIDE_INT val
;
895 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
897 type
= TREE_TYPE (t
);
898 if (TYPE_UNSIGNED (type
))
901 prec
= TYPE_PRECISION (type
);
902 if (prec
> HOST_BITS_PER_WIDE_INT
)
904 if (TREE_INT_CST_LOW (t
) != 0)
906 prec
-= HOST_BITS_PER_WIDE_INT
;
907 val
= TREE_INT_CST_HIGH (t
);
910 val
= TREE_INT_CST_LOW (t
);
911 if (prec
< HOST_BITS_PER_WIDE_INT
)
912 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
913 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
916 /* Determine whether an expression T can be cheaply negated using
917 the function negate_expr. */
920 negate_expr_p (tree t
)
927 type
= TREE_TYPE (t
);
930 switch (TREE_CODE (t
))
933 if (TYPE_UNSIGNED (type
) || ! flag_trapv
)
936 /* Check that -CST will not overflow type. */
937 return may_negate_without_overflow_p (t
);
939 return INTEGRAL_TYPE_P (type
);
946 return negate_expr_p (TREE_REALPART (t
))
947 && negate_expr_p (TREE_IMAGPART (t
));
950 if (FLOAT_TYPE_P (type
) && !flag_unsafe_math_optimizations
)
952 /* -(A + B) -> (-B) - A. */
953 if (negate_expr_p (TREE_OPERAND (t
, 1))
954 && reorder_operands_p (TREE_OPERAND (t
, 0),
955 TREE_OPERAND (t
, 1)))
957 /* -(A + B) -> (-A) - B. */
958 return negate_expr_p (TREE_OPERAND (t
, 0));
961 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
962 return (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
963 && reorder_operands_p (TREE_OPERAND (t
, 0),
964 TREE_OPERAND (t
, 1));
967 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
973 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
974 return negate_expr_p (TREE_OPERAND (t
, 1))
975 || negate_expr_p (TREE_OPERAND (t
, 0));
983 if (TYPE_UNSIGNED (TREE_TYPE (t
)) || flag_wrapv
)
985 return negate_expr_p (TREE_OPERAND (t
, 1))
986 || negate_expr_p (TREE_OPERAND (t
, 0));
989 /* Negate -((double)float) as (double)(-float). */
990 if (TREE_CODE (type
) == REAL_TYPE
)
992 tree tem
= strip_float_extensions (t
);
994 return negate_expr_p (tem
);
999 /* Negate -f(x) as f(-x). */
1000 if (negate_mathfn_p (builtin_mathfn_code (t
)))
1001 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1)));
1005 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1006 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1008 tree op1
= TREE_OPERAND (t
, 1);
1009 if (TREE_INT_CST_HIGH (op1
) == 0
1010 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1011 == TREE_INT_CST_LOW (op1
))
1022 /* Given T, an expression, return the negation of T. Allow for T to be
1023 null, in which case return null. */
1026 negate_expr (tree t
)
1034 type
= TREE_TYPE (t
);
1035 STRIP_SIGN_NOPS (t
);
1037 switch (TREE_CODE (t
))
1039 /* Convert - (~A) to A + 1. */
1041 if (INTEGRAL_TYPE_P (type
))
1042 return fold_build2 (PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
1043 build_int_cst (type
, 1));
1047 tem
= fold_negate_const (t
, type
);
1048 if (! TREE_OVERFLOW (tem
)
1049 || TYPE_UNSIGNED (type
)
1055 tem
= fold_negate_const (t
, type
);
1056 /* Two's complement FP formats, such as c4x, may overflow. */
1057 if (! TREE_OVERFLOW (tem
) || ! flag_trapping_math
)
1058 return fold_convert (type
, tem
);
1063 tree rpart
= negate_expr (TREE_REALPART (t
));
1064 tree ipart
= negate_expr (TREE_IMAGPART (t
));
1066 if ((TREE_CODE (rpart
) == REAL_CST
1067 && TREE_CODE (ipart
) == REAL_CST
)
1068 || (TREE_CODE (rpart
) == INTEGER_CST
1069 && TREE_CODE (ipart
) == INTEGER_CST
))
1070 return build_complex (type
, rpart
, ipart
);
1075 return fold_convert (type
, TREE_OPERAND (t
, 0));
1078 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1080 /* -(A + B) -> (-B) - A. */
1081 if (negate_expr_p (TREE_OPERAND (t
, 1))
1082 && reorder_operands_p (TREE_OPERAND (t
, 0),
1083 TREE_OPERAND (t
, 1)))
1085 tem
= negate_expr (TREE_OPERAND (t
, 1));
1086 tem
= fold_build2 (MINUS_EXPR
, TREE_TYPE (t
),
1087 tem
, TREE_OPERAND (t
, 0));
1088 return fold_convert (type
, tem
);
1091 /* -(A + B) -> (-A) - B. */
1092 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1094 tem
= negate_expr (TREE_OPERAND (t
, 0));
1095 tem
= fold_build2 (MINUS_EXPR
, TREE_TYPE (t
),
1096 tem
, TREE_OPERAND (t
, 1));
1097 return fold_convert (type
, tem
);
1103 /* - (A - B) -> B - A */
1104 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1105 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1106 return fold_convert (type
,
1107 fold_build2 (MINUS_EXPR
, TREE_TYPE (t
),
1108 TREE_OPERAND (t
, 1),
1109 TREE_OPERAND (t
, 0)));
1113 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1119 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1121 tem
= TREE_OPERAND (t
, 1);
1122 if (negate_expr_p (tem
))
1123 return fold_convert (type
,
1124 fold_build2 (TREE_CODE (t
), TREE_TYPE (t
),
1125 TREE_OPERAND (t
, 0),
1126 negate_expr (tem
)));
1127 tem
= TREE_OPERAND (t
, 0);
1128 if (negate_expr_p (tem
))
1129 return fold_convert (type
,
1130 fold_build2 (TREE_CODE (t
), TREE_TYPE (t
),
1132 TREE_OPERAND (t
, 1)));
1136 case TRUNC_DIV_EXPR
:
1137 case ROUND_DIV_EXPR
:
1138 case FLOOR_DIV_EXPR
:
1140 case EXACT_DIV_EXPR
:
1141 if (!TYPE_UNSIGNED (TREE_TYPE (t
)) && !flag_wrapv
)
1143 tem
= TREE_OPERAND (t
, 1);
1144 if (negate_expr_p (tem
))
1145 return fold_convert (type
,
1146 fold_build2 (TREE_CODE (t
), TREE_TYPE (t
),
1147 TREE_OPERAND (t
, 0),
1148 negate_expr (tem
)));
1149 tem
= TREE_OPERAND (t
, 0);
1150 if (negate_expr_p (tem
))
1151 return fold_convert (type
,
1152 fold_build2 (TREE_CODE (t
), TREE_TYPE (t
),
1154 TREE_OPERAND (t
, 1)));
1159 /* Convert -((double)float) into (double)(-float). */
1160 if (TREE_CODE (type
) == REAL_TYPE
)
1162 tem
= strip_float_extensions (t
);
1163 if (tem
!= t
&& negate_expr_p (tem
))
1164 return fold_convert (type
, negate_expr (tem
));
1169 /* Negate -f(x) as f(-x). */
1170 if (negate_mathfn_p (builtin_mathfn_code (t
))
1171 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1))))
1173 tree fndecl
, arg
, arglist
;
1175 fndecl
= get_callee_fndecl (t
);
1176 arg
= negate_expr (TREE_VALUE (TREE_OPERAND (t
, 1)));
1177 arglist
= build_tree_list (NULL_TREE
, arg
);
1178 return build_function_call_expr (fndecl
, arglist
);
1183 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1184 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1186 tree op1
= TREE_OPERAND (t
, 1);
1187 if (TREE_INT_CST_HIGH (op1
) == 0
1188 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1189 == TREE_INT_CST_LOW (op1
))
1191 tree ntype
= TYPE_UNSIGNED (type
)
1192 ? lang_hooks
.types
.signed_type (type
)
1193 : lang_hooks
.types
.unsigned_type (type
);
1194 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1195 temp
= fold_build2 (RSHIFT_EXPR
, ntype
, temp
, op1
);
1196 return fold_convert (type
, temp
);
1205 tem
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
1206 return fold_convert (type
, tem
);
1209 /* Split a tree IN into a constant, literal and variable parts that could be
1210 combined with CODE to make IN. "constant" means an expression with
1211 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1212 commutative arithmetic operation. Store the constant part into *CONP,
1213 the literal in *LITP and return the variable part. If a part isn't
1214 present, set it to null. If the tree does not decompose in this way,
1215 return the entire tree as the variable part and the other parts as null.
1217 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1218 case, we negate an operand that was subtracted. Except if it is a
1219 literal for which we use *MINUS_LITP instead.
1221 If NEGATE_P is true, we are negating all of IN, again except a literal
1222 for which we use *MINUS_LITP instead.
1224 If IN is itself a literal or constant, return it as appropriate.
1226 Note that we do not guarantee that any of the three values will be the
1227 same type as IN, but they will have the same signedness and mode. */
1230 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1231 tree
*minus_litp
, int negate_p
)
1239 /* Strip any conversions that don't change the machine mode or signedness. */
1240 STRIP_SIGN_NOPS (in
);
1242 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1244 else if (TREE_CODE (in
) == code
1245 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1246 /* We can associate addition and subtraction together (even
1247 though the C standard doesn't say so) for integers because
1248 the value is not affected. For reals, the value might be
1249 affected, so we can't. */
1250 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1251 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1253 tree op0
= TREE_OPERAND (in
, 0);
1254 tree op1
= TREE_OPERAND (in
, 1);
1255 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1256 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1258 /* First see if either of the operands is a literal, then a constant. */
1259 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1260 *litp
= op0
, op0
= 0;
1261 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1262 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1264 if (op0
!= 0 && TREE_CONSTANT (op0
))
1265 *conp
= op0
, op0
= 0;
1266 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1267 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1269 /* If we haven't dealt with either operand, this is not a case we can
1270 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1271 if (op0
!= 0 && op1
!= 0)
1276 var
= op1
, neg_var_p
= neg1_p
;
1278 /* Now do any needed negations. */
1280 *minus_litp
= *litp
, *litp
= 0;
1282 *conp
= negate_expr (*conp
);
1284 var
= negate_expr (var
);
1286 else if (TREE_CONSTANT (in
))
1294 *minus_litp
= *litp
, *litp
= 0;
1295 else if (*minus_litp
)
1296 *litp
= *minus_litp
, *minus_litp
= 0;
1297 *conp
= negate_expr (*conp
);
1298 var
= negate_expr (var
);
1304 /* Re-associate trees split by the above function. T1 and T2 are either
1305 expressions to associate or null. Return the new expression, if any. If
1306 we build an operation, do it in TYPE and with CODE. */
1309 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1316 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1317 try to fold this since we will have infinite recursion. But do
1318 deal with any NEGATE_EXPRs. */
1319 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1320 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1322 if (code
== PLUS_EXPR
)
1324 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1325 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1326 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1327 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1328 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1329 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1330 else if (integer_zerop (t2
))
1331 return fold_convert (type
, t1
);
1333 else if (code
== MINUS_EXPR
)
1335 if (integer_zerop (t2
))
1336 return fold_convert (type
, t1
);
1339 return build2 (code
, type
, fold_convert (type
, t1
),
1340 fold_convert (type
, t2
));
1343 return fold_build2 (code
, type
, fold_convert (type
, t1
),
1344 fold_convert (type
, t2
));
1347 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1348 to produce a new constant.
1350 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1353 int_const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1355 unsigned HOST_WIDE_INT int1l
, int2l
;
1356 HOST_WIDE_INT int1h
, int2h
;
1357 unsigned HOST_WIDE_INT low
;
1359 unsigned HOST_WIDE_INT garbagel
;
1360 HOST_WIDE_INT garbageh
;
1362 tree type
= TREE_TYPE (arg1
);
1363 int uns
= TYPE_UNSIGNED (type
);
1365 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1368 int1l
= TREE_INT_CST_LOW (arg1
);
1369 int1h
= TREE_INT_CST_HIGH (arg1
);
1370 int2l
= TREE_INT_CST_LOW (arg2
);
1371 int2h
= TREE_INT_CST_HIGH (arg2
);
1376 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1380 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1384 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1390 /* It's unclear from the C standard whether shifts can overflow.
1391 The following code ignores overflow; perhaps a C standard
1392 interpretation ruling is needed. */
1393 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1400 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1405 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1409 neg_double (int2l
, int2h
, &low
, &hi
);
1410 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1411 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1415 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1418 case TRUNC_DIV_EXPR
:
1419 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1420 case EXACT_DIV_EXPR
:
1421 /* This is a shortcut for a common special case. */
1422 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1423 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1424 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1425 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1427 if (code
== CEIL_DIV_EXPR
)
1430 low
= int1l
/ int2l
, hi
= 0;
1434 /* ... fall through ... */
1436 case ROUND_DIV_EXPR
:
1437 if (int2h
== 0 && int2l
== 1)
1439 low
= int1l
, hi
= int1h
;
1442 if (int1l
== int2l
&& int1h
== int2h
1443 && ! (int1l
== 0 && int1h
== 0))
1448 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1449 &low
, &hi
, &garbagel
, &garbageh
);
1452 case TRUNC_MOD_EXPR
:
1453 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1454 /* This is a shortcut for a common special case. */
1455 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1456 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1457 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1458 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1460 if (code
== CEIL_MOD_EXPR
)
1462 low
= int1l
% int2l
, hi
= 0;
1466 /* ... fall through ... */
1468 case ROUND_MOD_EXPR
:
1469 overflow
= div_and_round_double (code
, uns
,
1470 int1l
, int1h
, int2l
, int2h
,
1471 &garbagel
, &garbageh
, &low
, &hi
);
1477 low
= (((unsigned HOST_WIDE_INT
) int1h
1478 < (unsigned HOST_WIDE_INT
) int2h
)
1479 || (((unsigned HOST_WIDE_INT
) int1h
1480 == (unsigned HOST_WIDE_INT
) int2h
)
1483 low
= (int1h
< int2h
1484 || (int1h
== int2h
&& int1l
< int2l
));
1486 if (low
== (code
== MIN_EXPR
))
1487 low
= int1l
, hi
= int1h
;
1489 low
= int2l
, hi
= int2h
;
1496 t
= build_int_cst_wide (TREE_TYPE (arg1
), low
, hi
);
1500 /* Propagate overflow flags ourselves. */
1501 if (((!uns
|| is_sizetype
) && overflow
)
1502 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1505 TREE_OVERFLOW (t
) = 1;
1506 TREE_CONSTANT_OVERFLOW (t
) = 1;
1508 else if (TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
))
1511 TREE_CONSTANT_OVERFLOW (t
) = 1;
1515 t
= force_fit_type (t
, 1,
1516 ((!uns
|| is_sizetype
) && overflow
)
1517 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
),
1518 TREE_CONSTANT_OVERFLOW (arg1
)
1519 | TREE_CONSTANT_OVERFLOW (arg2
));
1524 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1525 constant. We assume ARG1 and ARG2 have the same data type, or at least
1526 are the same kind of constant and the same machine mode.
1528 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1531 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1536 if (TREE_CODE (arg1
) == INTEGER_CST
)
1537 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1539 if (TREE_CODE (arg1
) == REAL_CST
)
1541 enum machine_mode mode
;
1544 REAL_VALUE_TYPE value
;
1545 REAL_VALUE_TYPE result
;
1549 d1
= TREE_REAL_CST (arg1
);
1550 d2
= TREE_REAL_CST (arg2
);
1552 type
= TREE_TYPE (arg1
);
1553 mode
= TYPE_MODE (type
);
1555 /* Don't perform operation if we honor signaling NaNs and
1556 either operand is a NaN. */
1557 if (HONOR_SNANS (mode
)
1558 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1561 /* Don't perform operation if it would raise a division
1562 by zero exception. */
1563 if (code
== RDIV_EXPR
1564 && REAL_VALUES_EQUAL (d2
, dconst0
)
1565 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1568 /* If either operand is a NaN, just return it. Otherwise, set up
1569 for floating-point trap; we return an overflow. */
1570 if (REAL_VALUE_ISNAN (d1
))
1572 else if (REAL_VALUE_ISNAN (d2
))
1575 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1576 real_convert (&result
, mode
, &value
);
1578 /* Don't constant fold this floating point operation if
1579 the result has overflowed and flag_trapping_math. */
1581 if (flag_trapping_math
1582 && MODE_HAS_INFINITIES (mode
)
1583 && REAL_VALUE_ISINF (result
)
1584 && !REAL_VALUE_ISINF (d1
)
1585 && !REAL_VALUE_ISINF (d2
))
1588 /* Don't constant fold this floating point operation if the
1589 result may dependent upon the run-time rounding mode and
1590 flag_rounding_math is set, or if GCC's software emulation
1591 is unable to accurately represent the result. */
1593 if ((flag_rounding_math
1594 || (REAL_MODE_FORMAT_COMPOSITE_P (mode
)
1595 && !flag_unsafe_math_optimizations
))
1596 && (inexact
|| !real_identical (&result
, &value
)))
1599 t
= build_real (type
, result
);
1601 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1602 TREE_CONSTANT_OVERFLOW (t
)
1604 | TREE_CONSTANT_OVERFLOW (arg1
)
1605 | TREE_CONSTANT_OVERFLOW (arg2
);
1608 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1610 tree type
= TREE_TYPE (arg1
);
1611 tree r1
= TREE_REALPART (arg1
);
1612 tree i1
= TREE_IMAGPART (arg1
);
1613 tree r2
= TREE_REALPART (arg2
);
1614 tree i2
= TREE_IMAGPART (arg2
);
1620 t
= build_complex (type
,
1621 const_binop (PLUS_EXPR
, r1
, r2
, notrunc
),
1622 const_binop (PLUS_EXPR
, i1
, i2
, notrunc
));
1626 t
= build_complex (type
,
1627 const_binop (MINUS_EXPR
, r1
, r2
, notrunc
),
1628 const_binop (MINUS_EXPR
, i1
, i2
, notrunc
));
1632 t
= build_complex (type
,
1633 const_binop (MINUS_EXPR
,
1634 const_binop (MULT_EXPR
,
1636 const_binop (MULT_EXPR
,
1639 const_binop (PLUS_EXPR
,
1640 const_binop (MULT_EXPR
,
1642 const_binop (MULT_EXPR
,
1649 tree t1
, t2
, real
, imag
;
1651 = const_binop (PLUS_EXPR
,
1652 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1653 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1656 t1
= const_binop (PLUS_EXPR
,
1657 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1658 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1660 t2
= const_binop (MINUS_EXPR
,
1661 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1662 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1665 if (INTEGRAL_TYPE_P (TREE_TYPE (r1
)))
1667 real
= const_binop (TRUNC_DIV_EXPR
, t1
, magsquared
, notrunc
);
1668 imag
= const_binop (TRUNC_DIV_EXPR
, t2
, magsquared
, notrunc
);
1672 real
= const_binop (RDIV_EXPR
, t1
, magsquared
, notrunc
);
1673 imag
= const_binop (RDIV_EXPR
, t2
, magsquared
, notrunc
);
1678 t
= build_complex (type
, real
, imag
);
1690 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1691 indicates which particular sizetype to create. */
1694 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1696 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1699 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1700 is a tree code. The type of the result is taken from the operands.
1701 Both must be the same type integer type and it must be a size type.
1702 If the operands are constant, so is the result. */
1705 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
1707 tree type
= TREE_TYPE (arg0
);
1709 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1710 && type
== TREE_TYPE (arg1
));
1712 /* Handle the special case of two integer constants faster. */
1713 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1715 /* And some specific cases even faster than that. */
1716 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1718 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1719 && integer_zerop (arg1
))
1721 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1724 /* Handle general case of two integer constants. */
1725 return int_const_binop (code
, arg0
, arg1
, 0);
1728 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1729 return error_mark_node
;
1731 return fold_build2 (code
, type
, arg0
, arg1
);
1734 /* Given two values, either both of sizetype or both of bitsizetype,
1735 compute the difference between the two values. Return the value
1736 in signed type corresponding to the type of the operands. */
1739 size_diffop (tree arg0
, tree arg1
)
1741 tree type
= TREE_TYPE (arg0
);
1744 gcc_assert (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1745 && type
== TREE_TYPE (arg1
));
1747 /* If the type is already signed, just do the simple thing. */
1748 if (!TYPE_UNSIGNED (type
))
1749 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1751 ctype
= type
== bitsizetype
? sbitsizetype
: ssizetype
;
1753 /* If either operand is not a constant, do the conversions to the signed
1754 type and subtract. The hardware will do the right thing with any
1755 overflow in the subtraction. */
1756 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1757 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
1758 fold_convert (ctype
, arg1
));
1760 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1761 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1762 overflow) and negate (which can't either). Special-case a result
1763 of zero while we're here. */
1764 if (tree_int_cst_equal (arg0
, arg1
))
1765 return build_int_cst (ctype
, 0);
1766 else if (tree_int_cst_lt (arg1
, arg0
))
1767 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1769 return size_binop (MINUS_EXPR
, build_int_cst (ctype
, 0),
1770 fold_convert (ctype
, size_binop (MINUS_EXPR
,
1774 /* A subroutine of fold_convert_const handling conversions of an
1775 INTEGER_CST to another integer type. */
1778 fold_convert_const_int_from_int (tree type
, tree arg1
)
1782 /* Given an integer constant, make new constant with new type,
1783 appropriately sign-extended or truncated. */
1784 t
= build_int_cst_wide (type
, TREE_INT_CST_LOW (arg1
),
1785 TREE_INT_CST_HIGH (arg1
));
1787 t
= force_fit_type (t
,
1788 /* Don't set the overflow when
1789 converting a pointer */
1790 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1791 (TREE_INT_CST_HIGH (arg1
) < 0
1792 && (TYPE_UNSIGNED (type
)
1793 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1794 | TREE_OVERFLOW (arg1
),
1795 TREE_CONSTANT_OVERFLOW (arg1
));
1800 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1801 to an integer type. */
1804 fold_convert_const_int_from_real (enum tree_code code
, tree type
, tree arg1
)
1809 /* The following code implements the floating point to integer
1810 conversion rules required by the Java Language Specification,
1811 that IEEE NaNs are mapped to zero and values that overflow
1812 the target precision saturate, i.e. values greater than
1813 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1814 are mapped to INT_MIN. These semantics are allowed by the
1815 C and C++ standards that simply state that the behavior of
1816 FP-to-integer conversion is unspecified upon overflow. */
1818 HOST_WIDE_INT high
, low
;
1820 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1824 case FIX_TRUNC_EXPR
:
1825 real_trunc (&r
, VOIDmode
, &x
);
1829 real_ceil (&r
, VOIDmode
, &x
);
1832 case FIX_FLOOR_EXPR
:
1833 real_floor (&r
, VOIDmode
, &x
);
1836 case FIX_ROUND_EXPR
:
1837 real_round (&r
, VOIDmode
, &x
);
1844 /* If R is NaN, return zero and show we have an overflow. */
1845 if (REAL_VALUE_ISNAN (r
))
1852 /* See if R is less than the lower bound or greater than the
1857 tree lt
= TYPE_MIN_VALUE (type
);
1858 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1859 if (REAL_VALUES_LESS (r
, l
))
1862 high
= TREE_INT_CST_HIGH (lt
);
1863 low
= TREE_INT_CST_LOW (lt
);
1869 tree ut
= TYPE_MAX_VALUE (type
);
1872 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1873 if (REAL_VALUES_LESS (u
, r
))
1876 high
= TREE_INT_CST_HIGH (ut
);
1877 low
= TREE_INT_CST_LOW (ut
);
1883 REAL_VALUE_TO_INT (&low
, &high
, r
);
1885 t
= build_int_cst_wide (type
, low
, high
);
1887 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg1
),
1888 TREE_CONSTANT_OVERFLOW (arg1
));
1892 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1893 to another floating point type. */
1896 fold_convert_const_real_from_real (tree type
, tree arg1
)
1898 REAL_VALUE_TYPE value
;
1901 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1902 t
= build_real (type
, value
);
1904 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1905 TREE_CONSTANT_OVERFLOW (t
)
1906 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1910 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1911 type TYPE. If no simplification can be done return NULL_TREE. */
1914 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1916 if (TREE_TYPE (arg1
) == type
)
1919 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
1921 if (TREE_CODE (arg1
) == INTEGER_CST
)
1922 return fold_convert_const_int_from_int (type
, arg1
);
1923 else if (TREE_CODE (arg1
) == REAL_CST
)
1924 return fold_convert_const_int_from_real (code
, type
, arg1
);
1926 else if (TREE_CODE (type
) == REAL_TYPE
)
1928 if (TREE_CODE (arg1
) == INTEGER_CST
)
1929 return build_real_from_int_cst (type
, arg1
);
1930 if (TREE_CODE (arg1
) == REAL_CST
)
1931 return fold_convert_const_real_from_real (type
, arg1
);
1936 /* Construct a vector of zero elements of vector type TYPE. */
1939 build_zero_vector (tree type
)
1944 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1945 units
= TYPE_VECTOR_SUBPARTS (type
);
1948 for (i
= 0; i
< units
; i
++)
1949 list
= tree_cons (NULL_TREE
, elem
, list
);
1950 return build_vector (type
, list
);
1953 /* Convert expression ARG to type TYPE. Used by the middle-end for
1954 simple conversions in preference to calling the front-end's convert. */
1957 fold_convert (tree type
, tree arg
)
1959 tree orig
= TREE_TYPE (arg
);
1965 if (TREE_CODE (arg
) == ERROR_MARK
1966 || TREE_CODE (type
) == ERROR_MARK
1967 || TREE_CODE (orig
) == ERROR_MARK
)
1968 return error_mark_node
;
1970 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
)
1971 || lang_hooks
.types_compatible_p (TYPE_MAIN_VARIANT (type
),
1972 TYPE_MAIN_VARIANT (orig
)))
1973 return fold_build1 (NOP_EXPR
, type
, arg
);
1975 switch (TREE_CODE (type
))
1977 case INTEGER_TYPE
: case CHAR_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1978 case POINTER_TYPE
: case REFERENCE_TYPE
:
1980 if (TREE_CODE (arg
) == INTEGER_CST
)
1982 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1983 if (tem
!= NULL_TREE
)
1986 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1987 || TREE_CODE (orig
) == OFFSET_TYPE
)
1988 return fold_build1 (NOP_EXPR
, type
, arg
);
1989 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1991 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1992 return fold_convert (type
, tem
);
1994 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1995 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1996 return fold_build1 (NOP_EXPR
, type
, arg
);
1999 if (TREE_CODE (arg
) == INTEGER_CST
)
2001 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2002 if (tem
!= NULL_TREE
)
2005 else if (TREE_CODE (arg
) == REAL_CST
)
2007 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2008 if (tem
!= NULL_TREE
)
2012 switch (TREE_CODE (orig
))
2014 case INTEGER_TYPE
: case CHAR_TYPE
:
2015 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2016 case POINTER_TYPE
: case REFERENCE_TYPE
:
2017 return fold_build1 (FLOAT_EXPR
, type
, arg
);
2020 return fold_build1 (NOP_EXPR
, type
, arg
);
2023 tem
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2024 return fold_convert (type
, tem
);
2031 switch (TREE_CODE (orig
))
2033 case INTEGER_TYPE
: case CHAR_TYPE
:
2034 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2035 case POINTER_TYPE
: case REFERENCE_TYPE
:
2037 return build2 (COMPLEX_EXPR
, type
,
2038 fold_convert (TREE_TYPE (type
), arg
),
2039 fold_convert (TREE_TYPE (type
), integer_zero_node
));
2044 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2046 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
2047 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
2048 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2051 arg
= save_expr (arg
);
2052 rpart
= fold_build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2053 ipart
= fold_build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2054 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
2055 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
2056 return fold_build2 (COMPLEX_EXPR
, type
, rpart
, ipart
);
2064 if (integer_zerop (arg
))
2065 return build_zero_vector (type
);
2066 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2067 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2068 || TREE_CODE (orig
) == VECTOR_TYPE
);
2069 return fold_build1 (VIEW_CONVERT_EXPR
, type
, arg
);
2072 return fold_build1 (NOP_EXPR
, type
, fold_ignored_result (arg
));
2079 /* Return false if expr can be assumed not to be an lvalue, true
2083 maybe_lvalue_p (tree x
)
2085 /* We only need to wrap lvalue tree codes. */
2086 switch (TREE_CODE (x
))
2097 case ALIGN_INDIRECT_REF
:
2098 case MISALIGNED_INDIRECT_REF
:
2100 case ARRAY_RANGE_REF
:
2106 case PREINCREMENT_EXPR
:
2107 case PREDECREMENT_EXPR
:
2109 case TRY_CATCH_EXPR
:
2110 case WITH_CLEANUP_EXPR
:
2121 /* Assume the worst for front-end tree codes. */
2122 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2130 /* Return an expr equal to X but certainly not valid as an lvalue. */
2135 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2140 if (! maybe_lvalue_p (x
))
2142 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2145 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2146 Zero means allow extended lvalues. */
2148 int pedantic_lvalues
;
2150 /* When pedantic, return an expr equal to X but certainly not valid as a
2151 pedantic lvalue. Otherwise, return X. */
2154 pedantic_non_lvalue (tree x
)
2156 if (pedantic_lvalues
)
2157 return non_lvalue (x
);
2162 /* Given a tree comparison code, return the code that is the logical inverse
2163 of the given code. It is not safe to do this for floating-point
2164 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2165 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2168 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2170 if (honor_nans
&& flag_trapping_math
)
2180 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2182 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2184 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2186 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2200 return UNORDERED_EXPR
;
2201 case UNORDERED_EXPR
:
2202 return ORDERED_EXPR
;
2208 /* Similar, but return the comparison that results if the operands are
2209 swapped. This is safe for floating-point. */
2212 swap_tree_comparison (enum tree_code code
)
2219 case UNORDERED_EXPR
:
2245 /* Convert a comparison tree code from an enum tree_code representation
2246 into a compcode bit-based encoding. This function is the inverse of
2247 compcode_to_comparison. */
2249 static enum comparison_code
2250 comparison_to_compcode (enum tree_code code
)
2267 return COMPCODE_ORD
;
2268 case UNORDERED_EXPR
:
2269 return COMPCODE_UNORD
;
2271 return COMPCODE_UNLT
;
2273 return COMPCODE_UNEQ
;
2275 return COMPCODE_UNLE
;
2277 return COMPCODE_UNGT
;
2279 return COMPCODE_LTGT
;
2281 return COMPCODE_UNGE
;
2287 /* Convert a compcode bit-based encoding of a comparison operator back
2288 to GCC's enum tree_code representation. This function is the
2289 inverse of comparison_to_compcode. */
2291 static enum tree_code
2292 compcode_to_comparison (enum comparison_code code
)
2309 return ORDERED_EXPR
;
2310 case COMPCODE_UNORD
:
2311 return UNORDERED_EXPR
;
2329 /* Return a tree for the comparison which is the combination of
2330 doing the AND or OR (depending on CODE) of the two operations LCODE
2331 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2332 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2333 if this makes the transformation invalid. */
2336 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2337 enum tree_code rcode
, tree truth_type
,
2338 tree ll_arg
, tree lr_arg
)
2340 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2341 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2342 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2343 enum comparison_code compcode
;
2347 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2348 compcode
= lcompcode
& rcompcode
;
2351 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2352 compcode
= lcompcode
| rcompcode
;
2361 /* Eliminate unordered comparisons, as well as LTGT and ORD
2362 which are not used unless the mode has NaNs. */
2363 compcode
&= ~COMPCODE_UNORD
;
2364 if (compcode
== COMPCODE_LTGT
)
2365 compcode
= COMPCODE_NE
;
2366 else if (compcode
== COMPCODE_ORD
)
2367 compcode
= COMPCODE_TRUE
;
2369 else if (flag_trapping_math
)
2371 /* Check that the original operation and the optimized ones will trap
2372 under the same condition. */
2373 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2374 && (lcompcode
!= COMPCODE_EQ
)
2375 && (lcompcode
!= COMPCODE_ORD
);
2376 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2377 && (rcompcode
!= COMPCODE_EQ
)
2378 && (rcompcode
!= COMPCODE_ORD
);
2379 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2380 && (compcode
!= COMPCODE_EQ
)
2381 && (compcode
!= COMPCODE_ORD
);
2383 /* In a short-circuited boolean expression the LHS might be
2384 such that the RHS, if evaluated, will never trap. For
2385 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2386 if neither x nor y is NaN. (This is a mixed blessing: for
2387 example, the expression above will never trap, hence
2388 optimizing it to x < y would be invalid). */
2389 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2390 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2393 /* If the comparison was short-circuited, and only the RHS
2394 trapped, we may now generate a spurious trap. */
2396 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2399 /* If we changed the conditions that cause a trap, we lose. */
2400 if ((ltrap
|| rtrap
) != trap
)
2404 if (compcode
== COMPCODE_TRUE
)
2405 return constant_boolean_node (true, truth_type
);
2406 else if (compcode
== COMPCODE_FALSE
)
2407 return constant_boolean_node (false, truth_type
);
2409 return fold_build2 (compcode_to_comparison (compcode
),
2410 truth_type
, ll_arg
, lr_arg
);
2413 /* Return nonzero if CODE is a tree code that represents a truth value. */
2416 truth_value_p (enum tree_code code
)
2418 return (TREE_CODE_CLASS (code
) == tcc_comparison
2419 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2420 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2421 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2424 /* Return nonzero if two operands (typically of the same tree node)
2425 are necessarily equal. If either argument has side-effects this
2426 function returns zero. FLAGS modifies behavior as follows:
2428 If OEP_ONLY_CONST is set, only return nonzero for constants.
2429 This function tests whether the operands are indistinguishable;
2430 it does not test whether they are equal using C's == operation.
2431 The distinction is important for IEEE floating point, because
2432 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2433 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2435 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2436 even though it may hold multiple values during a function.
2437 This is because a GCC tree node guarantees that nothing else is
2438 executed between the evaluation of its "operands" (which may often
2439 be evaluated in arbitrary order). Hence if the operands themselves
2440 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2441 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2442 unset means assuming isochronic (or instantaneous) tree equivalence.
2443 Unless comparing arbitrary expression trees, such as from different
2444 statements, this flag can usually be left unset.
2446 If OEP_PURE_SAME is set, then pure functions with identical arguments
2447 are considered the same. It is used when the caller has other ways
2448 to ensure that global memory is unchanged in between. */
2451 operand_equal_p (tree arg0
, tree arg1
, unsigned int flags
)
2453 /* If either is ERROR_MARK, they aren't equal. */
2454 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
2457 /* If both types don't have the same signedness, then we can't consider
2458 them equal. We must check this before the STRIP_NOPS calls
2459 because they may change the signedness of the arguments. */
2460 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2466 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2467 /* This is needed for conversions and for COMPONENT_REF.
2468 Might as well play it safe and always test this. */
2469 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2470 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2471 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2474 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2475 We don't care about side effects in that case because the SAVE_EXPR
2476 takes care of that for us. In all other cases, two expressions are
2477 equal if they have no side effects. If we have two identical
2478 expressions with side effects that should be treated the same due
2479 to the only side effects being identical SAVE_EXPR's, that will
2480 be detected in the recursive calls below. */
2481 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2482 && (TREE_CODE (arg0
) == SAVE_EXPR
2483 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2486 /* Next handle constant cases, those for which we can return 1 even
2487 if ONLY_CONST is set. */
2488 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2489 switch (TREE_CODE (arg0
))
2492 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2493 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2494 && tree_int_cst_equal (arg0
, arg1
));
2497 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2498 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2499 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2500 TREE_REAL_CST (arg1
)));
2506 if (TREE_CONSTANT_OVERFLOW (arg0
)
2507 || TREE_CONSTANT_OVERFLOW (arg1
))
2510 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2511 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2514 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2517 v1
= TREE_CHAIN (v1
);
2518 v2
= TREE_CHAIN (v2
);
2525 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2527 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2531 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2532 && ! memcmp (TREE_STRING_POINTER (arg0
),
2533 TREE_STRING_POINTER (arg1
),
2534 TREE_STRING_LENGTH (arg0
)));
2537 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2543 if (flags
& OEP_ONLY_CONST
)
2546 /* Define macros to test an operand from arg0 and arg1 for equality and a
2547 variant that allows null and views null as being different from any
2548 non-null value. In the latter case, if either is null, the both
2549 must be; otherwise, do the normal comparison. */
2550 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2551 TREE_OPERAND (arg1, N), flags)
2553 #define OP_SAME_WITH_NULL(N) \
2554 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2555 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2557 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2560 /* Two conversions are equal only if signedness and modes match. */
2561 switch (TREE_CODE (arg0
))
2566 case FIX_TRUNC_EXPR
:
2567 case FIX_FLOOR_EXPR
:
2568 case FIX_ROUND_EXPR
:
2569 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2570 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2580 case tcc_comparison
:
2582 if (OP_SAME (0) && OP_SAME (1))
2585 /* For commutative ops, allow the other order. */
2586 return (commutative_tree_code (TREE_CODE (arg0
))
2587 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2588 TREE_OPERAND (arg1
, 1), flags
)
2589 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2590 TREE_OPERAND (arg1
, 0), flags
));
2593 /* If either of the pointer (or reference) expressions we are
2594 dereferencing contain a side effect, these cannot be equal. */
2595 if (TREE_SIDE_EFFECTS (arg0
)
2596 || TREE_SIDE_EFFECTS (arg1
))
2599 switch (TREE_CODE (arg0
))
2602 case ALIGN_INDIRECT_REF
:
2603 case MISALIGNED_INDIRECT_REF
:
2609 case ARRAY_RANGE_REF
:
2610 /* Operands 2 and 3 may be null. */
2613 && OP_SAME_WITH_NULL (2)
2614 && OP_SAME_WITH_NULL (3));
2617 /* Handle operand 2 the same as for ARRAY_REF. */
2618 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2621 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2627 case tcc_expression
:
2628 switch (TREE_CODE (arg0
))
2631 case TRUTH_NOT_EXPR
:
2634 case TRUTH_ANDIF_EXPR
:
2635 case TRUTH_ORIF_EXPR
:
2636 return OP_SAME (0) && OP_SAME (1);
2638 case TRUTH_AND_EXPR
:
2640 case TRUTH_XOR_EXPR
:
2641 if (OP_SAME (0) && OP_SAME (1))
2644 /* Otherwise take into account this is a commutative operation. */
2645 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2646 TREE_OPERAND (arg1
, 1), flags
)
2647 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2648 TREE_OPERAND (arg1
, 0), flags
));
2651 /* If the CALL_EXPRs call different functions, then they
2652 clearly can not be equal. */
2657 unsigned int cef
= call_expr_flags (arg0
);
2658 if (flags
& OEP_PURE_SAME
)
2659 cef
&= ECF_CONST
| ECF_PURE
;
2666 /* Now see if all the arguments are the same. operand_equal_p
2667 does not handle TREE_LIST, so we walk the operands here
2668 feeding them to operand_equal_p. */
2669 arg0
= TREE_OPERAND (arg0
, 1);
2670 arg1
= TREE_OPERAND (arg1
, 1);
2671 while (arg0
&& arg1
)
2673 if (! operand_equal_p (TREE_VALUE (arg0
), TREE_VALUE (arg1
),
2677 arg0
= TREE_CHAIN (arg0
);
2678 arg1
= TREE_CHAIN (arg1
);
2681 /* If we get here and both argument lists are exhausted
2682 then the CALL_EXPRs are equal. */
2683 return ! (arg0
|| arg1
);
2689 case tcc_declaration
:
2690 /* Consider __builtin_sqrt equal to sqrt. */
2691 return (TREE_CODE (arg0
) == FUNCTION_DECL
2692 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2693 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2694 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2701 #undef OP_SAME_WITH_NULL
2704 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2705 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2707 When in doubt, return 0. */
2710 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2712 int unsignedp1
, unsignedpo
;
2713 tree primarg0
, primarg1
, primother
;
2714 unsigned int correct_width
;
2716 if (operand_equal_p (arg0
, arg1
, 0))
2719 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2720 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2723 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2724 and see if the inner values are the same. This removes any
2725 signedness comparison, which doesn't matter here. */
2726 primarg0
= arg0
, primarg1
= arg1
;
2727 STRIP_NOPS (primarg0
);
2728 STRIP_NOPS (primarg1
);
2729 if (operand_equal_p (primarg0
, primarg1
, 0))
2732 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2733 actual comparison operand, ARG0.
2735 First throw away any conversions to wider types
2736 already present in the operands. */
2738 primarg1
= get_narrower (arg1
, &unsignedp1
);
2739 primother
= get_narrower (other
, &unsignedpo
);
2741 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2742 if (unsignedp1
== unsignedpo
2743 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2744 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2746 tree type
= TREE_TYPE (arg0
);
2748 /* Make sure shorter operand is extended the right way
2749 to match the longer operand. */
2750 primarg1
= fold_convert (lang_hooks
.types
.signed_or_unsigned_type
2751 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2753 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2760 /* See if ARG is an expression that is either a comparison or is performing
2761 arithmetic on comparisons. The comparisons must only be comparing
2762 two different values, which will be stored in *CVAL1 and *CVAL2; if
2763 they are nonzero it means that some operands have already been found.
2764 No variables may be used anywhere else in the expression except in the
2765 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2766 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2768 If this is true, return 1. Otherwise, return zero. */
2771 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2773 enum tree_code code
= TREE_CODE (arg
);
2774 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2776 /* We can handle some of the tcc_expression cases here. */
2777 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2779 else if (class == tcc_expression
2780 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2781 || code
== COMPOUND_EXPR
))
2784 else if (class == tcc_expression
&& code
== SAVE_EXPR
2785 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2787 /* If we've already found a CVAL1 or CVAL2, this expression is
2788 two complex to handle. */
2789 if (*cval1
|| *cval2
)
2799 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2802 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2803 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2804 cval1
, cval2
, save_p
));
2809 case tcc_expression
:
2810 if (code
== COND_EXPR
)
2811 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2812 cval1
, cval2
, save_p
)
2813 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2814 cval1
, cval2
, save_p
)
2815 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2816 cval1
, cval2
, save_p
));
2819 case tcc_comparison
:
2820 /* First see if we can handle the first operand, then the second. For
2821 the second operand, we know *CVAL1 can't be zero. It must be that
2822 one side of the comparison is each of the values; test for the
2823 case where this isn't true by failing if the two operands
2826 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2827 TREE_OPERAND (arg
, 1), 0))
2831 *cval1
= TREE_OPERAND (arg
, 0);
2832 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2834 else if (*cval2
== 0)
2835 *cval2
= TREE_OPERAND (arg
, 0);
2836 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2841 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2843 else if (*cval2
== 0)
2844 *cval2
= TREE_OPERAND (arg
, 1);
2845 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2857 /* ARG is a tree that is known to contain just arithmetic operations and
2858 comparisons. Evaluate the operations in the tree substituting NEW0 for
2859 any occurrence of OLD0 as an operand of a comparison and likewise for
2863 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
2865 tree type
= TREE_TYPE (arg
);
2866 enum tree_code code
= TREE_CODE (arg
);
2867 enum tree_code_class
class = TREE_CODE_CLASS (code
);
2869 /* We can handle some of the tcc_expression cases here. */
2870 if (class == tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2872 else if (class == tcc_expression
2873 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2879 return fold_build1 (code
, type
,
2880 eval_subst (TREE_OPERAND (arg
, 0),
2881 old0
, new0
, old1
, new1
));
2884 return fold_build2 (code
, type
,
2885 eval_subst (TREE_OPERAND (arg
, 0),
2886 old0
, new0
, old1
, new1
),
2887 eval_subst (TREE_OPERAND (arg
, 1),
2888 old0
, new0
, old1
, new1
));
2890 case tcc_expression
:
2894 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
2897 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
2900 return fold_build3 (code
, type
,
2901 eval_subst (TREE_OPERAND (arg
, 0),
2902 old0
, new0
, old1
, new1
),
2903 eval_subst (TREE_OPERAND (arg
, 1),
2904 old0
, new0
, old1
, new1
),
2905 eval_subst (TREE_OPERAND (arg
, 2),
2906 old0
, new0
, old1
, new1
));
2910 /* Fall through - ??? */
2912 case tcc_comparison
:
2914 tree arg0
= TREE_OPERAND (arg
, 0);
2915 tree arg1
= TREE_OPERAND (arg
, 1);
2917 /* We need to check both for exact equality and tree equality. The
2918 former will be true if the operand has a side-effect. In that
2919 case, we know the operand occurred exactly once. */
2921 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2923 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2926 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2928 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2931 return fold_build2 (code
, type
, arg0
, arg1
);
2939 /* Return a tree for the case when the result of an expression is RESULT
2940 converted to TYPE and OMITTED was previously an operand of the expression
2941 but is now not needed (e.g., we folded OMITTED * 0).
2943 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2944 the conversion of RESULT to TYPE. */
2947 omit_one_operand (tree type
, tree result
, tree omitted
)
2949 tree t
= fold_convert (type
, result
);
2951 if (TREE_SIDE_EFFECTS (omitted
))
2952 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2954 return non_lvalue (t
);
2957 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2960 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
2962 tree t
= fold_convert (type
, result
);
2964 if (TREE_SIDE_EFFECTS (omitted
))
2965 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2967 return pedantic_non_lvalue (t
);
2970 /* Return a tree for the case when the result of an expression is RESULT
2971 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2972 of the expression but are now not needed.
2974 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2975 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2976 evaluated before OMITTED2. Otherwise, if neither has side effects,
2977 just do the conversion of RESULT to TYPE. */
2980 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
2982 tree t
= fold_convert (type
, result
);
2984 if (TREE_SIDE_EFFECTS (omitted2
))
2985 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
2986 if (TREE_SIDE_EFFECTS (omitted1
))
2987 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
2989 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
2993 /* Return a simplified tree node for the truth-negation of ARG. This
2994 never alters ARG itself. We assume that ARG is an operation that
2995 returns a truth value (0 or 1).
2997 FIXME: one would think we would fold the result, but it causes
2998 problems with the dominator optimizer. */
3000 invert_truthvalue (tree arg
)
3002 tree type
= TREE_TYPE (arg
);
3003 enum tree_code code
= TREE_CODE (arg
);
3005 if (code
== ERROR_MARK
)
3008 /* If this is a comparison, we can simply invert it, except for
3009 floating-point non-equality comparisons, in which case we just
3010 enclose a TRUTH_NOT_EXPR around what we have. */
3012 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3014 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3015 if (FLOAT_TYPE_P (op_type
)
3016 && flag_trapping_math
3017 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3018 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3019 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3022 code
= invert_tree_comparison (code
,
3023 HONOR_NANS (TYPE_MODE (op_type
)));
3024 if (code
== ERROR_MARK
)
3025 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3027 return build2 (code
, type
,
3028 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
3035 return constant_boolean_node (integer_zerop (arg
), type
);
3037 case TRUTH_AND_EXPR
:
3038 return build2 (TRUTH_OR_EXPR
, type
,
3039 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3040 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3043 return build2 (TRUTH_AND_EXPR
, type
,
3044 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3045 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3047 case TRUTH_XOR_EXPR
:
3048 /* Here we can invert either operand. We invert the first operand
3049 unless the second operand is a TRUTH_NOT_EXPR in which case our
3050 result is the XOR of the first operand with the inside of the
3051 negation of the second operand. */
3053 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3054 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3055 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3057 return build2 (TRUTH_XOR_EXPR
, type
,
3058 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3059 TREE_OPERAND (arg
, 1));
3061 case TRUTH_ANDIF_EXPR
:
3062 return build2 (TRUTH_ORIF_EXPR
, type
,
3063 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3064 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3066 case TRUTH_ORIF_EXPR
:
3067 return build2 (TRUTH_ANDIF_EXPR
, type
,
3068 invert_truthvalue (TREE_OPERAND (arg
, 0)),
3069 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3071 case TRUTH_NOT_EXPR
:
3072 return TREE_OPERAND (arg
, 0);
3076 tree arg1
= TREE_OPERAND (arg
, 1);
3077 tree arg2
= TREE_OPERAND (arg
, 2);
3078 /* A COND_EXPR may have a throw as one operand, which
3079 then has void type. Just leave void operands
3081 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3082 VOID_TYPE_P (TREE_TYPE (arg1
))
3083 ? arg1
: invert_truthvalue (arg1
),
3084 VOID_TYPE_P (TREE_TYPE (arg2
))
3085 ? arg2
: invert_truthvalue (arg2
));
3089 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3090 invert_truthvalue (TREE_OPERAND (arg
, 1)));
3092 case NON_LVALUE_EXPR
:
3093 return invert_truthvalue (TREE_OPERAND (arg
, 0));
3096 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3101 return build1 (TREE_CODE (arg
), type
,
3102 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3105 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3107 return build2 (EQ_EXPR
, type
, arg
,
3108 build_int_cst (type
, 0));
3111 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3113 case CLEANUP_POINT_EXPR
:
3114 return build1 (CLEANUP_POINT_EXPR
, type
,
3115 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3120 gcc_assert (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
);
3121 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3124 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3125 operands are another bit-wise operation with a common input. If so,
3126 distribute the bit operations to save an operation and possibly two if
3127 constants are involved. For example, convert
3128 (A | B) & (A | C) into A | (B & C)
3129 Further simplification will occur if B and C are constants.
3131 If this optimization cannot be done, 0 will be returned. */
3134 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3139 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3140 || TREE_CODE (arg0
) == code
3141 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3142 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3145 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3147 common
= TREE_OPERAND (arg0
, 0);
3148 left
= TREE_OPERAND (arg0
, 1);
3149 right
= TREE_OPERAND (arg1
, 1);
3151 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3153 common
= TREE_OPERAND (arg0
, 0);
3154 left
= TREE_OPERAND (arg0
, 1);
3155 right
= TREE_OPERAND (arg1
, 0);
3157 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3159 common
= TREE_OPERAND (arg0
, 1);
3160 left
= TREE_OPERAND (arg0
, 0);
3161 right
= TREE_OPERAND (arg1
, 1);
3163 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3165 common
= TREE_OPERAND (arg0
, 1);
3166 left
= TREE_OPERAND (arg0
, 0);
3167 right
= TREE_OPERAND (arg1
, 0);
3172 return fold_build2 (TREE_CODE (arg0
), type
, common
,
3173 fold_build2 (code
, type
, left
, right
));
3176 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3177 with code CODE. This optimization is unsafe. */
3179 distribute_real_division (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3181 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3182 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3184 /* (A / C) +- (B / C) -> (A +- B) / C. */
3186 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3187 TREE_OPERAND (arg1
, 1), 0))
3188 return fold_build2 (mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3189 fold_build2 (code
, type
,
3190 TREE_OPERAND (arg0
, 0),
3191 TREE_OPERAND (arg1
, 0)),
3192 TREE_OPERAND (arg0
, 1));
3194 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3195 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3196 TREE_OPERAND (arg1
, 0), 0)
3197 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3198 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3200 REAL_VALUE_TYPE r0
, r1
;
3201 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3202 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3204 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3206 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3207 real_arithmetic (&r0
, code
, &r0
, &r1
);
3208 return fold_build2 (MULT_EXPR
, type
,
3209 TREE_OPERAND (arg0
, 0),
3210 build_real (type
, r0
));
3216 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3217 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3220 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
3227 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3228 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3229 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3230 && host_integerp (size
, 0)
3231 && tree_low_cst (size
, 0) == bitsize
)
3232 return fold_convert (type
, inner
);
3235 result
= build3 (BIT_FIELD_REF
, type
, inner
,
3236 size_int (bitsize
), bitsize_int (bitpos
));
3238 BIT_FIELD_REF_UNSIGNED (result
) = unsignedp
;
3243 /* Optimize a bit-field compare.
3245 There are two cases: First is a compare against a constant and the
3246 second is a comparison of two items where the fields are at the same
3247 bit position relative to the start of a chunk (byte, halfword, word)
3248 large enough to contain it. In these cases we can avoid the shift
3249 implicit in bitfield extractions.
3251 For constants, we emit a compare of the shifted constant with the
3252 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3253 compared. For two fields at the same position, we do the ANDs with the
3254 similar mask and compare the result of the ANDs.
3256 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3257 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3258 are the left and right operands of the comparison, respectively.
3260 If the optimization described above can be done, we return the resulting
3261 tree. Otherwise we return zero. */
3264 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
3267 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3268 tree type
= TREE_TYPE (lhs
);
3269 tree signed_type
, unsigned_type
;
3270 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3271 enum machine_mode lmode
, rmode
, nmode
;
3272 int lunsignedp
, runsignedp
;
3273 int lvolatilep
= 0, rvolatilep
= 0;
3274 tree linner
, rinner
= NULL_TREE
;
3278 /* Get all the information about the extractions being done. If the bit size
3279 if the same as the size of the underlying object, we aren't doing an
3280 extraction at all and so can do nothing. We also don't want to
3281 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3282 then will no longer be able to replace it. */
3283 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3284 &lunsignedp
, &lvolatilep
, false);
3285 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3286 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3291 /* If this is not a constant, we can only do something if bit positions,
3292 sizes, and signedness are the same. */
3293 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3294 &runsignedp
, &rvolatilep
, false);
3296 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3297 || lunsignedp
!= runsignedp
|| offset
!= 0
3298 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3302 /* See if we can find a mode to refer to this field. We should be able to,
3303 but fail if we can't. */
3304 nmode
= get_best_mode (lbitsize
, lbitpos
,
3305 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3306 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3307 TYPE_ALIGN (TREE_TYPE (rinner
))),
3308 word_mode
, lvolatilep
|| rvolatilep
);
3309 if (nmode
== VOIDmode
)
3312 /* Set signed and unsigned types of the precision of this mode for the
3314 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3315 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3317 /* Compute the bit position and size for the new reference and our offset
3318 within it. If the new reference is the same size as the original, we
3319 won't optimize anything, so return zero. */
3320 nbitsize
= GET_MODE_BITSIZE (nmode
);
3321 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3323 if (nbitsize
== lbitsize
)
3326 if (BYTES_BIG_ENDIAN
)
3327 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3329 /* Make the mask to be used against the extracted field. */
3330 mask
= build_int_cst (unsigned_type
, -1);
3331 mask
= force_fit_type (mask
, 0, false, false);
3332 mask
= fold_convert (unsigned_type
, mask
);
3333 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
3334 mask
= const_binop (RSHIFT_EXPR
, mask
,
3335 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
3338 /* If not comparing with constant, just rework the comparison
3340 return build2 (code
, compare_type
,
3341 build2 (BIT_AND_EXPR
, unsigned_type
,
3342 make_bit_field_ref (linner
, unsigned_type
,
3343 nbitsize
, nbitpos
, 1),
3345 build2 (BIT_AND_EXPR
, unsigned_type
,
3346 make_bit_field_ref (rinner
, unsigned_type
,
3347 nbitsize
, nbitpos
, 1),
3350 /* Otherwise, we are handling the constant case. See if the constant is too
3351 big for the field. Warn and return a tree of for 0 (false) if so. We do
3352 this not only for its own sake, but to avoid having to test for this
3353 error case below. If we didn't, we might generate wrong code.
3355 For unsigned fields, the constant shifted right by the field length should
3356 be all zero. For signed fields, the high-order bits should agree with
3361 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3362 fold_convert (unsigned_type
, rhs
),
3363 size_int (lbitsize
), 0)))
3365 warning (0, "comparison is always %d due to width of bit-field",
3367 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3372 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
3373 size_int (lbitsize
- 1), 0);
3374 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3376 warning (0, "comparison is always %d due to width of bit-field",
3378 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3382 /* Single-bit compares should always be against zero. */
3383 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3385 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3386 rhs
= build_int_cst (type
, 0);
3389 /* Make a new bitfield reference, shift the constant over the
3390 appropriate number of bits and mask it with the computed mask
3391 (in case this was a signed field). If we changed it, make a new one. */
3392 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3395 TREE_SIDE_EFFECTS (lhs
) = 1;
3396 TREE_THIS_VOLATILE (lhs
) = 1;
3399 rhs
= const_binop (BIT_AND_EXPR
,
3400 const_binop (LSHIFT_EXPR
,
3401 fold_convert (unsigned_type
, rhs
),
3402 size_int (lbitpos
), 0),
3405 return build2 (code
, compare_type
,
3406 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
3410 /* Subroutine for fold_truthop: decode a field reference.
3412 If EXP is a comparison reference, we return the innermost reference.
3414 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3415 set to the starting bit number.
3417 If the innermost field can be completely contained in a mode-sized
3418 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3420 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3421 otherwise it is not changed.
3423 *PUNSIGNEDP is set to the signedness of the field.
3425 *PMASK is set to the mask used. This is either contained in a
3426 BIT_AND_EXPR or derived from the width of the field.
3428 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3430 Return 0 if this is not a component reference or is one that we can't
3431 do anything with. */
3434 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3435 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3436 int *punsignedp
, int *pvolatilep
,
3437 tree
*pmask
, tree
*pand_mask
)
3439 tree outer_type
= 0;
3441 tree mask
, inner
, offset
;
3443 unsigned int precision
;
3445 /* All the optimizations using this function assume integer fields.
3446 There are problems with FP fields since the type_for_size call
3447 below can fail for, e.g., XFmode. */
3448 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3451 /* We are interested in the bare arrangement of bits, so strip everything
3452 that doesn't affect the machine mode. However, record the type of the
3453 outermost expression if it may matter below. */
3454 if (TREE_CODE (exp
) == NOP_EXPR
3455 || TREE_CODE (exp
) == CONVERT_EXPR
3456 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3457 outer_type
= TREE_TYPE (exp
);
3460 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3462 and_mask
= TREE_OPERAND (exp
, 1);
3463 exp
= TREE_OPERAND (exp
, 0);
3464 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3465 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3469 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3470 punsignedp
, pvolatilep
, false);
3471 if ((inner
== exp
&& and_mask
== 0)
3472 || *pbitsize
< 0 || offset
!= 0
3473 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3476 /* If the number of bits in the reference is the same as the bitsize of
3477 the outer type, then the outer type gives the signedness. Otherwise
3478 (in case of a small bitfield) the signedness is unchanged. */
3479 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3480 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3482 /* Compute the mask to access the bitfield. */
3483 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3484 precision
= TYPE_PRECISION (unsigned_type
);
3486 mask
= build_int_cst (unsigned_type
, -1);
3487 mask
= force_fit_type (mask
, 0, false, false);
3489 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3490 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3492 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3494 mask
= fold_build2 (BIT_AND_EXPR
, unsigned_type
,
3495 fold_convert (unsigned_type
, and_mask
), mask
);
3498 *pand_mask
= and_mask
;
3502 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3506 all_ones_mask_p (tree mask
, int size
)
3508 tree type
= TREE_TYPE (mask
);
3509 unsigned int precision
= TYPE_PRECISION (type
);
3512 tmask
= build_int_cst (lang_hooks
.types
.signed_type (type
), -1);
3513 tmask
= force_fit_type (tmask
, 0, false, false);
3516 tree_int_cst_equal (mask
,
3517 const_binop (RSHIFT_EXPR
,
3518 const_binop (LSHIFT_EXPR
, tmask
,
3519 size_int (precision
- size
),
3521 size_int (precision
- size
), 0));
3524 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3525 represents the sign bit of EXP's type. If EXP represents a sign
3526 or zero extension, also test VAL against the unextended type.
3527 The return value is the (sub)expression whose sign bit is VAL,
3528 or NULL_TREE otherwise. */
3531 sign_bit_p (tree exp
, tree val
)
3533 unsigned HOST_WIDE_INT mask_lo
, lo
;
3534 HOST_WIDE_INT mask_hi
, hi
;
3538 /* Tree EXP must have an integral type. */
3539 t
= TREE_TYPE (exp
);
3540 if (! INTEGRAL_TYPE_P (t
))
3543 /* Tree VAL must be an integer constant. */
3544 if (TREE_CODE (val
) != INTEGER_CST
3545 || TREE_CONSTANT_OVERFLOW (val
))
3548 width
= TYPE_PRECISION (t
);
3549 if (width
> HOST_BITS_PER_WIDE_INT
)
3551 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3554 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3555 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3561 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3564 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3565 >> (HOST_BITS_PER_WIDE_INT
- width
));
3568 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3569 treat VAL as if it were unsigned. */
3570 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3571 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3574 /* Handle extension from a narrower type. */
3575 if (TREE_CODE (exp
) == NOP_EXPR
3576 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3577 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3582 /* Subroutine for fold_truthop: determine if an operand is simple enough
3583 to be evaluated unconditionally. */
3586 simple_operand_p (tree exp
)
3588 /* Strip any conversions that don't change the machine mode. */
3591 return (CONSTANT_CLASS_P (exp
)
3592 || TREE_CODE (exp
) == SSA_NAME
3594 && ! TREE_ADDRESSABLE (exp
)
3595 && ! TREE_THIS_VOLATILE (exp
)
3596 && ! DECL_NONLOCAL (exp
)
3597 /* Don't regard global variables as simple. They may be
3598 allocated in ways unknown to the compiler (shared memory,
3599 #pragma weak, etc). */
3600 && ! TREE_PUBLIC (exp
)
3601 && ! DECL_EXTERNAL (exp
)
3602 /* Loading a static variable is unduly expensive, but global
3603 registers aren't expensive. */
3604 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3607 /* The following functions are subroutines to fold_range_test and allow it to
3608 try to change a logical combination of comparisons into a range test.
3611 X == 2 || X == 3 || X == 4 || X == 5
3615 (unsigned) (X - 2) <= 3
3617 We describe each set of comparisons as being either inside or outside
3618 a range, using a variable named like IN_P, and then describe the
3619 range with a lower and upper bound. If one of the bounds is omitted,
3620 it represents either the highest or lowest value of the type.
3622 In the comments below, we represent a range by two numbers in brackets
3623 preceded by a "+" to designate being inside that range, or a "-" to
3624 designate being outside that range, so the condition can be inverted by
3625 flipping the prefix. An omitted bound is represented by a "-". For
3626 example, "- [-, 10]" means being outside the range starting at the lowest
3627 possible value and ending at 10, in other words, being greater than 10.
3628 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3631 We set up things so that the missing bounds are handled in a consistent
3632 manner so neither a missing bound nor "true" and "false" need to be
3633 handled using a special case. */
3635 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3636 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3637 and UPPER1_P are nonzero if the respective argument is an upper bound
3638 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3639 must be specified for a comparison. ARG1 will be converted to ARG0's
3640 type if both are specified. */
3643 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3644 tree arg1
, int upper1_p
)
3650 /* If neither arg represents infinity, do the normal operation.
3651 Else, if not a comparison, return infinity. Else handle the special
3652 comparison rules. Note that most of the cases below won't occur, but
3653 are handled for consistency. */
3655 if (arg0
!= 0 && arg1
!= 0)
3657 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3658 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3660 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3663 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3666 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3667 for neither. In real maths, we cannot assume open ended ranges are
3668 the same. But, this is computer arithmetic, where numbers are finite.
3669 We can therefore make the transformation of any unbounded range with
3670 the value Z, Z being greater than any representable number. This permits
3671 us to treat unbounded ranges as equal. */
3672 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3673 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3677 result
= sgn0
== sgn1
;
3680 result
= sgn0
!= sgn1
;
3683 result
= sgn0
< sgn1
;
3686 result
= sgn0
<= sgn1
;
3689 result
= sgn0
> sgn1
;
3692 result
= sgn0
>= sgn1
;
3698 return constant_boolean_node (result
, type
);
3701 /* Given EXP, a logical expression, set the range it is testing into
3702 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3703 actually being tested. *PLOW and *PHIGH will be made of the same type
3704 as the returned expression. If EXP is not a comparison, we will most
3705 likely not be returning a useful value and range. */
3708 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
)
3710 enum tree_code code
;
3711 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
3712 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
3714 tree low
, high
, n_low
, n_high
;
3716 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3717 and see if we can refine the range. Some of the cases below may not
3718 happen, but it doesn't seem worth worrying about this. We "continue"
3719 the outer loop when we've changed something; otherwise we "break"
3720 the switch, which will "break" the while. */
3723 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
3727 code
= TREE_CODE (exp
);
3728 exp_type
= TREE_TYPE (exp
);
3730 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3732 if (TREE_CODE_LENGTH (code
) > 0)
3733 arg0
= TREE_OPERAND (exp
, 0);
3734 if (TREE_CODE_CLASS (code
) == tcc_comparison
3735 || TREE_CODE_CLASS (code
) == tcc_unary
3736 || TREE_CODE_CLASS (code
) == tcc_binary
)
3737 arg0_type
= TREE_TYPE (arg0
);
3738 if (TREE_CODE_CLASS (code
) == tcc_binary
3739 || TREE_CODE_CLASS (code
) == tcc_comparison
3740 || (TREE_CODE_CLASS (code
) == tcc_expression
3741 && TREE_CODE_LENGTH (code
) > 1))
3742 arg1
= TREE_OPERAND (exp
, 1);
3747 case TRUTH_NOT_EXPR
:
3748 in_p
= ! in_p
, exp
= arg0
;
3751 case EQ_EXPR
: case NE_EXPR
:
3752 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3753 /* We can only do something if the range is testing for zero
3754 and if the second operand is an integer constant. Note that
3755 saying something is "in" the range we make is done by
3756 complementing IN_P since it will set in the initial case of
3757 being not equal to zero; "out" is leaving it alone. */
3758 if (low
== 0 || high
== 0
3759 || ! integer_zerop (low
) || ! integer_zerop (high
)
3760 || TREE_CODE (arg1
) != INTEGER_CST
)
3765 case NE_EXPR
: /* - [c, c] */
3768 case EQ_EXPR
: /* + [c, c] */
3769 in_p
= ! in_p
, low
= high
= arg1
;
3771 case GT_EXPR
: /* - [-, c] */
3772 low
= 0, high
= arg1
;
3774 case GE_EXPR
: /* + [c, -] */
3775 in_p
= ! in_p
, low
= arg1
, high
= 0;
3777 case LT_EXPR
: /* - [c, -] */
3778 low
= arg1
, high
= 0;
3780 case LE_EXPR
: /* + [-, c] */
3781 in_p
= ! in_p
, low
= 0, high
= arg1
;
3787 /* If this is an unsigned comparison, we also know that EXP is
3788 greater than or equal to zero. We base the range tests we make
3789 on that fact, so we record it here so we can parse existing
3790 range tests. We test arg0_type since often the return type
3791 of, e.g. EQ_EXPR, is boolean. */
3792 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3794 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3796 build_int_cst (arg0_type
, 0),
3800 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3802 /* If the high bound is missing, but we have a nonzero low
3803 bound, reverse the range so it goes from zero to the low bound
3805 if (high
== 0 && low
&& ! integer_zerop (low
))
3808 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3809 integer_one_node
, 0);
3810 low
= build_int_cst (arg0_type
, 0);
3818 /* (-x) IN [a,b] -> x in [-b, -a] */
3819 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3820 build_int_cst (exp_type
, 0),
3822 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3823 build_int_cst (exp_type
, 0),
3825 low
= n_low
, high
= n_high
;
3831 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3832 build_int_cst (exp_type
, 1));
3835 case PLUS_EXPR
: case MINUS_EXPR
:
3836 if (TREE_CODE (arg1
) != INTEGER_CST
)
3839 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3840 move a constant to the other side. */
3841 if (flag_wrapv
&& !TYPE_UNSIGNED (arg0_type
))
3844 /* If EXP is signed, any overflow in the computation is undefined,
3845 so we don't worry about it so long as our computations on
3846 the bounds don't overflow. For unsigned, overflow is defined
3847 and this is exactly the right thing. */
3848 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3849 arg0_type
, low
, 0, arg1
, 0);
3850 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3851 arg0_type
, high
, 1, arg1
, 0);
3852 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3853 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3856 /* Check for an unsigned range which has wrapped around the maximum
3857 value thus making n_high < n_low, and normalize it. */
3858 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3860 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3861 integer_one_node
, 0);
3862 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3863 integer_one_node
, 0);
3865 /* If the range is of the form +/- [ x+1, x ], we won't
3866 be able to normalize it. But then, it represents the
3867 whole range or the empty set, so make it
3869 if (tree_int_cst_equal (n_low
, low
)
3870 && tree_int_cst_equal (n_high
, high
))
3876 low
= n_low
, high
= n_high
;
3881 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
3882 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3885 if (! INTEGRAL_TYPE_P (arg0_type
)
3886 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3887 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3890 n_low
= low
, n_high
= high
;
3893 n_low
= fold_convert (arg0_type
, n_low
);
3896 n_high
= fold_convert (arg0_type
, n_high
);
3899 /* If we're converting arg0 from an unsigned type, to exp,
3900 a signed type, we will be doing the comparison as unsigned.
3901 The tests above have already verified that LOW and HIGH
3904 So we have to ensure that we will handle large unsigned
3905 values the same way that the current signed bounds treat
3908 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
3911 tree equiv_type
= lang_hooks
.types
.type_for_mode
3912 (TYPE_MODE (arg0_type
), 1);
3914 /* A range without an upper bound is, naturally, unbounded.
3915 Since convert would have cropped a very large value, use
3916 the max value for the destination type. */
3918 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3919 : TYPE_MAX_VALUE (arg0_type
);
3921 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
3922 high_positive
= fold_build2 (RSHIFT_EXPR
, arg0_type
,
3923 fold_convert (arg0_type
,
3925 fold_convert (arg0_type
,
3928 /* If the low bound is specified, "and" the range with the
3929 range for which the original unsigned value will be
3933 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3934 1, n_low
, n_high
, 1,
3935 fold_convert (arg0_type
,
3940 in_p
= (n_in_p
== in_p
);
3944 /* Otherwise, "or" the range with the range of the input
3945 that will be interpreted as negative. */
3946 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3947 0, n_low
, n_high
, 1,
3948 fold_convert (arg0_type
,
3953 in_p
= (in_p
!= n_in_p
);
3958 low
= n_low
, high
= n_high
;
3968 /* If EXP is a constant, we can evaluate whether this is true or false. */
3969 if (TREE_CODE (exp
) == INTEGER_CST
)
3971 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
3973 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3979 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3983 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3984 type, TYPE, return an expression to test if EXP is in (or out of, depending
3985 on IN_P) the range. Return 0 if the test couldn't be created. */
3988 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
3990 tree etype
= TREE_TYPE (exp
);
3993 #ifdef HAVE_canonicalize_funcptr_for_compare
3994 /* Disable this optimization for function pointer expressions
3995 on targets that require function pointer canonicalization. */
3996 if (HAVE_canonicalize_funcptr_for_compare
3997 && TREE_CODE (etype
) == POINTER_TYPE
3998 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4004 value
= build_range_check (type
, exp
, 1, low
, high
);
4006 return invert_truthvalue (value
);
4011 if (low
== 0 && high
== 0)
4012 return build_int_cst (type
, 1);
4015 return fold_build2 (LE_EXPR
, type
, exp
,
4016 fold_convert (etype
, high
));
4019 return fold_build2 (GE_EXPR
, type
, exp
,
4020 fold_convert (etype
, low
));
4022 if (operand_equal_p (low
, high
, 0))
4023 return fold_build2 (EQ_EXPR
, type
, exp
,
4024 fold_convert (etype
, low
));
4026 if (integer_zerop (low
))
4028 if (! TYPE_UNSIGNED (etype
))
4030 etype
= lang_hooks
.types
.unsigned_type (etype
);
4031 high
= fold_convert (etype
, high
);
4032 exp
= fold_convert (etype
, exp
);
4034 return build_range_check (type
, exp
, 1, 0, high
);
4037 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4038 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4040 unsigned HOST_WIDE_INT lo
;
4044 prec
= TYPE_PRECISION (etype
);
4045 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4048 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4052 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4053 lo
= (unsigned HOST_WIDE_INT
) -1;
4056 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4058 if (TYPE_UNSIGNED (etype
))
4060 etype
= lang_hooks
.types
.signed_type (etype
);
4061 exp
= fold_convert (etype
, exp
);
4063 return fold_build2 (GT_EXPR
, type
, exp
,
4064 build_int_cst (etype
, 0));
4068 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4069 if (value
!= 0 && (!flag_wrapv
|| TREE_OVERFLOW (value
))
4070 && ! TYPE_UNSIGNED (etype
))
4072 tree utype
, minv
, maxv
;
4074 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4075 for the type in question, as we rely on this here. */
4076 switch (TREE_CODE (etype
))
4081 /* There is no requirement that LOW be within the range of ETYPE
4082 if the latter is a subtype. It must, however, be within the base
4083 type of ETYPE. So be sure we do the subtraction in that type. */
4084 if (TREE_TYPE (etype
))
4085 etype
= TREE_TYPE (etype
);
4086 utype
= lang_hooks
.types
.unsigned_type (etype
);
4087 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
4088 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4089 integer_one_node
, 1);
4090 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
4091 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4095 high
= fold_convert (etype
, high
);
4096 low
= fold_convert (etype
, low
);
4097 exp
= fold_convert (etype
, exp
);
4098 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
4106 if (value
!= 0 && ! TREE_OVERFLOW (value
))
4108 /* There is no requirement that LOW be within the range of ETYPE
4109 if the latter is a subtype. It must, however, be within the base
4110 type of ETYPE. So be sure we do the subtraction in that type. */
4111 if (INTEGRAL_TYPE_P (etype
) && TREE_TYPE (etype
))
4113 etype
= TREE_TYPE (etype
);
4114 exp
= fold_convert (etype
, exp
);
4115 low
= fold_convert (etype
, low
);
4116 value
= fold_convert (etype
, value
);
4119 return build_range_check (type
,
4120 fold_build2 (MINUS_EXPR
, etype
, exp
, low
),
4121 1, build_int_cst (etype
, 0), value
);
4127 /* Given two ranges, see if we can merge them into one. Return 1 if we
4128 can, 0 if we can't. Set the output range into the specified parameters. */
4131 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4132 tree high0
, int in1_p
, tree low1
, tree high1
)
4140 int lowequal
= ((low0
== 0 && low1
== 0)
4141 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4142 low0
, 0, low1
, 0)));
4143 int highequal
= ((high0
== 0 && high1
== 0)
4144 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4145 high0
, 1, high1
, 1)));
4147 /* Make range 0 be the range that starts first, or ends last if they
4148 start at the same value. Swap them if it isn't. */
4149 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4152 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4153 high1
, 1, high0
, 1))))
4155 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4156 tem
= low0
, low0
= low1
, low1
= tem
;
4157 tem
= high0
, high0
= high1
, high1
= tem
;
4160 /* Now flag two cases, whether the ranges are disjoint or whether the
4161 second range is totally subsumed in the first. Note that the tests
4162 below are simplified by the ones above. */
4163 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4164 high0
, 1, low1
, 0));
4165 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4166 high1
, 1, high0
, 1));
4168 /* We now have four cases, depending on whether we are including or
4169 excluding the two ranges. */
4172 /* If they don't overlap, the result is false. If the second range
4173 is a subset it is the result. Otherwise, the range is from the start
4174 of the second to the end of the first. */
4176 in_p
= 0, low
= high
= 0;
4178 in_p
= 1, low
= low1
, high
= high1
;
4180 in_p
= 1, low
= low1
, high
= high0
;
4183 else if (in0_p
&& ! in1_p
)
4185 /* If they don't overlap, the result is the first range. If they are
4186 equal, the result is false. If the second range is a subset of the
4187 first, and the ranges begin at the same place, we go from just after
4188 the end of the first range to the end of the second. If the second
4189 range is not a subset of the first, or if it is a subset and both
4190 ranges end at the same place, the range starts at the start of the
4191 first range and ends just before the second range.
4192 Otherwise, we can't describe this as a single range. */
4194 in_p
= 1, low
= low0
, high
= high0
;
4195 else if (lowequal
&& highequal
)
4196 in_p
= 0, low
= high
= 0;
4197 else if (subset
&& lowequal
)
4199 in_p
= 1, high
= high0
;
4200 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high1
, 0,
4201 integer_one_node
, 0);
4203 else if (! subset
|| highequal
)
4205 in_p
= 1, low
= low0
;
4206 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
4207 integer_one_node
, 0);
4213 else if (! in0_p
&& in1_p
)
4215 /* If they don't overlap, the result is the second range. If the second
4216 is a subset of the first, the result is false. Otherwise,
4217 the range starts just after the first range and ends at the
4218 end of the second. */
4220 in_p
= 1, low
= low1
, high
= high1
;
4221 else if (subset
|| highequal
)
4222 in_p
= 0, low
= high
= 0;
4225 in_p
= 1, high
= high1
;
4226 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4227 integer_one_node
, 0);
4233 /* The case where we are excluding both ranges. Here the complex case
4234 is if they don't overlap. In that case, the only time we have a
4235 range is if they are adjacent. If the second is a subset of the
4236 first, the result is the first. Otherwise, the range to exclude
4237 starts at the beginning of the first range and ends at the end of the
4241 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4242 range_binop (PLUS_EXPR
, NULL_TREE
,
4244 integer_one_node
, 1),
4246 in_p
= 0, low
= low0
, high
= high1
;
4249 /* Canonicalize - [min, x] into - [-, x]. */
4250 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4251 switch (TREE_CODE (TREE_TYPE (low0
)))
4254 if (TYPE_PRECISION (TREE_TYPE (low0
))
4255 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4260 if (tree_int_cst_equal (low0
,
4261 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4265 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4266 && integer_zerop (low0
))
4273 /* Canonicalize - [x, max] into - [x, -]. */
4274 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4275 switch (TREE_CODE (TREE_TYPE (high1
)))
4278 if (TYPE_PRECISION (TREE_TYPE (high1
))
4279 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4284 if (tree_int_cst_equal (high1
,
4285 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4289 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4290 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4292 integer_one_node
, 1)))
4299 /* The ranges might be also adjacent between the maximum and
4300 minimum values of the given type. For
4301 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4302 return + [x + 1, y - 1]. */
4303 if (low0
== 0 && high1
== 0)
4305 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4306 integer_one_node
, 1);
4307 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
4308 integer_one_node
, 0);
4309 if (low
== 0 || high
== 0)
4319 in_p
= 0, low
= low0
, high
= high0
;
4321 in_p
= 0, low
= low0
, high
= high1
;
4324 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4329 /* Subroutine of fold, looking inside expressions of the form
4330 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4331 of the COND_EXPR. This function is being used also to optimize
4332 A op B ? C : A, by reversing the comparison first.
4334 Return a folded expression whose code is not a COND_EXPR
4335 anymore, or NULL_TREE if no folding opportunity is found. */
4338 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
4340 enum tree_code comp_code
= TREE_CODE (arg0
);
4341 tree arg00
= TREE_OPERAND (arg0
, 0);
4342 tree arg01
= TREE_OPERAND (arg0
, 1);
4343 tree arg1_type
= TREE_TYPE (arg1
);
4349 /* If we have A op 0 ? A : -A, consider applying the following
4352 A == 0? A : -A same as -A
4353 A != 0? A : -A same as A
4354 A >= 0? A : -A same as abs (A)
4355 A > 0? A : -A same as abs (A)
4356 A <= 0? A : -A same as -abs (A)
4357 A < 0? A : -A same as -abs (A)
4359 None of these transformations work for modes with signed
4360 zeros. If A is +/-0, the first two transformations will
4361 change the sign of the result (from +0 to -0, or vice
4362 versa). The last four will fix the sign of the result,
4363 even though the original expressions could be positive or
4364 negative, depending on the sign of A.
4366 Note that all these transformations are correct if A is
4367 NaN, since the two alternatives (A and -A) are also NaNs. */
4368 if ((FLOAT_TYPE_P (TREE_TYPE (arg01
))
4369 ? real_zerop (arg01
)
4370 : integer_zerop (arg01
))
4371 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4372 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4373 /* In the case that A is of the form X-Y, '-A' (arg2) may
4374 have already been folded to Y-X, check for that. */
4375 || (TREE_CODE (arg1
) == MINUS_EXPR
4376 && TREE_CODE (arg2
) == MINUS_EXPR
4377 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4378 TREE_OPERAND (arg2
, 1), 0)
4379 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4380 TREE_OPERAND (arg2
, 0), 0))))
4385 tem
= fold_convert (arg1_type
, arg1
);
4386 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
4389 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4392 if (flag_trapping_math
)
4397 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4398 arg1
= fold_convert (lang_hooks
.types
.signed_type
4399 (TREE_TYPE (arg1
)), arg1
);
4400 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4401 return pedantic_non_lvalue (fold_convert (type
, tem
));
4404 if (flag_trapping_math
)
4408 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4409 arg1
= fold_convert (lang_hooks
.types
.signed_type
4410 (TREE_TYPE (arg1
)), arg1
);
4411 tem
= fold_build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4412 return negate_expr (fold_convert (type
, tem
));
4414 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4418 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4419 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4420 both transformations are correct when A is NaN: A != 0
4421 is then true, and A == 0 is false. */
4423 if (integer_zerop (arg01
) && integer_zerop (arg2
))
4425 if (comp_code
== NE_EXPR
)
4426 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4427 else if (comp_code
== EQ_EXPR
)
4428 return build_int_cst (type
, 0);
4431 /* Try some transformations of A op B ? A : B.
4433 A == B? A : B same as B
4434 A != B? A : B same as A
4435 A >= B? A : B same as max (A, B)
4436 A > B? A : B same as max (B, A)
4437 A <= B? A : B same as min (A, B)
4438 A < B? A : B same as min (B, A)
4440 As above, these transformations don't work in the presence
4441 of signed zeros. For example, if A and B are zeros of
4442 opposite sign, the first two transformations will change
4443 the sign of the result. In the last four, the original
4444 expressions give different results for (A=+0, B=-0) and
4445 (A=-0, B=+0), but the transformed expressions do not.
4447 The first two transformations are correct if either A or B
4448 is a NaN. In the first transformation, the condition will
4449 be false, and B will indeed be chosen. In the case of the
4450 second transformation, the condition A != B will be true,
4451 and A will be chosen.
4453 The conversions to max() and min() are not correct if B is
4454 a number and A is not. The conditions in the original
4455 expressions will be false, so all four give B. The min()
4456 and max() versions would give a NaN instead. */
4457 if (operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4458 /* Avoid these transformations if the COND_EXPR may be used
4459 as an lvalue in the C++ front-end. PR c++/19199. */
4461 || strcmp (lang_hooks
.name
, "GNU C++") != 0
4462 || ! maybe_lvalue_p (arg1
)
4463 || ! maybe_lvalue_p (arg2
)))
4465 tree comp_op0
= arg00
;
4466 tree comp_op1
= arg01
;
4467 tree comp_type
= TREE_TYPE (comp_op0
);
4469 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4470 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4480 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4482 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4487 /* In C++ a ?: expression can be an lvalue, so put the
4488 operand which will be used if they are equal first
4489 so that we can convert this back to the
4490 corresponding COND_EXPR. */
4491 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4493 comp_op0
= fold_convert (comp_type
, comp_op0
);
4494 comp_op1
= fold_convert (comp_type
, comp_op1
);
4495 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4496 ? fold_build2 (MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4497 : fold_build2 (MIN_EXPR
, comp_type
, comp_op1
, comp_op0
);
4498 return pedantic_non_lvalue (fold_convert (type
, tem
));
4505 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4507 comp_op0
= fold_convert (comp_type
, comp_op0
);
4508 comp_op1
= fold_convert (comp_type
, comp_op1
);
4509 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4510 ? fold_build2 (MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4511 : fold_build2 (MAX_EXPR
, comp_type
, comp_op1
, comp_op0
);
4512 return pedantic_non_lvalue (fold_convert (type
, tem
));
4516 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4517 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4520 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4521 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4524 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4529 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4530 we might still be able to simplify this. For example,
4531 if C1 is one less or one more than C2, this might have started
4532 out as a MIN or MAX and been transformed by this function.
4533 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4535 if (INTEGRAL_TYPE_P (type
)
4536 && TREE_CODE (arg01
) == INTEGER_CST
4537 && TREE_CODE (arg2
) == INTEGER_CST
)
4541 /* We can replace A with C1 in this case. */
4542 arg1
= fold_convert (type
, arg01
);
4543 return fold_build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
);
4546 /* If C1 is C2 + 1, this is min(A, C2). */
4547 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4549 && operand_equal_p (arg01
,
4550 const_binop (PLUS_EXPR
, arg2
,
4551 integer_one_node
, 0),
4553 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4558 /* If C1 is C2 - 1, this is min(A, C2). */
4559 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4561 && operand_equal_p (arg01
,
4562 const_binop (MINUS_EXPR
, arg2
,
4563 integer_one_node
, 0),
4565 return pedantic_non_lvalue (fold_build2 (MIN_EXPR
,
4570 /* If C1 is C2 - 1, this is max(A, C2). */
4571 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4573 && operand_equal_p (arg01
,
4574 const_binop (MINUS_EXPR
, arg2
,
4575 integer_one_node
, 0),
4577 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4582 /* If C1 is C2 + 1, this is max(A, C2). */
4583 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4585 && operand_equal_p (arg01
,
4586 const_binop (PLUS_EXPR
, arg2
,
4587 integer_one_node
, 0),
4589 return pedantic_non_lvalue (fold_build2 (MAX_EXPR
,
4603 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4604 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4607 /* EXP is some logical combination of boolean tests. See if we can
4608 merge it into some range test. Return the new tree if so. */
4611 fold_range_test (enum tree_code code
, tree type
, tree op0
, tree op1
)
4613 int or_op
= (code
== TRUTH_ORIF_EXPR
4614 || code
== TRUTH_OR_EXPR
);
4615 int in0_p
, in1_p
, in_p
;
4616 tree low0
, low1
, low
, high0
, high1
, high
;
4617 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
);
4618 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
);
4621 /* If this is an OR operation, invert both sides; we will invert
4622 again at the end. */
4624 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4626 /* If both expressions are the same, if we can merge the ranges, and we
4627 can build the range test, return it or it inverted. If one of the
4628 ranges is always true or always false, consider it to be the same
4629 expression as the other. */
4630 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4631 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4633 && 0 != (tem
= (build_range_check (type
,
4635 : rhs
!= 0 ? rhs
: integer_zero_node
,
4637 return or_op
? invert_truthvalue (tem
) : tem
;
4639 /* On machines where the branch cost is expensive, if this is a
4640 short-circuited branch and the underlying object on both sides
4641 is the same, make a non-short-circuit operation. */
4642 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4643 && lhs
!= 0 && rhs
!= 0
4644 && (code
== TRUTH_ANDIF_EXPR
4645 || code
== TRUTH_ORIF_EXPR
)
4646 && operand_equal_p (lhs
, rhs
, 0))
4648 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4649 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4650 which cases we can't do this. */
4651 if (simple_operand_p (lhs
))
4652 return build2 (code
== TRUTH_ANDIF_EXPR
4653 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4656 else if (lang_hooks
.decls
.global_bindings_p () == 0
4657 && ! CONTAINS_PLACEHOLDER_P (lhs
))
4659 tree common
= save_expr (lhs
);
4661 if (0 != (lhs
= build_range_check (type
, common
,
4662 or_op
? ! in0_p
: in0_p
,
4664 && (0 != (rhs
= build_range_check (type
, common
,
4665 or_op
? ! in1_p
: in1_p
,
4667 return build2 (code
== TRUTH_ANDIF_EXPR
4668 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4676 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4677 bit value. Arrange things so the extra bits will be set to zero if and
4678 only if C is signed-extended to its full width. If MASK is nonzero,
4679 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4682 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4684 tree type
= TREE_TYPE (c
);
4685 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4688 if (p
== modesize
|| unsignedp
)
4691 /* We work by getting just the sign bit into the low-order bit, then
4692 into the high-order bit, then sign-extend. We then XOR that value
4694 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
4695 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
4697 /* We must use a signed type in order to get an arithmetic right shift.
4698 However, we must also avoid introducing accidental overflows, so that
4699 a subsequent call to integer_zerop will work. Hence we must
4700 do the type conversion here. At this point, the constant is either
4701 zero or one, and the conversion to a signed type can never overflow.
4702 We could get an overflow if this conversion is done anywhere else. */
4703 if (TYPE_UNSIGNED (type
))
4704 temp
= fold_convert (lang_hooks
.types
.signed_type (type
), temp
);
4706 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
4707 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
4709 temp
= const_binop (BIT_AND_EXPR
, temp
,
4710 fold_convert (TREE_TYPE (c
), mask
), 0);
4711 /* If necessary, convert the type back to match the type of C. */
4712 if (TYPE_UNSIGNED (type
))
4713 temp
= fold_convert (type
, temp
);
4715 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
4718 /* Find ways of folding logical expressions of LHS and RHS:
4719 Try to merge two comparisons to the same innermost item.
4720 Look for range tests like "ch >= '0' && ch <= '9'".
4721 Look for combinations of simple terms on machines with expensive branches
4722 and evaluate the RHS unconditionally.
4724 For example, if we have p->a == 2 && p->b == 4 and we can make an
4725 object large enough to span both A and B, we can do this with a comparison
4726 against the object ANDed with the a mask.
4728 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4729 operations to do this with one comparison.
4731 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4732 function and the one above.
4734 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4735 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4737 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4740 We return the simplified tree or 0 if no optimization is possible. */
4743 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
4745 /* If this is the "or" of two comparisons, we can do something if
4746 the comparisons are NE_EXPR. If this is the "and", we can do something
4747 if the comparisons are EQ_EXPR. I.e.,
4748 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4750 WANTED_CODE is this operation code. For single bit fields, we can
4751 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4752 comparison for one-bit fields. */
4754 enum tree_code wanted_code
;
4755 enum tree_code lcode
, rcode
;
4756 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
4757 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
4758 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
4759 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
4760 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
4761 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
4762 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
4763 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
4764 enum machine_mode lnmode
, rnmode
;
4765 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
4766 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
4767 tree l_const
, r_const
;
4768 tree lntype
, rntype
, result
;
4769 int first_bit
, end_bit
;
4772 /* Start by getting the comparison codes. Fail if anything is volatile.
4773 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4774 it were surrounded with a NE_EXPR. */
4776 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
4779 lcode
= TREE_CODE (lhs
);
4780 rcode
= TREE_CODE (rhs
);
4782 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
4784 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
4785 build_int_cst (TREE_TYPE (lhs
), 0));
4789 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
4791 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
4792 build_int_cst (TREE_TYPE (rhs
), 0));
4796 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
4797 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
4800 ll_arg
= TREE_OPERAND (lhs
, 0);
4801 lr_arg
= TREE_OPERAND (lhs
, 1);
4802 rl_arg
= TREE_OPERAND (rhs
, 0);
4803 rr_arg
= TREE_OPERAND (rhs
, 1);
4805 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4806 if (simple_operand_p (ll_arg
)
4807 && simple_operand_p (lr_arg
))
4810 if (operand_equal_p (ll_arg
, rl_arg
, 0)
4811 && operand_equal_p (lr_arg
, rr_arg
, 0))
4813 result
= combine_comparisons (code
, lcode
, rcode
,
4814 truth_type
, ll_arg
, lr_arg
);
4818 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
4819 && operand_equal_p (lr_arg
, rl_arg
, 0))
4821 result
= combine_comparisons (code
, lcode
,
4822 swap_tree_comparison (rcode
),
4823 truth_type
, ll_arg
, lr_arg
);
4829 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
4830 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
4832 /* If the RHS can be evaluated unconditionally and its operands are
4833 simple, it wins to evaluate the RHS unconditionally on machines
4834 with expensive branches. In this case, this isn't a comparison
4835 that can be merged. Avoid doing this if the RHS is a floating-point
4836 comparison since those can trap. */
4838 if (BRANCH_COST
>= 2
4839 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
4840 && simple_operand_p (rl_arg
)
4841 && simple_operand_p (rr_arg
))
4843 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4844 if (code
== TRUTH_OR_EXPR
4845 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
4846 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
4847 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4848 return build2 (NE_EXPR
, truth_type
,
4849 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4851 build_int_cst (TREE_TYPE (ll_arg
), 0));
4853 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4854 if (code
== TRUTH_AND_EXPR
4855 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
4856 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
4857 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4858 return build2 (EQ_EXPR
, truth_type
,
4859 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4861 build_int_cst (TREE_TYPE (ll_arg
), 0));
4863 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
4864 return build2 (code
, truth_type
, lhs
, rhs
);
4867 /* See if the comparisons can be merged. Then get all the parameters for
4870 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
4871 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
4875 ll_inner
= decode_field_reference (ll_arg
,
4876 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
4877 &ll_unsignedp
, &volatilep
, &ll_mask
,
4879 lr_inner
= decode_field_reference (lr_arg
,
4880 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
4881 &lr_unsignedp
, &volatilep
, &lr_mask
,
4883 rl_inner
= decode_field_reference (rl_arg
,
4884 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
4885 &rl_unsignedp
, &volatilep
, &rl_mask
,
4887 rr_inner
= decode_field_reference (rr_arg
,
4888 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
4889 &rr_unsignedp
, &volatilep
, &rr_mask
,
4892 /* It must be true that the inner operation on the lhs of each
4893 comparison must be the same if we are to be able to do anything.
4894 Then see if we have constants. If not, the same must be true for
4896 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
4897 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
4900 if (TREE_CODE (lr_arg
) == INTEGER_CST
4901 && TREE_CODE (rr_arg
) == INTEGER_CST
)
4902 l_const
= lr_arg
, r_const
= rr_arg
;
4903 else if (lr_inner
== 0 || rr_inner
== 0
4904 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
4907 l_const
= r_const
= 0;
4909 /* If either comparison code is not correct for our logical operation,
4910 fail. However, we can convert a one-bit comparison against zero into
4911 the opposite comparison against that bit being set in the field. */
4913 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
4914 if (lcode
!= wanted_code
)
4916 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
4918 /* Make the left operand unsigned, since we are only interested
4919 in the value of one bit. Otherwise we are doing the wrong
4928 /* This is analogous to the code for l_const above. */
4929 if (rcode
!= wanted_code
)
4931 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
4940 /* After this point all optimizations will generate bit-field
4941 references, which we might not want. */
4942 if (! lang_hooks
.can_use_bit_fields_p ())
4945 /* See if we can find a mode that contains both fields being compared on
4946 the left. If we can't, fail. Otherwise, update all constants and masks
4947 to be relative to a field of that size. */
4948 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
4949 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
4950 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4951 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
4953 if (lnmode
== VOIDmode
)
4956 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
4957 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
4958 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
4959 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
4961 if (BYTES_BIG_ENDIAN
)
4963 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
4964 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
4967 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
4968 size_int (xll_bitpos
), 0);
4969 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
4970 size_int (xrl_bitpos
), 0);
4974 l_const
= fold_convert (lntype
, l_const
);
4975 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
4976 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
4977 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
4978 fold_build1 (BIT_NOT_EXPR
,
4982 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
4984 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4989 r_const
= fold_convert (lntype
, r_const
);
4990 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
4991 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
4992 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
4993 fold_build1 (BIT_NOT_EXPR
,
4997 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
4999 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5003 /* If the right sides are not constant, do the same for it. Also,
5004 disallow this optimization if a size or signedness mismatch occurs
5005 between the left and right sides. */
5008 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5009 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5010 /* Make sure the two fields on the right
5011 correspond to the left without being swapped. */
5012 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5015 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5016 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5017 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5018 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5020 if (rnmode
== VOIDmode
)
5023 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5024 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5025 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5026 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5028 if (BYTES_BIG_ENDIAN
)
5030 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5031 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5034 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
5035 size_int (xlr_bitpos
), 0);
5036 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
5037 size_int (xrr_bitpos
), 0);
5039 /* Make a mask that corresponds to both fields being compared.
5040 Do this for both items being compared. If the operands are the
5041 same size and the bits being compared are in the same position
5042 then we can do this by masking both and comparing the masked
5044 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5045 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
5046 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5048 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5049 ll_unsignedp
|| rl_unsignedp
);
5050 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5051 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5053 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5054 lr_unsignedp
|| rr_unsignedp
);
5055 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5056 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5058 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5061 /* There is still another way we can do something: If both pairs of
5062 fields being compared are adjacent, we may be able to make a wider
5063 field containing them both.
5065 Note that we still must mask the lhs/rhs expressions. Furthermore,
5066 the mask must be shifted to account for the shift done by
5067 make_bit_field_ref. */
5068 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5069 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5070 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5071 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5075 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
5076 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5077 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
5078 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5080 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5081 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
5082 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5083 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
5085 /* Convert to the smaller type before masking out unwanted bits. */
5087 if (lntype
!= rntype
)
5089 if (lnbitsize
> rnbitsize
)
5091 lhs
= fold_convert (rntype
, lhs
);
5092 ll_mask
= fold_convert (rntype
, ll_mask
);
5095 else if (lnbitsize
< rnbitsize
)
5097 rhs
= fold_convert (lntype
, rhs
);
5098 lr_mask
= fold_convert (lntype
, lr_mask
);
5103 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5104 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5106 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5107 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5109 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
5115 /* Handle the case of comparisons with constants. If there is something in
5116 common between the masks, those bits of the constants must be the same.
5117 If not, the condition is always false. Test for this to avoid generating
5118 incorrect code below. */
5119 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
5120 if (! integer_zerop (result
)
5121 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
5122 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
5124 if (wanted_code
== NE_EXPR
)
5126 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5127 return constant_boolean_node (true, truth_type
);
5131 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5132 return constant_boolean_node (false, truth_type
);
5136 /* Construct the expression we will return. First get the component
5137 reference we will make. Unless the mask is all ones the width of
5138 that field, perform the mask operation. Then compare with the
5140 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5141 ll_unsignedp
|| rl_unsignedp
);
5143 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
5144 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5145 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5147 return build2 (wanted_code
, truth_type
, result
,
5148 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
5151 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5155 optimize_minmax_comparison (enum tree_code code
, tree type
, tree op0
, tree op1
)
5158 enum tree_code op_code
;
5159 tree comp_const
= op1
;
5161 int consts_equal
, consts_lt
;
5164 STRIP_SIGN_NOPS (arg0
);
5166 op_code
= TREE_CODE (arg0
);
5167 minmax_const
= TREE_OPERAND (arg0
, 1);
5168 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5169 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5170 inner
= TREE_OPERAND (arg0
, 0);
5172 /* If something does not permit us to optimize, return the original tree. */
5173 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5174 || TREE_CODE (comp_const
) != INTEGER_CST
5175 || TREE_CONSTANT_OVERFLOW (comp_const
)
5176 || TREE_CODE (minmax_const
) != INTEGER_CST
5177 || TREE_CONSTANT_OVERFLOW (minmax_const
))
5180 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5181 and GT_EXPR, doing the rest with recursive calls using logical
5185 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5187 /* FIXME: We should be able to invert code without building a
5188 scratch tree node, but doing so would require us to
5189 duplicate a part of invert_truthvalue here. */
5190 tree tem
= invert_truthvalue (build2 (code
, type
, op0
, op1
));
5191 tem
= optimize_minmax_comparison (TREE_CODE (tem
),
5193 TREE_OPERAND (tem
, 0),
5194 TREE_OPERAND (tem
, 1));
5195 return invert_truthvalue (tem
);
5200 fold_build2 (TRUTH_ORIF_EXPR
, type
,
5201 optimize_minmax_comparison
5202 (EQ_EXPR
, type
, arg0
, comp_const
),
5203 optimize_minmax_comparison
5204 (GT_EXPR
, type
, arg0
, comp_const
));
5207 if (op_code
== MAX_EXPR
&& consts_equal
)
5208 /* MAX (X, 0) == 0 -> X <= 0 */
5209 return fold_build2 (LE_EXPR
, type
, inner
, comp_const
);
5211 else if (op_code
== MAX_EXPR
&& consts_lt
)
5212 /* MAX (X, 0) == 5 -> X == 5 */
5213 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5215 else if (op_code
== MAX_EXPR
)
5216 /* MAX (X, 0) == -1 -> false */
5217 return omit_one_operand (type
, integer_zero_node
, inner
);
5219 else if (consts_equal
)
5220 /* MIN (X, 0) == 0 -> X >= 0 */
5221 return fold_build2 (GE_EXPR
, type
, inner
, comp_const
);
5224 /* MIN (X, 0) == 5 -> false */
5225 return omit_one_operand (type
, integer_zero_node
, inner
);
5228 /* MIN (X, 0) == -1 -> X == -1 */
5229 return fold_build2 (EQ_EXPR
, type
, inner
, comp_const
);
5232 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5233 /* MAX (X, 0) > 0 -> X > 0
5234 MAX (X, 0) > 5 -> X > 5 */
5235 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5237 else if (op_code
== MAX_EXPR
)
5238 /* MAX (X, 0) > -1 -> true */
5239 return omit_one_operand (type
, integer_one_node
, inner
);
5241 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5242 /* MIN (X, 0) > 0 -> false
5243 MIN (X, 0) > 5 -> false */
5244 return omit_one_operand (type
, integer_zero_node
, inner
);
5247 /* MIN (X, 0) > -1 -> X > -1 */
5248 return fold_build2 (GT_EXPR
, type
, inner
, comp_const
);
5255 /* T is an integer expression that is being multiplied, divided, or taken a
5256 modulus (CODE says which and what kind of divide or modulus) by a
5257 constant C. See if we can eliminate that operation by folding it with
5258 other operations already in T. WIDE_TYPE, if non-null, is a type that
5259 should be used for the computation if wider than our type.
5261 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5262 (X * 2) + (Y * 4). We must, however, be assured that either the original
5263 expression would not overflow or that overflow is undefined for the type
5264 in the language in question.
5266 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5267 the machine has a multiply-accumulate insn or that this is part of an
5268 addressing calculation.
5270 If we return a non-null expression, it is an equivalent form of the
5271 original computation, but need not be in the original type. */
5274 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5276 /* To avoid exponential search depth, refuse to allow recursion past
5277 three levels. Beyond that (1) it's highly unlikely that we'll find
5278 something interesting and (2) we've probably processed it before
5279 when we built the inner expression. */
5288 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
);
5295 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5297 tree type
= TREE_TYPE (t
);
5298 enum tree_code tcode
= TREE_CODE (t
);
5299 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5300 > GET_MODE_SIZE (TYPE_MODE (type
)))
5301 ? wide_type
: type
);
5303 int same_p
= tcode
== code
;
5304 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5306 /* Don't deal with constants of zero here; they confuse the code below. */
5307 if (integer_zerop (c
))
5310 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5311 op0
= TREE_OPERAND (t
, 0);
5313 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5314 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5316 /* Note that we need not handle conditional operations here since fold
5317 already handles those cases. So just do arithmetic here. */
5321 /* For a constant, we can always simplify if we are a multiply
5322 or (for divide and modulus) if it is a multiple of our constant. */
5323 if (code
== MULT_EXPR
5324 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
5325 return const_binop (code
, fold_convert (ctype
, t
),
5326 fold_convert (ctype
, c
), 0);
5329 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
5330 /* If op0 is an expression ... */
5331 if ((COMPARISON_CLASS_P (op0
)
5332 || UNARY_CLASS_P (op0
)
5333 || BINARY_CLASS_P (op0
)
5334 || EXPRESSION_CLASS_P (op0
))
5335 /* ... and is unsigned, and its type is smaller than ctype,
5336 then we cannot pass through as widening. */
5337 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
5338 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5339 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5340 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
5341 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
5342 /* ... or this is a truncation (t is narrower than op0),
5343 then we cannot pass through this narrowing. */
5344 || (GET_MODE_SIZE (TYPE_MODE (type
))
5345 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
5346 /* ... or signedness changes for division or modulus,
5347 then we cannot pass through this conversion. */
5348 || (code
!= MULT_EXPR
5349 && (TYPE_UNSIGNED (ctype
)
5350 != TYPE_UNSIGNED (TREE_TYPE (op0
))))))
5353 /* Pass the constant down and see if we can make a simplification. If
5354 we can, replace this expression with the inner simplification for
5355 possible later conversion to our or some other type. */
5356 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5357 && TREE_CODE (t2
) == INTEGER_CST
5358 && ! TREE_CONSTANT_OVERFLOW (t2
)
5359 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5361 ? ctype
: NULL_TREE
))))
5366 /* If widening the type changes it from signed to unsigned, then we
5367 must avoid building ABS_EXPR itself as unsigned. */
5368 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5370 tree cstype
= (*lang_hooks
.types
.signed_type
) (ctype
);
5371 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
)) != 0)
5373 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5374 return fold_convert (ctype
, t1
);
5380 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5381 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5384 case MIN_EXPR
: case MAX_EXPR
:
5385 /* If widening the type changes the signedness, then we can't perform
5386 this optimization as that changes the result. */
5387 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5390 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5391 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
5392 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5394 if (tree_int_cst_sgn (c
) < 0)
5395 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5397 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5398 fold_convert (ctype
, t2
));
5402 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5403 /* If the second operand is constant, this is a multiplication
5404 or floor division, by a power of two, so we can treat it that
5405 way unless the multiplier or divisor overflows. Signed
5406 left-shift overflow is implementation-defined rather than
5407 undefined in C90, so do not convert signed left shift into
5409 if (TREE_CODE (op1
) == INTEGER_CST
5410 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5411 /* const_binop may not detect overflow correctly,
5412 so check for it explicitly here. */
5413 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5414 && TREE_INT_CST_HIGH (op1
) == 0
5415 && 0 != (t1
= fold_convert (ctype
,
5416 const_binop (LSHIFT_EXPR
,
5419 && ! TREE_OVERFLOW (t1
))
5420 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5421 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5422 ctype
, fold_convert (ctype
, op0
), t1
),
5423 c
, code
, wide_type
);
5426 case PLUS_EXPR
: case MINUS_EXPR
:
5427 /* See if we can eliminate the operation on both sides. If we can, we
5428 can return a new PLUS or MINUS. If we can't, the only remaining
5429 cases where we can do anything are if the second operand is a
5431 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
5432 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
5433 if (t1
!= 0 && t2
!= 0
5434 && (code
== MULT_EXPR
5435 /* If not multiplication, we can only do this if both operands
5436 are divisible by c. */
5437 || (multiple_of_p (ctype
, op0
, c
)
5438 && multiple_of_p (ctype
, op1
, c
))))
5439 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5440 fold_convert (ctype
, t2
));
5442 /* If this was a subtraction, negate OP1 and set it to be an addition.
5443 This simplifies the logic below. */
5444 if (tcode
== MINUS_EXPR
)
5445 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5447 if (TREE_CODE (op1
) != INTEGER_CST
)
5450 /* If either OP1 or C are negative, this optimization is not safe for
5451 some of the division and remainder types while for others we need
5452 to change the code. */
5453 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5455 if (code
== CEIL_DIV_EXPR
)
5456 code
= FLOOR_DIV_EXPR
;
5457 else if (code
== FLOOR_DIV_EXPR
)
5458 code
= CEIL_DIV_EXPR
;
5459 else if (code
!= MULT_EXPR
5460 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5464 /* If it's a multiply or a division/modulus operation of a multiple
5465 of our constant, do the operation and verify it doesn't overflow. */
5466 if (code
== MULT_EXPR
5467 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5469 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5470 fold_convert (ctype
, c
), 0);
5471 /* We allow the constant to overflow with wrapping semantics. */
5473 || (TREE_OVERFLOW (op1
) && ! flag_wrapv
))
5479 /* If we have an unsigned type is not a sizetype, we cannot widen
5480 the operation since it will change the result if the original
5481 computation overflowed. */
5482 if (TYPE_UNSIGNED (ctype
)
5483 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5487 /* If we were able to eliminate our operation from the first side,
5488 apply our operation to the second side and reform the PLUS. */
5489 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5490 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5492 /* The last case is if we are a multiply. In that case, we can
5493 apply the distributive law to commute the multiply and addition
5494 if the multiplication of the constants doesn't overflow. */
5495 if (code
== MULT_EXPR
)
5496 return fold_build2 (tcode
, ctype
,
5497 fold_build2 (code
, ctype
,
5498 fold_convert (ctype
, op0
),
5499 fold_convert (ctype
, c
)),
5505 /* We have a special case here if we are doing something like
5506 (C * 8) % 4 since we know that's zero. */
5507 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5508 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5509 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5510 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5511 return omit_one_operand (type
, integer_zero_node
, op0
);
5513 /* ... fall through ... */
5515 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5516 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5517 /* If we can extract our operation from the LHS, do so and return a
5518 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5519 do something only if the second operand is a constant. */
5521 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5522 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5523 fold_convert (ctype
, op1
));
5524 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5525 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5526 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5527 fold_convert (ctype
, t1
));
5528 else if (TREE_CODE (op1
) != INTEGER_CST
)
5531 /* If these are the same operation types, we can associate them
5532 assuming no overflow. */
5534 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
5535 fold_convert (ctype
, c
), 0))
5536 && ! TREE_OVERFLOW (t1
))
5537 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
5539 /* If these operations "cancel" each other, we have the main
5540 optimizations of this pass, which occur when either constant is a
5541 multiple of the other, in which case we replace this with either an
5542 operation or CODE or TCODE.
5544 If we have an unsigned type that is not a sizetype, we cannot do
5545 this since it will change the result if the original computation
5547 if ((! TYPE_UNSIGNED (ctype
)
5548 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5550 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5551 || (tcode
== MULT_EXPR
5552 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5553 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
5555 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5556 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5557 fold_convert (ctype
,
5558 const_binop (TRUNC_DIV_EXPR
,
5560 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
5561 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5562 fold_convert (ctype
,
5563 const_binop (TRUNC_DIV_EXPR
,
5575 /* Return a node which has the indicated constant VALUE (either 0 or
5576 1), and is of the indicated TYPE. */
5579 constant_boolean_node (int value
, tree type
)
5581 if (type
== integer_type_node
)
5582 return value
? integer_one_node
: integer_zero_node
;
5583 else if (type
== boolean_type_node
)
5584 return value
? boolean_true_node
: boolean_false_node
;
5586 return build_int_cst (type
, value
);
5590 /* Return true if expr looks like an ARRAY_REF and set base and
5591 offset to the appropriate trees. If there is no offset,
5592 offset is set to NULL_TREE. Base will be canonicalized to
5593 something you can get the element type from using
5594 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5595 in bytes to the base. */
5598 extract_array_ref (tree expr
, tree
*base
, tree
*offset
)
5600 /* One canonical form is a PLUS_EXPR with the first
5601 argument being an ADDR_EXPR with a possible NOP_EXPR
5603 if (TREE_CODE (expr
) == PLUS_EXPR
)
5605 tree op0
= TREE_OPERAND (expr
, 0);
5606 tree inner_base
, dummy1
;
5607 /* Strip NOP_EXPRs here because the C frontends and/or
5608 folders present us (int *)&x.a + 4B possibly. */
5610 if (extract_array_ref (op0
, &inner_base
, &dummy1
))
5613 if (dummy1
== NULL_TREE
)
5614 *offset
= TREE_OPERAND (expr
, 1);
5616 *offset
= fold_build2 (PLUS_EXPR
, TREE_TYPE (expr
),
5617 dummy1
, TREE_OPERAND (expr
, 1));
5621 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5622 which we transform into an ADDR_EXPR with appropriate
5623 offset. For other arguments to the ADDR_EXPR we assume
5624 zero offset and as such do not care about the ADDR_EXPR
5625 type and strip possible nops from it. */
5626 else if (TREE_CODE (expr
) == ADDR_EXPR
)
5628 tree op0
= TREE_OPERAND (expr
, 0);
5629 if (TREE_CODE (op0
) == ARRAY_REF
)
5631 tree idx
= TREE_OPERAND (op0
, 1);
5632 *base
= TREE_OPERAND (op0
, 0);
5633 *offset
= fold_build2 (MULT_EXPR
, TREE_TYPE (idx
), idx
,
5634 array_ref_element_size (op0
));
5638 /* Handle array-to-pointer decay as &a. */
5639 if (TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
5640 *base
= TREE_OPERAND (expr
, 0);
5643 *offset
= NULL_TREE
;
5647 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5648 else if (SSA_VAR_P (expr
)
5649 && TREE_CODE (TREE_TYPE (expr
)) == POINTER_TYPE
)
5652 *offset
= NULL_TREE
;
5660 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5661 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5662 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5663 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5664 COND is the first argument to CODE; otherwise (as in the example
5665 given here), it is the second argument. TYPE is the type of the
5666 original expression. Return NULL_TREE if no simplification is
5670 fold_binary_op_with_conditional_arg (enum tree_code code
,
5671 tree type
, tree op0
, tree op1
,
5672 tree cond
, tree arg
, int cond_first_p
)
5674 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
5675 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
5676 tree test
, true_value
, false_value
;
5677 tree lhs
= NULL_TREE
;
5678 tree rhs
= NULL_TREE
;
5680 /* This transformation is only worthwhile if we don't have to wrap
5681 arg in a SAVE_EXPR, and the operation can be simplified on at least
5682 one of the branches once its pushed inside the COND_EXPR. */
5683 if (!TREE_CONSTANT (arg
))
5686 if (TREE_CODE (cond
) == COND_EXPR
)
5688 test
= TREE_OPERAND (cond
, 0);
5689 true_value
= TREE_OPERAND (cond
, 1);
5690 false_value
= TREE_OPERAND (cond
, 2);
5691 /* If this operand throws an expression, then it does not make
5692 sense to try to perform a logical or arithmetic operation
5694 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
5696 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
5701 tree testtype
= TREE_TYPE (cond
);
5703 true_value
= constant_boolean_node (true, testtype
);
5704 false_value
= constant_boolean_node (false, testtype
);
5707 arg
= fold_convert (arg_type
, arg
);
5710 true_value
= fold_convert (cond_type
, true_value
);
5712 lhs
= fold_build2 (code
, type
, true_value
, arg
);
5714 lhs
= fold_build2 (code
, type
, arg
, true_value
);
5718 false_value
= fold_convert (cond_type
, false_value
);
5720 rhs
= fold_build2 (code
, type
, false_value
, arg
);
5722 rhs
= fold_build2 (code
, type
, arg
, false_value
);
5725 test
= fold_build3 (COND_EXPR
, type
, test
, lhs
, rhs
);
5726 return fold_convert (type
, test
);
5730 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5732 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5733 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5734 ADDEND is the same as X.
5736 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5737 and finite. The problematic cases are when X is zero, and its mode
5738 has signed zeros. In the case of rounding towards -infinity,
5739 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5740 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5743 fold_real_zero_addition_p (tree type
, tree addend
, int negate
)
5745 if (!real_zerop (addend
))
5748 /* Don't allow the fold with -fsignaling-nans. */
5749 if (HONOR_SNANS (TYPE_MODE (type
)))
5752 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5753 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
5756 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5757 if (TREE_CODE (addend
) == REAL_CST
5758 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
5761 /* The mode has signed zeros, and we have to honor their sign.
5762 In this situation, there is only one case we can return true for.
5763 X - 0 is the same as X unless rounding towards -infinity is
5765 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
5768 /* Subroutine of fold() that checks comparisons of built-in math
5769 functions against real constants.
5771 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5772 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5773 is the type of the result and ARG0 and ARG1 are the operands of the
5774 comparison. ARG1 must be a TREE_REAL_CST.
5776 The function returns the constant folded tree if a simplification
5777 can be made, and NULL_TREE otherwise. */
5780 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
5781 tree type
, tree arg0
, tree arg1
)
5785 if (BUILTIN_SQRT_P (fcode
))
5787 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5788 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
5790 c
= TREE_REAL_CST (arg1
);
5791 if (REAL_VALUE_NEGATIVE (c
))
5793 /* sqrt(x) < y is always false, if y is negative. */
5794 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
5795 return omit_one_operand (type
, integer_zero_node
, arg
);
5797 /* sqrt(x) > y is always true, if y is negative and we
5798 don't care about NaNs, i.e. negative values of x. */
5799 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
5800 return omit_one_operand (type
, integer_one_node
, arg
);
5802 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5803 return fold_build2 (GE_EXPR
, type
, arg
,
5804 build_real (TREE_TYPE (arg
), dconst0
));
5806 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
5810 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5811 real_convert (&c2
, mode
, &c2
);
5813 if (REAL_VALUE_ISINF (c2
))
5815 /* sqrt(x) > y is x == +Inf, when y is very large. */
5816 if (HONOR_INFINITIES (mode
))
5817 return fold_build2 (EQ_EXPR
, type
, arg
,
5818 build_real (TREE_TYPE (arg
), c2
));
5820 /* sqrt(x) > y is always false, when y is very large
5821 and we don't care about infinities. */
5822 return omit_one_operand (type
, integer_zero_node
, arg
);
5825 /* sqrt(x) > c is the same as x > c*c. */
5826 return fold_build2 (code
, type
, arg
,
5827 build_real (TREE_TYPE (arg
), c2
));
5829 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
5833 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5834 real_convert (&c2
, mode
, &c2
);
5836 if (REAL_VALUE_ISINF (c2
))
5838 /* sqrt(x) < y is always true, when y is a very large
5839 value and we don't care about NaNs or Infinities. */
5840 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
5841 return omit_one_operand (type
, integer_one_node
, arg
);
5843 /* sqrt(x) < y is x != +Inf when y is very large and we
5844 don't care about NaNs. */
5845 if (! HONOR_NANS (mode
))
5846 return fold_build2 (NE_EXPR
, type
, arg
,
5847 build_real (TREE_TYPE (arg
), c2
));
5849 /* sqrt(x) < y is x >= 0 when y is very large and we
5850 don't care about Infinities. */
5851 if (! HONOR_INFINITIES (mode
))
5852 return fold_build2 (GE_EXPR
, type
, arg
,
5853 build_real (TREE_TYPE (arg
), dconst0
));
5855 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5856 if (lang_hooks
.decls
.global_bindings_p () != 0
5857 || CONTAINS_PLACEHOLDER_P (arg
))
5860 arg
= save_expr (arg
);
5861 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
5862 fold_build2 (GE_EXPR
, type
, arg
,
5863 build_real (TREE_TYPE (arg
),
5865 fold_build2 (NE_EXPR
, type
, arg
,
5866 build_real (TREE_TYPE (arg
),
5870 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5871 if (! HONOR_NANS (mode
))
5872 return fold_build2 (code
, type
, arg
,
5873 build_real (TREE_TYPE (arg
), c2
));
5875 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5876 if (lang_hooks
.decls
.global_bindings_p () == 0
5877 && ! CONTAINS_PLACEHOLDER_P (arg
))
5879 arg
= save_expr (arg
);
5880 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
5881 fold_build2 (GE_EXPR
, type
, arg
,
5882 build_real (TREE_TYPE (arg
),
5884 fold_build2 (code
, type
, arg
,
5885 build_real (TREE_TYPE (arg
),
5894 /* Subroutine of fold() that optimizes comparisons against Infinities,
5895 either +Inf or -Inf.
5897 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5898 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5899 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5901 The function returns the constant folded tree if a simplification
5902 can be made, and NULL_TREE otherwise. */
5905 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5907 enum machine_mode mode
;
5908 REAL_VALUE_TYPE max
;
5912 mode
= TYPE_MODE (TREE_TYPE (arg0
));
5914 /* For negative infinity swap the sense of the comparison. */
5915 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
5917 code
= swap_tree_comparison (code
);
5922 /* x > +Inf is always false, if with ignore sNANs. */
5923 if (HONOR_SNANS (mode
))
5925 return omit_one_operand (type
, integer_zero_node
, arg0
);
5928 /* x <= +Inf is always true, if we don't case about NaNs. */
5929 if (! HONOR_NANS (mode
))
5930 return omit_one_operand (type
, integer_one_node
, arg0
);
5932 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5933 if (lang_hooks
.decls
.global_bindings_p () == 0
5934 && ! CONTAINS_PLACEHOLDER_P (arg0
))
5936 arg0
= save_expr (arg0
);
5937 return fold_build2 (EQ_EXPR
, type
, arg0
, arg0
);
5943 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5944 real_maxval (&max
, neg
, mode
);
5945 return fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5946 arg0
, build_real (TREE_TYPE (arg0
), max
));
5949 /* x < +Inf is always equal to x <= DBL_MAX. */
5950 real_maxval (&max
, neg
, mode
);
5951 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5952 arg0
, build_real (TREE_TYPE (arg0
), max
));
5955 /* x != +Inf is always equal to !(x > DBL_MAX). */
5956 real_maxval (&max
, neg
, mode
);
5957 if (! HONOR_NANS (mode
))
5958 return fold_build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5959 arg0
, build_real (TREE_TYPE (arg0
), max
));
5961 /* The transformation below creates non-gimple code and thus is
5962 not appropriate if we are in gimple form. */
5966 temp
= fold_build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5967 arg0
, build_real (TREE_TYPE (arg0
), max
));
5968 return fold_build1 (TRUTH_NOT_EXPR
, type
, temp
);
5977 /* Subroutine of fold() that optimizes comparisons of a division by
5978 a nonzero integer constant against an integer constant, i.e.
5981 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5982 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5983 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5985 The function returns the constant folded tree if a simplification
5986 can be made, and NULL_TREE otherwise. */
5989 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5991 tree prod
, tmp
, hi
, lo
;
5992 tree arg00
= TREE_OPERAND (arg0
, 0);
5993 tree arg01
= TREE_OPERAND (arg0
, 1);
5994 unsigned HOST_WIDE_INT lpart
;
5995 HOST_WIDE_INT hpart
;
5998 /* We have to do this the hard way to detect unsigned overflow.
5999 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6000 overflow
= mul_double (TREE_INT_CST_LOW (arg01
),
6001 TREE_INT_CST_HIGH (arg01
),
6002 TREE_INT_CST_LOW (arg1
),
6003 TREE_INT_CST_HIGH (arg1
), &lpart
, &hpart
);
6004 prod
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
6005 prod
= force_fit_type (prod
, -1, overflow
, false);
6007 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)))
6009 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
6012 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6013 overflow
= add_double (TREE_INT_CST_LOW (prod
),
6014 TREE_INT_CST_HIGH (prod
),
6015 TREE_INT_CST_LOW (tmp
),
6016 TREE_INT_CST_HIGH (tmp
),
6018 hi
= build_int_cst_wide (TREE_TYPE (arg00
), lpart
, hpart
);
6019 hi
= force_fit_type (hi
, -1, overflow
| TREE_OVERFLOW (prod
),
6020 TREE_CONSTANT_OVERFLOW (prod
));
6022 else if (tree_int_cst_sgn (arg01
) >= 0)
6024 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
6025 switch (tree_int_cst_sgn (arg1
))
6028 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6033 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6038 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6048 /* A negative divisor reverses the relational operators. */
6049 code
= swap_tree_comparison (code
);
6051 tmp
= int_const_binop (PLUS_EXPR
, arg01
, integer_one_node
, 0);
6052 switch (tree_int_cst_sgn (arg1
))
6055 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6060 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6065 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6077 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6078 return omit_one_operand (type
, integer_zero_node
, arg00
);
6079 if (TREE_OVERFLOW (hi
))
6080 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6081 if (TREE_OVERFLOW (lo
))
6082 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6083 return build_range_check (type
, arg00
, 1, lo
, hi
);
6086 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6087 return omit_one_operand (type
, integer_one_node
, arg00
);
6088 if (TREE_OVERFLOW (hi
))
6089 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6090 if (TREE_OVERFLOW (lo
))
6091 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6092 return build_range_check (type
, arg00
, 0, lo
, hi
);
6095 if (TREE_OVERFLOW (lo
))
6096 return omit_one_operand (type
, integer_zero_node
, arg00
);
6097 return fold_build2 (LT_EXPR
, type
, arg00
, lo
);
6100 if (TREE_OVERFLOW (hi
))
6101 return omit_one_operand (type
, integer_one_node
, arg00
);
6102 return fold_build2 (LE_EXPR
, type
, arg00
, hi
);
6105 if (TREE_OVERFLOW (hi
))
6106 return omit_one_operand (type
, integer_zero_node
, arg00
);
6107 return fold_build2 (GT_EXPR
, type
, arg00
, hi
);
6110 if (TREE_OVERFLOW (lo
))
6111 return omit_one_operand (type
, integer_one_node
, arg00
);
6112 return fold_build2 (GE_EXPR
, type
, arg00
, lo
);
6122 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6123 equality/inequality test, then return a simplified form of the test
6124 using a sign testing. Otherwise return NULL. TYPE is the desired
6128 fold_single_bit_test_into_sign_test (enum tree_code code
, tree arg0
, tree arg1
,
6131 /* If this is testing a single bit, we can optimize the test. */
6132 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6133 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6134 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6136 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6137 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6138 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6140 if (arg00
!= NULL_TREE
6141 /* This is only a win if casting to a signed type is cheap,
6142 i.e. when arg00's type is not a partial mode. */
6143 && TYPE_PRECISION (TREE_TYPE (arg00
))
6144 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6146 tree stype
= lang_hooks
.types
.signed_type (TREE_TYPE (arg00
));
6147 return fold_build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6148 result_type
, fold_convert (stype
, arg00
),
6149 build_int_cst (stype
, 0));
6156 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6157 equality/inequality test, then return a simplified form of
6158 the test using shifts and logical operations. Otherwise return
6159 NULL. TYPE is the desired result type. */
6162 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
6165 /* If this is testing a single bit, we can optimize the test. */
6166 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6167 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6168 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6170 tree inner
= TREE_OPERAND (arg0
, 0);
6171 tree type
= TREE_TYPE (arg0
);
6172 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6173 enum machine_mode operand_mode
= TYPE_MODE (type
);
6175 tree signed_type
, unsigned_type
, intermediate_type
;
6178 /* First, see if we can fold the single bit test into a sign-bit
6180 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
,
6185 /* Otherwise we have (A & C) != 0 where C is a single bit,
6186 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6187 Similarly for (A & C) == 0. */
6189 /* If INNER is a right shift of a constant and it plus BITNUM does
6190 not overflow, adjust BITNUM and INNER. */
6191 if (TREE_CODE (inner
) == RSHIFT_EXPR
6192 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6193 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6194 && bitnum
< TYPE_PRECISION (type
)
6195 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6196 bitnum
- TYPE_PRECISION (type
)))
6198 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6199 inner
= TREE_OPERAND (inner
, 0);
6202 /* If we are going to be able to omit the AND below, we must do our
6203 operations as unsigned. If we must use the AND, we have a choice.
6204 Normally unsigned is faster, but for some machines signed is. */
6205 #ifdef LOAD_EXTEND_OP
6206 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6207 && !flag_syntax_only
) ? 0 : 1;
6212 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6213 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6214 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6215 inner
= fold_convert (intermediate_type
, inner
);
6218 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6219 inner
, size_int (bitnum
));
6221 if (code
== EQ_EXPR
)
6222 inner
= fold_build2 (BIT_XOR_EXPR
, intermediate_type
,
6223 inner
, integer_one_node
);
6225 /* Put the AND last so it can combine with more things. */
6226 inner
= build2 (BIT_AND_EXPR
, intermediate_type
,
6227 inner
, integer_one_node
);
6229 /* Make sure to return the proper type. */
6230 inner
= fold_convert (result_type
, inner
);
6237 /* Check whether we are allowed to reorder operands arg0 and arg1,
6238 such that the evaluation of arg1 occurs before arg0. */
6241 reorder_operands_p (tree arg0
, tree arg1
)
6243 if (! flag_evaluation_order
)
6245 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6247 return ! TREE_SIDE_EFFECTS (arg0
)
6248 && ! TREE_SIDE_EFFECTS (arg1
);
6251 /* Test whether it is preferable two swap two operands, ARG0 and
6252 ARG1, for example because ARG0 is an integer constant and ARG1
6253 isn't. If REORDER is true, only recommend swapping if we can
6254 evaluate the operands in reverse order. */
6257 tree_swap_operands_p (tree arg0
, tree arg1
, bool reorder
)
6259 STRIP_SIGN_NOPS (arg0
);
6260 STRIP_SIGN_NOPS (arg1
);
6262 if (TREE_CODE (arg1
) == INTEGER_CST
)
6264 if (TREE_CODE (arg0
) == INTEGER_CST
)
6267 if (TREE_CODE (arg1
) == REAL_CST
)
6269 if (TREE_CODE (arg0
) == REAL_CST
)
6272 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6274 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6277 if (TREE_CONSTANT (arg1
))
6279 if (TREE_CONSTANT (arg0
))
6285 if (reorder
&& flag_evaluation_order
6286 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6294 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6295 for commutative and comparison operators. Ensuring a canonical
6296 form allows the optimizers to find additional redundancies without
6297 having to explicitly check for both orderings. */
6298 if (TREE_CODE (arg0
) == SSA_NAME
6299 && TREE_CODE (arg1
) == SSA_NAME
6300 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6306 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6307 ARG0 is extended to a wider type. */
6310 fold_widened_comparison (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6312 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6314 tree shorter_type
, outer_type
;
6318 if (arg0_unw
== arg0
)
6320 shorter_type
= TREE_TYPE (arg0_unw
);
6322 #ifdef HAVE_canonicalize_funcptr_for_compare
6323 /* Disable this optimization if we're casting a function pointer
6324 type on targets that require function pointer canonicalization. */
6325 if (HAVE_canonicalize_funcptr_for_compare
6326 && TREE_CODE (shorter_type
) == POINTER_TYPE
6327 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6331 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6334 arg1_unw
= get_unwidened (arg1
, shorter_type
);
6336 /* If possible, express the comparison in the shorter mode. */
6337 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6338 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6339 && (TREE_TYPE (arg1_unw
) == shorter_type
6340 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6341 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6342 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6343 && int_fits_type_p (arg1_unw
, shorter_type
))))
6344 return fold_build2 (code
, type
, arg0_unw
,
6345 fold_convert (shorter_type
, arg1_unw
));
6347 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6348 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6349 || !int_fits_type_p (arg1_unw
, shorter_type
))
6352 /* If we are comparing with the integer that does not fit into the range
6353 of the shorter type, the result is known. */
6354 outer_type
= TREE_TYPE (arg1_unw
);
6355 min
= lower_bound_in_type (outer_type
, shorter_type
);
6356 max
= upper_bound_in_type (outer_type
, shorter_type
);
6358 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6360 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6367 return omit_one_operand (type
, integer_zero_node
, arg0
);
6372 return omit_one_operand (type
, integer_one_node
, arg0
);
6378 return omit_one_operand (type
, integer_one_node
, arg0
);
6380 return omit_one_operand (type
, integer_zero_node
, arg0
);
6385 return omit_one_operand (type
, integer_zero_node
, arg0
);
6387 return omit_one_operand (type
, integer_one_node
, arg0
);
6396 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6397 ARG0 just the signedness is changed. */
6400 fold_sign_changed_comparison (enum tree_code code
, tree type
,
6401 tree arg0
, tree arg1
)
6403 tree arg0_inner
, tmp
;
6404 tree inner_type
, outer_type
;
6406 if (TREE_CODE (arg0
) != NOP_EXPR
6407 && TREE_CODE (arg0
) != CONVERT_EXPR
)
6410 outer_type
= TREE_TYPE (arg0
);
6411 arg0_inner
= TREE_OPERAND (arg0
, 0);
6412 inner_type
= TREE_TYPE (arg0_inner
);
6414 #ifdef HAVE_canonicalize_funcptr_for_compare
6415 /* Disable this optimization if we're casting a function pointer
6416 type on targets that require function pointer canonicalization. */
6417 if (HAVE_canonicalize_funcptr_for_compare
6418 && TREE_CODE (inner_type
) == POINTER_TYPE
6419 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6423 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6426 if (TREE_CODE (arg1
) != INTEGER_CST
6427 && !((TREE_CODE (arg1
) == NOP_EXPR
6428 || TREE_CODE (arg1
) == CONVERT_EXPR
)
6429 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6432 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6437 if (TREE_CODE (arg1
) == INTEGER_CST
)
6439 tmp
= build_int_cst_wide (inner_type
,
6440 TREE_INT_CST_LOW (arg1
),
6441 TREE_INT_CST_HIGH (arg1
));
6442 arg1
= force_fit_type (tmp
, 0,
6443 TREE_OVERFLOW (arg1
),
6444 TREE_CONSTANT_OVERFLOW (arg1
));
6447 arg1
= fold_convert (inner_type
, arg1
);
6449 return fold_build2 (code
, type
, arg0_inner
, arg1
);
6452 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6453 step of the array. Reconstructs s and delta in the case of s * delta
6454 being an integer constant (and thus already folded).
6455 ADDR is the address. MULT is the multiplicative expression.
6456 If the function succeeds, the new address expression is returned. Otherwise
6457 NULL_TREE is returned. */
6460 try_move_mult_to_index (enum tree_code code
, tree addr
, tree op1
)
6462 tree s
, delta
, step
;
6463 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6467 /* Canonicalize op1 into a possibly non-constant delta
6468 and an INTEGER_CST s. */
6469 if (TREE_CODE (op1
) == MULT_EXPR
)
6471 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6476 if (TREE_CODE (arg0
) == INTEGER_CST
)
6481 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6489 else if (TREE_CODE (op1
) == INTEGER_CST
)
6496 /* Simulate we are delta * 1. */
6498 s
= integer_one_node
;
6501 for (;; ref
= TREE_OPERAND (ref
, 0))
6503 if (TREE_CODE (ref
) == ARRAY_REF
)
6505 itype
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6509 step
= array_ref_element_size (ref
);
6510 if (TREE_CODE (step
) != INTEGER_CST
)
6515 if (! tree_int_cst_equal (step
, s
))
6520 /* Try if delta is a multiple of step. */
6521 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, delta
, step
);
6530 if (!handled_component_p (ref
))
6534 /* We found the suitable array reference. So copy everything up to it,
6535 and replace the index. */
6537 pref
= TREE_OPERAND (addr
, 0);
6538 ret
= copy_node (pref
);
6543 pref
= TREE_OPERAND (pref
, 0);
6544 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6545 pos
= TREE_OPERAND (pos
, 0);
6548 TREE_OPERAND (pos
, 1) = fold_build2 (code
, itype
,
6549 fold_convert (itype
,
6550 TREE_OPERAND (pos
, 1)),
6551 fold_convert (itype
, delta
));
6553 return fold_build1 (ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6557 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6558 means A >= Y && A != MAX, but in this case we know that
6559 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6562 fold_to_nonsharp_ineq_using_bound (tree ineq
, tree bound
)
6564 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6566 if (TREE_CODE (bound
) == LT_EXPR
)
6567 a
= TREE_OPERAND (bound
, 0);
6568 else if (TREE_CODE (bound
) == GT_EXPR
)
6569 a
= TREE_OPERAND (bound
, 1);
6573 typea
= TREE_TYPE (a
);
6574 if (!INTEGRAL_TYPE_P (typea
)
6575 && !POINTER_TYPE_P (typea
))
6578 if (TREE_CODE (ineq
) == LT_EXPR
)
6580 a1
= TREE_OPERAND (ineq
, 1);
6581 y
= TREE_OPERAND (ineq
, 0);
6583 else if (TREE_CODE (ineq
) == GT_EXPR
)
6585 a1
= TREE_OPERAND (ineq
, 0);
6586 y
= TREE_OPERAND (ineq
, 1);
6591 if (TREE_TYPE (a1
) != typea
)
6594 diff
= fold_build2 (MINUS_EXPR
, typea
, a1
, a
);
6595 if (!integer_onep (diff
))
6598 return fold_build2 (GE_EXPR
, type
, a
, y
);
6601 /* Fold a sum or difference of at least one multiplication.
6602 Returns the folded tree or NULL if no simplification could be made. */
6605 fold_plusminus_mult_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6607 tree arg00
, arg01
, arg10
, arg11
;
6608 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6610 /* (A * C) +- (B * C) -> (A+-B) * C.
6611 (A * C) +- A -> A * (C+-1).
6612 We are most concerned about the case where C is a constant,
6613 but other combinations show up during loop reduction. Since
6614 it is not difficult, try all four possibilities. */
6616 if (TREE_CODE (arg0
) == MULT_EXPR
)
6618 arg00
= TREE_OPERAND (arg0
, 0);
6619 arg01
= TREE_OPERAND (arg0
, 1);
6624 if (!FLOAT_TYPE_P (type
))
6625 arg01
= build_int_cst (type
, 1);
6627 arg01
= build_real (type
, dconst1
);
6629 if (TREE_CODE (arg1
) == MULT_EXPR
)
6631 arg10
= TREE_OPERAND (arg1
, 0);
6632 arg11
= TREE_OPERAND (arg1
, 1);
6637 if (!FLOAT_TYPE_P (type
))
6638 arg11
= build_int_cst (type
, 1);
6640 arg11
= build_real (type
, dconst1
);
6644 if (operand_equal_p (arg01
, arg11
, 0))
6645 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6646 else if (operand_equal_p (arg00
, arg10
, 0))
6647 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6648 else if (operand_equal_p (arg00
, arg11
, 0))
6649 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6650 else if (operand_equal_p (arg01
, arg10
, 0))
6651 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6653 /* No identical multiplicands; see if we can find a common
6654 power-of-two factor in non-power-of-two multiplies. This
6655 can help in multi-dimensional array access. */
6656 else if (host_integerp (arg01
, 0)
6657 && host_integerp (arg11
, 0))
6659 HOST_WIDE_INT int01
, int11
, tmp
;
6662 int01
= TREE_INT_CST_LOW (arg01
);
6663 int11
= TREE_INT_CST_LOW (arg11
);
6665 /* Move min of absolute values to int11. */
6666 if ((int01
>= 0 ? int01
: -int01
)
6667 < (int11
>= 0 ? int11
: -int11
))
6669 tmp
= int01
, int01
= int11
, int11
= tmp
;
6670 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6677 if (exact_log2 (int11
) > 0 && int01
% int11
== 0)
6679 alt0
= fold_build2 (MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
6680 build_int_cst (TREE_TYPE (arg00
),
6685 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
6690 return fold_build2 (MULT_EXPR
, type
,
6691 fold_build2 (code
, type
,
6692 fold_convert (type
, alt0
),
6693 fold_convert (type
, alt1
)),
6694 fold_convert (type
, same
));
6699 /* Fold a unary expression of code CODE and type TYPE with operand
6700 OP0. Return the folded expression if folding is successful.
6701 Otherwise, return NULL_TREE. */
6704 fold_unary (enum tree_code code
, tree type
, tree op0
)
6708 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
6710 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
6711 && TREE_CODE_LENGTH (code
) == 1);
6716 if (code
== NOP_EXPR
|| code
== CONVERT_EXPR
6717 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
6719 /* Don't use STRIP_NOPS, because signedness of argument type
6721 STRIP_SIGN_NOPS (arg0
);
6725 /* Strip any conversions that don't change the mode. This
6726 is safe for every expression, except for a comparison
6727 expression because its signedness is derived from its
6730 Note that this is done as an internal manipulation within
6731 the constant folder, in order to find the simplest
6732 representation of the arguments so that their form can be
6733 studied. In any cases, the appropriate type conversions
6734 should be put back in the tree that will get out of the
6740 if (TREE_CODE_CLASS (code
) == tcc_unary
)
6742 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
6743 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6744 fold_build1 (code
, type
, TREE_OPERAND (arg0
, 1)));
6745 else if (TREE_CODE (arg0
) == COND_EXPR
)
6747 tree arg01
= TREE_OPERAND (arg0
, 1);
6748 tree arg02
= TREE_OPERAND (arg0
, 2);
6749 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
6750 arg01
= fold_build1 (code
, type
, arg01
);
6751 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
6752 arg02
= fold_build1 (code
, type
, arg02
);
6753 tem
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6756 /* If this was a conversion, and all we did was to move into
6757 inside the COND_EXPR, bring it back out. But leave it if
6758 it is a conversion from integer to integer and the
6759 result precision is no wider than a word since such a
6760 conversion is cheap and may be optimized away by combine,
6761 while it couldn't if it were outside the COND_EXPR. Then return
6762 so we don't get into an infinite recursion loop taking the
6763 conversion out and then back in. */
6765 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
6766 || code
== NON_LVALUE_EXPR
)
6767 && TREE_CODE (tem
) == COND_EXPR
6768 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
6769 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
6770 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
6771 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
6772 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
6773 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
6774 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
6776 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
6777 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
6778 || flag_syntax_only
))
6779 tem
= build1 (code
, type
,
6781 TREE_TYPE (TREE_OPERAND
6782 (TREE_OPERAND (tem
, 1), 0)),
6783 TREE_OPERAND (tem
, 0),
6784 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
6785 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
6788 else if (COMPARISON_CLASS_P (arg0
))
6790 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
6792 arg0
= copy_node (arg0
);
6793 TREE_TYPE (arg0
) = type
;
6796 else if (TREE_CODE (type
) != INTEGER_TYPE
)
6797 return fold_build3 (COND_EXPR
, type
, arg0
,
6798 fold_build1 (code
, type
,
6800 fold_build1 (code
, type
,
6801 integer_zero_node
));
6810 case FIX_TRUNC_EXPR
:
6812 case FIX_FLOOR_EXPR
:
6813 case FIX_ROUND_EXPR
:
6814 if (TREE_TYPE (op0
) == type
)
6817 /* If we have (type) (a CMP b) and type is an integral type, return
6818 new expression involving the new type. */
6819 if (COMPARISON_CLASS_P (op0
) && INTEGRAL_TYPE_P (type
))
6820 return fold_build2 (TREE_CODE (op0
), type
, TREE_OPERAND (op0
, 0),
6821 TREE_OPERAND (op0
, 1));
6823 /* Handle cases of two conversions in a row. */
6824 if (TREE_CODE (op0
) == NOP_EXPR
6825 || TREE_CODE (op0
) == CONVERT_EXPR
)
6827 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
6828 tree inter_type
= TREE_TYPE (op0
);
6829 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
6830 int inside_ptr
= POINTER_TYPE_P (inside_type
);
6831 int inside_float
= FLOAT_TYPE_P (inside_type
);
6832 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
6833 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
6834 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
6835 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
6836 int inter_ptr
= POINTER_TYPE_P (inter_type
);
6837 int inter_float
= FLOAT_TYPE_P (inter_type
);
6838 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
6839 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
6840 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
6841 int final_int
= INTEGRAL_TYPE_P (type
);
6842 int final_ptr
= POINTER_TYPE_P (type
);
6843 int final_float
= FLOAT_TYPE_P (type
);
6844 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
6845 unsigned int final_prec
= TYPE_PRECISION (type
);
6846 int final_unsignedp
= TYPE_UNSIGNED (type
);
6848 /* In addition to the cases of two conversions in a row
6849 handled below, if we are converting something to its own
6850 type via an object of identical or wider precision, neither
6851 conversion is needed. */
6852 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
6853 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
6854 && inter_prec
>= final_prec
)
6855 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6857 /* Likewise, if the intermediate and final types are either both
6858 float or both integer, we don't need the middle conversion if
6859 it is wider than the final type and doesn't change the signedness
6860 (for integers). Avoid this if the final type is a pointer
6861 since then we sometimes need the inner conversion. Likewise if
6862 the outer has a precision not equal to the size of its mode. */
6863 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
6864 || (inter_float
&& inside_float
)
6865 || (inter_vec
&& inside_vec
))
6866 && inter_prec
>= inside_prec
6867 && (inter_float
|| inter_vec
6868 || inter_unsignedp
== inside_unsignedp
)
6869 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6870 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6872 && (! final_vec
|| inter_prec
== inside_prec
))
6873 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6875 /* If we have a sign-extension of a zero-extended value, we can
6876 replace that by a single zero-extension. */
6877 if (inside_int
&& inter_int
&& final_int
6878 && inside_prec
< inter_prec
&& inter_prec
< final_prec
6879 && inside_unsignedp
&& !inter_unsignedp
)
6880 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6882 /* Two conversions in a row are not needed unless:
6883 - some conversion is floating-point (overstrict for now), or
6884 - some conversion is a vector (overstrict for now), or
6885 - the intermediate type is narrower than both initial and
6887 - the intermediate type and innermost type differ in signedness,
6888 and the outermost type is wider than the intermediate, or
6889 - the initial type is a pointer type and the precisions of the
6890 intermediate and final types differ, or
6891 - the final type is a pointer type and the precisions of the
6892 initial and intermediate types differ. */
6893 if (! inside_float
&& ! inter_float
&& ! final_float
6894 && ! inside_vec
&& ! inter_vec
&& ! final_vec
6895 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
6896 && ! (inside_int
&& inter_int
6897 && inter_unsignedp
!= inside_unsignedp
6898 && inter_prec
< final_prec
)
6899 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
6900 == (final_unsignedp
&& final_prec
> inter_prec
))
6901 && ! (inside_ptr
&& inter_prec
!= final_prec
)
6902 && ! (final_ptr
&& inside_prec
!= inter_prec
)
6903 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6904 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6906 return fold_build1 (code
, type
, TREE_OPERAND (op0
, 0));
6909 /* Handle (T *)&A.B.C for A being of type T and B and C
6910 living at offset zero. This occurs frequently in
6911 C++ upcasting and then accessing the base. */
6912 if (TREE_CODE (op0
) == ADDR_EXPR
6913 && POINTER_TYPE_P (type
)
6914 && handled_component_p (TREE_OPERAND (op0
, 0)))
6916 HOST_WIDE_INT bitsize
, bitpos
;
6918 enum machine_mode mode
;
6919 int unsignedp
, volatilep
;
6920 tree base
= TREE_OPERAND (op0
, 0);
6921 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
6922 &mode
, &unsignedp
, &volatilep
, false);
6923 /* If the reference was to a (constant) zero offset, we can use
6924 the address of the base if it has the same base type
6925 as the result type. */
6926 if (! offset
&& bitpos
== 0
6927 && TYPE_MAIN_VARIANT (TREE_TYPE (type
))
6928 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
6929 return fold_convert (type
, build_fold_addr_expr (base
));
6932 if (TREE_CODE (op0
) == MODIFY_EXPR
6933 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
6934 /* Detect assigning a bitfield. */
6935 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
6936 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
6938 /* Don't leave an assignment inside a conversion
6939 unless assigning a bitfield. */
6940 tem
= fold_build1 (code
, type
, TREE_OPERAND (op0
, 1));
6941 /* First do the assignment, then return converted constant. */
6942 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
6943 TREE_NO_WARNING (tem
) = 1;
6944 TREE_USED (tem
) = 1;
6948 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6949 constants (if x has signed type, the sign bit cannot be set
6950 in c). This folds extension into the BIT_AND_EXPR. */
6951 if (INTEGRAL_TYPE_P (type
)
6952 && TREE_CODE (type
) != BOOLEAN_TYPE
6953 && TREE_CODE (op0
) == BIT_AND_EXPR
6954 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
6957 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
6960 if (TYPE_UNSIGNED (TREE_TYPE (and))
6961 || (TYPE_PRECISION (type
)
6962 <= TYPE_PRECISION (TREE_TYPE (and))))
6964 else if (TYPE_PRECISION (TREE_TYPE (and1
))
6965 <= HOST_BITS_PER_WIDE_INT
6966 && host_integerp (and1
, 1))
6968 unsigned HOST_WIDE_INT cst
;
6970 cst
= tree_low_cst (and1
, 1);
6971 cst
&= (HOST_WIDE_INT
) -1
6972 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
6973 change
= (cst
== 0);
6974 #ifdef LOAD_EXTEND_OP
6976 && !flag_syntax_only
6977 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
6980 tree uns
= lang_hooks
.types
.unsigned_type (TREE_TYPE (and0
));
6981 and0
= fold_convert (uns
, and0
);
6982 and1
= fold_convert (uns
, and1
);
6988 tem
= build_int_cst_wide (type
, TREE_INT_CST_LOW (and1
),
6989 TREE_INT_CST_HIGH (and1
));
6990 tem
= force_fit_type (tem
, 0, TREE_OVERFLOW (and1
),
6991 TREE_CONSTANT_OVERFLOW (and1
));
6992 return fold_build2 (BIT_AND_EXPR
, type
,
6993 fold_convert (type
, and0
), tem
);
6997 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6998 T2 being pointers to types of the same size. */
6999 if (POINTER_TYPE_P (type
)
7000 && BINARY_CLASS_P (arg0
)
7001 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7002 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7004 tree arg00
= TREE_OPERAND (arg0
, 0);
7006 tree t1
= TREE_TYPE (arg00
);
7007 tree tt0
= TREE_TYPE (t0
);
7008 tree tt1
= TREE_TYPE (t1
);
7009 tree s0
= TYPE_SIZE (tt0
);
7010 tree s1
= TYPE_SIZE (tt1
);
7012 if (s0
&& s1
&& operand_equal_p (s0
, s1
, OEP_ONLY_CONST
))
7013 return build2 (TREE_CODE (arg0
), t0
, fold_convert (t0
, arg00
),
7014 TREE_OPERAND (arg0
, 1));
7017 tem
= fold_convert_const (code
, type
, arg0
);
7018 return tem
? tem
: NULL_TREE
;
7020 case VIEW_CONVERT_EXPR
:
7021 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
7022 return build1 (VIEW_CONVERT_EXPR
, type
, TREE_OPERAND (op0
, 0));
7026 if (negate_expr_p (arg0
))
7027 return fold_convert (type
, negate_expr (arg0
));
7031 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
7032 return fold_abs_const (arg0
, type
);
7033 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
7034 return fold_build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7035 /* Convert fabs((double)float) into (double)fabsf(float). */
7036 else if (TREE_CODE (arg0
) == NOP_EXPR
7037 && TREE_CODE (type
) == REAL_TYPE
)
7039 tree targ0
= strip_float_extensions (arg0
);
7041 return fold_convert (type
, fold_build1 (ABS_EXPR
,
7045 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7046 else if (tree_expr_nonnegative_p (arg0
) || TREE_CODE (arg0
) == ABS_EXPR
)
7049 /* Strip sign ops from argument. */
7050 if (TREE_CODE (type
) == REAL_TYPE
)
7052 tem
= fold_strip_sign_ops (arg0
);
7054 return fold_build1 (ABS_EXPR
, type
, fold_convert (type
, tem
));
7059 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7060 return fold_convert (type
, arg0
);
7061 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7062 return build2 (COMPLEX_EXPR
, type
,
7063 TREE_OPERAND (arg0
, 0),
7064 negate_expr (TREE_OPERAND (arg0
, 1)));
7065 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
7066 return build_complex (type
, TREE_REALPART (arg0
),
7067 negate_expr (TREE_IMAGPART (arg0
)));
7068 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7069 return fold_build2 (TREE_CODE (arg0
), type
,
7070 fold_build1 (CONJ_EXPR
, type
,
7071 TREE_OPERAND (arg0
, 0)),
7072 fold_build1 (CONJ_EXPR
, type
,
7073 TREE_OPERAND (arg0
, 1)));
7074 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
7075 return TREE_OPERAND (arg0
, 0);
7079 if (TREE_CODE (arg0
) == INTEGER_CST
)
7080 return fold_not_const (arg0
, type
);
7081 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
7082 return TREE_OPERAND (arg0
, 0);
7083 /* Convert ~ (-A) to A - 1. */
7084 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
7085 return fold_build2 (MINUS_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7086 build_int_cst (type
, 1));
7087 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7088 else if (INTEGRAL_TYPE_P (type
)
7089 && ((TREE_CODE (arg0
) == MINUS_EXPR
7090 && integer_onep (TREE_OPERAND (arg0
, 1)))
7091 || (TREE_CODE (arg0
) == PLUS_EXPR
7092 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
7093 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7094 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7095 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7096 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
7098 TREE_OPERAND (arg0
, 0)))))
7099 return fold_build2 (BIT_XOR_EXPR
, type
, tem
,
7100 fold_convert (type
, TREE_OPERAND (arg0
, 1)));
7101 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7102 && (tem
= fold_unary (BIT_NOT_EXPR
, type
,
7104 TREE_OPERAND (arg0
, 1)))))
7105 return fold_build2 (BIT_XOR_EXPR
, type
,
7106 fold_convert (type
, TREE_OPERAND (arg0
, 0)), tem
);
7110 case TRUTH_NOT_EXPR
:
7111 /* The argument to invert_truthvalue must have Boolean type. */
7112 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
7113 arg0
= fold_convert (boolean_type_node
, arg0
);
7115 /* Note that the operand of this must be an int
7116 and its values must be 0 or 1.
7117 ("true" is a fixed value perhaps depending on the language,
7118 but we don't handle values other than 1 correctly yet.) */
7119 tem
= invert_truthvalue (arg0
);
7120 /* Avoid infinite recursion. */
7121 if (TREE_CODE (tem
) == TRUTH_NOT_EXPR
)
7123 return fold_convert (type
, tem
);
7126 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7128 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7129 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
7130 TREE_OPERAND (arg0
, 1));
7131 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
7132 return TREE_REALPART (arg0
);
7133 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7134 return fold_build2 (TREE_CODE (arg0
), type
,
7135 fold_build1 (REALPART_EXPR
, type
,
7136 TREE_OPERAND (arg0
, 0)),
7137 fold_build1 (REALPART_EXPR
, type
,
7138 TREE_OPERAND (arg0
, 1)));
7142 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7143 return fold_convert (type
, integer_zero_node
);
7144 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7145 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
7146 TREE_OPERAND (arg0
, 0));
7147 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
7148 return TREE_IMAGPART (arg0
);
7149 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7150 return fold_build2 (TREE_CODE (arg0
), type
,
7151 fold_build1 (IMAGPART_EXPR
, type
,
7152 TREE_OPERAND (arg0
, 0)),
7153 fold_build1 (IMAGPART_EXPR
, type
,
7154 TREE_OPERAND (arg0
, 1)));
7159 } /* switch (code) */
7162 /* Fold a binary expression of code CODE and type TYPE with operands
7163 OP0 and OP1. Return the folded expression if folding is
7164 successful. Otherwise, return NULL_TREE. */
7167 fold_binary (enum tree_code code
, tree type
, tree op0
, tree op1
)
7169 tree t1
= NULL_TREE
;
7171 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
7172 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7174 /* WINS will be nonzero when the switch is done
7175 if all operands are constant. */
7178 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7179 && TREE_CODE_LENGTH (code
) == 2);
7188 /* Strip any conversions that don't change the mode. This is
7189 safe for every expression, except for a comparison expression
7190 because its signedness is derived from its operands. So, in
7191 the latter case, only strip conversions that don't change the
7194 Note that this is done as an internal manipulation within the
7195 constant folder, in order to find the simplest representation
7196 of the arguments so that their form can be studied. In any
7197 cases, the appropriate type conversions should be put back in
7198 the tree that will get out of the constant folder. */
7199 if (kind
== tcc_comparison
)
7200 STRIP_SIGN_NOPS (arg0
);
7204 if (TREE_CODE (arg0
) == COMPLEX_CST
)
7205 subop
= TREE_REALPART (arg0
);
7209 if (TREE_CODE (subop
) != INTEGER_CST
7210 && TREE_CODE (subop
) != REAL_CST
)
7211 /* Note that TREE_CONSTANT isn't enough:
7212 static var addresses are constant but we can't
7213 do arithmetic on them. */
7221 /* Strip any conversions that don't change the mode. This is
7222 safe for every expression, except for a comparison expression
7223 because its signedness is derived from its operands. So, in
7224 the latter case, only strip conversions that don't change the
7227 Note that this is done as an internal manipulation within the
7228 constant folder, in order to find the simplest representation
7229 of the arguments so that their form can be studied. In any
7230 cases, the appropriate type conversions should be put back in
7231 the tree that will get out of the constant folder. */
7232 if (kind
== tcc_comparison
)
7233 STRIP_SIGN_NOPS (arg1
);
7237 if (TREE_CODE (arg1
) == COMPLEX_CST
)
7238 subop
= TREE_REALPART (arg1
);
7242 if (TREE_CODE (subop
) != INTEGER_CST
7243 && TREE_CODE (subop
) != REAL_CST
)
7244 /* Note that TREE_CONSTANT isn't enough:
7245 static var addresses are constant but we can't
7246 do arithmetic on them. */
7250 /* If this is a commutative operation, and ARG0 is a constant, move it
7251 to ARG1 to reduce the number of tests below. */
7252 if (commutative_tree_code (code
)
7253 && tree_swap_operands_p (arg0
, arg1
, true))
7254 return fold_build2 (code
, type
, op1
, op0
);
7256 /* Now WINS is set as described above,
7257 ARG0 is the first operand of EXPR,
7258 and ARG1 is the second operand (if it has more than one operand).
7260 First check for cases where an arithmetic operation is applied to a
7261 compound, conditional, or comparison operation. Push the arithmetic
7262 operation inside the compound or conditional to see if any folding
7263 can then be done. Convert comparison to conditional for this purpose.
7264 The also optimizes non-constant cases that used to be done in
7267 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7268 one of the operands is a comparison and the other is a comparison, a
7269 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7270 code below would make the expression more complex. Change it to a
7271 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7272 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7274 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
7275 || code
== EQ_EXPR
|| code
== NE_EXPR
)
7276 && ((truth_value_p (TREE_CODE (arg0
))
7277 && (truth_value_p (TREE_CODE (arg1
))
7278 || (TREE_CODE (arg1
) == BIT_AND_EXPR
7279 && integer_onep (TREE_OPERAND (arg1
, 1)))))
7280 || (truth_value_p (TREE_CODE (arg1
))
7281 && (truth_value_p (TREE_CODE (arg0
))
7282 || (TREE_CODE (arg0
) == BIT_AND_EXPR
7283 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
7285 tem
= fold_build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
7286 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
7289 fold_convert (boolean_type_node
, arg0
),
7290 fold_convert (boolean_type_node
, arg1
));
7292 if (code
== EQ_EXPR
)
7293 tem
= invert_truthvalue (tem
);
7295 return fold_convert (type
, tem
);
7298 if (TREE_CODE_CLASS (code
) == tcc_binary
7299 || TREE_CODE_CLASS (code
) == tcc_comparison
)
7301 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7302 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7303 fold_build2 (code
, type
,
7304 TREE_OPERAND (arg0
, 1), op1
));
7305 if (TREE_CODE (arg1
) == COMPOUND_EXPR
7306 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
7307 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
7308 fold_build2 (code
, type
,
7309 op0
, TREE_OPERAND (arg1
, 1)));
7311 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
7313 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
7315 /*cond_first_p=*/1);
7316 if (tem
!= NULL_TREE
)
7320 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
7322 tem
= fold_binary_op_with_conditional_arg (code
, type
, op0
, op1
,
7324 /*cond_first_p=*/0);
7325 if (tem
!= NULL_TREE
)
7333 /* A + (-B) -> A - B */
7334 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
7335 return fold_build2 (MINUS_EXPR
, type
,
7336 fold_convert (type
, arg0
),
7337 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
7338 /* (-A) + B -> B - A */
7339 if (TREE_CODE (arg0
) == NEGATE_EXPR
7340 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
7341 return fold_build2 (MINUS_EXPR
, type
,
7342 fold_convert (type
, arg1
),
7343 fold_convert (type
, TREE_OPERAND (arg0
, 0)));
7344 /* Convert ~A + 1 to -A. */
7345 if (INTEGRAL_TYPE_P (type
)
7346 && TREE_CODE (arg0
) == BIT_NOT_EXPR
7347 && integer_onep (arg1
))
7348 return fold_build1 (NEGATE_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7350 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
7352 if ((TREE_CODE (arg0
) == MULT_EXPR
7353 || TREE_CODE (arg1
) == MULT_EXPR
)
7354 && (!FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
7356 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
7361 if (! FLOAT_TYPE_P (type
))
7363 if (integer_zerop (arg1
))
7364 return non_lvalue (fold_convert (type
, arg0
));
7366 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7367 with a constant, and the two constants have no bits in common,
7368 we should treat this as a BIT_IOR_EXPR since this may produce more
7370 if (TREE_CODE (arg0
) == BIT_AND_EXPR
7371 && TREE_CODE (arg1
) == BIT_AND_EXPR
7372 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7373 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
7374 && integer_zerop (const_binop (BIT_AND_EXPR
,
7375 TREE_OPERAND (arg0
, 1),
7376 TREE_OPERAND (arg1
, 1), 0)))
7378 code
= BIT_IOR_EXPR
;
7382 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7383 (plus (plus (mult) (mult)) (foo)) so that we can
7384 take advantage of the factoring cases below. */
7385 if (((TREE_CODE (arg0
) == PLUS_EXPR
7386 || TREE_CODE (arg0
) == MINUS_EXPR
)
7387 && TREE_CODE (arg1
) == MULT_EXPR
)
7388 || ((TREE_CODE (arg1
) == PLUS_EXPR
7389 || TREE_CODE (arg1
) == MINUS_EXPR
)
7390 && TREE_CODE (arg0
) == MULT_EXPR
))
7392 tree parg0
, parg1
, parg
, marg
;
7393 enum tree_code pcode
;
7395 if (TREE_CODE (arg1
) == MULT_EXPR
)
7396 parg
= arg0
, marg
= arg1
;
7398 parg
= arg1
, marg
= arg0
;
7399 pcode
= TREE_CODE (parg
);
7400 parg0
= TREE_OPERAND (parg
, 0);
7401 parg1
= TREE_OPERAND (parg
, 1);
7405 if (TREE_CODE (parg0
) == MULT_EXPR
7406 && TREE_CODE (parg1
) != MULT_EXPR
)
7407 return fold_build2 (pcode
, type
,
7408 fold_build2 (PLUS_EXPR
, type
,
7409 fold_convert (type
, parg0
),
7410 fold_convert (type
, marg
)),
7411 fold_convert (type
, parg1
));
7412 if (TREE_CODE (parg0
) != MULT_EXPR
7413 && TREE_CODE (parg1
) == MULT_EXPR
)
7414 return fold_build2 (PLUS_EXPR
, type
,
7415 fold_convert (type
, parg0
),
7416 fold_build2 (pcode
, type
,
7417 fold_convert (type
, marg
),
7422 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7423 of the array. Loop optimizer sometimes produce this type of
7425 if (TREE_CODE (arg0
) == ADDR_EXPR
)
7427 tem
= try_move_mult_to_index (PLUS_EXPR
, arg0
, arg1
);
7429 return fold_convert (type
, tem
);
7431 else if (TREE_CODE (arg1
) == ADDR_EXPR
)
7433 tem
= try_move_mult_to_index (PLUS_EXPR
, arg1
, arg0
);
7435 return fold_convert (type
, tem
);
7440 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7441 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
7442 return non_lvalue (fold_convert (type
, arg0
));
7444 /* Likewise if the operands are reversed. */
7445 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
7446 return non_lvalue (fold_convert (type
, arg1
));
7448 /* Convert X + -C into X - C. */
7449 if (TREE_CODE (arg1
) == REAL_CST
7450 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
7452 tem
= fold_negate_const (arg1
, type
);
7453 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
7454 return fold_build2 (MINUS_EXPR
, type
,
7455 fold_convert (type
, arg0
),
7456 fold_convert (type
, tem
));
7459 if (flag_unsafe_math_optimizations
7460 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
7461 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
7462 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
7465 /* Convert x+x into x*2.0. */
7466 if (operand_equal_p (arg0
, arg1
, 0)
7467 && SCALAR_FLOAT_TYPE_P (type
))
7468 return fold_build2 (MULT_EXPR
, type
, arg0
,
7469 build_real (type
, dconst2
));
7471 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7472 if (flag_unsafe_math_optimizations
7473 && TREE_CODE (arg1
) == PLUS_EXPR
7474 && TREE_CODE (arg0
) != MULT_EXPR
)
7476 tree tree10
= TREE_OPERAND (arg1
, 0);
7477 tree tree11
= TREE_OPERAND (arg1
, 1);
7478 if (TREE_CODE (tree11
) == MULT_EXPR
7479 && TREE_CODE (tree10
) == MULT_EXPR
)
7482 tree0
= fold_build2 (PLUS_EXPR
, type
, arg0
, tree10
);
7483 return fold_build2 (PLUS_EXPR
, type
, tree0
, tree11
);
7486 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7487 if (flag_unsafe_math_optimizations
7488 && TREE_CODE (arg0
) == PLUS_EXPR
7489 && TREE_CODE (arg1
) != MULT_EXPR
)
7491 tree tree00
= TREE_OPERAND (arg0
, 0);
7492 tree tree01
= TREE_OPERAND (arg0
, 1);
7493 if (TREE_CODE (tree01
) == MULT_EXPR
7494 && TREE_CODE (tree00
) == MULT_EXPR
)
7497 tree0
= fold_build2 (PLUS_EXPR
, type
, tree01
, arg1
);
7498 return fold_build2 (PLUS_EXPR
, type
, tree00
, tree0
);
7504 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7505 is a rotate of A by C1 bits. */
7506 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7507 is a rotate of A by B bits. */
7509 enum tree_code code0
, code1
;
7510 code0
= TREE_CODE (arg0
);
7511 code1
= TREE_CODE (arg1
);
7512 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
7513 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
7514 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7515 TREE_OPERAND (arg1
, 0), 0)
7516 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7518 tree tree01
, tree11
;
7519 enum tree_code code01
, code11
;
7521 tree01
= TREE_OPERAND (arg0
, 1);
7522 tree11
= TREE_OPERAND (arg1
, 1);
7523 STRIP_NOPS (tree01
);
7524 STRIP_NOPS (tree11
);
7525 code01
= TREE_CODE (tree01
);
7526 code11
= TREE_CODE (tree11
);
7527 if (code01
== INTEGER_CST
7528 && code11
== INTEGER_CST
7529 && TREE_INT_CST_HIGH (tree01
) == 0
7530 && TREE_INT_CST_HIGH (tree11
) == 0
7531 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
7532 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
7533 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7534 code0
== LSHIFT_EXPR
? tree01
: tree11
);
7535 else if (code11
== MINUS_EXPR
)
7537 tree tree110
, tree111
;
7538 tree110
= TREE_OPERAND (tree11
, 0);
7539 tree111
= TREE_OPERAND (tree11
, 1);
7540 STRIP_NOPS (tree110
);
7541 STRIP_NOPS (tree111
);
7542 if (TREE_CODE (tree110
) == INTEGER_CST
7543 && 0 == compare_tree_int (tree110
,
7545 (TREE_TYPE (TREE_OPERAND
7547 && operand_equal_p (tree01
, tree111
, 0))
7548 return build2 ((code0
== LSHIFT_EXPR
7551 type
, TREE_OPERAND (arg0
, 0), tree01
);
7553 else if (code01
== MINUS_EXPR
)
7555 tree tree010
, tree011
;
7556 tree010
= TREE_OPERAND (tree01
, 0);
7557 tree011
= TREE_OPERAND (tree01
, 1);
7558 STRIP_NOPS (tree010
);
7559 STRIP_NOPS (tree011
);
7560 if (TREE_CODE (tree010
) == INTEGER_CST
7561 && 0 == compare_tree_int (tree010
,
7563 (TREE_TYPE (TREE_OPERAND
7565 && operand_equal_p (tree11
, tree011
, 0))
7566 return build2 ((code0
!= LSHIFT_EXPR
7569 type
, TREE_OPERAND (arg0
, 0), tree11
);
7575 /* In most languages, can't associate operations on floats through
7576 parentheses. Rather than remember where the parentheses were, we
7577 don't associate floats at all, unless the user has specified
7578 -funsafe-math-optimizations. */
7581 && (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
7583 tree var0
, con0
, lit0
, minus_lit0
;
7584 tree var1
, con1
, lit1
, minus_lit1
;
7586 /* Split both trees into variables, constants, and literals. Then
7587 associate each group together, the constants with literals,
7588 then the result with variables. This increases the chances of
7589 literals being recombined later and of generating relocatable
7590 expressions for the sum of a constant and literal. */
7591 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
7592 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
7593 code
== MINUS_EXPR
);
7595 /* Only do something if we found more than two objects. Otherwise,
7596 nothing has changed and we risk infinite recursion. */
7597 if (2 < ((var0
!= 0) + (var1
!= 0)
7598 + (con0
!= 0) + (con1
!= 0)
7599 + (lit0
!= 0) + (lit1
!= 0)
7600 + (minus_lit0
!= 0) + (minus_lit1
!= 0)))
7602 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7603 if (code
== MINUS_EXPR
)
7606 var0
= associate_trees (var0
, var1
, code
, type
);
7607 con0
= associate_trees (con0
, con1
, code
, type
);
7608 lit0
= associate_trees (lit0
, lit1
, code
, type
);
7609 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
7611 /* Preserve the MINUS_EXPR if the negative part of the literal is
7612 greater than the positive part. Otherwise, the multiplicative
7613 folding code (i.e extract_muldiv) may be fooled in case
7614 unsigned constants are subtracted, like in the following
7615 example: ((X*2 + 4) - 8U)/2. */
7616 if (minus_lit0
&& lit0
)
7618 if (TREE_CODE (lit0
) == INTEGER_CST
7619 && TREE_CODE (minus_lit0
) == INTEGER_CST
7620 && tree_int_cst_lt (lit0
, minus_lit0
))
7622 minus_lit0
= associate_trees (minus_lit0
, lit0
,
7628 lit0
= associate_trees (lit0
, minus_lit0
,
7636 return fold_convert (type
,
7637 associate_trees (var0
, minus_lit0
,
7641 con0
= associate_trees (con0
, minus_lit0
,
7643 return fold_convert (type
,
7644 associate_trees (var0
, con0
,
7649 con0
= associate_trees (con0
, lit0
, code
, type
);
7650 return fold_convert (type
, associate_trees (var0
, con0
,
7657 t1
= const_binop (code
, arg0
, arg1
, 0);
7658 if (t1
!= NULL_TREE
)
7660 /* The return value should always have
7661 the same type as the original expression. */
7662 if (TREE_TYPE (t1
) != type
)
7663 t1
= fold_convert (type
, t1
);
7670 /* A - (-B) -> A + B */
7671 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
7672 return fold_build2 (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0));
7673 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7674 if (TREE_CODE (arg0
) == NEGATE_EXPR
7675 && (FLOAT_TYPE_P (type
)
7676 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
))
7677 && negate_expr_p (arg1
)
7678 && reorder_operands_p (arg0
, arg1
))
7679 return fold_build2 (MINUS_EXPR
, type
, negate_expr (arg1
),
7680 TREE_OPERAND (arg0
, 0));
7681 /* Convert -A - 1 to ~A. */
7682 if (INTEGRAL_TYPE_P (type
)
7683 && TREE_CODE (arg0
) == NEGATE_EXPR
7684 && integer_onep (arg1
))
7685 return fold_build1 (BIT_NOT_EXPR
, type
, TREE_OPERAND (arg0
, 0));
7687 /* Convert -1 - A to ~A. */
7688 if (INTEGRAL_TYPE_P (type
)
7689 && integer_all_onesp (arg0
))
7690 return fold_build1 (BIT_NOT_EXPR
, type
, arg1
);
7692 if (! FLOAT_TYPE_P (type
))
7694 if (! wins
&& integer_zerop (arg0
))
7695 return negate_expr (fold_convert (type
, arg1
));
7696 if (integer_zerop (arg1
))
7697 return non_lvalue (fold_convert (type
, arg0
));
7699 /* Fold A - (A & B) into ~B & A. */
7700 if (!TREE_SIDE_EFFECTS (arg0
)
7701 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
7703 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
7704 return fold_build2 (BIT_AND_EXPR
, type
,
7705 fold_build1 (BIT_NOT_EXPR
, type
,
7706 TREE_OPERAND (arg1
, 0)),
7708 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7709 return fold_build2 (BIT_AND_EXPR
, type
,
7710 fold_build1 (BIT_NOT_EXPR
, type
,
7711 TREE_OPERAND (arg1
, 1)),
7715 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7716 any power of 2 minus 1. */
7717 if (TREE_CODE (arg0
) == BIT_AND_EXPR
7718 && TREE_CODE (arg1
) == BIT_AND_EXPR
7719 && operand_equal_p (TREE_OPERAND (arg0
, 0),
7720 TREE_OPERAND (arg1
, 0), 0))
7722 tree mask0
= TREE_OPERAND (arg0
, 1);
7723 tree mask1
= TREE_OPERAND (arg1
, 1);
7724 tree tem
= fold_build1 (BIT_NOT_EXPR
, type
, mask0
);
7726 if (operand_equal_p (tem
, mask1
, 0))
7728 tem
= fold_build2 (BIT_XOR_EXPR
, type
,
7729 TREE_OPERAND (arg0
, 0), mask1
);
7730 return fold_build2 (MINUS_EXPR
, type
, tem
, mask1
);
7735 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7736 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
7737 return non_lvalue (fold_convert (type
, arg0
));
7739 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7740 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7741 (-ARG1 + ARG0) reduces to -ARG1. */
7742 else if (!wins
&& fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
7743 return negate_expr (fold_convert (type
, arg1
));
7745 /* Fold &x - &x. This can happen from &x.foo - &x.
7746 This is unsafe for certain floats even in non-IEEE formats.
7747 In IEEE, it is unsafe because it does wrong for NaNs.
7748 Also note that operand_equal_p is always false if an operand
7751 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
7752 && operand_equal_p (arg0
, arg1
, 0))
7753 return fold_convert (type
, integer_zero_node
);
7755 /* A - B -> A + (-B) if B is easily negatable. */
7756 if (!wins
&& negate_expr_p (arg1
)
7757 && ((FLOAT_TYPE_P (type
)
7758 /* Avoid this transformation if B is a positive REAL_CST. */
7759 && (TREE_CODE (arg1
) != REAL_CST
7760 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
7761 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
)))
7762 return fold_build2 (PLUS_EXPR
, type
,
7763 fold_convert (type
, arg0
),
7764 fold_convert (type
, negate_expr (arg1
)));
7766 /* Try folding difference of addresses. */
7770 if ((TREE_CODE (arg0
) == ADDR_EXPR
7771 || TREE_CODE (arg1
) == ADDR_EXPR
)
7772 && ptr_difference_const (arg0
, arg1
, &diff
))
7773 return build_int_cst_type (type
, diff
);
7776 /* Fold &a[i] - &a[j] to i-j. */
7777 if (TREE_CODE (arg0
) == ADDR_EXPR
7778 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
7779 && TREE_CODE (arg1
) == ADDR_EXPR
7780 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
7782 tree aref0
= TREE_OPERAND (arg0
, 0);
7783 tree aref1
= TREE_OPERAND (arg1
, 0);
7784 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
7785 TREE_OPERAND (aref1
, 0), 0))
7787 tree op0
= fold_convert (type
, TREE_OPERAND (aref0
, 1));
7788 tree op1
= fold_convert (type
, TREE_OPERAND (aref1
, 1));
7789 tree esz
= array_ref_element_size (aref0
);
7790 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
7791 return fold_build2 (MULT_EXPR
, type
, diff
,
7792 fold_convert (type
, esz
));
7797 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7798 of the array. Loop optimizer sometimes produce this type of
7800 if (TREE_CODE (arg0
) == ADDR_EXPR
)
7802 tem
= try_move_mult_to_index (MINUS_EXPR
, arg0
, arg1
);
7804 return fold_convert (type
, tem
);
7807 if (flag_unsafe_math_optimizations
7808 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
7809 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
7810 && (tem
= distribute_real_division (code
, type
, arg0
, arg1
)))
7813 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
7815 if ((TREE_CODE (arg0
) == MULT_EXPR
7816 || TREE_CODE (arg1
) == MULT_EXPR
)
7817 && (!FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
7819 tree tem
= fold_plusminus_mult_expr (code
, type
, arg0
, arg1
);
7827 /* (-A) * (-B) -> A * B */
7828 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
7829 return fold_build2 (MULT_EXPR
, type
,
7830 TREE_OPERAND (arg0
, 0),
7831 negate_expr (arg1
));
7832 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
7833 return fold_build2 (MULT_EXPR
, type
,
7835 TREE_OPERAND (arg1
, 0));
7837 if (! FLOAT_TYPE_P (type
))
7839 if (integer_zerop (arg1
))
7840 return omit_one_operand (type
, arg1
, arg0
);
7841 if (integer_onep (arg1
))
7842 return non_lvalue (fold_convert (type
, arg0
));
7843 /* Transform x * -1 into -x. */
7844 if (integer_all_onesp (arg1
))
7845 return fold_convert (type
, negate_expr (arg0
));
7847 /* (a * (1 << b)) is (a << b) */
7848 if (TREE_CODE (arg1
) == LSHIFT_EXPR
7849 && integer_onep (TREE_OPERAND (arg1
, 0)))
7850 return fold_build2 (LSHIFT_EXPR
, type
, arg0
,
7851 TREE_OPERAND (arg1
, 1));
7852 if (TREE_CODE (arg0
) == LSHIFT_EXPR
7853 && integer_onep (TREE_OPERAND (arg0
, 0)))
7854 return fold_build2 (LSHIFT_EXPR
, type
, arg1
,
7855 TREE_OPERAND (arg0
, 1));
7857 if (TREE_CODE (arg1
) == INTEGER_CST
7858 && 0 != (tem
= extract_muldiv (op0
,
7859 fold_convert (type
, arg1
),
7861 return fold_convert (type
, tem
);
7866 /* Maybe fold x * 0 to 0. The expressions aren't the same
7867 when x is NaN, since x * 0 is also NaN. Nor are they the
7868 same in modes with signed zeros, since multiplying a
7869 negative value by 0 gives -0, not +0. */
7870 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
7871 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
7872 && real_zerop (arg1
))
7873 return omit_one_operand (type
, arg1
, arg0
);
7874 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7875 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7876 && real_onep (arg1
))
7877 return non_lvalue (fold_convert (type
, arg0
));
7879 /* Transform x * -1.0 into -x. */
7880 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7881 && real_minus_onep (arg1
))
7882 return fold_convert (type
, negate_expr (arg0
));
7884 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7885 if (flag_unsafe_math_optimizations
7886 && TREE_CODE (arg0
) == RDIV_EXPR
7887 && TREE_CODE (arg1
) == REAL_CST
7888 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
7890 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
7893 return fold_build2 (RDIV_EXPR
, type
, tem
,
7894 TREE_OPERAND (arg0
, 1));
7897 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7898 if (operand_equal_p (arg0
, arg1
, 0))
7900 tree tem
= fold_strip_sign_ops (arg0
);
7901 if (tem
!= NULL_TREE
)
7903 tem
= fold_convert (type
, tem
);
7904 return fold_build2 (MULT_EXPR
, type
, tem
, tem
);
7908 if (flag_unsafe_math_optimizations
)
7910 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
7911 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
7913 /* Optimizations of root(...)*root(...). */
7914 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
7916 tree rootfn
, arg
, arglist
;
7917 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7918 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7920 /* Optimize sqrt(x)*sqrt(x) as x. */
7921 if (BUILTIN_SQRT_P (fcode0
)
7922 && operand_equal_p (arg00
, arg10
, 0)
7923 && ! HONOR_SNANS (TYPE_MODE (type
)))
7926 /* Optimize root(x)*root(y) as root(x*y). */
7927 rootfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7928 arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
7929 arglist
= build_tree_list (NULL_TREE
, arg
);
7930 return build_function_call_expr (rootfn
, arglist
);
7933 /* Optimize expN(x)*expN(y) as expN(x+y). */
7934 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
7936 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7937 tree arg
= fold_build2 (PLUS_EXPR
, type
,
7938 TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7939 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
7940 tree arglist
= build_tree_list (NULL_TREE
, arg
);
7941 return build_function_call_expr (expfn
, arglist
);
7944 /* Optimizations of pow(...)*pow(...). */
7945 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
7946 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
7947 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
7949 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7950 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
7952 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7953 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
7956 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7957 if (operand_equal_p (arg01
, arg11
, 0))
7959 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7960 tree arg
= fold_build2 (MULT_EXPR
, type
, arg00
, arg10
);
7961 tree arglist
= tree_cons (NULL_TREE
, arg
,
7962 build_tree_list (NULL_TREE
,
7964 return build_function_call_expr (powfn
, arglist
);
7967 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7968 if (operand_equal_p (arg00
, arg10
, 0))
7970 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7971 tree arg
= fold_build2 (PLUS_EXPR
, type
, arg01
, arg11
);
7972 tree arglist
= tree_cons (NULL_TREE
, arg00
,
7973 build_tree_list (NULL_TREE
,
7975 return build_function_call_expr (powfn
, arglist
);
7979 /* Optimize tan(x)*cos(x) as sin(x). */
7980 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
7981 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
7982 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
7983 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
7984 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
7985 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
7986 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7987 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
7989 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
7991 if (sinfn
!= NULL_TREE
)
7992 return build_function_call_expr (sinfn
,
7993 TREE_OPERAND (arg0
, 1));
7996 /* Optimize x*pow(x,c) as pow(x,c+1). */
7997 if (fcode1
== BUILT_IN_POW
7998 || fcode1
== BUILT_IN_POWF
7999 || fcode1
== BUILT_IN_POWL
)
8001 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
8002 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
8004 if (TREE_CODE (arg11
) == REAL_CST
8005 && ! TREE_CONSTANT_OVERFLOW (arg11
)
8006 && operand_equal_p (arg0
, arg10
, 0))
8008 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
8012 c
= TREE_REAL_CST (arg11
);
8013 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
8014 arg
= build_real (type
, c
);
8015 arglist
= build_tree_list (NULL_TREE
, arg
);
8016 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
8017 return build_function_call_expr (powfn
, arglist
);
8021 /* Optimize pow(x,c)*x as pow(x,c+1). */
8022 if (fcode0
== BUILT_IN_POW
8023 || fcode0
== BUILT_IN_POWF
8024 || fcode0
== BUILT_IN_POWL
)
8026 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
8027 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
8029 if (TREE_CODE (arg01
) == REAL_CST
8030 && ! TREE_CONSTANT_OVERFLOW (arg01
)
8031 && operand_equal_p (arg1
, arg00
, 0))
8033 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8037 c
= TREE_REAL_CST (arg01
);
8038 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
8039 arg
= build_real (type
, c
);
8040 arglist
= build_tree_list (NULL_TREE
, arg
);
8041 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
8042 return build_function_call_expr (powfn
, arglist
);
8046 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8048 && operand_equal_p (arg0
, arg1
, 0))
8050 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
8054 tree arg
= build_real (type
, dconst2
);
8055 tree arglist
= build_tree_list (NULL_TREE
, arg
);
8056 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
8057 return build_function_call_expr (powfn
, arglist
);
8066 if (integer_all_onesp (arg1
))
8067 return omit_one_operand (type
, arg1
, arg0
);
8068 if (integer_zerop (arg1
))
8069 return non_lvalue (fold_convert (type
, arg0
));
8070 if (operand_equal_p (arg0
, arg1
, 0))
8071 return non_lvalue (fold_convert (type
, arg0
));
8074 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8075 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8077 t1
= build_int_cst (type
, -1);
8078 t1
= force_fit_type (t1
, 0, false, false);
8079 return omit_one_operand (type
, t1
, arg1
);
8083 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8084 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8086 t1
= build_int_cst (type
, -1);
8087 t1
= force_fit_type (t1
, 0, false, false);
8088 return omit_one_operand (type
, t1
, arg0
);
8091 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
8092 if (t1
!= NULL_TREE
)
8095 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8097 This results in more efficient code for machines without a NAND
8098 instruction. Combine will canonicalize to the first form
8099 which will allow use of NAND instructions provided by the
8100 backend if they exist. */
8101 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8102 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8104 return fold_build1 (BIT_NOT_EXPR
, type
,
8105 build2 (BIT_AND_EXPR
, type
,
8106 TREE_OPERAND (arg0
, 0),
8107 TREE_OPERAND (arg1
, 0)));
8110 /* See if this can be simplified into a rotate first. If that
8111 is unsuccessful continue in the association code. */
8115 if (integer_zerop (arg1
))
8116 return non_lvalue (fold_convert (type
, arg0
));
8117 if (integer_all_onesp (arg1
))
8118 return fold_build1 (BIT_NOT_EXPR
, type
, arg0
);
8119 if (operand_equal_p (arg0
, arg1
, 0))
8120 return omit_one_operand (type
, integer_zero_node
, arg0
);
8123 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8124 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8126 t1
= build_int_cst (type
, -1);
8127 t1
= force_fit_type (t1
, 0, false, false);
8128 return omit_one_operand (type
, t1
, arg1
);
8132 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8133 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8135 t1
= build_int_cst (type
, -1);
8136 t1
= force_fit_type (t1
, 0, false, false);
8137 return omit_one_operand (type
, t1
, arg0
);
8140 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8141 with a constant, and the two constants have no bits in common,
8142 we should treat this as a BIT_IOR_EXPR since this may produce more
8144 if (TREE_CODE (arg0
) == BIT_AND_EXPR
8145 && TREE_CODE (arg1
) == BIT_AND_EXPR
8146 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8147 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8148 && integer_zerop (const_binop (BIT_AND_EXPR
,
8149 TREE_OPERAND (arg0
, 1),
8150 TREE_OPERAND (arg1
, 1), 0)))
8152 code
= BIT_IOR_EXPR
;
8156 /* (X | Y) ^ X -> Y & ~ X*/
8157 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
8158 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8160 tree t2
= TREE_OPERAND (arg0
, 1);
8161 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
8163 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
8164 fold_convert (type
, t1
));
8168 /* (Y | X) ^ X -> Y & ~ X*/
8169 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
8170 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
8172 tree t2
= TREE_OPERAND (arg0
, 0);
8173 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
8175 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
8176 fold_convert (type
, t1
));
8180 /* X ^ (X | Y) -> Y & ~ X*/
8181 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
8182 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
8184 tree t2
= TREE_OPERAND (arg1
, 1);
8185 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
8187 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
8188 fold_convert (type
, t1
));
8192 /* X ^ (Y | X) -> Y & ~ X*/
8193 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
8194 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
8196 tree t2
= TREE_OPERAND (arg1
, 0);
8197 t1
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg0
),
8199 t1
= fold_build2 (BIT_AND_EXPR
, type
, fold_convert (type
, t2
),
8200 fold_convert (type
, t1
));
8204 /* Convert ~X ^ ~Y to X ^ Y. */
8205 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8206 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8207 return fold_build2 (code
, type
,
8208 fold_convert (type
, TREE_OPERAND (arg0
, 0)),
8209 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
8211 /* See if this can be simplified into a rotate first. If that
8212 is unsuccessful continue in the association code. */
8216 if (integer_all_onesp (arg1
))
8217 return non_lvalue (fold_convert (type
, arg0
));
8218 if (integer_zerop (arg1
))
8219 return omit_one_operand (type
, arg1
, arg0
);
8220 if (operand_equal_p (arg0
, arg1
, 0))
8221 return non_lvalue (fold_convert (type
, arg0
));
8223 /* ~X & X is always zero. */
8224 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8225 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8226 return omit_one_operand (type
, integer_zero_node
, arg1
);
8228 /* X & ~X is always zero. */
8229 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
8230 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8231 return omit_one_operand (type
, integer_zero_node
, arg0
);
8233 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
8234 if (t1
!= NULL_TREE
)
8236 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8237 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
8238 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
8241 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
8243 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
8244 && (~TREE_INT_CST_LOW (arg1
)
8245 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
8246 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
8249 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8251 This results in more efficient code for machines without a NOR
8252 instruction. Combine will canonicalize to the first form
8253 which will allow use of NOR instructions provided by the
8254 backend if they exist. */
8255 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
8256 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
8258 return fold_build1 (BIT_NOT_EXPR
, type
,
8259 build2 (BIT_IOR_EXPR
, type
,
8260 TREE_OPERAND (arg0
, 0),
8261 TREE_OPERAND (arg1
, 0)));
8267 /* Don't touch a floating-point divide by zero unless the mode
8268 of the constant can represent infinity. */
8269 if (TREE_CODE (arg1
) == REAL_CST
8270 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
8271 && real_zerop (arg1
))
8274 /* Optimize A / A to 1.0 if we don't care about
8275 NaNs or Infinities. */
8276 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
8277 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
8278 && operand_equal_p (arg0
, arg1
, 0))
8280 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
8282 return omit_two_operands (type
, r
, arg0
, arg1
);
8285 /* (-A) / (-B) -> A / B */
8286 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
8287 return fold_build2 (RDIV_EXPR
, type
,
8288 TREE_OPERAND (arg0
, 0),
8289 negate_expr (arg1
));
8290 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
8291 return fold_build2 (RDIV_EXPR
, type
,
8293 TREE_OPERAND (arg1
, 0));
8295 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8296 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8297 && real_onep (arg1
))
8298 return non_lvalue (fold_convert (type
, arg0
));
8300 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8301 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8302 && real_minus_onep (arg1
))
8303 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
8305 /* If ARG1 is a constant, we can convert this to a multiply by the
8306 reciprocal. This does not have the same rounding properties,
8307 so only do this if -funsafe-math-optimizations. We can actually
8308 always safely do it if ARG1 is a power of two, but it's hard to
8309 tell if it is or not in a portable manner. */
8310 if (TREE_CODE (arg1
) == REAL_CST
)
8312 if (flag_unsafe_math_optimizations
8313 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
8315 return fold_build2 (MULT_EXPR
, type
, arg0
, tem
);
8316 /* Find the reciprocal if optimizing and the result is exact. */
8320 r
= TREE_REAL_CST (arg1
);
8321 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
8323 tem
= build_real (type
, r
);
8324 return fold_build2 (MULT_EXPR
, type
,
8325 fold_convert (type
, arg0
), tem
);
8329 /* Convert A/B/C to A/(B*C). */
8330 if (flag_unsafe_math_optimizations
8331 && TREE_CODE (arg0
) == RDIV_EXPR
)
8332 return fold_build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8333 fold_build2 (MULT_EXPR
, type
,
8334 TREE_OPERAND (arg0
, 1), arg1
));
8336 /* Convert A/(B/C) to (A/B)*C. */
8337 if (flag_unsafe_math_optimizations
8338 && TREE_CODE (arg1
) == RDIV_EXPR
)
8339 return fold_build2 (MULT_EXPR
, type
,
8340 fold_build2 (RDIV_EXPR
, type
, arg0
,
8341 TREE_OPERAND (arg1
, 0)),
8342 TREE_OPERAND (arg1
, 1));
8344 /* Convert C1/(X*C2) into (C1/C2)/X. */
8345 if (flag_unsafe_math_optimizations
8346 && TREE_CODE (arg1
) == MULT_EXPR
8347 && TREE_CODE (arg0
) == REAL_CST
8348 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
8350 tree tem
= const_binop (RDIV_EXPR
, arg0
,
8351 TREE_OPERAND (arg1
, 1), 0);
8353 return fold_build2 (RDIV_EXPR
, type
, tem
,
8354 TREE_OPERAND (arg1
, 0));
8357 if (flag_unsafe_math_optimizations
)
8359 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
8360 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
8362 /* Optimize sin(x)/cos(x) as tan(x). */
8363 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
8364 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
8365 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
8366 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
8367 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
8369 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
8371 if (tanfn
!= NULL_TREE
)
8372 return build_function_call_expr (tanfn
,
8373 TREE_OPERAND (arg0
, 1));
8376 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8377 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
8378 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
8379 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
8380 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
8381 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
8383 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
8385 if (tanfn
!= NULL_TREE
)
8387 tree tmp
= TREE_OPERAND (arg0
, 1);
8388 tmp
= build_function_call_expr (tanfn
, tmp
);
8389 return fold_build2 (RDIV_EXPR
, type
,
8390 build_real (type
, dconst1
), tmp
);
8394 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
8395 NaNs or Infinities. */
8396 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
8397 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
8398 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
8400 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
8401 tree arg01
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
8403 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
8404 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
8405 && operand_equal_p (arg00
, arg01
, 0))
8407 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
8409 if (cosfn
!= NULL_TREE
)
8410 return build_function_call_expr (cosfn
,
8411 TREE_OPERAND (arg0
, 1));
8415 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
8416 NaNs or Infinities. */
8417 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
8418 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
8419 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
8421 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
8422 tree arg01
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
8424 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
8425 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
8426 && operand_equal_p (arg00
, arg01
, 0))
8428 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
8430 if (cosfn
!= NULL_TREE
)
8432 tree tmp
= TREE_OPERAND (arg0
, 1);
8433 tmp
= build_function_call_expr (cosfn
, tmp
);
8434 return fold_build2 (RDIV_EXPR
, type
,
8435 build_real (type
, dconst1
),
8441 /* Optimize pow(x,c)/x as pow(x,c-1). */
8442 if (fcode0
== BUILT_IN_POW
8443 || fcode0
== BUILT_IN_POWF
8444 || fcode0
== BUILT_IN_POWL
)
8446 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
8447 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
8448 if (TREE_CODE (arg01
) == REAL_CST
8449 && ! TREE_CONSTANT_OVERFLOW (arg01
)
8450 && operand_equal_p (arg1
, arg00
, 0))
8452 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
8456 c
= TREE_REAL_CST (arg01
);
8457 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
8458 arg
= build_real (type
, c
);
8459 arglist
= build_tree_list (NULL_TREE
, arg
);
8460 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
8461 return build_function_call_expr (powfn
, arglist
);
8465 /* Optimize x/expN(y) into x*expN(-y). */
8466 if (BUILTIN_EXPONENT_P (fcode1
))
8468 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
8469 tree arg
= negate_expr (TREE_VALUE (TREE_OPERAND (arg1
, 1)));
8470 tree arglist
= build_tree_list (NULL_TREE
,
8471 fold_convert (type
, arg
));
8472 arg1
= build_function_call_expr (expfn
, arglist
);
8473 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
8476 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8477 if (fcode1
== BUILT_IN_POW
8478 || fcode1
== BUILT_IN_POWF
8479 || fcode1
== BUILT_IN_POWL
)
8481 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
8482 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
8483 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
, 1)));
8484 tree neg11
= fold_convert (type
, negate_expr (arg11
));
8485 tree arglist
= tree_cons(NULL_TREE
, arg10
,
8486 build_tree_list (NULL_TREE
, neg11
));
8487 arg1
= build_function_call_expr (powfn
, arglist
);
8488 return fold_build2 (MULT_EXPR
, type
, arg0
, arg1
);
8493 case TRUNC_DIV_EXPR
:
8494 case ROUND_DIV_EXPR
:
8495 case FLOOR_DIV_EXPR
:
8497 case EXACT_DIV_EXPR
:
8498 if (integer_onep (arg1
))
8499 return non_lvalue (fold_convert (type
, arg0
));
8500 if (integer_zerop (arg1
))
8503 if (!TYPE_UNSIGNED (type
)
8504 && TREE_CODE (arg1
) == INTEGER_CST
8505 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
8506 && TREE_INT_CST_HIGH (arg1
) == -1)
8507 return fold_convert (type
, negate_expr (arg0
));
8509 /* Convert -A / -B to A / B when the type is signed and overflow is
8511 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
8512 && TREE_CODE (arg0
) == NEGATE_EXPR
8513 && negate_expr_p (arg1
))
8514 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
8515 negate_expr (arg1
));
8516 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
8517 && TREE_CODE (arg1
) == NEGATE_EXPR
8518 && negate_expr_p (arg0
))
8519 return fold_build2 (code
, type
, negate_expr (arg0
),
8520 TREE_OPERAND (arg1
, 0));
8522 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8523 operation, EXACT_DIV_EXPR.
8525 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8526 At one time others generated faster code, it's not clear if they do
8527 after the last round to changes to the DIV code in expmed.c. */
8528 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
8529 && multiple_of_p (type
, arg0
, arg1
))
8530 return fold_build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
);
8532 if (TREE_CODE (arg1
) == INTEGER_CST
8533 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
8534 return fold_convert (type
, tem
);
8539 case FLOOR_MOD_EXPR
:
8540 case ROUND_MOD_EXPR
:
8541 case TRUNC_MOD_EXPR
:
8542 /* X % 1 is always zero, but be sure to preserve any side
8544 if (integer_onep (arg1
))
8545 return omit_one_operand (type
, integer_zero_node
, arg0
);
8547 /* X % 0, return X % 0 unchanged so that we can get the
8548 proper warnings and errors. */
8549 if (integer_zerop (arg1
))
8552 /* 0 % X is always zero, but be sure to preserve any side
8553 effects in X. Place this after checking for X == 0. */
8554 if (integer_zerop (arg0
))
8555 return omit_one_operand (type
, integer_zero_node
, arg1
);
8557 /* X % -1 is zero. */
8558 if (!TYPE_UNSIGNED (type
)
8559 && TREE_CODE (arg1
) == INTEGER_CST
8560 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
8561 && TREE_INT_CST_HIGH (arg1
) == -1)
8562 return omit_one_operand (type
, integer_zero_node
, arg0
);
8564 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8565 i.e. "X % C" into "X & C2", if X and C are positive. */
8566 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
8567 && (TYPE_UNSIGNED (type
) || tree_expr_nonnegative_p (arg0
))
8568 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) >= 0)
8570 unsigned HOST_WIDE_INT high
, low
;
8574 l
= tree_log2 (arg1
);
8575 if (l
>= HOST_BITS_PER_WIDE_INT
)
8577 high
= ((unsigned HOST_WIDE_INT
) 1
8578 << (l
- HOST_BITS_PER_WIDE_INT
)) - 1;
8584 low
= ((unsigned HOST_WIDE_INT
) 1 << l
) - 1;
8587 mask
= build_int_cst_wide (type
, low
, high
);
8588 return fold_build2 (BIT_AND_EXPR
, type
,
8589 fold_convert (type
, arg0
), mask
);
8592 /* X % -C is the same as X % C. */
8593 if (code
== TRUNC_MOD_EXPR
8594 && !TYPE_UNSIGNED (type
)
8595 && TREE_CODE (arg1
) == INTEGER_CST
8596 && !TREE_CONSTANT_OVERFLOW (arg1
)
8597 && TREE_INT_CST_HIGH (arg1
) < 0
8599 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8600 && !sign_bit_p (arg1
, arg1
))
8601 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
8602 fold_convert (type
, negate_expr (arg1
)));
8604 /* X % -Y is the same as X % Y. */
8605 if (code
== TRUNC_MOD_EXPR
8606 && !TYPE_UNSIGNED (type
)
8607 && TREE_CODE (arg1
) == NEGATE_EXPR
8609 return fold_build2 (code
, type
, fold_convert (type
, arg0
),
8610 fold_convert (type
, TREE_OPERAND (arg1
, 0)));
8612 if (TREE_CODE (arg1
) == INTEGER_CST
8613 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
)))
8614 return fold_convert (type
, tem
);
8620 if (integer_all_onesp (arg0
))
8621 return omit_one_operand (type
, arg0
, arg1
);
8625 /* Optimize -1 >> x for arithmetic right shifts. */
8626 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
8627 return omit_one_operand (type
, arg0
, arg1
);
8628 /* ... fall through ... */
8632 if (integer_zerop (arg1
))
8633 return non_lvalue (fold_convert (type
, arg0
));
8634 if (integer_zerop (arg0
))
8635 return omit_one_operand (type
, arg0
, arg1
);
8637 /* Since negative shift count is not well-defined,
8638 don't try to compute it in the compiler. */
8639 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
8642 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8643 if (TREE_CODE (arg0
) == code
&& host_integerp (arg1
, false)
8644 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
8645 && host_integerp (TREE_OPERAND (arg0
, 1), false)
8646 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
8648 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
8649 + TREE_INT_CST_LOW (arg1
));
8651 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8652 being well defined. */
8653 if (low
>= TYPE_PRECISION (type
))
8655 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
8656 low
= low
% TYPE_PRECISION (type
);
8657 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
8658 return build_int_cst (type
, 0);
8660 low
= TYPE_PRECISION (type
) - 1;
8663 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
8664 build_int_cst (type
, low
));
8667 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8668 into x & ((unsigned)-1 >> c) for unsigned types. */
8669 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
8670 || (TYPE_UNSIGNED (type
)
8671 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
8672 && host_integerp (arg1
, false)
8673 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
8674 && host_integerp (TREE_OPERAND (arg0
, 1), false)
8675 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
8677 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
8678 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
8684 arg00
= fold_convert (type
, TREE_OPERAND (arg0
, 0));
8686 lshift
= build_int_cst (type
, -1);
8687 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
8689 return fold_build2 (BIT_AND_EXPR
, type
, arg00
, lshift
);
8693 /* Rewrite an LROTATE_EXPR by a constant into an
8694 RROTATE_EXPR by a new constant. */
8695 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
8697 tree tem
= build_int_cst (NULL_TREE
,
8698 GET_MODE_BITSIZE (TYPE_MODE (type
)));
8699 tem
= fold_convert (TREE_TYPE (arg1
), tem
);
8700 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
8701 return fold_build2 (RROTATE_EXPR
, type
, arg0
, tem
);
8704 /* If we have a rotate of a bit operation with the rotate count and
8705 the second operand of the bit operation both constant,
8706 permute the two operations. */
8707 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
8708 && (TREE_CODE (arg0
) == BIT_AND_EXPR
8709 || TREE_CODE (arg0
) == BIT_IOR_EXPR
8710 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
8711 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8712 return fold_build2 (TREE_CODE (arg0
), type
,
8713 fold_build2 (code
, type
,
8714 TREE_OPERAND (arg0
, 0), arg1
),
8715 fold_build2 (code
, type
,
8716 TREE_OPERAND (arg0
, 1), arg1
));
8718 /* Two consecutive rotates adding up to the width of the mode can
8720 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
8721 && TREE_CODE (arg0
) == RROTATE_EXPR
8722 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8723 && TREE_INT_CST_HIGH (arg1
) == 0
8724 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
8725 && ((TREE_INT_CST_LOW (arg1
)
8726 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
8727 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
8728 return TREE_OPERAND (arg0
, 0);
8733 if (operand_equal_p (arg0
, arg1
, 0))
8734 return omit_one_operand (type
, arg0
, arg1
);
8735 if (INTEGRAL_TYPE_P (type
)
8736 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
8737 return omit_one_operand (type
, arg1
, arg0
);
8741 if (operand_equal_p (arg0
, arg1
, 0))
8742 return omit_one_operand (type
, arg0
, arg1
);
8743 if (INTEGRAL_TYPE_P (type
)
8744 && TYPE_MAX_VALUE (type
)
8745 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
8746 return omit_one_operand (type
, arg1
, arg0
);
8749 case TRUTH_ANDIF_EXPR
:
8750 /* Note that the operands of this must be ints
8751 and their values must be 0 or 1.
8752 ("true" is a fixed value perhaps depending on the language.) */
8753 /* If first arg is constant zero, return it. */
8754 if (integer_zerop (arg0
))
8755 return fold_convert (type
, arg0
);
8756 case TRUTH_AND_EXPR
:
8757 /* If either arg is constant true, drop it. */
8758 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8759 return non_lvalue (fold_convert (type
, arg1
));
8760 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
8761 /* Preserve sequence points. */
8762 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
8763 return non_lvalue (fold_convert (type
, arg0
));
8764 /* If second arg is constant zero, result is zero, but first arg
8765 must be evaluated. */
8766 if (integer_zerop (arg1
))
8767 return omit_one_operand (type
, arg1
, arg0
);
8768 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8769 case will be handled here. */
8770 if (integer_zerop (arg0
))
8771 return omit_one_operand (type
, arg0
, arg1
);
8773 /* !X && X is always false. */
8774 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8775 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8776 return omit_one_operand (type
, integer_zero_node
, arg1
);
8777 /* X && !X is always false. */
8778 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8779 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8780 return omit_one_operand (type
, integer_zero_node
, arg0
);
8782 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8783 means A >= Y && A != MAX, but in this case we know that
8786 if (!TREE_SIDE_EFFECTS (arg0
)
8787 && !TREE_SIDE_EFFECTS (arg1
))
8789 tem
= fold_to_nonsharp_ineq_using_bound (arg0
, arg1
);
8790 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
8791 return fold_build2 (code
, type
, tem
, arg1
);
8793 tem
= fold_to_nonsharp_ineq_using_bound (arg1
, arg0
);
8794 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
8795 return fold_build2 (code
, type
, arg0
, tem
);
8799 /* We only do these simplifications if we are optimizing. */
8803 /* Check for things like (A || B) && (A || C). We can convert this
8804 to A || (B && C). Note that either operator can be any of the four
8805 truth and/or operations and the transformation will still be
8806 valid. Also note that we only care about order for the
8807 ANDIF and ORIF operators. If B contains side effects, this
8808 might change the truth-value of A. */
8809 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8810 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8811 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8812 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8813 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8814 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8816 tree a00
= TREE_OPERAND (arg0
, 0);
8817 tree a01
= TREE_OPERAND (arg0
, 1);
8818 tree a10
= TREE_OPERAND (arg1
, 0);
8819 tree a11
= TREE_OPERAND (arg1
, 1);
8820 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8821 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8822 && (code
== TRUTH_AND_EXPR
8823 || code
== TRUTH_OR_EXPR
));
8825 if (operand_equal_p (a00
, a10
, 0))
8826 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
8827 fold_build2 (code
, type
, a01
, a11
));
8828 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8829 return fold_build2 (TREE_CODE (arg0
), type
, a00
,
8830 fold_build2 (code
, type
, a01
, a10
));
8831 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8832 return fold_build2 (TREE_CODE (arg0
), type
, a01
,
8833 fold_build2 (code
, type
, a00
, a11
));
8835 /* This case if tricky because we must either have commutative
8836 operators or else A10 must not have side-effects. */
8838 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8839 && operand_equal_p (a01
, a11
, 0))
8840 return fold_build2 (TREE_CODE (arg0
), type
,
8841 fold_build2 (code
, type
, a00
, a10
),
8845 /* See if we can build a range comparison. */
8846 if (0 != (tem
= fold_range_test (code
, type
, op0
, op1
)))
8849 /* Check for the possibility of merging component references. If our
8850 lhs is another similar operation, try to merge its rhs with our
8851 rhs. Then try to merge our lhs and rhs. */
8852 if (TREE_CODE (arg0
) == code
8853 && 0 != (tem
= fold_truthop (code
, type
,
8854 TREE_OPERAND (arg0
, 1), arg1
)))
8855 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8857 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
8862 case TRUTH_ORIF_EXPR
:
8863 /* Note that the operands of this must be ints
8864 and their values must be 0 or true.
8865 ("true" is a fixed value perhaps depending on the language.) */
8866 /* If first arg is constant true, return it. */
8867 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8868 return fold_convert (type
, arg0
);
8870 /* If either arg is constant zero, drop it. */
8871 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
8872 return non_lvalue (fold_convert (type
, arg1
));
8873 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
8874 /* Preserve sequence points. */
8875 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
8876 return non_lvalue (fold_convert (type
, arg0
));
8877 /* If second arg is constant true, result is true, but we must
8878 evaluate first arg. */
8879 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
8880 return omit_one_operand (type
, arg1
, arg0
);
8881 /* Likewise for first arg, but note this only occurs here for
8883 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
8884 return omit_one_operand (type
, arg0
, arg1
);
8886 /* !X || X is always true. */
8887 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8888 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8889 return omit_one_operand (type
, integer_one_node
, arg1
);
8890 /* X || !X is always true. */
8891 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8892 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8893 return omit_one_operand (type
, integer_one_node
, arg0
);
8897 case TRUTH_XOR_EXPR
:
8898 /* If the second arg is constant zero, drop it. */
8899 if (integer_zerop (arg1
))
8900 return non_lvalue (fold_convert (type
, arg0
));
8901 /* If the second arg is constant true, this is a logical inversion. */
8902 if (integer_onep (arg1
))
8904 /* Only call invert_truthvalue if operand is a truth value. */
8905 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8906 tem
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
8908 tem
= invert_truthvalue (arg0
);
8909 return non_lvalue (fold_convert (type
, tem
));
8911 /* Identical arguments cancel to zero. */
8912 if (operand_equal_p (arg0
, arg1
, 0))
8913 return omit_one_operand (type
, integer_zero_node
, arg0
);
8915 /* !X ^ X is always true. */
8916 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
8917 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
8918 return omit_one_operand (type
, integer_one_node
, arg1
);
8920 /* X ^ !X is always true. */
8921 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
8922 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
8923 return omit_one_operand (type
, integer_one_node
, arg0
);
8933 /* If one arg is a real or integer constant, put it last. */
8934 if (tree_swap_operands_p (arg0
, arg1
, true))
8935 return fold_build2 (swap_tree_comparison (code
), type
, op1
, op0
);
8937 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
8938 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
8939 && (code
== NE_EXPR
|| code
== EQ_EXPR
))
8940 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0),
8941 fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
),
8944 /* bool_var != 0 becomes bool_var. */
8945 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
8947 return non_lvalue (fold_convert (type
, arg0
));
8949 /* bool_var == 1 becomes bool_var. */
8950 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
8952 return non_lvalue (fold_convert (type
, arg0
));
8954 /* bool_var != 1 becomes !bool_var. */
8955 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
8957 return fold_build1 (TRUTH_NOT_EXPR
, type
, arg0
);
8959 /* bool_var == 0 becomes !bool_var. */
8960 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
8962 return fold_build1 (TRUTH_NOT_EXPR
, type
, arg0
);
8964 /* If this is an equality comparison of the address of a non-weak
8965 object against zero, then we know the result. */
8966 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8967 && TREE_CODE (arg0
) == ADDR_EXPR
8968 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
8969 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
8970 && integer_zerop (arg1
))
8971 return constant_boolean_node (code
!= EQ_EXPR
, type
);
8973 /* If this is an equality comparison of the address of two non-weak,
8974 unaliased symbols neither of which are extern (since we do not
8975 have access to attributes for externs), then we know the result. */
8976 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8977 && TREE_CODE (arg0
) == ADDR_EXPR
8978 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
8979 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
8980 && ! lookup_attribute ("alias",
8981 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
8982 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
8983 && TREE_CODE (arg1
) == ADDR_EXPR
8984 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
8985 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
8986 && ! lookup_attribute ("alias",
8987 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
8988 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
8990 /* We know that we're looking at the address of two
8991 non-weak, unaliased, static _DECL nodes.
8993 It is both wasteful and incorrect to call operand_equal_p
8994 to compare the two ADDR_EXPR nodes. It is wasteful in that
8995 all we need to do is test pointer equality for the arguments
8996 to the two ADDR_EXPR nodes. It is incorrect to use
8997 operand_equal_p as that function is NOT equivalent to a
8998 C equality test. It can in fact return false for two
8999 objects which would test as equal using the C equality
9001 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
9002 return constant_boolean_node (equal
9003 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
9007 /* If this is a comparison of two exprs that look like an
9008 ARRAY_REF of the same object, then we can fold this to a
9009 comparison of the two offsets. */
9010 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
9012 tree base0
, offset0
, base1
, offset1
;
9014 if (extract_array_ref (arg0
, &base0
, &offset0
)
9015 && extract_array_ref (arg1
, &base1
, &offset1
)
9016 && operand_equal_p (base0
, base1
, 0))
9018 /* Handle no offsets on both sides specially. */
9019 if (offset0
== NULL_TREE
9020 && offset1
== NULL_TREE
)
9021 return fold_build2 (code
, type
, integer_zero_node
,
9024 if (!offset0
|| !offset1
9025 || TREE_TYPE (offset0
) == TREE_TYPE (offset1
))
9027 if (offset0
== NULL_TREE
)
9028 offset0
= build_int_cst (TREE_TYPE (offset1
), 0);
9029 if (offset1
== NULL_TREE
)
9030 offset1
= build_int_cst (TREE_TYPE (offset0
), 0);
9031 return fold_build2 (code
, type
, offset0
, offset1
);
9036 /* Transform comparisons of the form X +- C CMP X. */
9037 if ((code
!= EQ_EXPR
&& code
!= NE_EXPR
)
9038 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9039 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
9040 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9041 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
9042 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9043 && !TYPE_UNSIGNED (TREE_TYPE (arg1
))
9044 && !(flag_wrapv
|| flag_trapv
))))
9046 tree arg01
= TREE_OPERAND (arg0
, 1);
9047 enum tree_code code0
= TREE_CODE (arg0
);
9050 if (TREE_CODE (arg01
) == REAL_CST
)
9051 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
9053 is_positive
= tree_int_cst_sgn (arg01
);
9055 /* (X - c) > X becomes false. */
9057 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
9058 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
9059 return constant_boolean_node (0, type
);
9061 /* Likewise (X + c) < X becomes false. */
9063 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
9064 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
9065 return constant_boolean_node (0, type
);
9067 /* Convert (X - c) <= X to true. */
9068 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
9070 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
9071 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
9072 return constant_boolean_node (1, type
);
9074 /* Convert (X + c) >= X to true. */
9075 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
9077 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
9078 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
9079 return constant_boolean_node (1, type
);
9081 if (TREE_CODE (arg01
) == INTEGER_CST
)
9083 /* Convert X + c > X and X - c < X to true for integers. */
9085 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
9086 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
9087 return constant_boolean_node (1, type
);
9090 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
9091 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
9092 return constant_boolean_node (1, type
);
9094 /* Convert X + c <= X and X - c >= X to false for integers. */
9096 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
9097 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
9098 return constant_boolean_node (0, type
);
9101 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
9102 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
9103 return constant_boolean_node (0, type
);
9107 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9108 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9109 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9110 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9111 && !TYPE_UNSIGNED (TREE_TYPE (arg1
))
9112 && !(flag_wrapv
|| flag_trapv
))
9113 && (TREE_CODE (arg1
) == INTEGER_CST
9114 && !TREE_OVERFLOW (arg1
)))
9116 tree const1
= TREE_OPERAND (arg0
, 1);
9118 tree variable
= TREE_OPERAND (arg0
, 0);
9121 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
9123 lhs
= fold_build2 (lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
9124 TREE_TYPE (arg1
), const2
, const1
);
9125 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
9126 && (TREE_CODE (lhs
) != INTEGER_CST
9127 || !TREE_OVERFLOW (lhs
)))
9128 return fold_build2 (code
, type
, variable
, lhs
);
9131 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9133 tree targ0
= strip_float_extensions (arg0
);
9134 tree targ1
= strip_float_extensions (arg1
);
9135 tree newtype
= TREE_TYPE (targ0
);
9137 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9138 newtype
= TREE_TYPE (targ1
);
9140 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9141 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9142 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
9143 fold_convert (newtype
, targ1
));
9145 /* (-a) CMP (-b) -> b CMP a */
9146 if (TREE_CODE (arg0
) == NEGATE_EXPR
9147 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9148 return fold_build2 (code
, type
, TREE_OPERAND (arg1
, 0),
9149 TREE_OPERAND (arg0
, 0));
9151 if (TREE_CODE (arg1
) == REAL_CST
)
9153 REAL_VALUE_TYPE cst
;
9154 cst
= TREE_REAL_CST (arg1
);
9156 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9157 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9159 fold_build2 (swap_tree_comparison (code
), type
,
9160 TREE_OPERAND (arg0
, 0),
9161 build_real (TREE_TYPE (arg1
),
9162 REAL_VALUE_NEGATE (cst
)));
9164 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9165 /* a CMP (-0) -> a CMP 0 */
9166 if (REAL_VALUE_MINUS_ZERO (cst
))
9167 return fold_build2 (code
, type
, arg0
,
9168 build_real (TREE_TYPE (arg1
), dconst0
));
9170 /* x != NaN is always true, other ops are always false. */
9171 if (REAL_VALUE_ISNAN (cst
)
9172 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9174 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9175 return omit_one_operand (type
, tem
, arg0
);
9178 /* Fold comparisons against infinity. */
9179 if (REAL_VALUE_ISINF (cst
))
9181 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
9182 if (tem
!= NULL_TREE
)
9187 /* If this is a comparison of a real constant with a PLUS_EXPR
9188 or a MINUS_EXPR of a real constant, we can convert it into a
9189 comparison with a revised real constant as long as no overflow
9190 occurs when unsafe_math_optimizations are enabled. */
9191 if (flag_unsafe_math_optimizations
9192 && TREE_CODE (arg1
) == REAL_CST
9193 && (TREE_CODE (arg0
) == PLUS_EXPR
9194 || TREE_CODE (arg0
) == MINUS_EXPR
)
9195 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9196 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9197 ? MINUS_EXPR
: PLUS_EXPR
,
9198 arg1
, TREE_OPERAND (arg0
, 1), 0))
9199 && ! TREE_CONSTANT_OVERFLOW (tem
))
9200 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9202 /* Likewise, we can simplify a comparison of a real constant with
9203 a MINUS_EXPR whose first operand is also a real constant, i.e.
9204 (c1 - x) < c2 becomes x > c1-c2. */
9205 if (flag_unsafe_math_optimizations
9206 && TREE_CODE (arg1
) == REAL_CST
9207 && TREE_CODE (arg0
) == MINUS_EXPR
9208 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9209 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9211 && ! TREE_CONSTANT_OVERFLOW (tem
))
9212 return fold_build2 (swap_tree_comparison (code
), type
,
9213 TREE_OPERAND (arg0
, 1), tem
);
9215 /* Fold comparisons against built-in math functions. */
9216 if (TREE_CODE (arg1
) == REAL_CST
9217 && flag_unsafe_math_optimizations
9218 && ! flag_errno_math
)
9220 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9222 if (fcode
!= END_BUILTINS
)
9224 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
9225 if (tem
!= NULL_TREE
)
9231 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9232 if (TREE_CONSTANT (arg1
)
9233 && (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
9234 || TREE_CODE (arg0
) == POSTDECREMENT_EXPR
)
9235 /* This optimization is invalid for ordered comparisons
9236 if CONST+INCR overflows or if foo+incr might overflow.
9237 This optimization is invalid for floating point due to rounding.
9238 For pointer types we assume overflow doesn't happen. */
9239 && (POINTER_TYPE_P (TREE_TYPE (arg0
))
9240 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9241 && (code
== EQ_EXPR
|| code
== NE_EXPR
))))
9243 tree varop
, newconst
;
9245 if (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
)
9247 newconst
= fold_build2 (PLUS_EXPR
, TREE_TYPE (arg0
),
9248 arg1
, TREE_OPERAND (arg0
, 1));
9249 varop
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (arg0
),
9250 TREE_OPERAND (arg0
, 0),
9251 TREE_OPERAND (arg0
, 1));
9255 newconst
= fold_build2 (MINUS_EXPR
, TREE_TYPE (arg0
),
9256 arg1
, TREE_OPERAND (arg0
, 1));
9257 varop
= build2 (PREDECREMENT_EXPR
, TREE_TYPE (arg0
),
9258 TREE_OPERAND (arg0
, 0),
9259 TREE_OPERAND (arg0
, 1));
9263 /* If VAROP is a reference to a bitfield, we must mask
9264 the constant by the width of the field. */
9265 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
9266 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop
, 0), 1))
9267 && host_integerp (DECL_SIZE (TREE_OPERAND
9268 (TREE_OPERAND (varop
, 0), 1)), 1))
9270 tree fielddecl
= TREE_OPERAND (TREE_OPERAND (varop
, 0), 1);
9271 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (fielddecl
), 1);
9272 tree folded_compare
, shift
;
9274 /* First check whether the comparison would come out
9275 always the same. If we don't do that we would
9276 change the meaning with the masking. */
9277 folded_compare
= fold_build2 (code
, type
,
9278 TREE_OPERAND (varop
, 0), arg1
);
9279 if (integer_zerop (folded_compare
)
9280 || integer_onep (folded_compare
))
9281 return omit_one_operand (type
, folded_compare
, varop
);
9283 shift
= build_int_cst (NULL_TREE
,
9284 TYPE_PRECISION (TREE_TYPE (varop
)) - size
);
9285 shift
= fold_convert (TREE_TYPE (varop
), shift
);
9286 newconst
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (varop
),
9288 newconst
= fold_build2 (RSHIFT_EXPR
, TREE_TYPE (varop
),
9292 return fold_build2 (code
, type
, varop
, newconst
);
9295 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9296 This transformation affects the cases which are handled in later
9297 optimizations involving comparisons with non-negative constants. */
9298 if (TREE_CODE (arg1
) == INTEGER_CST
9299 && TREE_CODE (arg0
) != INTEGER_CST
9300 && tree_int_cst_sgn (arg1
) > 0)
9305 arg1
= const_binop (MINUS_EXPR
, arg1
,
9306 build_int_cst (TREE_TYPE (arg1
), 1), 0);
9307 return fold_build2 (GT_EXPR
, type
, arg0
,
9308 fold_convert (TREE_TYPE (arg0
), arg1
));
9311 arg1
= const_binop (MINUS_EXPR
, arg1
,
9312 build_int_cst (TREE_TYPE (arg1
), 1), 0);
9313 return fold_build2 (LE_EXPR
, type
, arg0
,
9314 fold_convert (TREE_TYPE (arg0
), arg1
));
9321 /* Comparisons with the highest or lowest possible integer of
9322 the specified size will have known values. */
9324 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1
)));
9326 if (TREE_CODE (arg1
) == INTEGER_CST
9327 && ! TREE_CONSTANT_OVERFLOW (arg1
)
9328 && width
<= 2 * HOST_BITS_PER_WIDE_INT
9329 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9330 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
9332 HOST_WIDE_INT signed_max_hi
;
9333 unsigned HOST_WIDE_INT signed_max_lo
;
9334 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
9336 if (width
<= HOST_BITS_PER_WIDE_INT
)
9338 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
9343 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
9345 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
9351 max_lo
= signed_max_lo
;
9352 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
9358 width
-= HOST_BITS_PER_WIDE_INT
;
9360 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
9365 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
9367 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
9372 max_hi
= signed_max_hi
;
9373 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
9377 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
9378 && TREE_INT_CST_LOW (arg1
) == max_lo
)
9382 return omit_one_operand (type
, integer_zero_node
, arg0
);
9385 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9388 return omit_one_operand (type
, integer_one_node
, arg0
);
9391 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
9393 /* The GE_EXPR and LT_EXPR cases above are not normally
9394 reached because of previous transformations. */
9399 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9401 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
9405 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
9406 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9408 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
9409 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
9413 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9415 && TREE_INT_CST_LOW (arg1
) == min_lo
)
9419 return omit_one_operand (type
, integer_zero_node
, arg0
);
9422 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9425 return omit_one_operand (type
, integer_one_node
, arg0
);
9428 return fold_build2 (NE_EXPR
, type
, op0
, op1
);
9433 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
9435 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
9439 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
9440 return fold_build2 (NE_EXPR
, type
, arg0
, arg1
);
9442 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
9443 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9448 else if (!in_gimple_form
9449 && TREE_INT_CST_HIGH (arg1
) == signed_max_hi
9450 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
9451 && TYPE_UNSIGNED (TREE_TYPE (arg1
))
9452 /* signed_type does not work on pointer types. */
9453 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
9455 /* The following case also applies to X < signed_max+1
9456 and X >= signed_max+1 because previous transformations. */
9457 if (code
== LE_EXPR
|| code
== GT_EXPR
)
9460 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (arg0
));
9461 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (arg1
));
9462 return fold_build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
9463 type
, fold_convert (st0
, arg0
),
9464 build_int_cst (st1
, 0));
9470 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9471 a MINUS_EXPR of a constant, we can convert it into a comparison with
9472 a revised constant as long as no overflow occurs. */
9473 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9474 && TREE_CODE (arg1
) == INTEGER_CST
9475 && (TREE_CODE (arg0
) == PLUS_EXPR
9476 || TREE_CODE (arg0
) == MINUS_EXPR
)
9477 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9478 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9479 ? MINUS_EXPR
: PLUS_EXPR
,
9480 arg1
, TREE_OPERAND (arg0
, 1), 0))
9481 && ! TREE_CONSTANT_OVERFLOW (tem
))
9482 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9484 /* Similarly for a NEGATE_EXPR. */
9485 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9486 && TREE_CODE (arg0
) == NEGATE_EXPR
9487 && TREE_CODE (arg1
) == INTEGER_CST
9488 && 0 != (tem
= negate_expr (arg1
))
9489 && TREE_CODE (tem
) == INTEGER_CST
9490 && ! TREE_CONSTANT_OVERFLOW (tem
))
9491 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9493 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9494 for !=. Don't do this for ordered comparisons due to overflow. */
9495 else if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
9496 && integer_zerop (arg1
) && TREE_CODE (arg0
) == MINUS_EXPR
)
9497 return fold_build2 (code
, type
,
9498 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
9500 else if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9501 && (TREE_CODE (arg0
) == NOP_EXPR
9502 || TREE_CODE (arg0
) == CONVERT_EXPR
))
9504 /* If we are widening one operand of an integer comparison,
9505 see if the other operand is similarly being widened. Perhaps we
9506 can do the comparison in the narrower type. */
9507 tem
= fold_widened_comparison (code
, type
, arg0
, arg1
);
9511 /* Or if we are changing signedness. */
9512 tem
= fold_sign_changed_comparison (code
, type
, arg0
, arg1
);
9517 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9518 constant, we can simplify it. */
9519 else if (TREE_CODE (arg1
) == INTEGER_CST
9520 && (TREE_CODE (arg0
) == MIN_EXPR
9521 || TREE_CODE (arg0
) == MAX_EXPR
)
9522 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9524 tem
= optimize_minmax_comparison (code
, type
, op0
, op1
);
9531 /* If we are comparing an ABS_EXPR with a constant, we can
9532 convert all the cases into explicit comparisons, but they may
9533 well not be faster than doing the ABS and one comparison.
9534 But ABS (X) <= C is a range comparison, which becomes a subtraction
9535 and a comparison, and is probably faster. */
9536 else if (code
== LE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
9537 && TREE_CODE (arg0
) == ABS_EXPR
9538 && ! TREE_SIDE_EFFECTS (arg0
)
9539 && (0 != (tem
= negate_expr (arg1
)))
9540 && TREE_CODE (tem
) == INTEGER_CST
9541 && ! TREE_CONSTANT_OVERFLOW (tem
))
9542 return fold_build2 (TRUTH_ANDIF_EXPR
, type
,
9543 build2 (GE_EXPR
, type
,
9544 TREE_OPERAND (arg0
, 0), tem
),
9545 build2 (LE_EXPR
, type
,
9546 TREE_OPERAND (arg0
, 0), arg1
));
9548 /* Convert ABS_EXPR<x> >= 0 to true. */
9549 else if (code
== GE_EXPR
9550 && tree_expr_nonnegative_p (arg0
)
9551 && (integer_zerop (arg1
)
9552 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
9553 && real_zerop (arg1
))))
9554 return omit_one_operand (type
, integer_one_node
, arg0
);
9556 /* Convert ABS_EXPR<x> < 0 to false. */
9557 else if (code
== LT_EXPR
9558 && tree_expr_nonnegative_p (arg0
)
9559 && (integer_zerop (arg1
) || real_zerop (arg1
)))
9560 return omit_one_operand (type
, integer_zero_node
, arg0
);
9562 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9563 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9564 && TREE_CODE (arg0
) == ABS_EXPR
9565 && (integer_zerop (arg1
) || real_zerop (arg1
)))
9566 return fold_build2 (code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
9568 /* If this is an EQ or NE comparison with zero and ARG0 is
9569 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9570 two operations, but the latter can be done in one less insn
9571 on machines that have only two-operand insns or on which a
9572 constant cannot be the first operand. */
9573 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
9574 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
9576 tree arg00
= TREE_OPERAND (arg0
, 0);
9577 tree arg01
= TREE_OPERAND (arg0
, 1);
9578 if (TREE_CODE (arg00
) == LSHIFT_EXPR
9579 && integer_onep (TREE_OPERAND (arg00
, 0)))
9581 fold_build2 (code
, type
,
9582 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9583 build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
9584 arg01
, TREE_OPERAND (arg00
, 1)),
9585 fold_convert (TREE_TYPE (arg0
),
9588 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
9589 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
9591 fold_build2 (code
, type
,
9592 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9593 build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
9594 arg00
, TREE_OPERAND (arg01
, 1)),
9595 fold_convert (TREE_TYPE (arg0
),
9600 /* If this is an NE or EQ comparison of zero against the result of a
9601 signed MOD operation whose second operand is a power of 2, make
9602 the MOD operation unsigned since it is simpler and equivalent. */
9603 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
9604 && integer_zerop (arg1
)
9605 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
9606 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
9607 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
9608 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
9609 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
9610 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9612 tree newtype
= lang_hooks
.types
.unsigned_type (TREE_TYPE (arg0
));
9613 tree newmod
= fold_build2 (TREE_CODE (arg0
), newtype
,
9614 fold_convert (newtype
,
9615 TREE_OPERAND (arg0
, 0)),
9616 fold_convert (newtype
,
9617 TREE_OPERAND (arg0
, 1)));
9619 return fold_build2 (code
, type
, newmod
,
9620 fold_convert (newtype
, arg1
));
9623 /* If this is an NE comparison of zero with an AND of one, remove the
9624 comparison since the AND will give the correct value. */
9625 if (code
== NE_EXPR
&& integer_zerop (arg1
)
9626 && TREE_CODE (arg0
) == BIT_AND_EXPR
9627 && integer_onep (TREE_OPERAND (arg0
, 1)))
9628 return fold_convert (type
, arg0
);
9630 /* If we have (A & C) == C where C is a power of 2, convert this into
9631 (A & C) != 0. Similarly for NE_EXPR. */
9632 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9633 && TREE_CODE (arg0
) == BIT_AND_EXPR
9634 && integer_pow2p (TREE_OPERAND (arg0
, 1))
9635 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
9636 return fold_build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
9637 arg0
, fold_convert (TREE_TYPE (arg0
),
9638 integer_zero_node
));
9640 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9641 bit, then fold the expression into A < 0 or A >= 0. */
9642 tem
= fold_single_bit_test_into_sign_test (code
, arg0
, arg1
, type
);
9646 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9647 Similarly for NE_EXPR. */
9648 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9649 && TREE_CODE (arg0
) == BIT_AND_EXPR
9650 && TREE_CODE (arg1
) == INTEGER_CST
9651 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9653 tree notc
= fold_build1 (BIT_NOT_EXPR
,
9654 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
9655 TREE_OPERAND (arg0
, 1));
9656 tree dandnotc
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9658 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
9659 if (integer_nonzerop (dandnotc
))
9660 return omit_one_operand (type
, rslt
, arg0
);
9663 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9664 Similarly for NE_EXPR. */
9665 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9666 && TREE_CODE (arg0
) == BIT_IOR_EXPR
9667 && TREE_CODE (arg1
) == INTEGER_CST
9668 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9670 tree notd
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
9671 tree candnotd
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
9672 TREE_OPERAND (arg0
, 1), notd
);
9673 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
9674 if (integer_nonzerop (candnotd
))
9675 return omit_one_operand (type
, rslt
, arg0
);
9678 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9679 and similarly for >= into !=. */
9680 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
9681 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
9682 && TREE_CODE (arg1
) == LSHIFT_EXPR
9683 && integer_onep (TREE_OPERAND (arg1
, 0)))
9684 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
9685 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
9686 TREE_OPERAND (arg1
, 1)),
9687 build_int_cst (TREE_TYPE (arg0
), 0));
9689 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
9690 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
9691 && (TREE_CODE (arg1
) == NOP_EXPR
9692 || TREE_CODE (arg1
) == CONVERT_EXPR
)
9693 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
9694 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
9696 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
9697 fold_convert (TREE_TYPE (arg0
),
9698 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
9699 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
9701 build_int_cst (TREE_TYPE (arg0
), 0));
9703 /* Simplify comparison of something with itself. (For IEEE
9704 floating-point, we can only do some of these simplifications.) */
9705 if (operand_equal_p (arg0
, arg1
, 0))
9710 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9711 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9712 return constant_boolean_node (1, type
);
9717 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9718 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9719 return constant_boolean_node (1, type
);
9720 return fold_build2 (EQ_EXPR
, type
, arg0
, arg1
);
9723 /* For NE, we can only do this simplification if integer
9724 or we don't honor IEEE floating point NaNs. */
9725 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9726 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9728 /* ... fall through ... */
9731 return constant_boolean_node (0, type
);
9737 /* If we are comparing an expression that just has comparisons
9738 of two integer values, arithmetic expressions of those comparisons,
9739 and constants, we can simplify it. There are only three cases
9740 to check: the two values can either be equal, the first can be
9741 greater, or the second can be greater. Fold the expression for
9742 those three values. Since each value must be 0 or 1, we have
9743 eight possibilities, each of which corresponds to the constant 0
9744 or 1 or one of the six possible comparisons.
9746 This handles common cases like (a > b) == 0 but also handles
9747 expressions like ((x > y) - (y > x)) > 0, which supposedly
9748 occur in macroized code. */
9750 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9752 tree cval1
= 0, cval2
= 0;
9755 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9756 /* Don't handle degenerate cases here; they should already
9757 have been handled anyway. */
9758 && cval1
!= 0 && cval2
!= 0
9759 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9760 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9761 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9762 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9763 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9764 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9765 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9767 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9768 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9770 /* We can't just pass T to eval_subst in case cval1 or cval2
9771 was the same as ARG1. */
9774 = fold_build2 (code
, type
,
9775 eval_subst (arg0
, cval1
, maxval
,
9779 = fold_build2 (code
, type
,
9780 eval_subst (arg0
, cval1
, maxval
,
9784 = fold_build2 (code
, type
,
9785 eval_subst (arg0
, cval1
, minval
,
9789 /* All three of these results should be 0 or 1. Confirm they
9790 are. Then use those values to select the proper code
9793 if ((integer_zerop (high_result
)
9794 || integer_onep (high_result
))
9795 && (integer_zerop (equal_result
)
9796 || integer_onep (equal_result
))
9797 && (integer_zerop (low_result
)
9798 || integer_onep (low_result
)))
9800 /* Make a 3-bit mask with the high-order bit being the
9801 value for `>', the next for '=', and the low for '<'. */
9802 switch ((integer_onep (high_result
) * 4)
9803 + (integer_onep (equal_result
) * 2)
9804 + integer_onep (low_result
))
9808 return omit_one_operand (type
, integer_zero_node
, arg0
);
9829 return omit_one_operand (type
, integer_one_node
, arg0
);
9833 return save_expr (build2 (code
, type
, cval1
, cval2
));
9835 return fold_build2 (code
, type
, cval1
, cval2
);
9840 /* If this is a comparison of a field, we may be able to simplify it. */
9841 if (((TREE_CODE (arg0
) == COMPONENT_REF
9842 && lang_hooks
.can_use_bit_fields_p ())
9843 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
9844 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
9845 /* Handle the constant case even without -O
9846 to make sure the warnings are given. */
9847 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
9849 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
9854 /* Fold a comparison of the address of COMPONENT_REFs with the same
9855 type and component to a comparison of the address of the base
9856 object. In short, &x->a OP &y->a to x OP y and
9857 &x->a OP &y.a to x OP &y */
9858 if (TREE_CODE (arg0
) == ADDR_EXPR
9859 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == COMPONENT_REF
9860 && TREE_CODE (arg1
) == ADDR_EXPR
9861 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == COMPONENT_REF
)
9863 tree cref0
= TREE_OPERAND (arg0
, 0);
9864 tree cref1
= TREE_OPERAND (arg1
, 0);
9865 if (TREE_OPERAND (cref0
, 1) == TREE_OPERAND (cref1
, 1))
9867 tree op0
= TREE_OPERAND (cref0
, 0);
9868 tree op1
= TREE_OPERAND (cref1
, 0);
9869 return fold_build2 (code
, type
,
9870 build_fold_addr_expr (op0
),
9871 build_fold_addr_expr (op1
));
9875 /* Optimize comparisons of strlen vs zero to a compare of the
9876 first character of the string vs zero. To wit,
9877 strlen(ptr) == 0 => *ptr == 0
9878 strlen(ptr) != 0 => *ptr != 0
9879 Other cases should reduce to one of these two (or a constant)
9880 due to the return value of strlen being unsigned. */
9881 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9882 && integer_zerop (arg1
)
9883 && TREE_CODE (arg0
) == CALL_EXPR
)
9885 tree fndecl
= get_callee_fndecl (arg0
);
9889 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
9890 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
9891 && (arglist
= TREE_OPERAND (arg0
, 1))
9892 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
9893 && ! TREE_CHAIN (arglist
))
9895 tree iref
= build_fold_indirect_ref (TREE_VALUE (arglist
));
9896 return fold_build2 (code
, type
, iref
,
9897 build_int_cst (TREE_TYPE (iref
), 0));
9901 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9902 into a single range test. */
9903 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9904 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9905 && TREE_CODE (arg1
) == INTEGER_CST
9906 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9907 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9908 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9909 && !TREE_OVERFLOW (arg1
))
9911 t1
= fold_div_compare (code
, type
, arg0
, arg1
);
9912 if (t1
!= NULL_TREE
)
9916 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
9917 && integer_zerop (arg1
)
9918 && tree_expr_nonzero_p (arg0
))
9920 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
9921 return omit_one_operand (type
, res
, arg0
);
9924 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
9925 return t1
== NULL_TREE
? NULL_TREE
: t1
;
9927 case UNORDERED_EXPR
:
9935 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9937 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
9938 if (t1
!= NULL_TREE
)
9942 /* If the first operand is NaN, the result is constant. */
9943 if (TREE_CODE (arg0
) == REAL_CST
9944 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
9945 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
9947 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
9950 return omit_one_operand (type
, t1
, arg1
);
9953 /* If the second operand is NaN, the result is constant. */
9954 if (TREE_CODE (arg1
) == REAL_CST
9955 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
9956 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
9958 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
9961 return omit_one_operand (type
, t1
, arg0
);
9964 /* Simplify unordered comparison of something with itself. */
9965 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
9966 && operand_equal_p (arg0
, arg1
, 0))
9967 return constant_boolean_node (1, type
);
9969 if (code
== LTGT_EXPR
9970 && !flag_trapping_math
9971 && operand_equal_p (arg0
, arg1
, 0))
9972 return constant_boolean_node (0, type
);
9974 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9976 tree targ0
= strip_float_extensions (arg0
);
9977 tree targ1
= strip_float_extensions (arg1
);
9978 tree newtype
= TREE_TYPE (targ0
);
9980 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9981 newtype
= TREE_TYPE (targ1
);
9983 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9984 return fold_build2 (code
, type
, fold_convert (newtype
, targ0
),
9985 fold_convert (newtype
, targ1
));
9991 /* When pedantic, a compound expression can be neither an lvalue
9992 nor an integer constant expression. */
9993 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
9995 /* Don't let (0, 0) be null pointer constant. */
9996 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
9997 : fold_convert (type
, arg1
);
9998 return pedantic_non_lvalue (tem
);
10002 return build_complex (type
, arg0
, arg1
);
10006 /* An ASSERT_EXPR should never be passed to fold_binary. */
10007 gcc_unreachable ();
10011 } /* switch (code) */
10014 /* Callback for walk_tree, looking for LABEL_EXPR.
10015 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10016 Do not check the sub-tree of GOTO_EXPR. */
10019 contains_label_1 (tree
*tp
,
10020 int *walk_subtrees
,
10021 void *data ATTRIBUTE_UNUSED
)
10023 switch (TREE_CODE (*tp
))
10028 *walk_subtrees
= 0;
10035 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10036 accessible from outside the sub-tree. Returns NULL_TREE if no
10037 addressable label is found. */
10040 contains_label_p (tree st
)
10042 return (walk_tree (&st
, contains_label_1
, NULL
, NULL
) != NULL_TREE
);
10045 /* Fold a ternary expression of code CODE and type TYPE with operands
10046 OP0, OP1, and OP2. Return the folded expression if folding is
10047 successful. Otherwise, return NULL_TREE. */
10050 fold_ternary (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
10053 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
10054 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10056 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
10057 && TREE_CODE_LENGTH (code
) == 3);
10059 /* Strip any conversions that don't change the mode. This is safe
10060 for every expression, except for a comparison expression because
10061 its signedness is derived from its operands. So, in the latter
10062 case, only strip conversions that don't change the signedness.
10064 Note that this is done as an internal manipulation within the
10065 constant folder, in order to find the simplest representation of
10066 the arguments so that their form can be studied. In any cases,
10067 the appropriate type conversions should be put back in the tree
10068 that will get out of the constant folder. */
10083 case COMPONENT_REF
:
10084 if (TREE_CODE (arg0
) == CONSTRUCTOR
10085 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
10087 unsigned HOST_WIDE_INT idx
;
10089 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
10096 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10097 so all simple results must be passed through pedantic_non_lvalue. */
10098 if (TREE_CODE (arg0
) == INTEGER_CST
)
10100 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
10101 tem
= integer_zerop (arg0
) ? op2
: op1
;
10102 /* Only optimize constant conditions when the selected branch
10103 has the same type as the COND_EXPR. This avoids optimizing
10104 away "c ? x : throw", where the throw has a void type.
10105 Avoid throwing away that operand which contains label. */
10106 if ((!TREE_SIDE_EFFECTS (unused_op
)
10107 || !contains_label_p (unused_op
))
10108 && (! VOID_TYPE_P (TREE_TYPE (tem
))
10109 || VOID_TYPE_P (type
)))
10110 return pedantic_non_lvalue (tem
);
10113 if (operand_equal_p (arg1
, op2
, 0))
10114 return pedantic_omit_one_operand (type
, arg1
, arg0
);
10116 /* If we have A op B ? A : C, we may be able to convert this to a
10117 simpler expression, depending on the operation and the values
10118 of B and C. Signed zeros prevent all of these transformations,
10119 for reasons given above each one.
10121 Also try swapping the arguments and inverting the conditional. */
10122 if (COMPARISON_CLASS_P (arg0
)
10123 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
10124 arg1
, TREE_OPERAND (arg0
, 1))
10125 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
10127 tem
= fold_cond_expr_with_comparison (type
, arg0
, op1
, op2
);
10132 if (COMPARISON_CLASS_P (arg0
)
10133 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
10135 TREE_OPERAND (arg0
, 1))
10136 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
10138 tem
= invert_truthvalue (arg0
);
10139 if (COMPARISON_CLASS_P (tem
))
10141 tem
= fold_cond_expr_with_comparison (type
, tem
, op2
, op1
);
10147 /* If the second operand is simpler than the third, swap them
10148 since that produces better jump optimization results. */
10149 if (truth_value_p (TREE_CODE (arg0
))
10150 && tree_swap_operands_p (op1
, op2
, false))
10152 /* See if this can be inverted. If it can't, possibly because
10153 it was a floating-point inequality comparison, don't do
10155 tem
= invert_truthvalue (arg0
);
10157 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
10158 return fold_build3 (code
, type
, tem
, op2
, op1
);
10161 /* Convert A ? 1 : 0 to simply A. */
10162 if (integer_onep (op1
)
10163 && integer_zerop (op2
)
10164 /* If we try to convert OP0 to our type, the
10165 call to fold will try to move the conversion inside
10166 a COND, which will recurse. In that case, the COND_EXPR
10167 is probably the best choice, so leave it alone. */
10168 && type
== TREE_TYPE (arg0
))
10169 return pedantic_non_lvalue (arg0
);
10171 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10172 over COND_EXPR in cases such as floating point comparisons. */
10173 if (integer_zerop (op1
)
10174 && integer_onep (op2
)
10175 && truth_value_p (TREE_CODE (arg0
)))
10176 return pedantic_non_lvalue (fold_convert (type
,
10177 invert_truthvalue (arg0
)));
10179 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10180 if (TREE_CODE (arg0
) == LT_EXPR
10181 && integer_zerop (TREE_OPERAND (arg0
, 1))
10182 && integer_zerop (op2
)
10183 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
10184 return fold_convert (type
, fold_build2 (BIT_AND_EXPR
,
10185 TREE_TYPE (tem
), tem
, arg1
));
10187 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10188 already handled above. */
10189 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10190 && integer_onep (TREE_OPERAND (arg0
, 1))
10191 && integer_zerop (op2
)
10192 && integer_pow2p (arg1
))
10194 tree tem
= TREE_OPERAND (arg0
, 0);
10196 if (TREE_CODE (tem
) == RSHIFT_EXPR
10197 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
10198 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
10199 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
10200 return fold_build2 (BIT_AND_EXPR
, type
,
10201 TREE_OPERAND (tem
, 0), arg1
);
10204 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10205 is probably obsolete because the first operand should be a
10206 truth value (that's why we have the two cases above), but let's
10207 leave it in until we can confirm this for all front-ends. */
10208 if (integer_zerop (op2
)
10209 && TREE_CODE (arg0
) == NE_EXPR
10210 && integer_zerop (TREE_OPERAND (arg0
, 1))
10211 && integer_pow2p (arg1
)
10212 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
10213 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10214 arg1
, OEP_ONLY_CONST
))
10215 return pedantic_non_lvalue (fold_convert (type
,
10216 TREE_OPERAND (arg0
, 0)));
10218 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10219 if (integer_zerop (op2
)
10220 && truth_value_p (TREE_CODE (arg0
))
10221 && truth_value_p (TREE_CODE (arg1
)))
10222 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, arg0
, arg1
);
10224 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10225 if (integer_onep (op2
)
10226 && truth_value_p (TREE_CODE (arg0
))
10227 && truth_value_p (TREE_CODE (arg1
)))
10229 /* Only perform transformation if ARG0 is easily inverted. */
10230 tem
= invert_truthvalue (arg0
);
10231 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
10232 return fold_build2 (TRUTH_ORIF_EXPR
, type
, tem
, arg1
);
10235 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10236 if (integer_zerop (arg1
)
10237 && truth_value_p (TREE_CODE (arg0
))
10238 && truth_value_p (TREE_CODE (op2
)))
10240 /* Only perform transformation if ARG0 is easily inverted. */
10241 tem
= invert_truthvalue (arg0
);
10242 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
10243 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, tem
, op2
);
10246 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10247 if (integer_onep (arg1
)
10248 && truth_value_p (TREE_CODE (arg0
))
10249 && truth_value_p (TREE_CODE (op2
)))
10250 return fold_build2 (TRUTH_ORIF_EXPR
, type
, arg0
, op2
);
10255 /* Check for a built-in function. */
10256 if (TREE_CODE (op0
) == ADDR_EXPR
10257 && TREE_CODE (TREE_OPERAND (op0
, 0)) == FUNCTION_DECL
10258 && DECL_BUILT_IN (TREE_OPERAND (op0
, 0)))
10259 return fold_builtin (TREE_OPERAND (op0
, 0), op1
, false);
10262 case BIT_FIELD_REF
:
10263 if (TREE_CODE (arg0
) == VECTOR_CST
10264 && type
== TREE_TYPE (TREE_TYPE (arg0
))
10265 && host_integerp (arg1
, 1)
10266 && host_integerp (op2
, 1))
10268 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
10269 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
10272 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
10273 && (idx
% width
) == 0
10274 && (idx
= idx
/ width
)
10275 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
10277 tree elements
= TREE_VECTOR_CST_ELTS (arg0
);
10278 while (idx
-- > 0 && elements
)
10279 elements
= TREE_CHAIN (elements
);
10281 return TREE_VALUE (elements
);
10283 return fold_convert (type
, integer_zero_node
);
10290 } /* switch (code) */
10293 /* Perform constant folding and related simplification of EXPR.
10294 The related simplifications include x*1 => x, x*0 => 0, etc.,
10295 and application of the associative law.
10296 NOP_EXPR conversions may be removed freely (as long as we
10297 are careful not to change the type of the overall expression).
10298 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10299 but we can constant-fold them if they have constant operands. */
10301 #ifdef ENABLE_FOLD_CHECKING
10302 # define fold(x) fold_1 (x)
10303 static tree
fold_1 (tree
);
10309 const tree t
= expr
;
10310 enum tree_code code
= TREE_CODE (t
);
10311 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10314 /* Return right away if a constant. */
10315 if (kind
== tcc_constant
)
10318 if (IS_EXPR_CODE_CLASS (kind
))
10320 tree type
= TREE_TYPE (t
);
10321 tree op0
, op1
, op2
;
10323 switch (TREE_CODE_LENGTH (code
))
10326 op0
= TREE_OPERAND (t
, 0);
10327 tem
= fold_unary (code
, type
, op0
);
10328 return tem
? tem
: expr
;
10330 op0
= TREE_OPERAND (t
, 0);
10331 op1
= TREE_OPERAND (t
, 1);
10332 tem
= fold_binary (code
, type
, op0
, op1
);
10333 return tem
? tem
: expr
;
10335 op0
= TREE_OPERAND (t
, 0);
10336 op1
= TREE_OPERAND (t
, 1);
10337 op2
= TREE_OPERAND (t
, 2);
10338 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
10339 return tem
? tem
: expr
;
10348 return fold (DECL_INITIAL (t
));
10352 } /* switch (code) */
10355 #ifdef ENABLE_FOLD_CHECKING
10358 static void fold_checksum_tree (tree
, struct md5_ctx
*, htab_t
);
10359 static void fold_check_failed (tree
, tree
);
10360 void print_fold_checksum (tree
);
10362 /* When --enable-checking=fold, compute a digest of expr before
10363 and after actual fold call to see if fold did not accidentally
10364 change original expr. */
10370 struct md5_ctx ctx
;
10371 unsigned char checksum_before
[16], checksum_after
[16];
10374 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10375 md5_init_ctx (&ctx
);
10376 fold_checksum_tree (expr
, &ctx
, ht
);
10377 md5_finish_ctx (&ctx
, checksum_before
);
10380 ret
= fold_1 (expr
);
10382 md5_init_ctx (&ctx
);
10383 fold_checksum_tree (expr
, &ctx
, ht
);
10384 md5_finish_ctx (&ctx
, checksum_after
);
10387 if (memcmp (checksum_before
, checksum_after
, 16))
10388 fold_check_failed (expr
, ret
);
10394 print_fold_checksum (tree expr
)
10396 struct md5_ctx ctx
;
10397 unsigned char checksum
[16], cnt
;
10400 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10401 md5_init_ctx (&ctx
);
10402 fold_checksum_tree (expr
, &ctx
, ht
);
10403 md5_finish_ctx (&ctx
, checksum
);
10405 for (cnt
= 0; cnt
< 16; ++cnt
)
10406 fprintf (stderr
, "%02x", checksum
[cnt
]);
10407 putc ('\n', stderr
);
10411 fold_check_failed (tree expr ATTRIBUTE_UNUSED
, tree ret ATTRIBUTE_UNUSED
)
10413 internal_error ("fold check: original tree changed by fold");
10417 fold_checksum_tree (tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
10420 enum tree_code code
;
10421 char buf
[sizeof (struct tree_function_decl
)];
10426 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
10427 <= sizeof (struct tree_function_decl
))
10428 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
10431 slot
= htab_find_slot (ht
, expr
, INSERT
);
10435 code
= TREE_CODE (expr
);
10436 if (TREE_CODE_CLASS (code
) == tcc_declaration
10437 && DECL_ASSEMBLER_NAME_SET_P (expr
))
10439 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10440 memcpy (buf
, expr
, tree_size (expr
));
10442 SET_DECL_ASSEMBLER_NAME (expr
, NULL
);
10444 else if (TREE_CODE_CLASS (code
) == tcc_type
10445 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)
10446 || TYPE_CACHED_VALUES_P (expr
)
10447 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)))
10449 /* Allow these fields to be modified. */
10450 memcpy (buf
, expr
, tree_size (expr
));
10452 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
) = 0;
10453 TYPE_POINTER_TO (expr
) = NULL
;
10454 TYPE_REFERENCE_TO (expr
) = NULL
;
10455 if (TYPE_CACHED_VALUES_P (expr
))
10457 TYPE_CACHED_VALUES_P (expr
) = 0;
10458 TYPE_CACHED_VALUES (expr
) = NULL
;
10461 md5_process_bytes (expr
, tree_size (expr
), ctx
);
10462 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
10463 if (TREE_CODE_CLASS (code
) != tcc_type
10464 && TREE_CODE_CLASS (code
) != tcc_declaration
10465 && code
!= TREE_LIST
)
10466 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
10467 switch (TREE_CODE_CLASS (code
))
10473 md5_process_bytes (TREE_STRING_POINTER (expr
),
10474 TREE_STRING_LENGTH (expr
), ctx
);
10477 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
10478 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
10481 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
10487 case tcc_exceptional
:
10491 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
10492 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
10493 expr
= TREE_CHAIN (expr
);
10494 goto recursive_label
;
10497 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
10498 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
10504 case tcc_expression
:
10505 case tcc_reference
:
10506 case tcc_comparison
:
10509 case tcc_statement
:
10510 len
= TREE_CODE_LENGTH (code
);
10511 for (i
= 0; i
< len
; ++i
)
10512 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
10514 case tcc_declaration
:
10515 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
10516 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
10517 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
10518 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
10519 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
10520 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
10521 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
10522 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
10523 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
10525 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
10527 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
10528 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
10529 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
10533 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
10534 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
10535 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
10536 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
10537 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
10538 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
10539 if (INTEGRAL_TYPE_P (expr
)
10540 || SCALAR_FLOAT_TYPE_P (expr
))
10542 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
10543 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
10545 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
10546 if (TREE_CODE (expr
) == RECORD_TYPE
10547 || TREE_CODE (expr
) == UNION_TYPE
10548 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
10549 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
10550 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
10559 /* Fold a unary tree expression with code CODE of type TYPE with an
10560 operand OP0. Return a folded expression if successful. Otherwise,
10561 return a tree expression with code CODE of type TYPE with an
10565 fold_build1_stat (enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
10568 #ifdef ENABLE_FOLD_CHECKING
10569 unsigned char checksum_before
[16], checksum_after
[16];
10570 struct md5_ctx ctx
;
10573 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10574 md5_init_ctx (&ctx
);
10575 fold_checksum_tree (op0
, &ctx
, ht
);
10576 md5_finish_ctx (&ctx
, checksum_before
);
10580 tem
= fold_unary (code
, type
, op0
);
10582 tem
= build1_stat (code
, type
, op0 PASS_MEM_STAT
);
10584 #ifdef ENABLE_FOLD_CHECKING
10585 md5_init_ctx (&ctx
);
10586 fold_checksum_tree (op0
, &ctx
, ht
);
10587 md5_finish_ctx (&ctx
, checksum_after
);
10590 if (memcmp (checksum_before
, checksum_after
, 16))
10591 fold_check_failed (op0
, tem
);
10596 /* Fold a binary tree expression with code CODE of type TYPE with
10597 operands OP0 and OP1. Return a folded expression if successful.
10598 Otherwise, return a tree expression with code CODE of type TYPE
10599 with operands OP0 and OP1. */
10602 fold_build2_stat (enum tree_code code
, tree type
, tree op0
, tree op1
10606 #ifdef ENABLE_FOLD_CHECKING
10607 unsigned char checksum_before_op0
[16],
10608 checksum_before_op1
[16],
10609 checksum_after_op0
[16],
10610 checksum_after_op1
[16];
10611 struct md5_ctx ctx
;
10614 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10615 md5_init_ctx (&ctx
);
10616 fold_checksum_tree (op0
, &ctx
, ht
);
10617 md5_finish_ctx (&ctx
, checksum_before_op0
);
10620 md5_init_ctx (&ctx
);
10621 fold_checksum_tree (op1
, &ctx
, ht
);
10622 md5_finish_ctx (&ctx
, checksum_before_op1
);
10626 tem
= fold_binary (code
, type
, op0
, op1
);
10628 tem
= build2_stat (code
, type
, op0
, op1 PASS_MEM_STAT
);
10630 #ifdef ENABLE_FOLD_CHECKING
10631 md5_init_ctx (&ctx
);
10632 fold_checksum_tree (op0
, &ctx
, ht
);
10633 md5_finish_ctx (&ctx
, checksum_after_op0
);
10636 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
10637 fold_check_failed (op0
, tem
);
10639 md5_init_ctx (&ctx
);
10640 fold_checksum_tree (op1
, &ctx
, ht
);
10641 md5_finish_ctx (&ctx
, checksum_after_op1
);
10644 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
10645 fold_check_failed (op1
, tem
);
10650 /* Fold a ternary tree expression with code CODE of type TYPE with
10651 operands OP0, OP1, and OP2. Return a folded expression if
10652 successful. Otherwise, return a tree expression with code CODE of
10653 type TYPE with operands OP0, OP1, and OP2. */
10656 fold_build3_stat (enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
10660 #ifdef ENABLE_FOLD_CHECKING
10661 unsigned char checksum_before_op0
[16],
10662 checksum_before_op1
[16],
10663 checksum_before_op2
[16],
10664 checksum_after_op0
[16],
10665 checksum_after_op1
[16],
10666 checksum_after_op2
[16];
10667 struct md5_ctx ctx
;
10670 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
10671 md5_init_ctx (&ctx
);
10672 fold_checksum_tree (op0
, &ctx
, ht
);
10673 md5_finish_ctx (&ctx
, checksum_before_op0
);
10676 md5_init_ctx (&ctx
);
10677 fold_checksum_tree (op1
, &ctx
, ht
);
10678 md5_finish_ctx (&ctx
, checksum_before_op1
);
10681 md5_init_ctx (&ctx
);
10682 fold_checksum_tree (op2
, &ctx
, ht
);
10683 md5_finish_ctx (&ctx
, checksum_before_op2
);
10687 tem
= fold_ternary (code
, type
, op0
, op1
, op2
);
10689 tem
= build3_stat (code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
10691 #ifdef ENABLE_FOLD_CHECKING
10692 md5_init_ctx (&ctx
);
10693 fold_checksum_tree (op0
, &ctx
, ht
);
10694 md5_finish_ctx (&ctx
, checksum_after_op0
);
10697 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
10698 fold_check_failed (op0
, tem
);
10700 md5_init_ctx (&ctx
);
10701 fold_checksum_tree (op1
, &ctx
, ht
);
10702 md5_finish_ctx (&ctx
, checksum_after_op1
);
10705 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
10706 fold_check_failed (op1
, tem
);
10708 md5_init_ctx (&ctx
);
10709 fold_checksum_tree (op2
, &ctx
, ht
);
10710 md5_finish_ctx (&ctx
, checksum_after_op2
);
10713 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
10714 fold_check_failed (op2
, tem
);
10719 /* Perform constant folding and related simplification of initializer
10720 expression EXPR. These behave identically to "fold_buildN" but ignore
10721 potential run-time traps and exceptions that fold must preserve. */
10723 #define START_FOLD_INIT \
10724 int saved_signaling_nans = flag_signaling_nans;\
10725 int saved_trapping_math = flag_trapping_math;\
10726 int saved_rounding_math = flag_rounding_math;\
10727 int saved_trapv = flag_trapv;\
10728 flag_signaling_nans = 0;\
10729 flag_trapping_math = 0;\
10730 flag_rounding_math = 0;\
10733 #define END_FOLD_INIT \
10734 flag_signaling_nans = saved_signaling_nans;\
10735 flag_trapping_math = saved_trapping_math;\
10736 flag_rounding_math = saved_rounding_math;\
10737 flag_trapv = saved_trapv
10740 fold_build1_initializer (enum tree_code code
, tree type
, tree op
)
10745 result
= fold_build1 (code
, type
, op
);
10752 fold_build2_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
)
10757 result
= fold_build2 (code
, type
, op0
, op1
);
10764 fold_build3_initializer (enum tree_code code
, tree type
, tree op0
, tree op1
,
10770 result
= fold_build3 (code
, type
, op0
, op1
, op2
);
10776 #undef START_FOLD_INIT
10777 #undef END_FOLD_INIT
10779 /* Determine if first argument is a multiple of second argument. Return 0 if
10780 it is not, or we cannot easily determined it to be.
10782 An example of the sort of thing we care about (at this point; this routine
10783 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10784 fold cases do now) is discovering that
10786 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10792 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10794 This code also handles discovering that
10796 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10798 is a multiple of 8 so we don't have to worry about dealing with a
10799 possible remainder.
10801 Note that we *look* inside a SAVE_EXPR only to determine how it was
10802 calculated; it is not safe for fold to do much of anything else with the
10803 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10804 at run time. For example, the latter example above *cannot* be implemented
10805 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10806 evaluation time of the original SAVE_EXPR is not necessarily the same at
10807 the time the new expression is evaluated. The only optimization of this
10808 sort that would be valid is changing
10810 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10814 SAVE_EXPR (I) * SAVE_EXPR (J)
10816 (where the same SAVE_EXPR (J) is used in the original and the
10817 transformed version). */
10820 multiple_of_p (tree type
, tree top
, tree bottom
)
10822 if (operand_equal_p (top
, bottom
, 0))
10825 if (TREE_CODE (type
) != INTEGER_TYPE
)
10828 switch (TREE_CODE (top
))
10831 /* Bitwise and provides a power of two multiple. If the mask is
10832 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10833 if (!integer_pow2p (bottom
))
10838 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
10839 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
10843 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
10844 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
10847 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
10851 op1
= TREE_OPERAND (top
, 1);
10852 /* const_binop may not detect overflow correctly,
10853 so check for it explicitly here. */
10854 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
10855 > TREE_INT_CST_LOW (op1
)
10856 && TREE_INT_CST_HIGH (op1
) == 0
10857 && 0 != (t1
= fold_convert (type
,
10858 const_binop (LSHIFT_EXPR
,
10861 && ! TREE_OVERFLOW (t1
))
10862 return multiple_of_p (type
, t1
, bottom
);
10867 /* Can't handle conversions from non-integral or wider integral type. */
10868 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
10869 || (TYPE_PRECISION (type
)
10870 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
10873 /* .. fall through ... */
10876 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
10879 if (TREE_CODE (bottom
) != INTEGER_CST
10880 || (TYPE_UNSIGNED (type
)
10881 && (tree_int_cst_sgn (top
) < 0
10882 || tree_int_cst_sgn (bottom
) < 0)))
10884 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,
10892 /* Return true if `t' is known to be non-negative. */
10895 tree_expr_nonnegative_p (tree t
)
10897 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
10900 switch (TREE_CODE (t
))
10903 /* We can't return 1 if flag_wrapv is set because
10904 ABS_EXPR<INT_MIN> = INT_MIN. */
10905 if (!(flag_wrapv
&& INTEGRAL_TYPE_P (TREE_TYPE (t
))))
10910 return tree_int_cst_sgn (t
) >= 0;
10913 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
10916 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
10917 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10918 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10920 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10921 both unsigned and at least 2 bits shorter than the result. */
10922 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
10923 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
10924 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
10926 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
10927 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
10928 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
10929 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
10931 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
10932 TYPE_PRECISION (inner2
)) + 1;
10933 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
10939 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
10941 /* x * x for floating point x is always non-negative. */
10942 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
10944 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10945 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10948 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10949 both unsigned and their total bits is shorter than the result. */
10950 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
10951 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
10952 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
10954 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
10955 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
10956 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
10957 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
10958 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
10959 < TYPE_PRECISION (TREE_TYPE (t
));
10965 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10966 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10972 case TRUNC_DIV_EXPR
:
10973 case CEIL_DIV_EXPR
:
10974 case FLOOR_DIV_EXPR
:
10975 case ROUND_DIV_EXPR
:
10976 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
10977 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10979 case TRUNC_MOD_EXPR
:
10980 case CEIL_MOD_EXPR
:
10981 case FLOOR_MOD_EXPR
:
10982 case ROUND_MOD_EXPR
:
10984 case NON_LVALUE_EXPR
:
10986 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
10988 case COMPOUND_EXPR
:
10990 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
10993 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t
, 1)));
10996 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
10997 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
11001 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
11002 tree outer_type
= TREE_TYPE (t
);
11004 if (TREE_CODE (outer_type
) == REAL_TYPE
)
11006 if (TREE_CODE (inner_type
) == REAL_TYPE
)
11007 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
11008 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
11010 if (TYPE_UNSIGNED (inner_type
))
11012 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
11015 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
11017 if (TREE_CODE (inner_type
) == REAL_TYPE
)
11018 return tree_expr_nonnegative_p (TREE_OPERAND (t
,0));
11019 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
11020 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
11021 && TYPE_UNSIGNED (inner_type
);
11028 tree temp
= TARGET_EXPR_SLOT (t
);
11029 t
= TARGET_EXPR_INITIAL (t
);
11031 /* If the initializer is non-void, then it's a normal expression
11032 that will be assigned to the slot. */
11033 if (!VOID_TYPE_P (t
))
11034 return tree_expr_nonnegative_p (t
);
11036 /* Otherwise, the initializer sets the slot in some way. One common
11037 way is an assignment statement at the end of the initializer. */
11040 if (TREE_CODE (t
) == BIND_EXPR
)
11041 t
= expr_last (BIND_EXPR_BODY (t
));
11042 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
11043 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
11044 t
= expr_last (TREE_OPERAND (t
, 0));
11045 else if (TREE_CODE (t
) == STATEMENT_LIST
)
11050 if (TREE_CODE (t
) == MODIFY_EXPR
11051 && TREE_OPERAND (t
, 0) == temp
)
11052 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
11059 tree fndecl
= get_callee_fndecl (t
);
11060 tree arglist
= TREE_OPERAND (t
, 1);
11061 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
11062 switch (DECL_FUNCTION_CODE (fndecl
))
11064 CASE_FLT_FN (BUILT_IN_ACOS
):
11065 CASE_FLT_FN (BUILT_IN_ACOSH
):
11066 CASE_FLT_FN (BUILT_IN_CABS
):
11067 CASE_FLT_FN (BUILT_IN_COSH
):
11068 CASE_FLT_FN (BUILT_IN_ERFC
):
11069 CASE_FLT_FN (BUILT_IN_EXP
):
11070 CASE_FLT_FN (BUILT_IN_EXP10
):
11071 CASE_FLT_FN (BUILT_IN_EXP2
):
11072 CASE_FLT_FN (BUILT_IN_FABS
):
11073 CASE_FLT_FN (BUILT_IN_FDIM
):
11074 CASE_FLT_FN (BUILT_IN_HYPOT
):
11075 CASE_FLT_FN (BUILT_IN_POW10
):
11076 CASE_INT_FN (BUILT_IN_FFS
):
11077 CASE_INT_FN (BUILT_IN_PARITY
):
11078 CASE_INT_FN (BUILT_IN_POPCOUNT
):
11082 CASE_FLT_FN (BUILT_IN_SQRT
):
11083 /* sqrt(-0.0) is -0.0. */
11084 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
11086 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
11088 CASE_FLT_FN (BUILT_IN_ASINH
):
11089 CASE_FLT_FN (BUILT_IN_ATAN
):
11090 CASE_FLT_FN (BUILT_IN_ATANH
):
11091 CASE_FLT_FN (BUILT_IN_CBRT
):
11092 CASE_FLT_FN (BUILT_IN_CEIL
):
11093 CASE_FLT_FN (BUILT_IN_ERF
):
11094 CASE_FLT_FN (BUILT_IN_EXPM1
):
11095 CASE_FLT_FN (BUILT_IN_FLOOR
):
11096 CASE_FLT_FN (BUILT_IN_FMOD
):
11097 CASE_FLT_FN (BUILT_IN_FREXP
):
11098 CASE_FLT_FN (BUILT_IN_LCEIL
):
11099 CASE_FLT_FN (BUILT_IN_LDEXP
):
11100 CASE_FLT_FN (BUILT_IN_LFLOOR
):
11101 CASE_FLT_FN (BUILT_IN_LLCEIL
):
11102 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
11103 CASE_FLT_FN (BUILT_IN_LLRINT
):
11104 CASE_FLT_FN (BUILT_IN_LLROUND
):
11105 CASE_FLT_FN (BUILT_IN_LRINT
):
11106 CASE_FLT_FN (BUILT_IN_LROUND
):
11107 CASE_FLT_FN (BUILT_IN_MODF
):
11108 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
11109 CASE_FLT_FN (BUILT_IN_POW
):
11110 CASE_FLT_FN (BUILT_IN_RINT
):
11111 CASE_FLT_FN (BUILT_IN_ROUND
):
11112 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
11113 CASE_FLT_FN (BUILT_IN_SINH
):
11114 CASE_FLT_FN (BUILT_IN_TANH
):
11115 CASE_FLT_FN (BUILT_IN_TRUNC
):
11116 /* True if the 1st argument is nonnegative. */
11117 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
11119 CASE_FLT_FN (BUILT_IN_FMAX
):
11120 /* True if the 1st OR 2nd arguments are nonnegative. */
11121 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
11122 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
11124 CASE_FLT_FN (BUILT_IN_FMIN
):
11125 /* True if the 1st AND 2nd arguments are nonnegative. */
11126 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
11127 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
11129 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
11130 /* True if the 2nd argument is nonnegative. */
11131 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
11138 /* ... fall through ... */
11141 if (truth_value_p (TREE_CODE (t
)))
11142 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11146 /* We don't know sign of `t', so be conservative and return false. */
11150 /* Return true when T is an address and is known to be nonzero.
11151 For floating point we further ensure that T is not denormal.
11152 Similar logic is present in nonzero_address in rtlanal.h. */
11155 tree_expr_nonzero_p (tree t
)
11157 tree type
= TREE_TYPE (t
);
11159 /* Doing something useful for floating point would need more work. */
11160 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
11163 switch (TREE_CODE (t
))
11166 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
11169 /* We used to test for !integer_zerop here. This does not work correctly
11170 if TREE_CONSTANT_OVERFLOW (t). */
11171 return (TREE_INT_CST_LOW (t
) != 0
11172 || TREE_INT_CST_HIGH (t
) != 0);
11175 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
11177 /* With the presence of negative values it is hard
11178 to say something. */
11179 if (!tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
11180 || !tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
11182 /* One of operands must be positive and the other non-negative. */
11183 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
11184 || tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
11189 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
11191 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
11192 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
11198 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
11199 tree outer_type
= TREE_TYPE (t
);
11201 return (TYPE_PRECISION (inner_type
) >= TYPE_PRECISION (outer_type
)
11202 && tree_expr_nonzero_p (TREE_OPERAND (t
, 0)));
11208 tree base
= get_base_address (TREE_OPERAND (t
, 0));
11213 /* Weak declarations may link to NULL. */
11214 if (VAR_OR_FUNCTION_DECL_P (base
))
11215 return !DECL_WEAK (base
);
11217 /* Constants are never weak. */
11218 if (CONSTANT_CLASS_P (base
))
11225 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
11226 && tree_expr_nonzero_p (TREE_OPERAND (t
, 2)));
11229 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
11230 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
11233 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 0)))
11235 /* When both operands are nonzero, then MAX must be too. */
11236 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1)))
11239 /* MAX where operand 0 is positive is positive. */
11240 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
11242 /* MAX where operand 1 is positive is positive. */
11243 else if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
11244 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
11248 case COMPOUND_EXPR
:
11251 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1));
11254 case NON_LVALUE_EXPR
:
11255 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
11258 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
11259 || tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
11262 return alloca_call_p (t
);
11270 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11271 attempt to fold the expression to a constant without modifying TYPE,
11274 If the expression could be simplified to a constant, then return
11275 the constant. If the expression would not be simplified to a
11276 constant, then return NULL_TREE. */
11279 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
11281 tree tem
= fold_binary (code
, type
, op0
, op1
);
11282 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
11285 /* Given the components of a unary expression CODE, TYPE and OP0,
11286 attempt to fold the expression to a constant without modifying
11289 If the expression could be simplified to a constant, then return
11290 the constant. If the expression would not be simplified to a
11291 constant, then return NULL_TREE. */
11294 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
11296 tree tem
= fold_unary (code
, type
, op0
);
11297 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
11300 /* If EXP represents referencing an element in a constant string
11301 (either via pointer arithmetic or array indexing), return the
11302 tree representing the value accessed, otherwise return NULL. */
11305 fold_read_from_constant_string (tree exp
)
11307 if (TREE_CODE (exp
) == INDIRECT_REF
|| TREE_CODE (exp
) == ARRAY_REF
)
11309 tree exp1
= TREE_OPERAND (exp
, 0);
11313 if (TREE_CODE (exp
) == INDIRECT_REF
)
11314 string
= string_constant (exp1
, &index
);
11317 tree low_bound
= array_ref_low_bound (exp
);
11318 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
11320 /* Optimize the special-case of a zero lower bound.
11322 We convert the low_bound to sizetype to avoid some problems
11323 with constant folding. (E.g. suppose the lower bound is 1,
11324 and its mode is QI. Without the conversion,l (ARRAY
11325 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11326 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11327 if (! integer_zerop (low_bound
))
11328 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
11334 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (string
))
11335 && TREE_CODE (string
) == STRING_CST
11336 && TREE_CODE (index
) == INTEGER_CST
11337 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
11338 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
11340 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
11341 return fold_convert (TREE_TYPE (exp
),
11342 build_int_cst (NULL_TREE
,
11343 (TREE_STRING_POINTER (string
)
11344 [TREE_INT_CST_LOW (index
)])));
11349 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11350 an integer constant or real constant.
11352 TYPE is the type of the result. */
11355 fold_negate_const (tree arg0
, tree type
)
11357 tree t
= NULL_TREE
;
11359 switch (TREE_CODE (arg0
))
11363 unsigned HOST_WIDE_INT low
;
11364 HOST_WIDE_INT high
;
11365 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
11366 TREE_INT_CST_HIGH (arg0
),
11368 t
= build_int_cst_wide (type
, low
, high
);
11369 t
= force_fit_type (t
, 1,
11370 (overflow
| TREE_OVERFLOW (arg0
))
11371 && !TYPE_UNSIGNED (type
),
11372 TREE_CONSTANT_OVERFLOW (arg0
));
11377 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
11381 gcc_unreachable ();
11387 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11388 an integer constant or real constant.
11390 TYPE is the type of the result. */
11393 fold_abs_const (tree arg0
, tree type
)
11395 tree t
= NULL_TREE
;
11397 switch (TREE_CODE (arg0
))
11400 /* If the value is unsigned, then the absolute value is
11401 the same as the ordinary value. */
11402 if (TYPE_UNSIGNED (type
))
11404 /* Similarly, if the value is non-negative. */
11405 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
11407 /* If the value is negative, then the absolute value is
11411 unsigned HOST_WIDE_INT low
;
11412 HOST_WIDE_INT high
;
11413 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
11414 TREE_INT_CST_HIGH (arg0
),
11416 t
= build_int_cst_wide (type
, low
, high
);
11417 t
= force_fit_type (t
, -1, overflow
| TREE_OVERFLOW (arg0
),
11418 TREE_CONSTANT_OVERFLOW (arg0
));
11423 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
11424 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
11430 gcc_unreachable ();
11436 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11437 constant. TYPE is the type of the result. */
11440 fold_not_const (tree arg0
, tree type
)
11442 tree t
= NULL_TREE
;
11444 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
11446 t
= build_int_cst_wide (type
,
11447 ~ TREE_INT_CST_LOW (arg0
),
11448 ~ TREE_INT_CST_HIGH (arg0
));
11449 t
= force_fit_type (t
, 0, TREE_OVERFLOW (arg0
),
11450 TREE_CONSTANT_OVERFLOW (arg0
));
11455 /* Given CODE, a relational operator, the target type, TYPE and two
11456 constant operands OP0 and OP1, return the result of the
11457 relational operation. If the result is not a compile time
11458 constant, then return NULL_TREE. */
11461 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
11463 int result
, invert
;
11465 /* From here on, the only cases we handle are when the result is
11466 known to be a constant. */
11468 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
11470 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
11471 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
11473 /* Handle the cases where either operand is a NaN. */
11474 if (real_isnan (c0
) || real_isnan (c1
))
11484 case UNORDERED_EXPR
:
11498 if (flag_trapping_math
)
11504 gcc_unreachable ();
11507 return constant_boolean_node (result
, type
);
11510 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
11513 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11515 To compute GT, swap the arguments and do LT.
11516 To compute GE, do LT and invert the result.
11517 To compute LE, swap the arguments, do LT and invert the result.
11518 To compute NE, do EQ and invert the result.
11520 Therefore, the code below must handle only EQ and LT. */
11522 if (code
== LE_EXPR
|| code
== GT_EXPR
)
11527 code
= swap_tree_comparison (code
);
11530 /* Note that it is safe to invert for real values here because we
11531 have already handled the one case that it matters. */
11534 if (code
== NE_EXPR
|| code
== GE_EXPR
)
11537 code
= invert_tree_comparison (code
, false);
11540 /* Compute a result for LT or EQ if args permit;
11541 Otherwise return T. */
11542 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
11544 if (code
== EQ_EXPR
)
11545 result
= tree_int_cst_equal (op0
, op1
);
11546 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
11547 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
11549 result
= INT_CST_LT (op0
, op1
);
11556 return constant_boolean_node (result
, type
);
11559 /* Build an expression for the a clean point containing EXPR with type TYPE.
11560 Don't build a cleanup point expression for EXPR which don't have side
11564 fold_build_cleanup_point_expr (tree type
, tree expr
)
11566 /* If the expression does not have side effects then we don't have to wrap
11567 it with a cleanup point expression. */
11568 if (!TREE_SIDE_EFFECTS (expr
))
11571 /* If the expression is a return, check to see if the expression inside the
11572 return has no side effects or the right hand side of the modify expression
11573 inside the return. If either don't have side effects set we don't need to
11574 wrap the expression in a cleanup point expression. Note we don't check the
11575 left hand side of the modify because it should always be a return decl. */
11576 if (TREE_CODE (expr
) == RETURN_EXPR
)
11578 tree op
= TREE_OPERAND (expr
, 0);
11579 if (!op
|| !TREE_SIDE_EFFECTS (op
))
11581 op
= TREE_OPERAND (op
, 1);
11582 if (!TREE_SIDE_EFFECTS (op
))
11586 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
11589 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11590 avoid confusing the gimplify process. */
11593 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
11595 /* The size of the object is not relevant when talking about its address. */
11596 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
11597 t
= TREE_OPERAND (t
, 0);
11599 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11600 if (TREE_CODE (t
) == INDIRECT_REF
11601 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
11603 t
= TREE_OPERAND (t
, 0);
11604 if (TREE_TYPE (t
) != ptrtype
)
11605 t
= build1 (NOP_EXPR
, ptrtype
, t
);
11611 while (handled_component_p (base
))
11612 base
= TREE_OPERAND (base
, 0);
11614 TREE_ADDRESSABLE (base
) = 1;
11616 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
11623 build_fold_addr_expr (tree t
)
11625 return build_fold_addr_expr_with_type (t
, build_pointer_type (TREE_TYPE (t
)));
11628 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11629 of an indirection through OP0, or NULL_TREE if no simplification is
11633 fold_indirect_ref_1 (tree type
, tree op0
)
11639 subtype
= TREE_TYPE (sub
);
11640 if (!POINTER_TYPE_P (subtype
))
11643 if (TREE_CODE (sub
) == ADDR_EXPR
)
11645 tree op
= TREE_OPERAND (sub
, 0);
11646 tree optype
= TREE_TYPE (op
);
11647 /* *&p => p; make sure to handle *&"str"[cst] here. */
11648 if (type
== optype
)
11650 tree fop
= fold_read_from_constant_string (op
);
11656 /* *(foo *)&fooarray => fooarray[0] */
11657 else if (TREE_CODE (optype
) == ARRAY_TYPE
11658 && type
== TREE_TYPE (optype
))
11660 tree type_domain
= TYPE_DOMAIN (optype
);
11661 tree min_val
= size_zero_node
;
11662 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
11663 min_val
= TYPE_MIN_VALUE (type_domain
);
11664 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
11668 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11669 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
11670 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
11673 tree min_val
= size_zero_node
;
11674 sub
= build_fold_indirect_ref (sub
);
11675 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
11676 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
11677 min_val
= TYPE_MIN_VALUE (type_domain
);
11678 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
11684 /* Builds an expression for an indirection through T, simplifying some
11688 build_fold_indirect_ref (tree t
)
11690 tree type
= TREE_TYPE (TREE_TYPE (t
));
11691 tree sub
= fold_indirect_ref_1 (type
, t
);
11696 return build1 (INDIRECT_REF
, type
, t
);
11699 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11702 fold_indirect_ref (tree t
)
11704 tree sub
= fold_indirect_ref_1 (TREE_TYPE (t
), TREE_OPERAND (t
, 0));
11712 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11713 whose result is ignored. The type of the returned tree need not be
11714 the same as the original expression. */
11717 fold_ignored_result (tree t
)
11719 if (!TREE_SIDE_EFFECTS (t
))
11720 return integer_zero_node
;
11723 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
11726 t
= TREE_OPERAND (t
, 0);
11730 case tcc_comparison
:
11731 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
11732 t
= TREE_OPERAND (t
, 0);
11733 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
11734 t
= TREE_OPERAND (t
, 1);
11739 case tcc_expression
:
11740 switch (TREE_CODE (t
))
11742 case COMPOUND_EXPR
:
11743 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
11745 t
= TREE_OPERAND (t
, 0);
11749 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
11750 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
11752 t
= TREE_OPERAND (t
, 0);
11765 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11766 This can only be applied to objects of a sizetype. */
11769 round_up (tree value
, int divisor
)
11771 tree div
= NULL_TREE
;
11773 gcc_assert (divisor
> 0);
11777 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11778 have to do anything. Only do this when we are not given a const,
11779 because in that case, this check is more expensive than just
11781 if (TREE_CODE (value
) != INTEGER_CST
)
11783 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11785 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
11789 /* If divisor is a power of two, simplify this to bit manipulation. */
11790 if (divisor
== (divisor
& -divisor
))
11794 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
11795 value
= size_binop (PLUS_EXPR
, value
, t
);
11796 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
11797 value
= size_binop (BIT_AND_EXPR
, value
, t
);
11802 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11803 value
= size_binop (CEIL_DIV_EXPR
, value
, div
);
11804 value
= size_binop (MULT_EXPR
, value
, div
);
11810 /* Likewise, but round down. */
11813 round_down (tree value
, int divisor
)
11815 tree div
= NULL_TREE
;
11817 gcc_assert (divisor
> 0);
11821 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11822 have to do anything. Only do this when we are not given a const,
11823 because in that case, this check is more expensive than just
11825 if (TREE_CODE (value
) != INTEGER_CST
)
11827 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11829 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
11833 /* If divisor is a power of two, simplify this to bit manipulation. */
11834 if (divisor
== (divisor
& -divisor
))
11838 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
11839 value
= size_binop (BIT_AND_EXPR
, value
, t
);
11844 div
= build_int_cst (TREE_TYPE (value
), divisor
);
11845 value
= size_binop (FLOOR_DIV_EXPR
, value
, div
);
11846 value
= size_binop (MULT_EXPR
, value
, div
);
11852 /* Returns the pointer to the base of the object addressed by EXP and
11853 extracts the information about the offset of the access, storing it
11854 to PBITPOS and POFFSET. */
11857 split_address_to_core_and_offset (tree exp
,
11858 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
11861 enum machine_mode mode
;
11862 int unsignedp
, volatilep
;
11863 HOST_WIDE_INT bitsize
;
11865 if (TREE_CODE (exp
) == ADDR_EXPR
)
11867 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
11868 poffset
, &mode
, &unsignedp
, &volatilep
,
11870 core
= build_fold_addr_expr (core
);
11876 *poffset
= NULL_TREE
;
11882 /* Returns true if addresses of E1 and E2 differ by a constant, false
11883 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11886 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
11889 HOST_WIDE_INT bitpos1
, bitpos2
;
11890 tree toffset1
, toffset2
, tdiff
, type
;
11892 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
11893 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
11895 if (bitpos1
% BITS_PER_UNIT
!= 0
11896 || bitpos2
% BITS_PER_UNIT
!= 0
11897 || !operand_equal_p (core1
, core2
, 0))
11900 if (toffset1
&& toffset2
)
11902 type
= TREE_TYPE (toffset1
);
11903 if (type
!= TREE_TYPE (toffset2
))
11904 toffset2
= fold_convert (type
, toffset2
);
11906 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
11907 if (!cst_and_fits_in_hwi (tdiff
))
11910 *diff
= int_cst_value (tdiff
);
11912 else if (toffset1
|| toffset2
)
11914 /* If only one of the offsets is non-constant, the difference cannot
11921 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
11925 /* Simplify the floating point expression EXP when the sign of the
11926 result is not significant. Return NULL_TREE if no simplification
11930 fold_strip_sign_ops (tree exp
)
11934 switch (TREE_CODE (exp
))
11938 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
11939 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
11943 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
11945 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
11946 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
11947 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
11948 return fold_build2 (TREE_CODE (exp
), TREE_TYPE (exp
),
11949 arg0
? arg0
: TREE_OPERAND (exp
, 0),
11950 arg1
? arg1
: TREE_OPERAND (exp
, 1));