1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 /* The following constants represent a bit based encoding of GCC's
62 comparison operators. This encoding simplifies transformations
63 on relational comparison operators, such as AND and OR. */
64 enum comparison_code
{
83 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
84 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
85 static bool negate_mathfn_p (enum built_in_function
);
86 static bool negate_expr_p (tree
);
87 static tree
negate_expr (tree
);
88 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
89 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
90 static tree
const_binop (enum tree_code
, tree
, tree
, int);
91 static hashval_t
size_htab_hash (const void *);
92 static int size_htab_eq (const void *, const void *);
93 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
94 static enum tree_code
invert_tree_comparison (enum tree_code
, bool);
95 static enum comparison_code
comparison_to_compcode (enum tree_code
);
96 static enum tree_code
compcode_to_comparison (enum comparison_code
);
97 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
98 enum tree_code
, tree
, tree
, tree
);
99 static int truth_value_p (enum tree_code
);
100 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
101 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
102 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
103 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
104 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
105 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
106 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
107 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
108 enum machine_mode
*, int *, int *,
110 static int all_ones_mask_p (tree
, int);
111 static tree
sign_bit_p (tree
, tree
);
112 static int simple_operand_p (tree
);
113 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
114 static tree
make_range (tree
, int *, tree
*, tree
*);
115 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
116 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
118 static tree
fold_range_test (tree
);
119 static tree
fold_cond_expr_with_comparison (tree
, tree
, tree
, tree
);
120 static tree
unextend (tree
, int, int, tree
);
121 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
122 static tree
optimize_minmax_comparison (tree
);
123 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
);
124 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
);
125 static int multiple_of_p (tree
, tree
, tree
);
126 static tree
constant_boolean_node (int, tree
);
127 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
, tree
,
129 static bool fold_real_zero_addition_p (tree
, tree
, int);
130 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
132 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
133 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
134 static bool reorder_operands_p (tree
, tree
);
135 static tree
fold_negate_const (tree
, tree
);
136 static tree
fold_not_const (tree
, tree
);
137 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
138 static tree
fold_relational_hi_lo (enum tree_code
*, const tree
,
140 static bool tree_expr_nonzero_p (tree
);
142 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
143 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
144 and SUM1. Then this yields nonzero if overflow occurred during the
147 Overflow occurs if A and B have the same sign, but A and SUM differ in
148 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
150 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
152 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
153 We do that by representing the two-word integer in 4 words, with only
154 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
155 number. The value of the word is LOWPART + HIGHPART * BASE. */
158 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
159 #define HIGHPART(x) \
160 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
161 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
163 /* Unpack a two-word integer into 4 words.
164 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
165 WORDS points to the array of HOST_WIDE_INTs. */
168 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
170 words
[0] = LOWPART (low
);
171 words
[1] = HIGHPART (low
);
172 words
[2] = LOWPART (hi
);
173 words
[3] = HIGHPART (hi
);
176 /* Pack an array of 4 words into a two-word integer.
177 WORDS points to the array of words.
178 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
181 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
184 *low
= words
[0] + words
[1] * BASE
;
185 *hi
= words
[2] + words
[3] * BASE
;
188 /* Make the integer constant T valid for its type by setting to 0 or 1 all
189 the bits in the constant that don't belong in the type.
191 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
192 nonzero, a signed overflow has already occurred in calculating T, so
196 force_fit_type (tree t
, int overflow
)
198 unsigned HOST_WIDE_INT low
;
202 if (TREE_CODE (t
) == REAL_CST
)
204 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
205 Consider doing it via real_convert now. */
209 else if (TREE_CODE (t
) != INTEGER_CST
)
212 low
= TREE_INT_CST_LOW (t
);
213 high
= TREE_INT_CST_HIGH (t
);
215 if (POINTER_TYPE_P (TREE_TYPE (t
))
216 || TREE_CODE (TREE_TYPE (t
)) == OFFSET_TYPE
)
219 prec
= TYPE_PRECISION (TREE_TYPE (t
));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec
== 2 * HOST_BITS_PER_WIDE_INT
)
225 else if (prec
> HOST_BITS_PER_WIDE_INT
)
226 TREE_INT_CST_HIGH (t
)
227 &= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
230 TREE_INT_CST_HIGH (t
) = 0;
231 if (prec
< HOST_BITS_PER_WIDE_INT
)
232 TREE_INT_CST_LOW (t
) &= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
235 /* Unsigned types do not suffer sign extension or overflow unless they
237 if (TYPE_UNSIGNED (TREE_TYPE (t
))
238 && ! (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
239 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))))
242 /* If the value's sign bit is set, extend the sign. */
243 if (prec
!= 2 * HOST_BITS_PER_WIDE_INT
244 && (prec
> HOST_BITS_PER_WIDE_INT
245 ? 0 != (TREE_INT_CST_HIGH (t
)
247 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
248 : 0 != (TREE_INT_CST_LOW (t
)
249 & ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)))))
251 /* Value is negative:
252 set to 1 all the bits that are outside this type's precision. */
253 if (prec
> HOST_BITS_PER_WIDE_INT
)
254 TREE_INT_CST_HIGH (t
)
255 |= ((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
258 TREE_INT_CST_HIGH (t
) = -1;
259 if (prec
< HOST_BITS_PER_WIDE_INT
)
260 TREE_INT_CST_LOW (t
) |= ((unsigned HOST_WIDE_INT
) (-1) << prec
);
264 /* Return nonzero if signed overflow occurred. */
266 ((overflow
| (low
^ TREE_INT_CST_LOW (t
)) | (high
^ TREE_INT_CST_HIGH (t
)))
270 /* Add two doubleword integers with doubleword result.
271 Each argument is given as two `HOST_WIDE_INT' pieces.
272 One argument is L1 and H1; the other, L2 and H2.
273 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
276 add_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
277 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
278 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
280 unsigned HOST_WIDE_INT l
;
284 h
= h1
+ h2
+ (l
< l1
);
288 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
291 /* Negate a doubleword integer with doubleword result.
292 Return nonzero if the operation overflows, assuming it's signed.
293 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
294 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
297 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
298 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
304 return (*hv
& h1
) < 0;
314 /* Multiply two doubleword integers with doubleword result.
315 Return nonzero if the operation overflows, assuming it's signed.
316 Each argument is given as two `HOST_WIDE_INT' pieces.
317 One argument is L1 and H1; the other, L2 and H2.
318 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
321 mul_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
322 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
323 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
325 HOST_WIDE_INT arg1
[4];
326 HOST_WIDE_INT arg2
[4];
327 HOST_WIDE_INT prod
[4 * 2];
328 unsigned HOST_WIDE_INT carry
;
330 unsigned HOST_WIDE_INT toplow
, neglow
;
331 HOST_WIDE_INT tophigh
, neghigh
;
333 encode (arg1
, l1
, h1
);
334 encode (arg2
, l2
, h2
);
336 memset (prod
, 0, sizeof prod
);
338 for (i
= 0; i
< 4; i
++)
341 for (j
= 0; j
< 4; j
++)
344 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
345 carry
+= arg1
[i
] * arg2
[j
];
346 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
348 prod
[k
] = LOWPART (carry
);
349 carry
= HIGHPART (carry
);
354 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
356 /* Check for overflow by calculating the top half of the answer in full;
357 it should agree with the low half's sign bit. */
358 decode (prod
+ 4, &toplow
, &tophigh
);
361 neg_double (l2
, h2
, &neglow
, &neghigh
);
362 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
366 neg_double (l1
, h1
, &neglow
, &neghigh
);
367 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
369 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
372 /* Shift the doubleword integer in L1, H1 left by COUNT places
373 keeping only PREC bits of result.
374 Shift right if COUNT is negative.
375 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
376 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
379 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
380 HOST_WIDE_INT count
, unsigned int prec
,
381 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
383 unsigned HOST_WIDE_INT signmask
;
387 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
391 if (SHIFT_COUNT_TRUNCATED
)
394 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
396 /* Shifting by the host word size is undefined according to the
397 ANSI standard, so we must handle this as a special case. */
401 else if (count
>= HOST_BITS_PER_WIDE_INT
)
403 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
408 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
409 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
413 /* Sign extend all bits that are beyond the precision. */
415 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
416 ? ((unsigned HOST_WIDE_INT
) *hv
417 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
418 : (*lv
>> (prec
- 1))) & 1);
420 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
422 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
424 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
425 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
430 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
431 *lv
|= signmask
<< prec
;
435 /* Shift the doubleword integer in L1, H1 right by COUNT places
436 keeping only PREC bits of result. COUNT must be positive.
437 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
438 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
441 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
442 HOST_WIDE_INT count
, unsigned int prec
,
443 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
446 unsigned HOST_WIDE_INT signmask
;
449 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
452 if (SHIFT_COUNT_TRUNCATED
)
455 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
457 /* Shifting by the host word size is undefined according to the
458 ANSI standard, so we must handle this as a special case. */
462 else if (count
>= HOST_BITS_PER_WIDE_INT
)
465 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
469 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
471 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
474 /* Zero / sign extend all bits that are beyond the precision. */
476 if (count
>= (HOST_WIDE_INT
)prec
)
481 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
483 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
485 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
486 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
491 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
492 *lv
|= signmask
<< (prec
- count
);
496 /* Rotate the doubleword integer in L1, H1 left by COUNT places
497 keeping only PREC bits of result.
498 Rotate right if COUNT is negative.
499 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
502 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
503 HOST_WIDE_INT count
, unsigned int prec
,
504 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
506 unsigned HOST_WIDE_INT s1l
, s2l
;
507 HOST_WIDE_INT s1h
, s2h
;
513 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
514 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
519 /* Rotate the doubleword integer in L1, H1 left by COUNT places
520 keeping only PREC bits of result. COUNT must be positive.
521 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
524 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
525 HOST_WIDE_INT count
, unsigned int prec
,
526 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
528 unsigned HOST_WIDE_INT s1l
, s2l
;
529 HOST_WIDE_INT s1h
, s2h
;
535 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
536 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
541 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
542 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
543 CODE is a tree code for a kind of division, one of
544 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
546 It controls how the quotient is rounded to an integer.
547 Return nonzero if the operation overflows.
548 UNS nonzero says do unsigned division. */
551 div_and_round_double (enum tree_code code
, int uns
,
552 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
553 HOST_WIDE_INT hnum_orig
,
554 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
555 HOST_WIDE_INT hden_orig
,
556 unsigned HOST_WIDE_INT
*lquo
,
557 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
561 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
562 HOST_WIDE_INT den
[4], quo
[4];
564 unsigned HOST_WIDE_INT work
;
565 unsigned HOST_WIDE_INT carry
= 0;
566 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
567 HOST_WIDE_INT hnum
= hnum_orig
;
568 unsigned HOST_WIDE_INT lden
= lden_orig
;
569 HOST_WIDE_INT hden
= hden_orig
;
572 if (hden
== 0 && lden
== 0)
573 overflow
= 1, lden
= 1;
575 /* Calculate quotient sign and convert operands to unsigned. */
581 /* (minimum integer) / (-1) is the only overflow case. */
582 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
583 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
589 neg_double (lden
, hden
, &lden
, &hden
);
593 if (hnum
== 0 && hden
== 0)
594 { /* single precision */
596 /* This unsigned division rounds toward zero. */
602 { /* trivial case: dividend < divisor */
603 /* hden != 0 already checked. */
610 memset (quo
, 0, sizeof quo
);
612 memset (num
, 0, sizeof num
); /* to zero 9th element */
613 memset (den
, 0, sizeof den
);
615 encode (num
, lnum
, hnum
);
616 encode (den
, lden
, hden
);
618 /* Special code for when the divisor < BASE. */
619 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
621 /* hnum != 0 already checked. */
622 for (i
= 4 - 1; i
>= 0; i
--)
624 work
= num
[i
] + carry
* BASE
;
625 quo
[i
] = work
/ lden
;
631 /* Full double precision division,
632 with thanks to Don Knuth's "Seminumerical Algorithms". */
633 int num_hi_sig
, den_hi_sig
;
634 unsigned HOST_WIDE_INT quo_est
, scale
;
636 /* Find the highest nonzero divisor digit. */
637 for (i
= 4 - 1;; i
--)
644 /* Insure that the first digit of the divisor is at least BASE/2.
645 This is required by the quotient digit estimation algorithm. */
647 scale
= BASE
/ (den
[den_hi_sig
] + 1);
649 { /* scale divisor and dividend */
651 for (i
= 0; i
<= 4 - 1; i
++)
653 work
= (num
[i
] * scale
) + carry
;
654 num
[i
] = LOWPART (work
);
655 carry
= HIGHPART (work
);
660 for (i
= 0; i
<= 4 - 1; i
++)
662 work
= (den
[i
] * scale
) + carry
;
663 den
[i
] = LOWPART (work
);
664 carry
= HIGHPART (work
);
665 if (den
[i
] != 0) den_hi_sig
= i
;
672 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
674 /* Guess the next quotient digit, quo_est, by dividing the first
675 two remaining dividend digits by the high order quotient digit.
676 quo_est is never low and is at most 2 high. */
677 unsigned HOST_WIDE_INT tmp
;
679 num_hi_sig
= i
+ den_hi_sig
+ 1;
680 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
681 if (num
[num_hi_sig
] != den
[den_hi_sig
])
682 quo_est
= work
/ den
[den_hi_sig
];
686 /* Refine quo_est so it's usually correct, and at most one high. */
687 tmp
= work
- quo_est
* den
[den_hi_sig
];
689 && (den
[den_hi_sig
- 1] * quo_est
690 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
693 /* Try QUO_EST as the quotient digit, by multiplying the
694 divisor by QUO_EST and subtracting from the remaining dividend.
695 Keep in mind that QUO_EST is the I - 1st digit. */
698 for (j
= 0; j
<= den_hi_sig
; j
++)
700 work
= quo_est
* den
[j
] + carry
;
701 carry
= HIGHPART (work
);
702 work
= num
[i
+ j
] - LOWPART (work
);
703 num
[i
+ j
] = LOWPART (work
);
704 carry
+= HIGHPART (work
) != 0;
707 /* If quo_est was high by one, then num[i] went negative and
708 we need to correct things. */
709 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
712 carry
= 0; /* add divisor back in */
713 for (j
= 0; j
<= den_hi_sig
; j
++)
715 work
= num
[i
+ j
] + den
[j
] + carry
;
716 carry
= HIGHPART (work
);
717 num
[i
+ j
] = LOWPART (work
);
720 num
[num_hi_sig
] += carry
;
723 /* Store the quotient digit. */
728 decode (quo
, lquo
, hquo
);
731 /* If result is negative, make it so. */
733 neg_double (*lquo
, *hquo
, lquo
, hquo
);
735 /* Compute trial remainder: rem = num - (quo * den) */
736 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
737 neg_double (*lrem
, *hrem
, lrem
, hrem
);
738 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
743 case TRUNC_MOD_EXPR
: /* round toward zero */
744 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
748 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
749 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
752 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
760 case CEIL_MOD_EXPR
: /* round toward positive infinity */
761 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
763 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
771 case ROUND_MOD_EXPR
: /* round to closest integer */
773 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
774 HOST_WIDE_INT habs_rem
= *hrem
;
775 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
776 HOST_WIDE_INT habs_den
= hden
, htwice
;
778 /* Get absolute values. */
780 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
782 neg_double (lden
, hden
, &labs_den
, &habs_den
);
784 /* If (2 * abs (lrem) >= abs (lden)) */
785 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
786 labs_rem
, habs_rem
, <wice
, &htwice
);
788 if (((unsigned HOST_WIDE_INT
) habs_den
789 < (unsigned HOST_WIDE_INT
) htwice
)
790 || (((unsigned HOST_WIDE_INT
) habs_den
791 == (unsigned HOST_WIDE_INT
) htwice
)
792 && (labs_den
< ltwice
)))
796 add_double (*lquo
, *hquo
,
797 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
800 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
812 /* Compute true remainder: rem = num - (quo * den) */
813 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
814 neg_double (*lrem
, *hrem
, lrem
, hrem
);
815 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
819 /* Return true if built-in mathematical function specified by CODE
820 preserves the sign of it argument, i.e. -f(x) == f(-x). */
823 negate_mathfn_p (enum built_in_function code
)
847 /* Determine whether an expression T can be cheaply negated using
848 the function negate_expr. */
851 negate_expr_p (tree t
)
853 unsigned HOST_WIDE_INT val
;
860 type
= TREE_TYPE (t
);
863 switch (TREE_CODE (t
))
866 if (TYPE_UNSIGNED (type
) || ! flag_trapv
)
869 /* Check that -CST will not overflow type. */
870 prec
= TYPE_PRECISION (type
);
871 if (prec
> HOST_BITS_PER_WIDE_INT
)
873 if (TREE_INT_CST_LOW (t
) != 0)
875 prec
-= HOST_BITS_PER_WIDE_INT
;
876 val
= TREE_INT_CST_HIGH (t
);
879 val
= TREE_INT_CST_LOW (t
);
880 if (prec
< HOST_BITS_PER_WIDE_INT
)
881 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
882 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
889 return negate_expr_p (TREE_REALPART (t
))
890 && negate_expr_p (TREE_IMAGPART (t
));
893 if (FLOAT_TYPE_P (type
) && !flag_unsafe_math_optimizations
)
895 /* -(A + B) -> (-B) - A. */
896 if (negate_expr_p (TREE_OPERAND (t
, 1))
897 && reorder_operands_p (TREE_OPERAND (t
, 0),
898 TREE_OPERAND (t
, 1)))
900 /* -(A + B) -> (-A) - B. */
901 return negate_expr_p (TREE_OPERAND (t
, 0));
904 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
905 return (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
906 && reorder_operands_p (TREE_OPERAND (t
, 0),
907 TREE_OPERAND (t
, 1));
910 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
916 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
917 return negate_expr_p (TREE_OPERAND (t
, 1))
918 || negate_expr_p (TREE_OPERAND (t
, 0));
922 /* Negate -((double)float) as (double)(-float). */
923 if (TREE_CODE (type
) == REAL_TYPE
)
925 tree tem
= strip_float_extensions (t
);
927 return negate_expr_p (tem
);
932 /* Negate -f(x) as f(-x). */
933 if (negate_mathfn_p (builtin_mathfn_code (t
)))
934 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1)));
938 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
939 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
941 tree op1
= TREE_OPERAND (t
, 1);
942 if (TREE_INT_CST_HIGH (op1
) == 0
943 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
944 == TREE_INT_CST_LOW (op1
))
955 /* Given T, an expression, return the negation of T. Allow for T to be
956 null, in which case return null. */
967 type
= TREE_TYPE (t
);
970 switch (TREE_CODE (t
))
973 tem
= fold_negate_const (t
, type
);
974 if (! TREE_OVERFLOW (tem
)
975 || TYPE_UNSIGNED (type
)
981 tem
= fold_negate_const (t
, type
);
982 /* Two's complement FP formats, such as c4x, may overflow. */
983 if (! TREE_OVERFLOW (tem
) || ! flag_trapping_math
)
984 return fold_convert (type
, tem
);
989 tree rpart
= negate_expr (TREE_REALPART (t
));
990 tree ipart
= negate_expr (TREE_IMAGPART (t
));
992 if ((TREE_CODE (rpart
) == REAL_CST
993 && TREE_CODE (ipart
) == REAL_CST
)
994 || (TREE_CODE (rpart
) == INTEGER_CST
995 && TREE_CODE (ipart
) == INTEGER_CST
))
996 return build_complex (type
, rpart
, ipart
);
1001 return fold_convert (type
, TREE_OPERAND (t
, 0));
1004 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1006 /* -(A + B) -> (-B) - A. */
1007 if (negate_expr_p (TREE_OPERAND (t
, 1))
1008 && reorder_operands_p (TREE_OPERAND (t
, 0),
1009 TREE_OPERAND (t
, 1)))
1011 tem
= negate_expr (TREE_OPERAND (t
, 1));
1012 tem
= fold (build2 (MINUS_EXPR
, TREE_TYPE (t
),
1013 tem
, TREE_OPERAND (t
, 0)));
1014 return fold_convert (type
, tem
);
1017 /* -(A + B) -> (-A) - B. */
1018 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1020 tem
= negate_expr (TREE_OPERAND (t
, 0));
1021 tem
= fold (build2 (MINUS_EXPR
, TREE_TYPE (t
),
1022 tem
, TREE_OPERAND (t
, 1)));
1023 return fold_convert (type
, tem
);
1029 /* - (A - B) -> B - A */
1030 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1031 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1032 return fold_convert (type
,
1033 fold (build2 (MINUS_EXPR
, TREE_TYPE (t
),
1034 TREE_OPERAND (t
, 1),
1035 TREE_OPERAND (t
, 0))));
1039 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1045 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1047 tem
= TREE_OPERAND (t
, 1);
1048 if (negate_expr_p (tem
))
1049 return fold_convert (type
,
1050 fold (build2 (TREE_CODE (t
), TREE_TYPE (t
),
1051 TREE_OPERAND (t
, 0),
1052 negate_expr (tem
))));
1053 tem
= TREE_OPERAND (t
, 0);
1054 if (negate_expr_p (tem
))
1055 return fold_convert (type
,
1056 fold (build2 (TREE_CODE (t
), TREE_TYPE (t
),
1058 TREE_OPERAND (t
, 1))));
1063 /* Convert -((double)float) into (double)(-float). */
1064 if (TREE_CODE (type
) == REAL_TYPE
)
1066 tem
= strip_float_extensions (t
);
1067 if (tem
!= t
&& negate_expr_p (tem
))
1068 return fold_convert (type
, negate_expr (tem
));
1073 /* Negate -f(x) as f(-x). */
1074 if (negate_mathfn_p (builtin_mathfn_code (t
))
1075 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1))))
1077 tree fndecl
, arg
, arglist
;
1079 fndecl
= get_callee_fndecl (t
);
1080 arg
= negate_expr (TREE_VALUE (TREE_OPERAND (t
, 1)));
1081 arglist
= build_tree_list (NULL_TREE
, arg
);
1082 return build_function_call_expr (fndecl
, arglist
);
1087 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1088 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1090 tree op1
= TREE_OPERAND (t
, 1);
1091 if (TREE_INT_CST_HIGH (op1
) == 0
1092 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1093 == TREE_INT_CST_LOW (op1
))
1095 tree ntype
= TYPE_UNSIGNED (type
)
1096 ? lang_hooks
.types
.signed_type (type
)
1097 : lang_hooks
.types
.unsigned_type (type
);
1098 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1099 temp
= fold (build2 (RSHIFT_EXPR
, ntype
, temp
, op1
));
1100 return fold_convert (type
, temp
);
1109 tem
= fold (build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
));
1110 return fold_convert (type
, tem
);
1113 /* Split a tree IN into a constant, literal and variable parts that could be
1114 combined with CODE to make IN. "constant" means an expression with
1115 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1116 commutative arithmetic operation. Store the constant part into *CONP,
1117 the literal in *LITP and return the variable part. If a part isn't
1118 present, set it to null. If the tree does not decompose in this way,
1119 return the entire tree as the variable part and the other parts as null.
1121 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1122 case, we negate an operand that was subtracted. Except if it is a
1123 literal for which we use *MINUS_LITP instead.
1125 If NEGATE_P is true, we are negating all of IN, again except a literal
1126 for which we use *MINUS_LITP instead.
1128 If IN is itself a literal or constant, return it as appropriate.
1130 Note that we do not guarantee that any of the three values will be the
1131 same type as IN, but they will have the same signedness and mode. */
1134 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1135 tree
*minus_litp
, int negate_p
)
1143 /* Strip any conversions that don't change the machine mode or signedness. */
1144 STRIP_SIGN_NOPS (in
);
1146 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1148 else if (TREE_CODE (in
) == code
1149 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1150 /* We can associate addition and subtraction together (even
1151 though the C standard doesn't say so) for integers because
1152 the value is not affected. For reals, the value might be
1153 affected, so we can't. */
1154 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1155 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1157 tree op0
= TREE_OPERAND (in
, 0);
1158 tree op1
= TREE_OPERAND (in
, 1);
1159 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1160 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1162 /* First see if either of the operands is a literal, then a constant. */
1163 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1164 *litp
= op0
, op0
= 0;
1165 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1166 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1168 if (op0
!= 0 && TREE_CONSTANT (op0
))
1169 *conp
= op0
, op0
= 0;
1170 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1171 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1173 /* If we haven't dealt with either operand, this is not a case we can
1174 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1175 if (op0
!= 0 && op1
!= 0)
1180 var
= op1
, neg_var_p
= neg1_p
;
1182 /* Now do any needed negations. */
1184 *minus_litp
= *litp
, *litp
= 0;
1186 *conp
= negate_expr (*conp
);
1188 var
= negate_expr (var
);
1190 else if (TREE_CONSTANT (in
))
1198 *minus_litp
= *litp
, *litp
= 0;
1199 else if (*minus_litp
)
1200 *litp
= *minus_litp
, *minus_litp
= 0;
1201 *conp
= negate_expr (*conp
);
1202 var
= negate_expr (var
);
1208 /* Re-associate trees split by the above function. T1 and T2 are either
1209 expressions to associate or null. Return the new expression, if any. If
1210 we build an operation, do it in TYPE and with CODE. */
1213 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1220 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1221 try to fold this since we will have infinite recursion. But do
1222 deal with any NEGATE_EXPRs. */
1223 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1224 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1226 if (code
== PLUS_EXPR
)
1228 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1229 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1230 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1231 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1232 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1233 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1235 return build2 (code
, type
, fold_convert (type
, t1
),
1236 fold_convert (type
, t2
));
1239 return fold (build2 (code
, type
, fold_convert (type
, t1
),
1240 fold_convert (type
, t2
)));
1243 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1244 to produce a new constant.
1246 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1249 int_const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1251 unsigned HOST_WIDE_INT int1l
, int2l
;
1252 HOST_WIDE_INT int1h
, int2h
;
1253 unsigned HOST_WIDE_INT low
;
1255 unsigned HOST_WIDE_INT garbagel
;
1256 HOST_WIDE_INT garbageh
;
1258 tree type
= TREE_TYPE (arg1
);
1259 int uns
= TYPE_UNSIGNED (type
);
1261 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1263 int no_overflow
= 0;
1265 int1l
= TREE_INT_CST_LOW (arg1
);
1266 int1h
= TREE_INT_CST_HIGH (arg1
);
1267 int2l
= TREE_INT_CST_LOW (arg2
);
1268 int2h
= TREE_INT_CST_HIGH (arg2
);
1273 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1277 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1281 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1287 /* It's unclear from the C standard whether shifts can overflow.
1288 The following code ignores overflow; perhaps a C standard
1289 interpretation ruling is needed. */
1290 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1298 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1303 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1307 neg_double (int2l
, int2h
, &low
, &hi
);
1308 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1309 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1313 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1316 case TRUNC_DIV_EXPR
:
1317 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1318 case EXACT_DIV_EXPR
:
1319 /* This is a shortcut for a common special case. */
1320 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1321 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1322 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1323 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1325 if (code
== CEIL_DIV_EXPR
)
1328 low
= int1l
/ int2l
, hi
= 0;
1332 /* ... fall through ... */
1334 case ROUND_DIV_EXPR
:
1335 if (int2h
== 0 && int2l
== 1)
1337 low
= int1l
, hi
= int1h
;
1340 if (int1l
== int2l
&& int1h
== int2h
1341 && ! (int1l
== 0 && int1h
== 0))
1346 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1347 &low
, &hi
, &garbagel
, &garbageh
);
1350 case TRUNC_MOD_EXPR
:
1351 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1352 /* This is a shortcut for a common special case. */
1353 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1354 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1355 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1356 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1358 if (code
== CEIL_MOD_EXPR
)
1360 low
= int1l
% int2l
, hi
= 0;
1364 /* ... fall through ... */
1366 case ROUND_MOD_EXPR
:
1367 overflow
= div_and_round_double (code
, uns
,
1368 int1l
, int1h
, int2l
, int2h
,
1369 &garbagel
, &garbageh
, &low
, &hi
);
1375 low
= (((unsigned HOST_WIDE_INT
) int1h
1376 < (unsigned HOST_WIDE_INT
) int2h
)
1377 || (((unsigned HOST_WIDE_INT
) int1h
1378 == (unsigned HOST_WIDE_INT
) int2h
)
1381 low
= (int1h
< int2h
1382 || (int1h
== int2h
&& int1l
< int2l
));
1384 if (low
== (code
== MIN_EXPR
))
1385 low
= int1l
, hi
= int1h
;
1387 low
= int2l
, hi
= int2h
;
1394 /* If this is for a sizetype, can be represented as one (signed)
1395 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1398 && ((hi
== 0 && (HOST_WIDE_INT
) low
>= 0)
1399 || (hi
== -1 && (HOST_WIDE_INT
) low
< 0))
1400 && overflow
== 0 && ! TREE_OVERFLOW (arg1
) && ! TREE_OVERFLOW (arg2
))
1401 return size_int_type (low
, type
);
1404 t
= build_int_2 (low
, hi
);
1405 TREE_TYPE (t
) = TREE_TYPE (arg1
);
1410 ? (!uns
|| is_sizetype
) && overflow
1411 : (force_fit_type (t
, (!uns
|| is_sizetype
) && overflow
)
1413 | TREE_OVERFLOW (arg1
)
1414 | TREE_OVERFLOW (arg2
));
1416 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1417 So check if force_fit_type truncated the value. */
1419 && ! TREE_OVERFLOW (t
)
1420 && (TREE_INT_CST_HIGH (t
) != hi
1421 || TREE_INT_CST_LOW (t
) != low
))
1422 TREE_OVERFLOW (t
) = 1;
1424 TREE_CONSTANT_OVERFLOW (t
) = (TREE_OVERFLOW (t
)
1425 | TREE_CONSTANT_OVERFLOW (arg1
)
1426 | TREE_CONSTANT_OVERFLOW (arg2
));
1430 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1431 constant. We assume ARG1 and ARG2 have the same data type, or at least
1432 are the same kind of constant and the same machine mode.
1434 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1437 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1442 if (TREE_CODE (arg1
) == INTEGER_CST
)
1443 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1445 if (TREE_CODE (arg1
) == REAL_CST
)
1447 enum machine_mode mode
;
1450 REAL_VALUE_TYPE value
;
1453 d1
= TREE_REAL_CST (arg1
);
1454 d2
= TREE_REAL_CST (arg2
);
1456 type
= TREE_TYPE (arg1
);
1457 mode
= TYPE_MODE (type
);
1459 /* Don't perform operation if we honor signaling NaNs and
1460 either operand is a NaN. */
1461 if (HONOR_SNANS (mode
)
1462 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1465 /* Don't perform operation if it would raise a division
1466 by zero exception. */
1467 if (code
== RDIV_EXPR
1468 && REAL_VALUES_EQUAL (d2
, dconst0
)
1469 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1472 /* If either operand is a NaN, just return it. Otherwise, set up
1473 for floating-point trap; we return an overflow. */
1474 if (REAL_VALUE_ISNAN (d1
))
1476 else if (REAL_VALUE_ISNAN (d2
))
1479 REAL_ARITHMETIC (value
, code
, d1
, d2
);
1481 t
= build_real (type
, real_value_truncate (mode
, value
));
1484 = (force_fit_type (t
, 0)
1485 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1486 TREE_CONSTANT_OVERFLOW (t
)
1488 | TREE_CONSTANT_OVERFLOW (arg1
)
1489 | TREE_CONSTANT_OVERFLOW (arg2
);
1492 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1494 tree type
= TREE_TYPE (arg1
);
1495 tree r1
= TREE_REALPART (arg1
);
1496 tree i1
= TREE_IMAGPART (arg1
);
1497 tree r2
= TREE_REALPART (arg2
);
1498 tree i2
= TREE_IMAGPART (arg2
);
1504 t
= build_complex (type
,
1505 const_binop (PLUS_EXPR
, r1
, r2
, notrunc
),
1506 const_binop (PLUS_EXPR
, i1
, i2
, notrunc
));
1510 t
= build_complex (type
,
1511 const_binop (MINUS_EXPR
, r1
, r2
, notrunc
),
1512 const_binop (MINUS_EXPR
, i1
, i2
, notrunc
));
1516 t
= build_complex (type
,
1517 const_binop (MINUS_EXPR
,
1518 const_binop (MULT_EXPR
,
1520 const_binop (MULT_EXPR
,
1523 const_binop (PLUS_EXPR
,
1524 const_binop (MULT_EXPR
,
1526 const_binop (MULT_EXPR
,
1534 = const_binop (PLUS_EXPR
,
1535 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1536 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1539 t
= build_complex (type
,
1541 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1542 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1543 const_binop (PLUS_EXPR
,
1544 const_binop (MULT_EXPR
, r1
, r2
,
1546 const_binop (MULT_EXPR
, i1
, i2
,
1549 magsquared
, notrunc
),
1551 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1552 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1553 const_binop (MINUS_EXPR
,
1554 const_binop (MULT_EXPR
, i1
, r2
,
1556 const_binop (MULT_EXPR
, r1
, i2
,
1559 magsquared
, notrunc
));
1571 /* These are the hash table functions for the hash table of INTEGER_CST
1572 nodes of a sizetype. */
1574 /* Return the hash code code X, an INTEGER_CST. */
1577 size_htab_hash (const void *x
)
1581 return (TREE_INT_CST_HIGH (t
) ^ TREE_INT_CST_LOW (t
)
1582 ^ htab_hash_pointer (TREE_TYPE (t
))
1583 ^ (TREE_OVERFLOW (t
) << 20));
1586 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1587 is the same as that given by *Y, which is the same. */
1590 size_htab_eq (const void *x
, const void *y
)
1595 return (TREE_INT_CST_HIGH (xt
) == TREE_INT_CST_HIGH (yt
)
1596 && TREE_INT_CST_LOW (xt
) == TREE_INT_CST_LOW (yt
)
1597 && TREE_TYPE (xt
) == TREE_TYPE (yt
)
1598 && TREE_OVERFLOW (xt
) == TREE_OVERFLOW (yt
));
1601 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1602 bits are given by NUMBER and of the sizetype represented by KIND. */
1605 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1607 return size_int_type (number
, sizetype_tab
[(int) kind
]);
1610 /* Likewise, but the desired type is specified explicitly. */
1612 static GTY (()) tree new_const
;
1613 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
1617 size_int_type (HOST_WIDE_INT number
, tree type
)
1623 size_htab
= htab_create_ggc (1024, size_htab_hash
, size_htab_eq
, NULL
);
1624 new_const
= make_node (INTEGER_CST
);
1627 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1628 hash table, we return the value from the hash table. Otherwise, we
1629 place that in the hash table and make a new node for the next time. */
1630 TREE_INT_CST_LOW (new_const
) = number
;
1631 TREE_INT_CST_HIGH (new_const
) = number
< 0 ? -1 : 0;
1632 TREE_TYPE (new_const
) = type
;
1633 TREE_OVERFLOW (new_const
) = TREE_CONSTANT_OVERFLOW (new_const
)
1634 = force_fit_type (new_const
, 0);
1636 slot
= htab_find_slot (size_htab
, new_const
, INSERT
);
1642 new_const
= make_node (INTEGER_CST
);
1646 return (tree
) *slot
;
1649 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1650 is a tree code. The type of the result is taken from the operands.
1651 Both must be the same type integer type and it must be a size type.
1652 If the operands are constant, so is the result. */
1655 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
1657 tree type
= TREE_TYPE (arg0
);
1659 if (TREE_CODE (type
) != INTEGER_TYPE
|| ! TYPE_IS_SIZETYPE (type
)
1660 || type
!= TREE_TYPE (arg1
))
1663 /* Handle the special case of two integer constants faster. */
1664 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1666 /* And some specific cases even faster than that. */
1667 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1669 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1670 && integer_zerop (arg1
))
1672 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1675 /* Handle general case of two integer constants. */
1676 return int_const_binop (code
, arg0
, arg1
, 0);
1679 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1680 return error_mark_node
;
1682 return fold (build2 (code
, type
, arg0
, arg1
));
1685 /* Given two values, either both of sizetype or both of bitsizetype,
1686 compute the difference between the two values. Return the value
1687 in signed type corresponding to the type of the operands. */
1690 size_diffop (tree arg0
, tree arg1
)
1692 tree type
= TREE_TYPE (arg0
);
1695 if (TREE_CODE (type
) != INTEGER_TYPE
|| ! TYPE_IS_SIZETYPE (type
)
1696 || type
!= TREE_TYPE (arg1
))
1699 /* If the type is already signed, just do the simple thing. */
1700 if (!TYPE_UNSIGNED (type
))
1701 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1703 ctype
= (type
== bitsizetype
|| type
== ubitsizetype
1704 ? sbitsizetype
: ssizetype
);
1706 /* If either operand is not a constant, do the conversions to the signed
1707 type and subtract. The hardware will do the right thing with any
1708 overflow in the subtraction. */
1709 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1710 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
1711 fold_convert (ctype
, arg1
));
1713 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1714 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1715 overflow) and negate (which can't either). Special-case a result
1716 of zero while we're here. */
1717 if (tree_int_cst_equal (arg0
, arg1
))
1718 return fold_convert (ctype
, integer_zero_node
);
1719 else if (tree_int_cst_lt (arg1
, arg0
))
1720 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1722 return size_binop (MINUS_EXPR
, fold_convert (ctype
, integer_zero_node
),
1723 fold_convert (ctype
, size_binop (MINUS_EXPR
,
1728 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1729 type TYPE. If no simplification can be done return NULL_TREE. */
1732 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1737 if (TREE_TYPE (arg1
) == type
)
1740 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
1742 if (TREE_CODE (arg1
) == INTEGER_CST
)
1744 /* If we would build a constant wider than GCC supports,
1745 leave the conversion unfolded. */
1746 if (TYPE_PRECISION (type
) > 2 * HOST_BITS_PER_WIDE_INT
)
1749 /* If we are trying to make a sizetype for a small integer, use
1750 size_int to pick up cached types to reduce duplicate nodes. */
1751 if (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1752 && !TREE_CONSTANT_OVERFLOW (arg1
)
1753 && compare_tree_int (arg1
, 10000) < 0)
1754 return size_int_type (TREE_INT_CST_LOW (arg1
), type
);
1756 /* Given an integer constant, make new constant with new type,
1757 appropriately sign-extended or truncated. */
1758 t
= build_int_2 (TREE_INT_CST_LOW (arg1
),
1759 TREE_INT_CST_HIGH (arg1
));
1760 TREE_TYPE (t
) = type
;
1761 /* Indicate an overflow if (1) ARG1 already overflowed,
1762 or (2) force_fit_type indicates an overflow.
1763 Tell force_fit_type that an overflow has already occurred
1764 if ARG1 is a too-large unsigned value and T is signed.
1765 But don't indicate an overflow if converting a pointer. */
1767 = ((force_fit_type (t
,
1768 (TREE_INT_CST_HIGH (arg1
) < 0
1769 && (TYPE_UNSIGNED (type
)
1770 < TYPE_UNSIGNED (TREE_TYPE (arg1
)))))
1771 && ! POINTER_TYPE_P (TREE_TYPE (arg1
)))
1772 || TREE_OVERFLOW (arg1
));
1773 TREE_CONSTANT_OVERFLOW (t
)
1774 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1777 else if (TREE_CODE (arg1
) == REAL_CST
)
1779 /* The following code implements the floating point to integer
1780 conversion rules required by the Java Language Specification,
1781 that IEEE NaNs are mapped to zero and values that overflow
1782 the target precision saturate, i.e. values greater than
1783 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1784 are mapped to INT_MIN. These semantics are allowed by the
1785 C and C++ standards that simply state that the behavior of
1786 FP-to-integer conversion is unspecified upon overflow. */
1788 HOST_WIDE_INT high
, low
;
1791 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1795 case FIX_TRUNC_EXPR
:
1796 real_trunc (&r
, VOIDmode
, &x
);
1800 real_ceil (&r
, VOIDmode
, &x
);
1803 case FIX_FLOOR_EXPR
:
1804 real_floor (&r
, VOIDmode
, &x
);
1807 case FIX_ROUND_EXPR
:
1808 real_round (&r
, VOIDmode
, &x
);
1815 /* If R is NaN, return zero and show we have an overflow. */
1816 if (REAL_VALUE_ISNAN (r
))
1823 /* See if R is less than the lower bound or greater than the
1828 tree lt
= TYPE_MIN_VALUE (type
);
1829 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1830 if (REAL_VALUES_LESS (r
, l
))
1833 high
= TREE_INT_CST_HIGH (lt
);
1834 low
= TREE_INT_CST_LOW (lt
);
1840 tree ut
= TYPE_MAX_VALUE (type
);
1843 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1844 if (REAL_VALUES_LESS (u
, r
))
1847 high
= TREE_INT_CST_HIGH (ut
);
1848 low
= TREE_INT_CST_LOW (ut
);
1854 REAL_VALUE_TO_INT (&low
, &high
, r
);
1856 t
= build_int_2 (low
, high
);
1857 TREE_TYPE (t
) = type
;
1859 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, overflow
);
1860 TREE_CONSTANT_OVERFLOW (t
)
1861 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1865 else if (TREE_CODE (type
) == REAL_TYPE
)
1867 if (TREE_CODE (arg1
) == INTEGER_CST
)
1868 return build_real_from_int_cst (type
, arg1
);
1869 if (TREE_CODE (arg1
) == REAL_CST
)
1871 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
1873 /* We make a copy of ARG1 so that we don't modify an
1874 existing constant tree. */
1875 t
= copy_node (arg1
);
1876 TREE_TYPE (t
) = type
;
1880 t
= build_real (type
,
1881 real_value_truncate (TYPE_MODE (type
),
1882 TREE_REAL_CST (arg1
)));
1885 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, 0);
1886 TREE_CONSTANT_OVERFLOW (t
)
1887 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1894 /* Convert expression ARG to type TYPE. Used by the middle-end for
1895 simple conversions in preference to calling the front-end's convert. */
1898 fold_convert (tree type
, tree arg
)
1900 tree orig
= TREE_TYPE (arg
);
1906 if (TREE_CODE (arg
) == ERROR_MARK
1907 || TREE_CODE (type
) == ERROR_MARK
1908 || TREE_CODE (orig
) == ERROR_MARK
)
1909 return error_mark_node
;
1911 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
)
1912 || lang_hooks
.types_compatible_p (TYPE_MAIN_VARIANT (type
),
1913 TYPE_MAIN_VARIANT (orig
)))
1914 return fold (build1 (NOP_EXPR
, type
, arg
));
1916 if (INTEGRAL_TYPE_P (type
) || POINTER_TYPE_P (type
)
1917 || TREE_CODE (type
) == OFFSET_TYPE
)
1919 if (TREE_CODE (arg
) == INTEGER_CST
)
1921 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1922 if (tem
!= NULL_TREE
)
1925 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1926 || TREE_CODE (orig
) == OFFSET_TYPE
)
1927 return fold (build1 (NOP_EXPR
, type
, arg
));
1928 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1930 tem
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1931 return fold_convert (type
, tem
);
1933 if (TREE_CODE (orig
) == VECTOR_TYPE
1934 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)))
1935 return fold (build1 (NOP_EXPR
, type
, arg
));
1937 else if (TREE_CODE (type
) == REAL_TYPE
)
1939 if (TREE_CODE (arg
) == INTEGER_CST
)
1941 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1942 if (tem
!= NULL_TREE
)
1945 else if (TREE_CODE (arg
) == REAL_CST
)
1947 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1948 if (tem
!= NULL_TREE
)
1952 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
))
1953 return fold (build1 (FLOAT_EXPR
, type
, arg
));
1954 if (TREE_CODE (orig
) == REAL_TYPE
)
1955 return fold (build1 (flag_float_store
? CONVERT_EXPR
: NOP_EXPR
,
1957 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1959 tem
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1960 return fold_convert (type
, tem
);
1963 else if (TREE_CODE (type
) == COMPLEX_TYPE
)
1965 if (INTEGRAL_TYPE_P (orig
)
1966 || POINTER_TYPE_P (orig
)
1967 || TREE_CODE (orig
) == REAL_TYPE
)
1968 return build2 (COMPLEX_EXPR
, type
,
1969 fold_convert (TREE_TYPE (type
), arg
),
1970 fold_convert (TREE_TYPE (type
), integer_zero_node
));
1971 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1975 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1977 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
1978 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
1979 return fold (build2 (COMPLEX_EXPR
, type
, rpart
, ipart
));
1982 arg
= save_expr (arg
);
1983 rpart
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1984 ipart
= fold (build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
));
1985 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
1986 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
1987 return fold (build2 (COMPLEX_EXPR
, type
, rpart
, ipart
));
1990 else if (TREE_CODE (type
) == VECTOR_TYPE
)
1992 if ((INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
))
1993 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)))
1994 return fold (build1 (NOP_EXPR
, type
, arg
));
1995 if (TREE_CODE (orig
) == VECTOR_TYPE
1996 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)))
1997 return fold (build1 (NOP_EXPR
, type
, arg
));
1999 else if (VOID_TYPE_P (type
))
2000 return fold (build1 (CONVERT_EXPR
, type
, fold_ignored_result (arg
)));
2004 /* Return an expr equal to X but certainly not valid as an lvalue. */
2009 /* We only need to wrap lvalue tree codes. */
2010 switch (TREE_CODE (x
))
2022 case ARRAY_RANGE_REF
:
2028 case PREINCREMENT_EXPR
:
2029 case PREDECREMENT_EXPR
:
2032 case TRY_CATCH_EXPR
:
2033 case WITH_CLEANUP_EXPR
:
2044 /* Assume the worst for front-end tree codes. */
2045 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2049 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2052 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2053 Zero means allow extended lvalues. */
2055 int pedantic_lvalues
;
2057 /* When pedantic, return an expr equal to X but certainly not valid as a
2058 pedantic lvalue. Otherwise, return X. */
2061 pedantic_non_lvalue (tree x
)
2063 if (pedantic_lvalues
)
2064 return non_lvalue (x
);
2069 /* Given a tree comparison code, return the code that is the logical inverse
2070 of the given code. It is not safe to do this for floating-point
2071 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2072 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2074 static enum tree_code
2075 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2077 if (honor_nans
&& flag_trapping_math
)
2087 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2089 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2091 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2093 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2107 return UNORDERED_EXPR
;
2108 case UNORDERED_EXPR
:
2109 return ORDERED_EXPR
;
2115 /* Similar, but return the comparison that results if the operands are
2116 swapped. This is safe for floating-point. */
2119 swap_tree_comparison (enum tree_code code
)
2140 /* Convert a comparison tree code from an enum tree_code representation
2141 into a compcode bit-based encoding. This function is the inverse of
2142 compcode_to_comparison. */
2144 static enum comparison_code
2145 comparison_to_compcode (enum tree_code code
)
2162 return COMPCODE_ORD
;
2163 case UNORDERED_EXPR
:
2164 return COMPCODE_UNORD
;
2166 return COMPCODE_UNLT
;
2168 return COMPCODE_UNEQ
;
2170 return COMPCODE_UNLE
;
2172 return COMPCODE_UNGT
;
2174 return COMPCODE_LTGT
;
2176 return COMPCODE_UNGE
;
2182 /* Convert a compcode bit-based encoding of a comparison operator back
2183 to GCC's enum tree_code representation. This function is the
2184 inverse of comparison_to_compcode. */
2186 static enum tree_code
2187 compcode_to_comparison (enum comparison_code code
)
2204 return ORDERED_EXPR
;
2205 case COMPCODE_UNORD
:
2206 return UNORDERED_EXPR
;
2224 /* Return a tree for the comparison which is the combination of
2225 doing the AND or OR (depending on CODE) of the two operations LCODE
2226 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2227 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2228 if this makes the transformation invalid. */
2231 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2232 enum tree_code rcode
, tree truth_type
,
2233 tree ll_arg
, tree lr_arg
)
2235 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2236 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2237 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2238 enum comparison_code compcode
;
2242 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2243 compcode
= lcompcode
& rcompcode
;
2246 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2247 compcode
= lcompcode
| rcompcode
;
2256 /* Eliminate unordered comparisons, as well as LTGT and ORD
2257 which are not used unless the mode has NaNs. */
2258 compcode
&= ~COMPCODE_UNORD
;
2259 if (compcode
== COMPCODE_LTGT
)
2260 compcode
= COMPCODE_NE
;
2261 else if (compcode
== COMPCODE_ORD
)
2262 compcode
= COMPCODE_TRUE
;
2264 else if (flag_trapping_math
)
2266 /* Check that the original operation and the optimized ones will trap
2267 under the same condition. */
2268 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2269 && (lcompcode
!= COMPCODE_EQ
)
2270 && (lcompcode
!= COMPCODE_ORD
);
2271 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2272 && (rcompcode
!= COMPCODE_EQ
)
2273 && (rcompcode
!= COMPCODE_ORD
);
2274 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2275 && (compcode
!= COMPCODE_EQ
)
2276 && (compcode
!= COMPCODE_ORD
);
2278 /* In a short-circuited boolean expression the LHS might be
2279 such that the RHS, if evaluated, will never trap. For
2280 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2281 if neither x nor y is NaN. (This is a mixed blessing: for
2282 example, the expression above will never trap, hence
2283 optimizing it to x < y would be invalid). */
2284 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2285 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2288 /* If the comparison was short-circuited, and only the RHS
2289 trapped, we may now generate a spurious trap. */
2291 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2294 /* If we changed the conditions that cause a trap, we lose. */
2295 if ((ltrap
|| rtrap
) != trap
)
2299 if (compcode
== COMPCODE_TRUE
)
2300 return constant_boolean_node (true, truth_type
);
2301 else if (compcode
== COMPCODE_FALSE
)
2302 return constant_boolean_node (false, truth_type
);
2304 return fold (build2 (compcode_to_comparison (compcode
),
2305 truth_type
, ll_arg
, lr_arg
));
2308 /* Return nonzero if CODE is a tree code that represents a truth value. */
2311 truth_value_p (enum tree_code code
)
2313 return (TREE_CODE_CLASS (code
) == '<'
2314 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2315 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2316 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2319 /* Return nonzero if two operands (typically of the same tree node)
2320 are necessarily equal. If either argument has side-effects this
2321 function returns zero. FLAGS modifies behavior as follows:
2323 If OEP_ONLY_CONST is set, only return nonzero for constants.
2324 This function tests whether the operands are indistinguishable;
2325 it does not test whether they are equal using C's == operation.
2326 The distinction is important for IEEE floating point, because
2327 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2328 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2330 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2331 even though it may hold multiple values during a function.
2332 This is because a GCC tree node guarantees that nothing else is
2333 executed between the evaluation of its "operands" (which may often
2334 be evaluated in arbitrary order). Hence if the operands themselves
2335 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2336 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2337 unset means assuming isochronic (or instantaneous) tree equivalence.
2338 Unless comparing arbitrary expression trees, such as from different
2339 statements, this flag can usually be left unset.
2341 If OEP_PURE_SAME is set, then pure functions with identical arguments
2342 are considered the same. It is used when the caller has other ways
2343 to ensure that global memory is unchanged in between. */
2346 operand_equal_p (tree arg0
, tree arg1
, unsigned int flags
)
2348 /* If one is specified and the other isn't, they aren't equal and if
2349 neither is specified, they are.
2351 ??? This is temporary and is meant only to handle the cases of the
2352 optional operands for COMPONENT_REF and ARRAY_REF. */
2353 if ((arg0
&& !arg1
) || (!arg0
&& arg1
))
2355 else if (!arg0
&& !arg1
)
2357 /* If either is ERROR_MARK, they aren't equal. */
2358 else if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
2361 /* If both types don't have the same signedness, then we can't consider
2362 them equal. We must check this before the STRIP_NOPS calls
2363 because they may change the signedness of the arguments. */
2364 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2370 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2371 /* This is needed for conversions and for COMPONENT_REF.
2372 Might as well play it safe and always test this. */
2373 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2374 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2375 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2378 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2379 We don't care about side effects in that case because the SAVE_EXPR
2380 takes care of that for us. In all other cases, two expressions are
2381 equal if they have no side effects. If we have two identical
2382 expressions with side effects that should be treated the same due
2383 to the only side effects being identical SAVE_EXPR's, that will
2384 be detected in the recursive calls below. */
2385 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2386 && (TREE_CODE (arg0
) == SAVE_EXPR
2387 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2390 /* Next handle constant cases, those for which we can return 1 even
2391 if ONLY_CONST is set. */
2392 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2393 switch (TREE_CODE (arg0
))
2396 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2397 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2398 && tree_int_cst_equal (arg0
, arg1
));
2401 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2402 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2403 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2404 TREE_REAL_CST (arg1
)));
2410 if (TREE_CONSTANT_OVERFLOW (arg0
)
2411 || TREE_CONSTANT_OVERFLOW (arg1
))
2414 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2415 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2418 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2421 v1
= TREE_CHAIN (v1
);
2422 v2
= TREE_CHAIN (v2
);
2429 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2431 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2435 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2436 && ! memcmp (TREE_STRING_POINTER (arg0
),
2437 TREE_STRING_POINTER (arg1
),
2438 TREE_STRING_LENGTH (arg0
)));
2441 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2447 if (flags
& OEP_ONLY_CONST
)
2450 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2453 /* Two conversions are equal only if signedness and modes match. */
2454 if ((TREE_CODE (arg0
) == NOP_EXPR
|| TREE_CODE (arg0
) == CONVERT_EXPR
)
2455 && (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2456 != TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2459 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2460 TREE_OPERAND (arg1
, 0), flags
);
2464 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
2465 TREE_OPERAND (arg1
, 0), flags
)
2466 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2467 TREE_OPERAND (arg1
, 1), flags
))
2470 /* For commutative ops, allow the other order. */
2471 return (commutative_tree_code (TREE_CODE (arg0
))
2472 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2473 TREE_OPERAND (arg1
, 1), flags
)
2474 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2475 TREE_OPERAND (arg1
, 0), flags
));
2478 /* If either of the pointer (or reference) expressions we are
2479 dereferencing contain a side effect, these cannot be equal. */
2480 if (TREE_SIDE_EFFECTS (arg0
)
2481 || TREE_SIDE_EFFECTS (arg1
))
2484 switch (TREE_CODE (arg0
))
2489 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2490 TREE_OPERAND (arg1
, 0), flags
);
2493 case ARRAY_RANGE_REF
:
2494 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2495 TREE_OPERAND (arg1
, 0), flags
)
2496 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2497 TREE_OPERAND (arg1
, 1), flags
)
2498 && operand_equal_p (TREE_OPERAND (arg0
, 2),
2499 TREE_OPERAND (arg1
, 2), flags
)
2500 && operand_equal_p (TREE_OPERAND (arg0
, 3),
2501 TREE_OPERAND (arg1
, 3), flags
));
2505 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2506 TREE_OPERAND (arg1
, 0), flags
)
2507 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2508 TREE_OPERAND (arg1
, 1), flags
)
2509 && operand_equal_p (TREE_OPERAND (arg0
, 2),
2510 TREE_OPERAND (arg1
, 2), flags
));
2514 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2515 TREE_OPERAND (arg1
, 0), flags
)
2516 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2517 TREE_OPERAND (arg1
, 1), flags
)
2518 && operand_equal_p (TREE_OPERAND (arg0
, 2),
2519 TREE_OPERAND (arg1
, 2), flags
));
2525 switch (TREE_CODE (arg0
))
2528 case TRUTH_NOT_EXPR
:
2529 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2530 TREE_OPERAND (arg1
, 0), flags
);
2532 case TRUTH_ANDIF_EXPR
:
2533 case TRUTH_ORIF_EXPR
:
2534 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2535 TREE_OPERAND (arg1
, 0), flags
)
2536 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2537 TREE_OPERAND (arg1
, 1), flags
);
2539 case TRUTH_AND_EXPR
:
2541 case TRUTH_XOR_EXPR
:
2542 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2543 TREE_OPERAND (arg1
, 0), flags
)
2544 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2545 TREE_OPERAND (arg1
, 1), flags
))
2546 || (operand_equal_p (TREE_OPERAND (arg0
, 0),
2547 TREE_OPERAND (arg1
, 1), flags
)
2548 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2549 TREE_OPERAND (arg1
, 0), flags
));
2552 /* If the CALL_EXPRs call different functions, then they
2553 clearly can not be equal. */
2554 if (! operand_equal_p (TREE_OPERAND (arg0
, 0),
2555 TREE_OPERAND (arg1
, 0), flags
))
2559 unsigned int cef
= call_expr_flags (arg0
);
2560 if (flags
& OEP_PURE_SAME
)
2561 cef
&= ECF_CONST
| ECF_PURE
;
2568 /* Now see if all the arguments are the same. operand_equal_p
2569 does not handle TREE_LIST, so we walk the operands here
2570 feeding them to operand_equal_p. */
2571 arg0
= TREE_OPERAND (arg0
, 1);
2572 arg1
= TREE_OPERAND (arg1
, 1);
2573 while (arg0
&& arg1
)
2575 if (! operand_equal_p (TREE_VALUE (arg0
), TREE_VALUE (arg1
),
2579 arg0
= TREE_CHAIN (arg0
);
2580 arg1
= TREE_CHAIN (arg1
);
2583 /* If we get here and both argument lists are exhausted
2584 then the CALL_EXPRs are equal. */
2585 return ! (arg0
|| arg1
);
2592 /* Consider __builtin_sqrt equal to sqrt. */
2593 return (TREE_CODE (arg0
) == FUNCTION_DECL
2594 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2595 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2596 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2603 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2604 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2606 When in doubt, return 0. */
2609 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2611 int unsignedp1
, unsignedpo
;
2612 tree primarg0
, primarg1
, primother
;
2613 unsigned int correct_width
;
2615 if (operand_equal_p (arg0
, arg1
, 0))
2618 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2619 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2622 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2623 and see if the inner values are the same. This removes any
2624 signedness comparison, which doesn't matter here. */
2625 primarg0
= arg0
, primarg1
= arg1
;
2626 STRIP_NOPS (primarg0
);
2627 STRIP_NOPS (primarg1
);
2628 if (operand_equal_p (primarg0
, primarg1
, 0))
2631 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2632 actual comparison operand, ARG0.
2634 First throw away any conversions to wider types
2635 already present in the operands. */
2637 primarg1
= get_narrower (arg1
, &unsignedp1
);
2638 primother
= get_narrower (other
, &unsignedpo
);
2640 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2641 if (unsignedp1
== unsignedpo
2642 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2643 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2645 tree type
= TREE_TYPE (arg0
);
2647 /* Make sure shorter operand is extended the right way
2648 to match the longer operand. */
2649 primarg1
= fold_convert (lang_hooks
.types
.signed_or_unsigned_type
2650 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2652 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2659 /* See if ARG is an expression that is either a comparison or is performing
2660 arithmetic on comparisons. The comparisons must only be comparing
2661 two different values, which will be stored in *CVAL1 and *CVAL2; if
2662 they are nonzero it means that some operands have already been found.
2663 No variables may be used anywhere else in the expression except in the
2664 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2665 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2667 If this is true, return 1. Otherwise, return zero. */
2670 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2672 enum tree_code code
= TREE_CODE (arg
);
2673 char class = TREE_CODE_CLASS (code
);
2675 /* We can handle some of the 'e' cases here. */
2676 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
2678 else if (class == 'e'
2679 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2680 || code
== COMPOUND_EXPR
))
2683 else if (class == 'e' && code
== SAVE_EXPR
2684 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2686 /* If we've already found a CVAL1 or CVAL2, this expression is
2687 two complex to handle. */
2688 if (*cval1
|| *cval2
)
2698 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2701 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2702 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2703 cval1
, cval2
, save_p
));
2709 if (code
== COND_EXPR
)
2710 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2711 cval1
, cval2
, save_p
)
2712 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2713 cval1
, cval2
, save_p
)
2714 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2715 cval1
, cval2
, save_p
));
2719 /* First see if we can handle the first operand, then the second. For
2720 the second operand, we know *CVAL1 can't be zero. It must be that
2721 one side of the comparison is each of the values; test for the
2722 case where this isn't true by failing if the two operands
2725 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2726 TREE_OPERAND (arg
, 1), 0))
2730 *cval1
= TREE_OPERAND (arg
, 0);
2731 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2733 else if (*cval2
== 0)
2734 *cval2
= TREE_OPERAND (arg
, 0);
2735 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2740 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2742 else if (*cval2
== 0)
2743 *cval2
= TREE_OPERAND (arg
, 1);
2744 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2756 /* ARG is a tree that is known to contain just arithmetic operations and
2757 comparisons. Evaluate the operations in the tree substituting NEW0 for
2758 any occurrence of OLD0 as an operand of a comparison and likewise for
2762 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
2764 tree type
= TREE_TYPE (arg
);
2765 enum tree_code code
= TREE_CODE (arg
);
2766 char class = TREE_CODE_CLASS (code
);
2768 /* We can handle some of the 'e' cases here. */
2769 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
2771 else if (class == 'e'
2772 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2778 return fold (build1 (code
, type
,
2779 eval_subst (TREE_OPERAND (arg
, 0),
2780 old0
, new0
, old1
, new1
)));
2783 return fold (build2 (code
, type
,
2784 eval_subst (TREE_OPERAND (arg
, 0),
2785 old0
, new0
, old1
, new1
),
2786 eval_subst (TREE_OPERAND (arg
, 1),
2787 old0
, new0
, old1
, new1
)));
2793 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
2796 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
2799 return fold (build3 (code
, type
,
2800 eval_subst (TREE_OPERAND (arg
, 0),
2801 old0
, new0
, old1
, new1
),
2802 eval_subst (TREE_OPERAND (arg
, 1),
2803 old0
, new0
, old1
, new1
),
2804 eval_subst (TREE_OPERAND (arg
, 2),
2805 old0
, new0
, old1
, new1
)));
2809 /* Fall through - ??? */
2813 tree arg0
= TREE_OPERAND (arg
, 0);
2814 tree arg1
= TREE_OPERAND (arg
, 1);
2816 /* We need to check both for exact equality and tree equality. The
2817 former will be true if the operand has a side-effect. In that
2818 case, we know the operand occurred exactly once. */
2820 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2822 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2825 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2827 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2830 return fold (build2 (code
, type
, arg0
, arg1
));
2838 /* Return a tree for the case when the result of an expression is RESULT
2839 converted to TYPE and OMITTED was previously an operand of the expression
2840 but is now not needed (e.g., we folded OMITTED * 0).
2842 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2843 the conversion of RESULT to TYPE. */
2846 omit_one_operand (tree type
, tree result
, tree omitted
)
2848 tree t
= fold_convert (type
, result
);
2850 if (TREE_SIDE_EFFECTS (omitted
))
2851 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2853 return non_lvalue (t
);
2856 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2859 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
2861 tree t
= fold_convert (type
, result
);
2863 if (TREE_SIDE_EFFECTS (omitted
))
2864 return build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2866 return pedantic_non_lvalue (t
);
2869 /* Return a tree for the case when the result of an expression is RESULT
2870 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2871 of the expression but are now not needed.
2873 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2874 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2875 evaluated before OMITTED2. Otherwise, if neither has side effects,
2876 just do the conversion of RESULT to TYPE. */
2879 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
2881 tree t
= fold_convert (type
, result
);
2883 if (TREE_SIDE_EFFECTS (omitted2
))
2884 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
2885 if (TREE_SIDE_EFFECTS (omitted1
))
2886 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
2888 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
2892 /* Return a simplified tree node for the truth-negation of ARG. This
2893 never alters ARG itself. We assume that ARG is an operation that
2894 returns a truth value (0 or 1).
2896 FIXME: one would think we would fold the result, but it causes
2897 problems with the dominator optimizer. */
2899 invert_truthvalue (tree arg
)
2901 tree type
= TREE_TYPE (arg
);
2902 enum tree_code code
= TREE_CODE (arg
);
2904 if (code
== ERROR_MARK
)
2907 /* If this is a comparison, we can simply invert it, except for
2908 floating-point non-equality comparisons, in which case we just
2909 enclose a TRUTH_NOT_EXPR around what we have. */
2911 if (TREE_CODE_CLASS (code
) == '<')
2913 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
2914 if (FLOAT_TYPE_P (op_type
)
2915 && flag_trapping_math
2916 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
2917 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
2918 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2921 code
= invert_tree_comparison (code
,
2922 HONOR_NANS (TYPE_MODE (op_type
)));
2923 if (code
== ERROR_MARK
)
2924 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2926 return build2 (code
, type
,
2927 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
2934 return fold_convert (type
, build_int_2 (integer_zerop (arg
), 0));
2936 case TRUTH_AND_EXPR
:
2937 return build2 (TRUTH_OR_EXPR
, type
,
2938 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2939 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2942 return build2 (TRUTH_AND_EXPR
, type
,
2943 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2944 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2946 case TRUTH_XOR_EXPR
:
2947 /* Here we can invert either operand. We invert the first operand
2948 unless the second operand is a TRUTH_NOT_EXPR in which case our
2949 result is the XOR of the first operand with the inside of the
2950 negation of the second operand. */
2952 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
2953 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
2954 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
2956 return build2 (TRUTH_XOR_EXPR
, type
,
2957 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2958 TREE_OPERAND (arg
, 1));
2960 case TRUTH_ANDIF_EXPR
:
2961 return build2 (TRUTH_ORIF_EXPR
, type
,
2962 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2963 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2965 case TRUTH_ORIF_EXPR
:
2966 return build2 (TRUTH_ANDIF_EXPR
, type
,
2967 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2968 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2970 case TRUTH_NOT_EXPR
:
2971 return TREE_OPERAND (arg
, 0);
2974 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2975 invert_truthvalue (TREE_OPERAND (arg
, 1)),
2976 invert_truthvalue (TREE_OPERAND (arg
, 2)));
2979 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2980 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2982 case NON_LVALUE_EXPR
:
2983 return invert_truthvalue (TREE_OPERAND (arg
, 0));
2986 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
2991 return build1 (TREE_CODE (arg
), type
,
2992 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2995 if (!integer_onep (TREE_OPERAND (arg
, 1)))
2997 return build2 (EQ_EXPR
, type
, arg
,
2998 fold_convert (type
, integer_zero_node
));
3001 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3003 case CLEANUP_POINT_EXPR
:
3004 return build1 (CLEANUP_POINT_EXPR
, type
,
3005 invert_truthvalue (TREE_OPERAND (arg
, 0)));
3010 if (TREE_CODE (TREE_TYPE (arg
)) != BOOLEAN_TYPE
)
3012 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
3015 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3016 operands are another bit-wise operation with a common input. If so,
3017 distribute the bit operations to save an operation and possibly two if
3018 constants are involved. For example, convert
3019 (A | B) & (A | C) into A | (B & C)
3020 Further simplification will occur if B and C are constants.
3022 If this optimization cannot be done, 0 will be returned. */
3025 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
3030 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3031 || TREE_CODE (arg0
) == code
3032 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3033 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3036 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3038 common
= TREE_OPERAND (arg0
, 0);
3039 left
= TREE_OPERAND (arg0
, 1);
3040 right
= TREE_OPERAND (arg1
, 1);
3042 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3044 common
= TREE_OPERAND (arg0
, 0);
3045 left
= TREE_OPERAND (arg0
, 1);
3046 right
= TREE_OPERAND (arg1
, 0);
3048 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3050 common
= TREE_OPERAND (arg0
, 1);
3051 left
= TREE_OPERAND (arg0
, 0);
3052 right
= TREE_OPERAND (arg1
, 1);
3054 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3056 common
= TREE_OPERAND (arg0
, 1);
3057 left
= TREE_OPERAND (arg0
, 0);
3058 right
= TREE_OPERAND (arg1
, 0);
3063 return fold (build2 (TREE_CODE (arg0
), type
, common
,
3064 fold (build2 (code
, type
, left
, right
))));
3067 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3068 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3071 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
3074 tree result
= build3 (BIT_FIELD_REF
, type
, inner
,
3075 size_int (bitsize
), bitsize_int (bitpos
));
3077 BIT_FIELD_REF_UNSIGNED (result
) = unsignedp
;
3082 /* Optimize a bit-field compare.
3084 There are two cases: First is a compare against a constant and the
3085 second is a comparison of two items where the fields are at the same
3086 bit position relative to the start of a chunk (byte, halfword, word)
3087 large enough to contain it. In these cases we can avoid the shift
3088 implicit in bitfield extractions.
3090 For constants, we emit a compare of the shifted constant with the
3091 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3092 compared. For two fields at the same position, we do the ANDs with the
3093 similar mask and compare the result of the ANDs.
3095 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3096 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3097 are the left and right operands of the comparison, respectively.
3099 If the optimization described above can be done, we return the resulting
3100 tree. Otherwise we return zero. */
3103 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
3106 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3107 tree type
= TREE_TYPE (lhs
);
3108 tree signed_type
, unsigned_type
;
3109 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3110 enum machine_mode lmode
, rmode
, nmode
;
3111 int lunsignedp
, runsignedp
;
3112 int lvolatilep
= 0, rvolatilep
= 0;
3113 tree linner
, rinner
= NULL_TREE
;
3117 /* Get all the information about the extractions being done. If the bit size
3118 if the same as the size of the underlying object, we aren't doing an
3119 extraction at all and so can do nothing. We also don't want to
3120 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3121 then will no longer be able to replace it. */
3122 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3123 &lunsignedp
, &lvolatilep
);
3124 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3125 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3130 /* If this is not a constant, we can only do something if bit positions,
3131 sizes, and signedness are the same. */
3132 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3133 &runsignedp
, &rvolatilep
);
3135 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3136 || lunsignedp
!= runsignedp
|| offset
!= 0
3137 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3141 /* See if we can find a mode to refer to this field. We should be able to,
3142 but fail if we can't. */
3143 nmode
= get_best_mode (lbitsize
, lbitpos
,
3144 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3145 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3146 TYPE_ALIGN (TREE_TYPE (rinner
))),
3147 word_mode
, lvolatilep
|| rvolatilep
);
3148 if (nmode
== VOIDmode
)
3151 /* Set signed and unsigned types of the precision of this mode for the
3153 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3154 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3156 /* Compute the bit position and size for the new reference and our offset
3157 within it. If the new reference is the same size as the original, we
3158 won't optimize anything, so return zero. */
3159 nbitsize
= GET_MODE_BITSIZE (nmode
);
3160 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3162 if (nbitsize
== lbitsize
)
3165 if (BYTES_BIG_ENDIAN
)
3166 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3168 /* Make the mask to be used against the extracted field. */
3169 mask
= build_int_2 (~0, ~0);
3170 TREE_TYPE (mask
) = unsigned_type
;
3171 force_fit_type (mask
, 0);
3172 mask
= fold_convert (unsigned_type
, mask
);
3173 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
3174 mask
= const_binop (RSHIFT_EXPR
, mask
,
3175 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
3178 /* If not comparing with constant, just rework the comparison
3180 return build2 (code
, compare_type
,
3181 build2 (BIT_AND_EXPR
, unsigned_type
,
3182 make_bit_field_ref (linner
, unsigned_type
,
3183 nbitsize
, nbitpos
, 1),
3185 build2 (BIT_AND_EXPR
, unsigned_type
,
3186 make_bit_field_ref (rinner
, unsigned_type
,
3187 nbitsize
, nbitpos
, 1),
3190 /* Otherwise, we are handling the constant case. See if the constant is too
3191 big for the field. Warn and return a tree of for 0 (false) if so. We do
3192 this not only for its own sake, but to avoid having to test for this
3193 error case below. If we didn't, we might generate wrong code.
3195 For unsigned fields, the constant shifted right by the field length should
3196 be all zero. For signed fields, the high-order bits should agree with
3201 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3202 fold_convert (unsigned_type
, rhs
),
3203 size_int (lbitsize
), 0)))
3205 warning ("comparison is always %d due to width of bit-field",
3207 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3212 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
3213 size_int (lbitsize
- 1), 0);
3214 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3216 warning ("comparison is always %d due to width of bit-field",
3218 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3222 /* Single-bit compares should always be against zero. */
3223 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3225 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3226 rhs
= fold_convert (type
, integer_zero_node
);
3229 /* Make a new bitfield reference, shift the constant over the
3230 appropriate number of bits and mask it with the computed mask
3231 (in case this was a signed field). If we changed it, make a new one. */
3232 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3235 TREE_SIDE_EFFECTS (lhs
) = 1;
3236 TREE_THIS_VOLATILE (lhs
) = 1;
3239 rhs
= fold (const_binop (BIT_AND_EXPR
,
3240 const_binop (LSHIFT_EXPR
,
3241 fold_convert (unsigned_type
, rhs
),
3242 size_int (lbitpos
), 0),
3245 return build2 (code
, compare_type
,
3246 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
3250 /* Subroutine for fold_truthop: decode a field reference.
3252 If EXP is a comparison reference, we return the innermost reference.
3254 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3255 set to the starting bit number.
3257 If the innermost field can be completely contained in a mode-sized
3258 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3260 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3261 otherwise it is not changed.
3263 *PUNSIGNEDP is set to the signedness of the field.
3265 *PMASK is set to the mask used. This is either contained in a
3266 BIT_AND_EXPR or derived from the width of the field.
3268 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3270 Return 0 if this is not a component reference or is one that we can't
3271 do anything with. */
3274 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3275 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3276 int *punsignedp
, int *pvolatilep
,
3277 tree
*pmask
, tree
*pand_mask
)
3279 tree outer_type
= 0;
3281 tree mask
, inner
, offset
;
3283 unsigned int precision
;
3285 /* All the optimizations using this function assume integer fields.
3286 There are problems with FP fields since the type_for_size call
3287 below can fail for, e.g., XFmode. */
3288 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3291 /* We are interested in the bare arrangement of bits, so strip everything
3292 that doesn't affect the machine mode. However, record the type of the
3293 outermost expression if it may matter below. */
3294 if (TREE_CODE (exp
) == NOP_EXPR
3295 || TREE_CODE (exp
) == CONVERT_EXPR
3296 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3297 outer_type
= TREE_TYPE (exp
);
3300 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3302 and_mask
= TREE_OPERAND (exp
, 1);
3303 exp
= TREE_OPERAND (exp
, 0);
3304 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3305 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3309 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3310 punsignedp
, pvolatilep
);
3311 if ((inner
== exp
&& and_mask
== 0)
3312 || *pbitsize
< 0 || offset
!= 0
3313 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3316 /* If the number of bits in the reference is the same as the bitsize of
3317 the outer type, then the outer type gives the signedness. Otherwise
3318 (in case of a small bitfield) the signedness is unchanged. */
3319 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3320 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3322 /* Compute the mask to access the bitfield. */
3323 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3324 precision
= TYPE_PRECISION (unsigned_type
);
3326 mask
= build_int_2 (~0, ~0);
3327 TREE_TYPE (mask
) = unsigned_type
;
3328 force_fit_type (mask
, 0);
3329 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3330 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3332 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3334 mask
= fold (build2 (BIT_AND_EXPR
, unsigned_type
,
3335 fold_convert (unsigned_type
, and_mask
), mask
));
3338 *pand_mask
= and_mask
;
3342 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3346 all_ones_mask_p (tree mask
, int size
)
3348 tree type
= TREE_TYPE (mask
);
3349 unsigned int precision
= TYPE_PRECISION (type
);
3352 tmask
= build_int_2 (~0, ~0);
3353 TREE_TYPE (tmask
) = lang_hooks
.types
.signed_type (type
);
3354 force_fit_type (tmask
, 0);
3356 tree_int_cst_equal (mask
,
3357 const_binop (RSHIFT_EXPR
,
3358 const_binop (LSHIFT_EXPR
, tmask
,
3359 size_int (precision
- size
),
3361 size_int (precision
- size
), 0));
3364 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3365 represents the sign bit of EXP's type. If EXP represents a sign
3366 or zero extension, also test VAL against the unextended type.
3367 The return value is the (sub)expression whose sign bit is VAL,
3368 or NULL_TREE otherwise. */
3371 sign_bit_p (tree exp
, tree val
)
3373 unsigned HOST_WIDE_INT mask_lo
, lo
;
3374 HOST_WIDE_INT mask_hi
, hi
;
3378 /* Tree EXP must have an integral type. */
3379 t
= TREE_TYPE (exp
);
3380 if (! INTEGRAL_TYPE_P (t
))
3383 /* Tree VAL must be an integer constant. */
3384 if (TREE_CODE (val
) != INTEGER_CST
3385 || TREE_CONSTANT_OVERFLOW (val
))
3388 width
= TYPE_PRECISION (t
);
3389 if (width
> HOST_BITS_PER_WIDE_INT
)
3391 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3394 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3395 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3401 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3404 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3405 >> (HOST_BITS_PER_WIDE_INT
- width
));
3408 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3409 treat VAL as if it were unsigned. */
3410 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3411 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3414 /* Handle extension from a narrower type. */
3415 if (TREE_CODE (exp
) == NOP_EXPR
3416 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3417 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3422 /* Subroutine for fold_truthop: determine if an operand is simple enough
3423 to be evaluated unconditionally. */
3426 simple_operand_p (tree exp
)
3428 /* Strip any conversions that don't change the machine mode. */
3429 while ((TREE_CODE (exp
) == NOP_EXPR
3430 || TREE_CODE (exp
) == CONVERT_EXPR
)
3431 && (TYPE_MODE (TREE_TYPE (exp
))
3432 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
3433 exp
= TREE_OPERAND (exp
, 0);
3435 return (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c'
3437 && ! TREE_ADDRESSABLE (exp
)
3438 && ! TREE_THIS_VOLATILE (exp
)
3439 && ! DECL_NONLOCAL (exp
)
3440 /* Don't regard global variables as simple. They may be
3441 allocated in ways unknown to the compiler (shared memory,
3442 #pragma weak, etc). */
3443 && ! TREE_PUBLIC (exp
)
3444 && ! DECL_EXTERNAL (exp
)
3445 /* Loading a static variable is unduly expensive, but global
3446 registers aren't expensive. */
3447 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3450 /* The following functions are subroutines to fold_range_test and allow it to
3451 try to change a logical combination of comparisons into a range test.
3454 X == 2 || X == 3 || X == 4 || X == 5
3458 (unsigned) (X - 2) <= 3
3460 We describe each set of comparisons as being either inside or outside
3461 a range, using a variable named like IN_P, and then describe the
3462 range with a lower and upper bound. If one of the bounds is omitted,
3463 it represents either the highest or lowest value of the type.
3465 In the comments below, we represent a range by two numbers in brackets
3466 preceded by a "+" to designate being inside that range, or a "-" to
3467 designate being outside that range, so the condition can be inverted by
3468 flipping the prefix. An omitted bound is represented by a "-". For
3469 example, "- [-, 10]" means being outside the range starting at the lowest
3470 possible value and ending at 10, in other words, being greater than 10.
3471 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3474 We set up things so that the missing bounds are handled in a consistent
3475 manner so neither a missing bound nor "true" and "false" need to be
3476 handled using a special case. */
3478 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3479 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3480 and UPPER1_P are nonzero if the respective argument is an upper bound
3481 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3482 must be specified for a comparison. ARG1 will be converted to ARG0's
3483 type if both are specified. */
3486 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3487 tree arg1
, int upper1_p
)
3493 /* If neither arg represents infinity, do the normal operation.
3494 Else, if not a comparison, return infinity. Else handle the special
3495 comparison rules. Note that most of the cases below won't occur, but
3496 are handled for consistency. */
3498 if (arg0
!= 0 && arg1
!= 0)
3500 tem
= fold (build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3501 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
)));
3503 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3506 if (TREE_CODE_CLASS (code
) != '<')
3509 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3510 for neither. In real maths, we cannot assume open ended ranges are
3511 the same. But, this is computer arithmetic, where numbers are finite.
3512 We can therefore make the transformation of any unbounded range with
3513 the value Z, Z being greater than any representable number. This permits
3514 us to treat unbounded ranges as equal. */
3515 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3516 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3520 result
= sgn0
== sgn1
;
3523 result
= sgn0
!= sgn1
;
3526 result
= sgn0
< sgn1
;
3529 result
= sgn0
<= sgn1
;
3532 result
= sgn0
> sgn1
;
3535 result
= sgn0
>= sgn1
;
3541 return constant_boolean_node (result
, type
);
3544 /* Given EXP, a logical expression, set the range it is testing into
3545 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3546 actually being tested. *PLOW and *PHIGH will be made of the same type
3547 as the returned expression. If EXP is not a comparison, we will most
3548 likely not be returning a useful value and range. */
3551 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
)
3553 enum tree_code code
;
3554 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
3555 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
3557 tree low
, high
, n_low
, n_high
;
3559 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3560 and see if we can refine the range. Some of the cases below may not
3561 happen, but it doesn't seem worth worrying about this. We "continue"
3562 the outer loop when we've changed something; otherwise we "break"
3563 the switch, which will "break" the while. */
3566 low
= high
= fold_convert (TREE_TYPE (exp
), integer_zero_node
);
3570 code
= TREE_CODE (exp
);
3571 exp_type
= TREE_TYPE (exp
);
3573 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3575 if (first_rtl_op (code
) > 0)
3576 arg0
= TREE_OPERAND (exp
, 0);
3577 if (TREE_CODE_CLASS (code
) == '<'
3578 || TREE_CODE_CLASS (code
) == '1'
3579 || TREE_CODE_CLASS (code
) == '2')
3580 arg0_type
= TREE_TYPE (arg0
);
3581 if (TREE_CODE_CLASS (code
) == '2'
3582 || TREE_CODE_CLASS (code
) == '<'
3583 || (TREE_CODE_CLASS (code
) == 'e'
3584 && TREE_CODE_LENGTH (code
) > 1))
3585 arg1
= TREE_OPERAND (exp
, 1);
3590 case TRUTH_NOT_EXPR
:
3591 in_p
= ! in_p
, exp
= arg0
;
3594 case EQ_EXPR
: case NE_EXPR
:
3595 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3596 /* We can only do something if the range is testing for zero
3597 and if the second operand is an integer constant. Note that
3598 saying something is "in" the range we make is done by
3599 complementing IN_P since it will set in the initial case of
3600 being not equal to zero; "out" is leaving it alone. */
3601 if (low
== 0 || high
== 0
3602 || ! integer_zerop (low
) || ! integer_zerop (high
)
3603 || TREE_CODE (arg1
) != INTEGER_CST
)
3608 case NE_EXPR
: /* - [c, c] */
3611 case EQ_EXPR
: /* + [c, c] */
3612 in_p
= ! in_p
, low
= high
= arg1
;
3614 case GT_EXPR
: /* - [-, c] */
3615 low
= 0, high
= arg1
;
3617 case GE_EXPR
: /* + [c, -] */
3618 in_p
= ! in_p
, low
= arg1
, high
= 0;
3620 case LT_EXPR
: /* - [c, -] */
3621 low
= arg1
, high
= 0;
3623 case LE_EXPR
: /* + [-, c] */
3624 in_p
= ! in_p
, low
= 0, high
= arg1
;
3630 /* If this is an unsigned comparison, we also know that EXP is
3631 greater than or equal to zero. We base the range tests we make
3632 on that fact, so we record it here so we can parse existing
3633 range tests. We test arg0_type since often the return type
3634 of, e.g. EQ_EXPR, is boolean. */
3635 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3637 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, in_p
, low
, high
,
3638 1, fold_convert (arg0_type
, integer_zero_node
),
3642 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3644 /* If the high bound is missing, but we have a nonzero low
3645 bound, reverse the range so it goes from zero to the low bound
3647 if (high
== 0 && low
&& ! integer_zerop (low
))
3650 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3651 integer_one_node
, 0);
3652 low
= fold_convert (arg0_type
, integer_zero_node
);
3660 /* (-x) IN [a,b] -> x in [-b, -a] */
3661 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3662 fold_convert (exp_type
, integer_zero_node
),
3664 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3665 fold_convert (exp_type
, integer_zero_node
),
3667 low
= n_low
, high
= n_high
;
3673 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3674 fold_convert (exp_type
, integer_one_node
));
3677 case PLUS_EXPR
: case MINUS_EXPR
:
3678 if (TREE_CODE (arg1
) != INTEGER_CST
)
3681 /* If EXP is signed, any overflow in the computation is undefined,
3682 so we don't worry about it so long as our computations on
3683 the bounds don't overflow. For unsigned, overflow is defined
3684 and this is exactly the right thing. */
3685 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3686 arg0_type
, low
, 0, arg1
, 0);
3687 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3688 arg0_type
, high
, 1, arg1
, 0);
3689 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3690 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3693 /* Check for an unsigned range which has wrapped around the maximum
3694 value thus making n_high < n_low, and normalize it. */
3695 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3697 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3698 integer_one_node
, 0);
3699 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3700 integer_one_node
, 0);
3702 /* If the range is of the form +/- [ x+1, x ], we won't
3703 be able to normalize it. But then, it represents the
3704 whole range or the empty set, so make it
3706 if (tree_int_cst_equal (n_low
, low
)
3707 && tree_int_cst_equal (n_high
, high
))
3713 low
= n_low
, high
= n_high
;
3718 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
3719 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3722 if (! INTEGRAL_TYPE_P (arg0_type
)
3723 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3724 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3727 n_low
= low
, n_high
= high
;
3730 n_low
= fold_convert (arg0_type
, n_low
);
3733 n_high
= fold_convert (arg0_type
, n_high
);
3736 /* If we're converting arg0 from an unsigned type, to exp,
3737 a signed type, we will be doing the comparison as unsigned.
3738 The tests above have already verified that LOW and HIGH
3741 So we have to ensure that we will handle large unsigned
3742 values the same way that the current signed bounds treat
3745 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
3748 tree equiv_type
= lang_hooks
.types
.type_for_mode
3749 (TYPE_MODE (arg0_type
), 1);
3751 /* A range without an upper bound is, naturally, unbounded.
3752 Since convert would have cropped a very large value, use
3753 the max value for the destination type. */
3755 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3756 : TYPE_MAX_VALUE (arg0_type
);
3758 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
3759 high_positive
= fold (build2 (RSHIFT_EXPR
, arg0_type
,
3760 fold_convert (arg0_type
,
3762 fold_convert (arg0_type
,
3763 integer_one_node
)));
3765 /* If the low bound is specified, "and" the range with the
3766 range for which the original unsigned value will be
3770 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3771 1, n_low
, n_high
, 1,
3772 fold_convert (arg0_type
, integer_zero_node
),
3776 in_p
= (n_in_p
== in_p
);
3780 /* Otherwise, "or" the range with the range of the input
3781 that will be interpreted as negative. */
3782 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3783 0, n_low
, n_high
, 1,
3784 fold_convert (arg0_type
, integer_zero_node
),
3788 in_p
= (in_p
!= n_in_p
);
3793 low
= n_low
, high
= n_high
;
3803 /* If EXP is a constant, we can evaluate whether this is true or false. */
3804 if (TREE_CODE (exp
) == INTEGER_CST
)
3806 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
3808 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3814 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3818 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3819 type, TYPE, return an expression to test if EXP is in (or out of, depending
3820 on IN_P) the range. Return 0 if the test couldn't be created. */
3823 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
3825 tree etype
= TREE_TYPE (exp
);
3830 value
= build_range_check (type
, exp
, 1, low
, high
);
3832 return invert_truthvalue (value
);
3837 if (low
== 0 && high
== 0)
3838 return fold_convert (type
, integer_one_node
);
3841 return fold (build2 (LE_EXPR
, type
, exp
, high
));
3844 return fold (build2 (GE_EXPR
, type
, exp
, low
));
3846 if (operand_equal_p (low
, high
, 0))
3847 return fold (build2 (EQ_EXPR
, type
, exp
, low
));
3849 if (integer_zerop (low
))
3851 if (! TYPE_UNSIGNED (etype
))
3853 etype
= lang_hooks
.types
.unsigned_type (etype
);
3854 high
= fold_convert (etype
, high
);
3855 exp
= fold_convert (etype
, exp
);
3857 return build_range_check (type
, exp
, 1, 0, high
);
3860 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3861 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
3863 unsigned HOST_WIDE_INT lo
;
3867 prec
= TYPE_PRECISION (etype
);
3868 if (prec
<= HOST_BITS_PER_WIDE_INT
)
3871 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
3875 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
3876 lo
= (unsigned HOST_WIDE_INT
) -1;
3879 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
3881 if (TYPE_UNSIGNED (etype
))
3883 etype
= lang_hooks
.types
.signed_type (etype
);
3884 exp
= fold_convert (etype
, exp
);
3886 return fold (build2 (GT_EXPR
, type
, exp
,
3887 fold_convert (etype
, integer_zero_node
)));
3891 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
3892 if (value
!= 0 && TREE_OVERFLOW (value
) && ! TYPE_UNSIGNED (etype
))
3894 tree utype
, minv
, maxv
;
3896 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3897 for the type in question, as we rely on this here. */
3898 switch (TREE_CODE (etype
))
3903 utype
= lang_hooks
.types
.unsigned_type (etype
);
3904 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
3905 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
3906 integer_one_node
, 1);
3907 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
3908 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
3912 high
= fold_convert (etype
, high
);
3913 low
= fold_convert (etype
, low
);
3914 exp
= fold_convert (etype
, exp
);
3915 value
= const_binop (MINUS_EXPR
, high
, low
, 0);
3923 if (value
!= 0 && ! TREE_OVERFLOW (value
))
3924 return build_range_check (type
,
3925 fold (build2 (MINUS_EXPR
, etype
, exp
, low
)),
3926 1, fold_convert (etype
, integer_zero_node
),
3932 /* Given two ranges, see if we can merge them into one. Return 1 if we
3933 can, 0 if we can't. Set the output range into the specified parameters. */
3936 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
3937 tree high0
, int in1_p
, tree low1
, tree high1
)
3945 int lowequal
= ((low0
== 0 && low1
== 0)
3946 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3947 low0
, 0, low1
, 0)));
3948 int highequal
= ((high0
== 0 && high1
== 0)
3949 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3950 high0
, 1, high1
, 1)));
3952 /* Make range 0 be the range that starts first, or ends last if they
3953 start at the same value. Swap them if it isn't. */
3954 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3957 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3958 high1
, 1, high0
, 1))))
3960 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
3961 tem
= low0
, low0
= low1
, low1
= tem
;
3962 tem
= high0
, high0
= high1
, high1
= tem
;
3965 /* Now flag two cases, whether the ranges are disjoint or whether the
3966 second range is totally subsumed in the first. Note that the tests
3967 below are simplified by the ones above. */
3968 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
3969 high0
, 1, low1
, 0));
3970 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3971 high1
, 1, high0
, 1));
3973 /* We now have four cases, depending on whether we are including or
3974 excluding the two ranges. */
3977 /* If they don't overlap, the result is false. If the second range
3978 is a subset it is the result. Otherwise, the range is from the start
3979 of the second to the end of the first. */
3981 in_p
= 0, low
= high
= 0;
3983 in_p
= 1, low
= low1
, high
= high1
;
3985 in_p
= 1, low
= low1
, high
= high0
;
3988 else if (in0_p
&& ! in1_p
)
3990 /* If they don't overlap, the result is the first range. If they are
3991 equal, the result is false. If the second range is a subset of the
3992 first, and the ranges begin at the same place, we go from just after
3993 the end of the first range to the end of the second. If the second
3994 range is not a subset of the first, or if it is a subset and both
3995 ranges end at the same place, the range starts at the start of the
3996 first range and ends just before the second range.
3997 Otherwise, we can't describe this as a single range. */
3999 in_p
= 1, low
= low0
, high
= high0
;
4000 else if (lowequal
&& highequal
)
4001 in_p
= 0, low
= high
= 0;
4002 else if (subset
&& lowequal
)
4004 in_p
= 1, high
= high0
;
4005 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high1
, 0,
4006 integer_one_node
, 0);
4008 else if (! subset
|| highequal
)
4010 in_p
= 1, low
= low0
;
4011 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
4012 integer_one_node
, 0);
4018 else if (! in0_p
&& in1_p
)
4020 /* If they don't overlap, the result is the second range. If the second
4021 is a subset of the first, the result is false. Otherwise,
4022 the range starts just after the first range and ends at the
4023 end of the second. */
4025 in_p
= 1, low
= low1
, high
= high1
;
4026 else if (subset
|| highequal
)
4027 in_p
= 0, low
= high
= 0;
4030 in_p
= 1, high
= high1
;
4031 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4032 integer_one_node
, 0);
4038 /* The case where we are excluding both ranges. Here the complex case
4039 is if they don't overlap. In that case, the only time we have a
4040 range is if they are adjacent. If the second is a subset of the
4041 first, the result is the first. Otherwise, the range to exclude
4042 starts at the beginning of the first range and ends at the end of the
4046 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4047 range_binop (PLUS_EXPR
, NULL_TREE
,
4049 integer_one_node
, 1),
4051 in_p
= 0, low
= low0
, high
= high1
;
4054 /* Canonicalize - [min, x] into - [-, x]. */
4055 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4056 switch (TREE_CODE (TREE_TYPE (low0
)))
4059 if (TYPE_PRECISION (TREE_TYPE (low0
))
4060 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4065 if (tree_int_cst_equal (low0
,
4066 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4070 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4071 && integer_zerop (low0
))
4078 /* Canonicalize - [x, max] into - [x, -]. */
4079 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4080 switch (TREE_CODE (TREE_TYPE (high1
)))
4083 if (TYPE_PRECISION (TREE_TYPE (high1
))
4084 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4089 if (tree_int_cst_equal (high1
,
4090 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4094 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4095 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4097 integer_one_node
, 1)))
4104 /* The ranges might be also adjacent between the maximum and
4105 minimum values of the given type. For
4106 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4107 return + [x + 1, y - 1]. */
4108 if (low0
== 0 && high1
== 0)
4110 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
4111 integer_one_node
, 1);
4112 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
4113 integer_one_node
, 0);
4114 if (low
== 0 || high
== 0)
4124 in_p
= 0, low
= low0
, high
= high0
;
4126 in_p
= 0, low
= low0
, high
= high1
;
4129 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4134 /* Subroutine of fold, looking inside expressions of the form
4135 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4136 of the COND_EXPR. This function is being used also to optimize
4137 A op B ? C : A, by reversing the comparison first.
4139 Return a folded expression whose code is not a COND_EXPR
4140 anymore, or NULL_TREE if no folding opportunity is found. */
4143 fold_cond_expr_with_comparison (tree type
, tree arg0
, tree arg1
, tree arg2
)
4145 enum tree_code comp_code
= TREE_CODE (arg0
);
4146 tree arg00
= TREE_OPERAND (arg0
, 0);
4147 tree arg01
= TREE_OPERAND (arg0
, 1);
4148 tree arg1_type
= TREE_TYPE (arg1
);
4154 /* If we have A op 0 ? A : -A, consider applying the following
4157 A == 0? A : -A same as -A
4158 A != 0? A : -A same as A
4159 A >= 0? A : -A same as abs (A)
4160 A > 0? A : -A same as abs (A)
4161 A <= 0? A : -A same as -abs (A)
4162 A < 0? A : -A same as -abs (A)
4164 None of these transformations work for modes with signed
4165 zeros. If A is +/-0, the first two transformations will
4166 change the sign of the result (from +0 to -0, or vice
4167 versa). The last four will fix the sign of the result,
4168 even though the original expressions could be positive or
4169 negative, depending on the sign of A.
4171 Note that all these transformations are correct if A is
4172 NaN, since the two alternatives (A and -A) are also NaNs. */
4173 if ((FLOAT_TYPE_P (TREE_TYPE (arg01
))
4174 ? real_zerop (arg01
)
4175 : integer_zerop (arg01
))
4176 && TREE_CODE (arg2
) == NEGATE_EXPR
4177 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4181 tem
= fold_convert (arg1_type
, arg1
);
4182 return pedantic_non_lvalue (fold_convert (type
, negate_expr (tem
)));
4184 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4187 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4188 arg1
= fold_convert (lang_hooks
.types
.signed_type
4189 (TREE_TYPE (arg1
)), arg1
);
4190 tem
= fold (build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
));
4191 return pedantic_non_lvalue (fold_convert (type
, tem
));
4194 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4195 arg1
= fold_convert (lang_hooks
.types
.signed_type
4196 (TREE_TYPE (arg1
)), arg1
);
4197 tem
= fold (build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
));
4198 return negate_expr (fold_convert (type
, tem
));
4203 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4204 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4205 both transformations are correct when A is NaN: A != 0
4206 is then true, and A == 0 is false. */
4208 if (integer_zerop (arg01
) && integer_zerop (arg2
))
4210 if (comp_code
== NE_EXPR
)
4211 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4212 else if (comp_code
== EQ_EXPR
)
4213 return pedantic_non_lvalue (fold_convert (type
, integer_zero_node
));
4216 /* Try some transformations of A op B ? A : B.
4218 A == B? A : B same as B
4219 A != B? A : B same as A
4220 A >= B? A : B same as max (A, B)
4221 A > B? A : B same as max (B, A)
4222 A <= B? A : B same as min (A, B)
4223 A < B? A : B same as min (B, A)
4225 As above, these transformations don't work in the presence
4226 of signed zeros. For example, if A and B are zeros of
4227 opposite sign, the first two transformations will change
4228 the sign of the result. In the last four, the original
4229 expressions give different results for (A=+0, B=-0) and
4230 (A=-0, B=+0), but the transformed expressions do not.
4232 The first two transformations are correct if either A or B
4233 is a NaN. In the first transformation, the condition will
4234 be false, and B will indeed be chosen. In the case of the
4235 second transformation, the condition A != B will be true,
4236 and A will be chosen.
4238 The conversions to max() and min() are not correct if B is
4239 a number and A is not. The conditions in the original
4240 expressions will be false, so all four give B. The min()
4241 and max() versions would give a NaN instead. */
4242 if (operand_equal_for_comparison_p (arg01
, arg2
, arg00
))
4244 tree comp_op0
= arg00
;
4245 tree comp_op1
= arg01
;
4246 tree comp_type
= TREE_TYPE (comp_op0
);
4248 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4249 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4259 return pedantic_non_lvalue (fold_convert (type
, arg2
));
4261 return pedantic_non_lvalue (fold_convert (type
, arg1
));
4264 /* In C++ a ?: expression can be an lvalue, so put the
4265 operand which will be used if they are equal first
4266 so that we can convert this back to the
4267 corresponding COND_EXPR. */
4268 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4269 return pedantic_non_lvalue (
4270 fold_convert (type
, fold (build2 (MIN_EXPR
, comp_type
,
4271 (comp_code
== LE_EXPR
4272 ? comp_op0
: comp_op1
),
4273 (comp_code
== LE_EXPR
4274 ? comp_op1
: comp_op0
)))));
4278 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4279 return pedantic_non_lvalue (
4280 fold_convert (type
, fold (build2 (MAX_EXPR
, comp_type
,
4281 (comp_code
== GE_EXPR
4282 ? comp_op0
: comp_op1
),
4283 (comp_code
== GE_EXPR
4284 ? comp_op1
: comp_op0
)))));
4291 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4292 we might still be able to simplify this. For example,
4293 if C1 is one less or one more than C2, this might have started
4294 out as a MIN or MAX and been transformed by this function.
4295 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4297 if (INTEGRAL_TYPE_P (type
)
4298 && TREE_CODE (arg01
) == INTEGER_CST
4299 && TREE_CODE (arg2
) == INTEGER_CST
)
4303 /* We can replace A with C1 in this case. */
4304 arg1
= fold_convert (type
, arg01
);
4305 return fold (build3 (COND_EXPR
, type
, arg0
, arg1
, arg2
));
4308 /* If C1 is C2 + 1, this is min(A, C2). */
4309 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4311 && operand_equal_p (arg01
,
4312 const_binop (PLUS_EXPR
, arg2
,
4313 integer_one_node
, 0),
4315 return pedantic_non_lvalue (fold (build2 (MIN_EXPR
,
4316 type
, arg1
, arg2
)));
4320 /* If C1 is C2 - 1, this is min(A, C2). */
4321 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4323 && operand_equal_p (arg01
,
4324 const_binop (MINUS_EXPR
, arg2
,
4325 integer_one_node
, 0),
4327 return pedantic_non_lvalue (fold (build2 (MIN_EXPR
,
4328 type
, arg1
, arg2
)));
4332 /* If C1 is C2 - 1, this is max(A, C2). */
4333 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4335 && operand_equal_p (arg01
,
4336 const_binop (MINUS_EXPR
, arg2
,
4337 integer_one_node
, 0),
4339 return pedantic_non_lvalue (fold (build2 (MAX_EXPR
,
4340 type
, arg1
, arg2
)));
4344 /* If C1 is C2 + 1, this is max(A, C2). */
4345 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4347 && operand_equal_p (arg01
,
4348 const_binop (PLUS_EXPR
, arg2
,
4349 integer_one_node
, 0),
4351 return pedantic_non_lvalue (fold (build2 (MAX_EXPR
,
4352 type
, arg1
, arg2
)));
4365 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
4366 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4369 /* EXP is some logical combination of boolean tests. See if we can
4370 merge it into some range test. Return the new tree if so. */
4373 fold_range_test (tree exp
)
4375 int or_op
= (TREE_CODE (exp
) == TRUTH_ORIF_EXPR
4376 || TREE_CODE (exp
) == TRUTH_OR_EXPR
);
4377 int in0_p
, in1_p
, in_p
;
4378 tree low0
, low1
, low
, high0
, high1
, high
;
4379 tree lhs
= make_range (TREE_OPERAND (exp
, 0), &in0_p
, &low0
, &high0
);
4380 tree rhs
= make_range (TREE_OPERAND (exp
, 1), &in1_p
, &low1
, &high1
);
4383 /* If this is an OR operation, invert both sides; we will invert
4384 again at the end. */
4386 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4388 /* If both expressions are the same, if we can merge the ranges, and we
4389 can build the range test, return it or it inverted. If one of the
4390 ranges is always true or always false, consider it to be the same
4391 expression as the other. */
4392 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4393 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4395 && 0 != (tem
= (build_range_check (TREE_TYPE (exp
),
4397 : rhs
!= 0 ? rhs
: integer_zero_node
,
4399 return or_op
? invert_truthvalue (tem
) : tem
;
4401 /* On machines where the branch cost is expensive, if this is a
4402 short-circuited branch and the underlying object on both sides
4403 is the same, make a non-short-circuit operation. */
4404 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4405 && lhs
!= 0 && rhs
!= 0
4406 && (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
4407 || TREE_CODE (exp
) == TRUTH_ORIF_EXPR
)
4408 && operand_equal_p (lhs
, rhs
, 0))
4410 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4411 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4412 which cases we can't do this. */
4413 if (simple_operand_p (lhs
))
4414 return build2 (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
4415 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4416 TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
4417 TREE_OPERAND (exp
, 1));
4419 else if (lang_hooks
.decls
.global_bindings_p () == 0
4420 && ! CONTAINS_PLACEHOLDER_P (lhs
))
4422 tree common
= save_expr (lhs
);
4424 if (0 != (lhs
= build_range_check (TREE_TYPE (exp
), common
,
4425 or_op
? ! in0_p
: in0_p
,
4427 && (0 != (rhs
= build_range_check (TREE_TYPE (exp
), common
,
4428 or_op
? ! in1_p
: in1_p
,
4430 return build2 (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
4431 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4432 TREE_TYPE (exp
), lhs
, rhs
);
4439 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4440 bit value. Arrange things so the extra bits will be set to zero if and
4441 only if C is signed-extended to its full width. If MASK is nonzero,
4442 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4445 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4447 tree type
= TREE_TYPE (c
);
4448 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4451 if (p
== modesize
|| unsignedp
)
4454 /* We work by getting just the sign bit into the low-order bit, then
4455 into the high-order bit, then sign-extend. We then XOR that value
4457 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
4458 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
4460 /* We must use a signed type in order to get an arithmetic right shift.
4461 However, we must also avoid introducing accidental overflows, so that
4462 a subsequent call to integer_zerop will work. Hence we must
4463 do the type conversion here. At this point, the constant is either
4464 zero or one, and the conversion to a signed type can never overflow.
4465 We could get an overflow if this conversion is done anywhere else. */
4466 if (TYPE_UNSIGNED (type
))
4467 temp
= fold_convert (lang_hooks
.types
.signed_type (type
), temp
);
4469 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
4470 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
4472 temp
= const_binop (BIT_AND_EXPR
, temp
,
4473 fold_convert (TREE_TYPE (c
), mask
), 0);
4474 /* If necessary, convert the type back to match the type of C. */
4475 if (TYPE_UNSIGNED (type
))
4476 temp
= fold_convert (type
, temp
);
4478 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
4481 /* Find ways of folding logical expressions of LHS and RHS:
4482 Try to merge two comparisons to the same innermost item.
4483 Look for range tests like "ch >= '0' && ch <= '9'".
4484 Look for combinations of simple terms on machines with expensive branches
4485 and evaluate the RHS unconditionally.
4487 For example, if we have p->a == 2 && p->b == 4 and we can make an
4488 object large enough to span both A and B, we can do this with a comparison
4489 against the object ANDed with the a mask.
4491 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4492 operations to do this with one comparison.
4494 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4495 function and the one above.
4497 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4498 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4500 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4503 We return the simplified tree or 0 if no optimization is possible. */
4506 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
4508 /* If this is the "or" of two comparisons, we can do something if
4509 the comparisons are NE_EXPR. If this is the "and", we can do something
4510 if the comparisons are EQ_EXPR. I.e.,
4511 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4513 WANTED_CODE is this operation code. For single bit fields, we can
4514 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4515 comparison for one-bit fields. */
4517 enum tree_code wanted_code
;
4518 enum tree_code lcode
, rcode
;
4519 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
4520 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
4521 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
4522 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
4523 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
4524 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
4525 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
4526 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
4527 enum machine_mode lnmode
, rnmode
;
4528 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
4529 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
4530 tree l_const
, r_const
;
4531 tree lntype
, rntype
, result
;
4532 int first_bit
, end_bit
;
4535 /* Start by getting the comparison codes. Fail if anything is volatile.
4536 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4537 it were surrounded with a NE_EXPR. */
4539 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
4542 lcode
= TREE_CODE (lhs
);
4543 rcode
= TREE_CODE (rhs
);
4545 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
4547 lhs
= build2 (NE_EXPR
, truth_type
, lhs
, integer_zero_node
);
4551 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
4553 rhs
= build2 (NE_EXPR
, truth_type
, rhs
, integer_zero_node
);
4557 if (TREE_CODE_CLASS (lcode
) != '<' || TREE_CODE_CLASS (rcode
) != '<')
4560 ll_arg
= TREE_OPERAND (lhs
, 0);
4561 lr_arg
= TREE_OPERAND (lhs
, 1);
4562 rl_arg
= TREE_OPERAND (rhs
, 0);
4563 rr_arg
= TREE_OPERAND (rhs
, 1);
4565 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4566 if (simple_operand_p (ll_arg
)
4567 && simple_operand_p (lr_arg
))
4570 if (operand_equal_p (ll_arg
, rl_arg
, 0)
4571 && operand_equal_p (lr_arg
, rr_arg
, 0))
4573 result
= combine_comparisons (code
, lcode
, rcode
,
4574 truth_type
, ll_arg
, lr_arg
);
4578 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
4579 && operand_equal_p (lr_arg
, rl_arg
, 0))
4581 result
= combine_comparisons (code
, lcode
,
4582 swap_tree_comparison (rcode
),
4583 truth_type
, ll_arg
, lr_arg
);
4589 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
4590 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
4592 /* If the RHS can be evaluated unconditionally and its operands are
4593 simple, it wins to evaluate the RHS unconditionally on machines
4594 with expensive branches. In this case, this isn't a comparison
4595 that can be merged. Avoid doing this if the RHS is a floating-point
4596 comparison since those can trap. */
4598 if (BRANCH_COST
>= 2
4599 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
4600 && simple_operand_p (rl_arg
)
4601 && simple_operand_p (rr_arg
))
4603 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4604 if (code
== TRUTH_OR_EXPR
4605 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
4606 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
4607 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4608 return build2 (NE_EXPR
, truth_type
,
4609 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4611 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4613 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4614 if (code
== TRUTH_AND_EXPR
4615 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
4616 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
4617 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4618 return build2 (EQ_EXPR
, truth_type
,
4619 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4621 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4623 return build2 (code
, truth_type
, lhs
, rhs
);
4626 /* See if the comparisons can be merged. Then get all the parameters for
4629 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
4630 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
4634 ll_inner
= decode_field_reference (ll_arg
,
4635 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
4636 &ll_unsignedp
, &volatilep
, &ll_mask
,
4638 lr_inner
= decode_field_reference (lr_arg
,
4639 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
4640 &lr_unsignedp
, &volatilep
, &lr_mask
,
4642 rl_inner
= decode_field_reference (rl_arg
,
4643 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
4644 &rl_unsignedp
, &volatilep
, &rl_mask
,
4646 rr_inner
= decode_field_reference (rr_arg
,
4647 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
4648 &rr_unsignedp
, &volatilep
, &rr_mask
,
4651 /* It must be true that the inner operation on the lhs of each
4652 comparison must be the same if we are to be able to do anything.
4653 Then see if we have constants. If not, the same must be true for
4655 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
4656 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
4659 if (TREE_CODE (lr_arg
) == INTEGER_CST
4660 && TREE_CODE (rr_arg
) == INTEGER_CST
)
4661 l_const
= lr_arg
, r_const
= rr_arg
;
4662 else if (lr_inner
== 0 || rr_inner
== 0
4663 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
4666 l_const
= r_const
= 0;
4668 /* If either comparison code is not correct for our logical operation,
4669 fail. However, we can convert a one-bit comparison against zero into
4670 the opposite comparison against that bit being set in the field. */
4672 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
4673 if (lcode
!= wanted_code
)
4675 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
4677 /* Make the left operand unsigned, since we are only interested
4678 in the value of one bit. Otherwise we are doing the wrong
4687 /* This is analogous to the code for l_const above. */
4688 if (rcode
!= wanted_code
)
4690 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
4699 /* After this point all optimizations will generate bit-field
4700 references, which we might not want. */
4701 if (! lang_hooks
.can_use_bit_fields_p ())
4704 /* See if we can find a mode that contains both fields being compared on
4705 the left. If we can't, fail. Otherwise, update all constants and masks
4706 to be relative to a field of that size. */
4707 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
4708 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
4709 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4710 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
4712 if (lnmode
== VOIDmode
)
4715 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
4716 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
4717 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
4718 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
4720 if (BYTES_BIG_ENDIAN
)
4722 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
4723 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
4726 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
4727 size_int (xll_bitpos
), 0);
4728 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
4729 size_int (xrl_bitpos
), 0);
4733 l_const
= fold_convert (lntype
, l_const
);
4734 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
4735 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
4736 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
4737 fold (build1 (BIT_NOT_EXPR
,
4741 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
4743 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4748 r_const
= fold_convert (lntype
, r_const
);
4749 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
4750 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
4751 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
4752 fold (build1 (BIT_NOT_EXPR
,
4756 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
4758 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4762 /* If the right sides are not constant, do the same for it. Also,
4763 disallow this optimization if a size or signedness mismatch occurs
4764 between the left and right sides. */
4767 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
4768 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
4769 /* Make sure the two fields on the right
4770 correspond to the left without being swapped. */
4771 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
4774 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
4775 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
4776 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4777 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
4779 if (rnmode
== VOIDmode
)
4782 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
4783 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
4784 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
4785 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
4787 if (BYTES_BIG_ENDIAN
)
4789 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
4790 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
4793 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
4794 size_int (xlr_bitpos
), 0);
4795 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
4796 size_int (xrr_bitpos
), 0);
4798 /* Make a mask that corresponds to both fields being compared.
4799 Do this for both items being compared. If the operands are the
4800 same size and the bits being compared are in the same position
4801 then we can do this by masking both and comparing the masked
4803 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4804 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
4805 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
4807 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4808 ll_unsignedp
|| rl_unsignedp
);
4809 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4810 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
4812 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
4813 lr_unsignedp
|| rr_unsignedp
);
4814 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
4815 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
4817 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
4820 /* There is still another way we can do something: If both pairs of
4821 fields being compared are adjacent, we may be able to make a wider
4822 field containing them both.
4824 Note that we still must mask the lhs/rhs expressions. Furthermore,
4825 the mask must be shifted to account for the shift done by
4826 make_bit_field_ref. */
4827 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
4828 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
4829 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
4830 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
4834 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
4835 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
4836 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
4837 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
4839 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
4840 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
4841 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
4842 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
4844 /* Convert to the smaller type before masking out unwanted bits. */
4846 if (lntype
!= rntype
)
4848 if (lnbitsize
> rnbitsize
)
4850 lhs
= fold_convert (rntype
, lhs
);
4851 ll_mask
= fold_convert (rntype
, ll_mask
);
4854 else if (lnbitsize
< rnbitsize
)
4856 rhs
= fold_convert (lntype
, rhs
);
4857 lr_mask
= fold_convert (lntype
, lr_mask
);
4862 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
4863 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
4865 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
4866 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
4868 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
4874 /* Handle the case of comparisons with constants. If there is something in
4875 common between the masks, those bits of the constants must be the same.
4876 If not, the condition is always false. Test for this to avoid generating
4877 incorrect code below. */
4878 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
4879 if (! integer_zerop (result
)
4880 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
4881 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
4883 if (wanted_code
== NE_EXPR
)
4885 warning ("`or' of unmatched not-equal tests is always 1");
4886 return constant_boolean_node (true, truth_type
);
4890 warning ("`and' of mutually exclusive equal-tests is always 0");
4891 return constant_boolean_node (false, truth_type
);
4895 /* Construct the expression we will return. First get the component
4896 reference we will make. Unless the mask is all ones the width of
4897 that field, perform the mask operation. Then compare with the
4899 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4900 ll_unsignedp
|| rl_unsignedp
);
4902 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4903 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4904 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
4906 return build2 (wanted_code
, truth_type
, result
,
4907 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
4910 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4914 optimize_minmax_comparison (tree t
)
4916 tree type
= TREE_TYPE (t
);
4917 tree arg0
= TREE_OPERAND (t
, 0);
4918 enum tree_code op_code
;
4919 tree comp_const
= TREE_OPERAND (t
, 1);
4921 int consts_equal
, consts_lt
;
4924 STRIP_SIGN_NOPS (arg0
);
4926 op_code
= TREE_CODE (arg0
);
4927 minmax_const
= TREE_OPERAND (arg0
, 1);
4928 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
4929 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
4930 inner
= TREE_OPERAND (arg0
, 0);
4932 /* If something does not permit us to optimize, return the original tree. */
4933 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
4934 || TREE_CODE (comp_const
) != INTEGER_CST
4935 || TREE_CONSTANT_OVERFLOW (comp_const
)
4936 || TREE_CODE (minmax_const
) != INTEGER_CST
4937 || TREE_CONSTANT_OVERFLOW (minmax_const
))
4940 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4941 and GT_EXPR, doing the rest with recursive calls using logical
4943 switch (TREE_CODE (t
))
4945 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
4947 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t
)));
4951 fold (build2 (TRUTH_ORIF_EXPR
, type
,
4952 optimize_minmax_comparison
4953 (build2 (EQ_EXPR
, type
, arg0
, comp_const
)),
4954 optimize_minmax_comparison
4955 (build2 (GT_EXPR
, type
, arg0
, comp_const
))));
4958 if (op_code
== MAX_EXPR
&& consts_equal
)
4959 /* MAX (X, 0) == 0 -> X <= 0 */
4960 return fold (build2 (LE_EXPR
, type
, inner
, comp_const
));
4962 else if (op_code
== MAX_EXPR
&& consts_lt
)
4963 /* MAX (X, 0) == 5 -> X == 5 */
4964 return fold (build2 (EQ_EXPR
, type
, inner
, comp_const
));
4966 else if (op_code
== MAX_EXPR
)
4967 /* MAX (X, 0) == -1 -> false */
4968 return omit_one_operand (type
, integer_zero_node
, inner
);
4970 else if (consts_equal
)
4971 /* MIN (X, 0) == 0 -> X >= 0 */
4972 return fold (build2 (GE_EXPR
, type
, inner
, comp_const
));
4975 /* MIN (X, 0) == 5 -> false */
4976 return omit_one_operand (type
, integer_zero_node
, inner
);
4979 /* MIN (X, 0) == -1 -> X == -1 */
4980 return fold (build2 (EQ_EXPR
, type
, inner
, comp_const
));
4983 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
4984 /* MAX (X, 0) > 0 -> X > 0
4985 MAX (X, 0) > 5 -> X > 5 */
4986 return fold (build2 (GT_EXPR
, type
, inner
, comp_const
));
4988 else if (op_code
== MAX_EXPR
)
4989 /* MAX (X, 0) > -1 -> true */
4990 return omit_one_operand (type
, integer_one_node
, inner
);
4992 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
4993 /* MIN (X, 0) > 0 -> false
4994 MIN (X, 0) > 5 -> false */
4995 return omit_one_operand (type
, integer_zero_node
, inner
);
4998 /* MIN (X, 0) > -1 -> X > -1 */
4999 return fold (build2 (GT_EXPR
, type
, inner
, comp_const
));
5006 /* T is an integer expression that is being multiplied, divided, or taken a
5007 modulus (CODE says which and what kind of divide or modulus) by a
5008 constant C. See if we can eliminate that operation by folding it with
5009 other operations already in T. WIDE_TYPE, if non-null, is a type that
5010 should be used for the computation if wider than our type.
5012 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5013 (X * 2) + (Y * 4). We must, however, be assured that either the original
5014 expression would not overflow or that overflow is undefined for the type
5015 in the language in question.
5017 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5018 the machine has a multiply-accumulate insn or that this is part of an
5019 addressing calculation.
5021 If we return a non-null expression, it is an equivalent form of the
5022 original computation, but need not be in the original type. */
5025 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5027 /* To avoid exponential search depth, refuse to allow recursion past
5028 three levels. Beyond that (1) it's highly unlikely that we'll find
5029 something interesting and (2) we've probably processed it before
5030 when we built the inner expression. */
5039 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
);
5046 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
)
5048 tree type
= TREE_TYPE (t
);
5049 enum tree_code tcode
= TREE_CODE (t
);
5050 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5051 > GET_MODE_SIZE (TYPE_MODE (type
)))
5052 ? wide_type
: type
);
5054 int same_p
= tcode
== code
;
5055 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5057 /* Don't deal with constants of zero here; they confuse the code below. */
5058 if (integer_zerop (c
))
5061 if (TREE_CODE_CLASS (tcode
) == '1')
5062 op0
= TREE_OPERAND (t
, 0);
5064 if (TREE_CODE_CLASS (tcode
) == '2')
5065 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5067 /* Note that we need not handle conditional operations here since fold
5068 already handles those cases. So just do arithmetic here. */
5072 /* For a constant, we can always simplify if we are a multiply
5073 or (for divide and modulus) if it is a multiple of our constant. */
5074 if (code
== MULT_EXPR
5075 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
5076 return const_binop (code
, fold_convert (ctype
, t
),
5077 fold_convert (ctype
, c
), 0);
5080 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
5081 /* If op0 is an expression ... */
5082 if ((TREE_CODE_CLASS (TREE_CODE (op0
)) == '<'
5083 || TREE_CODE_CLASS (TREE_CODE (op0
)) == '1'
5084 || TREE_CODE_CLASS (TREE_CODE (op0
)) == '2'
5085 || TREE_CODE_CLASS (TREE_CODE (op0
)) == 'e')
5086 /* ... and is unsigned, and its type is smaller than ctype,
5087 then we cannot pass through as widening. */
5088 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
5089 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5090 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5091 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
5092 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
5093 /* ... or its type is larger than ctype,
5094 then we cannot pass through this truncation. */
5095 || (GET_MODE_SIZE (TYPE_MODE (ctype
))
5096 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
5097 /* ... or signedness changes for division or modulus,
5098 then we cannot pass through this conversion. */
5099 || (code
!= MULT_EXPR
5100 && (TYPE_UNSIGNED (ctype
)
5101 != TYPE_UNSIGNED (TREE_TYPE (op0
))))))
5104 /* Pass the constant down and see if we can make a simplification. If
5105 we can, replace this expression with the inner simplification for
5106 possible later conversion to our or some other type. */
5107 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5108 && TREE_CODE (t2
) == INTEGER_CST
5109 && ! TREE_CONSTANT_OVERFLOW (t2
)
5110 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5112 ? ctype
: NULL_TREE
))))
5116 case NEGATE_EXPR
: case ABS_EXPR
:
5117 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5118 return fold (build1 (tcode
, ctype
, fold_convert (ctype
, t1
)));
5121 case MIN_EXPR
: case MAX_EXPR
:
5122 /* If widening the type changes the signedness, then we can't perform
5123 this optimization as that changes the result. */
5124 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5127 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5128 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
5129 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5131 if (tree_int_cst_sgn (c
) < 0)
5132 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5134 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5135 fold_convert (ctype
, t2
)));
5139 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5140 /* If the second operand is constant, this is a multiplication
5141 or floor division, by a power of two, so we can treat it that
5142 way unless the multiplier or divisor overflows. */
5143 if (TREE_CODE (op1
) == INTEGER_CST
5144 /* const_binop may not detect overflow correctly,
5145 so check for it explicitly here. */
5146 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5147 && TREE_INT_CST_HIGH (op1
) == 0
5148 && 0 != (t1
= fold_convert (ctype
,
5149 const_binop (LSHIFT_EXPR
,
5152 && ! TREE_OVERFLOW (t1
))
5153 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5154 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5155 ctype
, fold_convert (ctype
, op0
), t1
),
5156 c
, code
, wide_type
);
5159 case PLUS_EXPR
: case MINUS_EXPR
:
5160 /* See if we can eliminate the operation on both sides. If we can, we
5161 can return a new PLUS or MINUS. If we can't, the only remaining
5162 cases where we can do anything are if the second operand is a
5164 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
5165 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
5166 if (t1
!= 0 && t2
!= 0
5167 && (code
== MULT_EXPR
5168 /* If not multiplication, we can only do this if both operands
5169 are divisible by c. */
5170 || (multiple_of_p (ctype
, op0
, c
)
5171 && multiple_of_p (ctype
, op1
, c
))))
5172 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5173 fold_convert (ctype
, t2
)));
5175 /* If this was a subtraction, negate OP1 and set it to be an addition.
5176 This simplifies the logic below. */
5177 if (tcode
== MINUS_EXPR
)
5178 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5180 if (TREE_CODE (op1
) != INTEGER_CST
)
5183 /* If either OP1 or C are negative, this optimization is not safe for
5184 some of the division and remainder types while for others we need
5185 to change the code. */
5186 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5188 if (code
== CEIL_DIV_EXPR
)
5189 code
= FLOOR_DIV_EXPR
;
5190 else if (code
== FLOOR_DIV_EXPR
)
5191 code
= CEIL_DIV_EXPR
;
5192 else if (code
!= MULT_EXPR
5193 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5197 /* If it's a multiply or a division/modulus operation of a multiple
5198 of our constant, do the operation and verify it doesn't overflow. */
5199 if (code
== MULT_EXPR
5200 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5202 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5203 fold_convert (ctype
, c
), 0);
5204 /* We allow the constant to overflow with wrapping semantics. */
5206 || (TREE_OVERFLOW (op1
) && ! flag_wrapv
))
5212 /* If we have an unsigned type is not a sizetype, we cannot widen
5213 the operation since it will change the result if the original
5214 computation overflowed. */
5215 if (TYPE_UNSIGNED (ctype
)
5216 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5220 /* If we were able to eliminate our operation from the first side,
5221 apply our operation to the second side and reform the PLUS. */
5222 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5223 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
));
5225 /* The last case is if we are a multiply. In that case, we can
5226 apply the distributive law to commute the multiply and addition
5227 if the multiplication of the constants doesn't overflow. */
5228 if (code
== MULT_EXPR
)
5229 return fold (build2 (tcode
, ctype
,
5230 fold (build2 (code
, ctype
,
5231 fold_convert (ctype
, op0
),
5232 fold_convert (ctype
, c
))),
5238 /* We have a special case here if we are doing something like
5239 (C * 8) % 4 since we know that's zero. */
5240 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5241 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5242 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5243 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5244 return omit_one_operand (type
, integer_zero_node
, op0
);
5246 /* ... fall through ... */
5248 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5249 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5250 /* If we can extract our operation from the LHS, do so and return a
5251 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5252 do something only if the second operand is a constant. */
5254 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
5255 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5256 fold_convert (ctype
, op1
)));
5257 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5258 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
5259 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5260 fold_convert (ctype
, t1
)));
5261 else if (TREE_CODE (op1
) != INTEGER_CST
)
5264 /* If these are the same operation types, we can associate them
5265 assuming no overflow. */
5267 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
5268 fold_convert (ctype
, c
), 0))
5269 && ! TREE_OVERFLOW (t1
))
5270 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
));
5272 /* If these operations "cancel" each other, we have the main
5273 optimizations of this pass, which occur when either constant is a
5274 multiple of the other, in which case we replace this with either an
5275 operation or CODE or TCODE.
5277 If we have an unsigned type that is not a sizetype, we cannot do
5278 this since it will change the result if the original computation
5280 if ((! TYPE_UNSIGNED (ctype
)
5281 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5283 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5284 || (tcode
== MULT_EXPR
5285 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5286 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
5288 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
5289 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5290 fold_convert (ctype
,
5291 const_binop (TRUNC_DIV_EXPR
,
5293 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
5294 return fold (build2 (code
, ctype
, fold_convert (ctype
, op0
),
5295 fold_convert (ctype
,
5296 const_binop (TRUNC_DIV_EXPR
,
5308 /* Return a node which has the indicated constant VALUE (either 0 or
5309 1), and is of the indicated TYPE. */
5312 constant_boolean_node (int value
, tree type
)
5314 if (type
== integer_type_node
)
5315 return value
? integer_one_node
: integer_zero_node
;
5316 else if (type
== boolean_type_node
)
5317 return value
? boolean_true_node
: boolean_false_node
;
5318 else if (TREE_CODE (type
) == BOOLEAN_TYPE
)
5319 return lang_hooks
.truthvalue_conversion (value
? integer_one_node
5320 : integer_zero_node
);
5323 tree t
= build_int_2 (value
, 0);
5325 TREE_TYPE (t
) = type
;
5330 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5331 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5332 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5333 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5334 COND is the first argument to CODE; otherwise (as in the example
5335 given here), it is the second argument. TYPE is the type of the
5336 original expression. Return NULL_TREE if no simplification is
5340 fold_binary_op_with_conditional_arg (enum tree_code code
, tree type
,
5341 tree cond
, tree arg
, int cond_first_p
)
5343 tree test
, true_value
, false_value
;
5344 tree lhs
= NULL_TREE
;
5345 tree rhs
= NULL_TREE
;
5347 /* This transformation is only worthwhile if we don't have to wrap
5348 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5349 one of the branches once its pushed inside the COND_EXPR. */
5350 if (!TREE_CONSTANT (arg
))
5353 if (TREE_CODE (cond
) == COND_EXPR
)
5355 test
= TREE_OPERAND (cond
, 0);
5356 true_value
= TREE_OPERAND (cond
, 1);
5357 false_value
= TREE_OPERAND (cond
, 2);
5358 /* If this operand throws an expression, then it does not make
5359 sense to try to perform a logical or arithmetic operation
5361 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
5363 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
5368 tree testtype
= TREE_TYPE (cond
);
5370 true_value
= constant_boolean_node (true, testtype
);
5371 false_value
= constant_boolean_node (false, testtype
);
5375 lhs
= fold (cond_first_p
? build2 (code
, type
, true_value
, arg
)
5376 : build2 (code
, type
, arg
, true_value
));
5378 rhs
= fold (cond_first_p
? build2 (code
, type
, false_value
, arg
)
5379 : build2 (code
, type
, arg
, false_value
));
5381 test
= fold (build3 (COND_EXPR
, type
, test
, lhs
, rhs
));
5382 return fold_convert (type
, test
);
5386 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5388 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5389 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5390 ADDEND is the same as X.
5392 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5393 and finite. The problematic cases are when X is zero, and its mode
5394 has signed zeros. In the case of rounding towards -infinity,
5395 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5396 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5399 fold_real_zero_addition_p (tree type
, tree addend
, int negate
)
5401 if (!real_zerop (addend
))
5404 /* Don't allow the fold with -fsignaling-nans. */
5405 if (HONOR_SNANS (TYPE_MODE (type
)))
5408 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5409 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
5412 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5413 if (TREE_CODE (addend
) == REAL_CST
5414 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
5417 /* The mode has signed zeros, and we have to honor their sign.
5418 In this situation, there is only one case we can return true for.
5419 X - 0 is the same as X unless rounding towards -infinity is
5421 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
5424 /* Subroutine of fold() that checks comparisons of built-in math
5425 functions against real constants.
5427 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5428 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5429 is the type of the result and ARG0 and ARG1 are the operands of the
5430 comparison. ARG1 must be a TREE_REAL_CST.
5432 The function returns the constant folded tree if a simplification
5433 can be made, and NULL_TREE otherwise. */
5436 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
5437 tree type
, tree arg0
, tree arg1
)
5441 if (BUILTIN_SQRT_P (fcode
))
5443 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5444 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
5446 c
= TREE_REAL_CST (arg1
);
5447 if (REAL_VALUE_NEGATIVE (c
))
5449 /* sqrt(x) < y is always false, if y is negative. */
5450 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
5451 return omit_one_operand (type
, integer_zero_node
, arg
);
5453 /* sqrt(x) > y is always true, if y is negative and we
5454 don't care about NaNs, i.e. negative values of x. */
5455 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
5456 return omit_one_operand (type
, integer_one_node
, arg
);
5458 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5459 return fold (build2 (GE_EXPR
, type
, arg
,
5460 build_real (TREE_TYPE (arg
), dconst0
)));
5462 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
5466 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5467 real_convert (&c2
, mode
, &c2
);
5469 if (REAL_VALUE_ISINF (c2
))
5471 /* sqrt(x) > y is x == +Inf, when y is very large. */
5472 if (HONOR_INFINITIES (mode
))
5473 return fold (build2 (EQ_EXPR
, type
, arg
,
5474 build_real (TREE_TYPE (arg
), c2
)));
5476 /* sqrt(x) > y is always false, when y is very large
5477 and we don't care about infinities. */
5478 return omit_one_operand (type
, integer_zero_node
, arg
);
5481 /* sqrt(x) > c is the same as x > c*c. */
5482 return fold (build2 (code
, type
, arg
,
5483 build_real (TREE_TYPE (arg
), c2
)));
5485 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
5489 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5490 real_convert (&c2
, mode
, &c2
);
5492 if (REAL_VALUE_ISINF (c2
))
5494 /* sqrt(x) < y is always true, when y is a very large
5495 value and we don't care about NaNs or Infinities. */
5496 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
5497 return omit_one_operand (type
, integer_one_node
, arg
);
5499 /* sqrt(x) < y is x != +Inf when y is very large and we
5500 don't care about NaNs. */
5501 if (! HONOR_NANS (mode
))
5502 return fold (build2 (NE_EXPR
, type
, arg
,
5503 build_real (TREE_TYPE (arg
), c2
)));
5505 /* sqrt(x) < y is x >= 0 when y is very large and we
5506 don't care about Infinities. */
5507 if (! HONOR_INFINITIES (mode
))
5508 return fold (build2 (GE_EXPR
, type
, arg
,
5509 build_real (TREE_TYPE (arg
), dconst0
)));
5511 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5512 if (lang_hooks
.decls
.global_bindings_p () != 0
5513 || CONTAINS_PLACEHOLDER_P (arg
))
5516 arg
= save_expr (arg
);
5517 return fold (build2 (TRUTH_ANDIF_EXPR
, type
,
5518 fold (build2 (GE_EXPR
, type
, arg
,
5519 build_real (TREE_TYPE (arg
),
5521 fold (build2 (NE_EXPR
, type
, arg
,
5522 build_real (TREE_TYPE (arg
),
5526 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5527 if (! HONOR_NANS (mode
))
5528 return fold (build2 (code
, type
, arg
,
5529 build_real (TREE_TYPE (arg
), c2
)));
5531 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5532 if (lang_hooks
.decls
.global_bindings_p () == 0
5533 && ! CONTAINS_PLACEHOLDER_P (arg
))
5535 arg
= save_expr (arg
);
5536 return fold (build2 (TRUTH_ANDIF_EXPR
, type
,
5537 fold (build2 (GE_EXPR
, type
, arg
,
5538 build_real (TREE_TYPE (arg
),
5540 fold (build2 (code
, type
, arg
,
5541 build_real (TREE_TYPE (arg
),
5550 /* Subroutine of fold() that optimizes comparisons against Infinities,
5551 either +Inf or -Inf.
5553 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5554 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5555 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5557 The function returns the constant folded tree if a simplification
5558 can be made, and NULL_TREE otherwise. */
5561 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5563 enum machine_mode mode
;
5564 REAL_VALUE_TYPE max
;
5568 mode
= TYPE_MODE (TREE_TYPE (arg0
));
5570 /* For negative infinity swap the sense of the comparison. */
5571 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
5573 code
= swap_tree_comparison (code
);
5578 /* x > +Inf is always false, if with ignore sNANs. */
5579 if (HONOR_SNANS (mode
))
5581 return omit_one_operand (type
, integer_zero_node
, arg0
);
5584 /* x <= +Inf is always true, if we don't case about NaNs. */
5585 if (! HONOR_NANS (mode
))
5586 return omit_one_operand (type
, integer_one_node
, arg0
);
5588 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5589 if (lang_hooks
.decls
.global_bindings_p () == 0
5590 && ! CONTAINS_PLACEHOLDER_P (arg0
))
5592 arg0
= save_expr (arg0
);
5593 return fold (build2 (EQ_EXPR
, type
, arg0
, arg0
));
5599 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5600 real_maxval (&max
, neg
, mode
);
5601 return fold (build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5602 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5605 /* x < +Inf is always equal to x <= DBL_MAX. */
5606 real_maxval (&max
, neg
, mode
);
5607 return fold (build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5608 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5611 /* x != +Inf is always equal to !(x > DBL_MAX). */
5612 real_maxval (&max
, neg
, mode
);
5613 if (! HONOR_NANS (mode
))
5614 return fold (build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5615 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5617 /* The transformation below creates non-gimple code and thus is
5618 not appropriate if we are in gimple form. */
5622 temp
= fold (build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5623 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5624 return fold (build1 (TRUTH_NOT_EXPR
, type
, temp
));
5633 /* Subroutine of fold() that optimizes comparisons of a division by
5634 a nonzero integer constant against an integer constant, i.e.
5637 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5638 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5639 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5641 The function returns the constant folded tree if a simplification
5642 can be made, and NULL_TREE otherwise. */
5645 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5647 tree prod
, tmp
, hi
, lo
;
5648 tree arg00
= TREE_OPERAND (arg0
, 0);
5649 tree arg01
= TREE_OPERAND (arg0
, 1);
5650 unsigned HOST_WIDE_INT lpart
;
5651 HOST_WIDE_INT hpart
;
5654 /* We have to do this the hard way to detect unsigned overflow.
5655 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5656 overflow
= mul_double (TREE_INT_CST_LOW (arg01
),
5657 TREE_INT_CST_HIGH (arg01
),
5658 TREE_INT_CST_LOW (arg1
),
5659 TREE_INT_CST_HIGH (arg1
), &lpart
, &hpart
);
5660 prod
= build_int_2 (lpart
, hpart
);
5661 TREE_TYPE (prod
) = TREE_TYPE (arg00
);
5662 TREE_OVERFLOW (prod
) = force_fit_type (prod
, overflow
)
5663 || TREE_INT_CST_HIGH (prod
) != hpart
5664 || TREE_INT_CST_LOW (prod
) != lpart
;
5665 TREE_CONSTANT_OVERFLOW (prod
) = TREE_OVERFLOW (prod
);
5667 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)))
5669 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5672 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5673 overflow
= add_double (TREE_INT_CST_LOW (prod
),
5674 TREE_INT_CST_HIGH (prod
),
5675 TREE_INT_CST_LOW (tmp
),
5676 TREE_INT_CST_HIGH (tmp
),
5678 hi
= build_int_2 (lpart
, hpart
);
5679 TREE_TYPE (hi
) = TREE_TYPE (arg00
);
5680 TREE_OVERFLOW (hi
) = force_fit_type (hi
, overflow
)
5681 || TREE_INT_CST_HIGH (hi
) != hpart
5682 || TREE_INT_CST_LOW (hi
) != lpart
5683 || TREE_OVERFLOW (prod
);
5684 TREE_CONSTANT_OVERFLOW (hi
) = TREE_OVERFLOW (hi
);
5686 else if (tree_int_cst_sgn (arg01
) >= 0)
5688 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5689 switch (tree_int_cst_sgn (arg1
))
5692 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
5697 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
5702 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
5712 tmp
= int_const_binop (PLUS_EXPR
, arg01
, integer_one_node
, 0);
5713 switch (tree_int_cst_sgn (arg1
))
5716 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
5721 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
5726 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
5738 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
5739 return omit_one_operand (type
, integer_zero_node
, arg00
);
5740 if (TREE_OVERFLOW (hi
))
5741 return fold (build2 (GE_EXPR
, type
, arg00
, lo
));
5742 if (TREE_OVERFLOW (lo
))
5743 return fold (build2 (LE_EXPR
, type
, arg00
, hi
));
5744 return build_range_check (type
, arg00
, 1, lo
, hi
);
5747 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
5748 return omit_one_operand (type
, integer_one_node
, arg00
);
5749 if (TREE_OVERFLOW (hi
))
5750 return fold (build2 (LT_EXPR
, type
, arg00
, lo
));
5751 if (TREE_OVERFLOW (lo
))
5752 return fold (build2 (GT_EXPR
, type
, arg00
, hi
));
5753 return build_range_check (type
, arg00
, 0, lo
, hi
);
5756 if (TREE_OVERFLOW (lo
))
5757 return omit_one_operand (type
, integer_zero_node
, arg00
);
5758 return fold (build2 (LT_EXPR
, type
, arg00
, lo
));
5761 if (TREE_OVERFLOW (hi
))
5762 return omit_one_operand (type
, integer_one_node
, arg00
);
5763 return fold (build2 (LE_EXPR
, type
, arg00
, hi
));
5766 if (TREE_OVERFLOW (hi
))
5767 return omit_one_operand (type
, integer_zero_node
, arg00
);
5768 return fold (build2 (GT_EXPR
, type
, arg00
, hi
));
5771 if (TREE_OVERFLOW (lo
))
5772 return omit_one_operand (type
, integer_one_node
, arg00
);
5773 return fold (build2 (GE_EXPR
, type
, arg00
, lo
));
5783 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5784 equality/inequality test, then return a simplified form of
5785 the test using shifts and logical operations. Otherwise return
5786 NULL. TYPE is the desired result type. */
5789 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
5792 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5794 if (code
== TRUTH_NOT_EXPR
)
5796 code
= TREE_CODE (arg0
);
5797 if (code
!= NE_EXPR
&& code
!= EQ_EXPR
)
5800 /* Extract the arguments of the EQ/NE. */
5801 arg1
= TREE_OPERAND (arg0
, 1);
5802 arg0
= TREE_OPERAND (arg0
, 0);
5804 /* This requires us to invert the code. */
5805 code
= (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
);
5808 /* If this is testing a single bit, we can optimize the test. */
5809 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
5810 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
5811 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
5813 tree inner
= TREE_OPERAND (arg0
, 0);
5814 tree type
= TREE_TYPE (arg0
);
5815 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
5816 enum machine_mode operand_mode
= TYPE_MODE (type
);
5818 tree signed_type
, unsigned_type
, intermediate_type
;
5821 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5822 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5823 arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
5824 if (arg00
!= NULL_TREE
5825 /* This is only a win if casting to a signed type is cheap,
5826 i.e. when arg00's type is not a partial mode. */
5827 && TYPE_PRECISION (TREE_TYPE (arg00
))
5828 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
5830 tree stype
= lang_hooks
.types
.signed_type (TREE_TYPE (arg00
));
5831 return fold (build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
5832 result_type
, fold_convert (stype
, arg00
),
5833 fold_convert (stype
, integer_zero_node
)));
5836 /* Otherwise we have (A & C) != 0 where C is a single bit,
5837 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5838 Similarly for (A & C) == 0. */
5840 /* If INNER is a right shift of a constant and it plus BITNUM does
5841 not overflow, adjust BITNUM and INNER. */
5842 if (TREE_CODE (inner
) == RSHIFT_EXPR
5843 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
5844 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
5845 && bitnum
< TYPE_PRECISION (type
)
5846 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
5847 bitnum
- TYPE_PRECISION (type
)))
5849 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
5850 inner
= TREE_OPERAND (inner
, 0);
5853 /* If we are going to be able to omit the AND below, we must do our
5854 operations as unsigned. If we must use the AND, we have a choice.
5855 Normally unsigned is faster, but for some machines signed is. */
5856 #ifdef LOAD_EXTEND_OP
5857 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1);
5862 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
5863 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
5864 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
5865 inner
= fold_convert (intermediate_type
, inner
);
5868 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
5869 inner
, size_int (bitnum
));
5871 if (code
== EQ_EXPR
)
5872 inner
= fold (build2 (BIT_XOR_EXPR
, intermediate_type
,
5873 inner
, integer_one_node
));
5875 /* Put the AND last so it can combine with more things. */
5876 inner
= build2 (BIT_AND_EXPR
, intermediate_type
,
5877 inner
, integer_one_node
);
5879 /* Make sure to return the proper type. */
5880 inner
= fold_convert (result_type
, inner
);
5887 /* Check whether we are allowed to reorder operands arg0 and arg1,
5888 such that the evaluation of arg1 occurs before arg0. */
5891 reorder_operands_p (tree arg0
, tree arg1
)
5893 if (! flag_evaluation_order
)
5895 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
5897 return ! TREE_SIDE_EFFECTS (arg0
)
5898 && ! TREE_SIDE_EFFECTS (arg1
);
5901 /* Test whether it is preferable two swap two operands, ARG0 and
5902 ARG1, for example because ARG0 is an integer constant and ARG1
5903 isn't. If REORDER is true, only recommend swapping if we can
5904 evaluate the operands in reverse order. */
5907 tree_swap_operands_p (tree arg0
, tree arg1
, bool reorder
)
5909 STRIP_SIGN_NOPS (arg0
);
5910 STRIP_SIGN_NOPS (arg1
);
5912 if (TREE_CODE (arg1
) == INTEGER_CST
)
5914 if (TREE_CODE (arg0
) == INTEGER_CST
)
5917 if (TREE_CODE (arg1
) == REAL_CST
)
5919 if (TREE_CODE (arg0
) == REAL_CST
)
5922 if (TREE_CODE (arg1
) == COMPLEX_CST
)
5924 if (TREE_CODE (arg0
) == COMPLEX_CST
)
5927 if (TREE_CONSTANT (arg1
))
5929 if (TREE_CONSTANT (arg0
))
5935 if (reorder
&& flag_evaluation_order
5936 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
5944 if (reorder
&& flag_evaluation_order
5945 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
5953 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5954 for commutative and comparison operators. Ensuring a canonical
5955 form allows the optimizers to find additional redundancies without
5956 having to explicitly check for both orderings. */
5957 if (TREE_CODE (arg0
) == SSA_NAME
5958 && TREE_CODE (arg1
) == SSA_NAME
5959 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
5965 /* Perform constant folding and related simplification of EXPR.
5966 The related simplifications include x*1 => x, x*0 => 0, etc.,
5967 and application of the associative law.
5968 NOP_EXPR conversions may be removed freely (as long as we
5969 are careful not to change the type of the overall expression).
5970 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5971 but we can constant-fold them if they have constant operands. */
5973 #ifdef ENABLE_FOLD_CHECKING
5974 # define fold(x) fold_1 (x)
5975 static tree
fold_1 (tree
);
5981 const tree t
= expr
;
5982 const tree type
= TREE_TYPE (expr
);
5983 tree t1
= NULL_TREE
;
5985 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
5986 enum tree_code code
= TREE_CODE (t
);
5987 int kind
= TREE_CODE_CLASS (code
);
5989 /* WINS will be nonzero when the switch is done
5990 if all operands are constant. */
5993 /* Return right away if a constant. */
5997 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
6001 /* Special case for conversion ops that can have fixed point args. */
6002 arg0
= TREE_OPERAND (t
, 0);
6004 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6006 STRIP_SIGN_NOPS (arg0
);
6008 if (arg0
!= 0 && TREE_CODE (arg0
) == COMPLEX_CST
)
6009 subop
= TREE_REALPART (arg0
);
6013 if (subop
!= 0 && TREE_CODE (subop
) != INTEGER_CST
6014 && TREE_CODE (subop
) != REAL_CST
)
6015 /* Note that TREE_CONSTANT isn't enough:
6016 static var addresses are constant but we can't
6017 do arithmetic on them. */
6020 else if (IS_EXPR_CODE_CLASS (kind
))
6022 int len
= first_rtl_op (code
);
6024 for (i
= 0; i
< len
; i
++)
6026 tree op
= TREE_OPERAND (t
, i
);
6030 continue; /* Valid for CALL_EXPR, at least. */
6032 /* Strip any conversions that don't change the mode. This is
6033 safe for every expression, except for a comparison expression
6034 because its signedness is derived from its operands. So, in
6035 the latter case, only strip conversions that don't change the
6038 Note that this is done as an internal manipulation within the
6039 constant folder, in order to find the simplest representation
6040 of the arguments so that their form can be studied. In any
6041 cases, the appropriate type conversions should be put back in
6042 the tree that will get out of the constant folder. */
6044 STRIP_SIGN_NOPS (op
);
6048 if (TREE_CODE (op
) == COMPLEX_CST
)
6049 subop
= TREE_REALPART (op
);
6053 if (TREE_CODE (subop
) != INTEGER_CST
6054 && TREE_CODE (subop
) != REAL_CST
)
6055 /* Note that TREE_CONSTANT isn't enough:
6056 static var addresses are constant but we can't
6057 do arithmetic on them. */
6067 /* If this is a commutative operation, and ARG0 is a constant, move it
6068 to ARG1 to reduce the number of tests below. */
6069 if (commutative_tree_code (code
)
6070 && tree_swap_operands_p (arg0
, arg1
, true))
6071 return fold (build2 (code
, type
, TREE_OPERAND (t
, 1),
6072 TREE_OPERAND (t
, 0)));
6074 /* Now WINS is set as described above,
6075 ARG0 is the first operand of EXPR,
6076 and ARG1 is the second operand (if it has more than one operand).
6078 First check for cases where an arithmetic operation is applied to a
6079 compound, conditional, or comparison operation. Push the arithmetic
6080 operation inside the compound or conditional to see if any folding
6081 can then be done. Convert comparison to conditional for this purpose.
6082 The also optimizes non-constant cases that used to be done in
6085 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6086 one of the operands is a comparison and the other is a comparison, a
6087 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6088 code below would make the expression more complex. Change it to a
6089 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6090 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6092 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
6093 || code
== EQ_EXPR
|| code
== NE_EXPR
)
6094 && ((truth_value_p (TREE_CODE (arg0
))
6095 && (truth_value_p (TREE_CODE (arg1
))
6096 || (TREE_CODE (arg1
) == BIT_AND_EXPR
6097 && integer_onep (TREE_OPERAND (arg1
, 1)))))
6098 || (truth_value_p (TREE_CODE (arg1
))
6099 && (truth_value_p (TREE_CODE (arg0
))
6100 || (TREE_CODE (arg0
) == BIT_AND_EXPR
6101 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
6103 tem
= fold (build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
6104 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
6106 type
, fold_convert (boolean_type_node
, arg0
),
6107 fold_convert (boolean_type_node
, arg1
)));
6109 if (code
== EQ_EXPR
)
6110 tem
= invert_truthvalue (tem
);
6115 if (TREE_CODE_CLASS (code
) == '1')
6117 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
6118 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6119 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))));
6120 else if (TREE_CODE (arg0
) == COND_EXPR
)
6122 tree arg01
= TREE_OPERAND (arg0
, 1);
6123 tree arg02
= TREE_OPERAND (arg0
, 2);
6124 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
6125 arg01
= fold (build1 (code
, type
, arg01
));
6126 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
6127 arg02
= fold (build1 (code
, type
, arg02
));
6128 tem
= fold (build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6131 /* If this was a conversion, and all we did was to move into
6132 inside the COND_EXPR, bring it back out. But leave it if
6133 it is a conversion from integer to integer and the
6134 result precision is no wider than a word since such a
6135 conversion is cheap and may be optimized away by combine,
6136 while it couldn't if it were outside the COND_EXPR. Then return
6137 so we don't get into an infinite recursion loop taking the
6138 conversion out and then back in. */
6140 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
6141 || code
== NON_LVALUE_EXPR
)
6142 && TREE_CODE (tem
) == COND_EXPR
6143 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
6144 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
6145 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
6146 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
6147 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
6148 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
6149 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
6151 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
6152 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
))
6153 tem
= build1 (code
, type
,
6155 TREE_TYPE (TREE_OPERAND
6156 (TREE_OPERAND (tem
, 1), 0)),
6157 TREE_OPERAND (tem
, 0),
6158 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
6159 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
6162 else if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<')
6164 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
6166 arg0
= copy_node (arg0
);
6167 TREE_TYPE (arg0
) = type
;
6170 else if (TREE_CODE (type
) != INTEGER_TYPE
)
6171 return fold (build3 (COND_EXPR
, type
, arg0
,
6172 fold (build1 (code
, type
,
6174 fold (build1 (code
, type
,
6175 integer_zero_node
))));
6178 else if (TREE_CODE_CLASS (code
) == '<'
6179 && TREE_CODE (arg0
) == COMPOUND_EXPR
)
6180 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6181 fold (build2 (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
6182 else if (TREE_CODE_CLASS (code
) == '<'
6183 && TREE_CODE (arg1
) == COMPOUND_EXPR
)
6184 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
6185 fold (build2 (code
, type
, arg0
, TREE_OPERAND (arg1
, 1))));
6186 else if (TREE_CODE_CLASS (code
) == '2'
6187 || TREE_CODE_CLASS (code
) == '<')
6189 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
6190 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6191 fold (build2 (code
, type
, TREE_OPERAND (arg0
, 1),
6193 if (TREE_CODE (arg1
) == COMPOUND_EXPR
6194 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
6195 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
6196 fold (build2 (code
, type
,
6197 arg0
, TREE_OPERAND (arg1
, 1))));
6199 if (TREE_CODE (arg0
) == COND_EXPR
6200 || TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<')
6202 tem
= fold_binary_op_with_conditional_arg (code
, type
, arg0
, arg1
,
6203 /*cond_first_p=*/1);
6204 if (tem
!= NULL_TREE
)
6208 if (TREE_CODE (arg1
) == COND_EXPR
6209 || TREE_CODE_CLASS (TREE_CODE (arg1
)) == '<')
6211 tem
= fold_binary_op_with_conditional_arg (code
, type
, arg1
, arg0
,
6212 /*cond_first_p=*/0);
6213 if (tem
!= NULL_TREE
)
6221 return fold (DECL_INITIAL (t
));
6226 case FIX_TRUNC_EXPR
:
6228 case FIX_FLOOR_EXPR
:
6229 case FIX_ROUND_EXPR
:
6230 if (TREE_TYPE (TREE_OPERAND (t
, 0)) == type
)
6231 return TREE_OPERAND (t
, 0);
6233 /* Handle cases of two conversions in a row. */
6234 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
6235 || TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
)
6237 tree inside_type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
6238 tree inter_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
6239 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
6240 int inside_ptr
= POINTER_TYPE_P (inside_type
);
6241 int inside_float
= FLOAT_TYPE_P (inside_type
);
6242 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
6243 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
6244 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
6245 int inter_ptr
= POINTER_TYPE_P (inter_type
);
6246 int inter_float
= FLOAT_TYPE_P (inter_type
);
6247 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
6248 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
6249 int final_int
= INTEGRAL_TYPE_P (type
);
6250 int final_ptr
= POINTER_TYPE_P (type
);
6251 int final_float
= FLOAT_TYPE_P (type
);
6252 unsigned int final_prec
= TYPE_PRECISION (type
);
6253 int final_unsignedp
= TYPE_UNSIGNED (type
);
6255 /* In addition to the cases of two conversions in a row
6256 handled below, if we are converting something to its own
6257 type via an object of identical or wider precision, neither
6258 conversion is needed. */
6259 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
6260 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
6261 && inter_prec
>= final_prec
)
6262 return fold (build1 (code
, type
,
6263 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
6265 /* Likewise, if the intermediate and final types are either both
6266 float or both integer, we don't need the middle conversion if
6267 it is wider than the final type and doesn't change the signedness
6268 (for integers). Avoid this if the final type is a pointer
6269 since then we sometimes need the inner conversion. Likewise if
6270 the outer has a precision not equal to the size of its mode. */
6271 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
6272 || (inter_float
&& inside_float
))
6273 && inter_prec
>= inside_prec
6274 && (inter_float
|| inter_unsignedp
== inside_unsignedp
)
6275 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6276 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6278 return fold (build1 (code
, type
,
6279 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
6281 /* If we have a sign-extension of a zero-extended value, we can
6282 replace that by a single zero-extension. */
6283 if (inside_int
&& inter_int
&& final_int
6284 && inside_prec
< inter_prec
&& inter_prec
< final_prec
6285 && inside_unsignedp
&& !inter_unsignedp
)
6286 return fold (build1 (code
, type
,
6287 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
6289 /* Two conversions in a row are not needed unless:
6290 - some conversion is floating-point (overstrict for now), or
6291 - the intermediate type is narrower than both initial and
6293 - the intermediate type and innermost type differ in signedness,
6294 and the outermost type is wider than the intermediate, or
6295 - the initial type is a pointer type and the precisions of the
6296 intermediate and final types differ, or
6297 - the final type is a pointer type and the precisions of the
6298 initial and intermediate types differ. */
6299 if (! inside_float
&& ! inter_float
&& ! final_float
6300 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
6301 && ! (inside_int
&& inter_int
6302 && inter_unsignedp
!= inside_unsignedp
6303 && inter_prec
< final_prec
)
6304 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
6305 == (final_unsignedp
&& final_prec
> inter_prec
))
6306 && ! (inside_ptr
&& inter_prec
!= final_prec
)
6307 && ! (final_ptr
&& inside_prec
!= inter_prec
)
6308 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
6309 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
6311 return fold (build1 (code
, type
,
6312 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
6315 if (TREE_CODE (TREE_OPERAND (t
, 0)) == MODIFY_EXPR
6316 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t
, 0), 1))
6317 /* Detect assigning a bitfield. */
6318 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)) == COMPONENT_REF
6319 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 1))))
6321 /* Don't leave an assignment inside a conversion
6322 unless assigning a bitfield. */
6323 tree prev
= TREE_OPERAND (t
, 0);
6324 tem
= copy_node (t
);
6325 TREE_OPERAND (tem
, 0) = TREE_OPERAND (prev
, 1);
6326 /* First do the assignment, then return converted constant. */
6327 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), prev
, fold (tem
));
6328 TREE_NO_WARNING (tem
) = 1;
6329 TREE_USED (tem
) = 1;
6333 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6334 constants (if x has signed type, the sign bit cannot be set
6335 in c). This folds extension into the BIT_AND_EXPR. */
6336 if (INTEGRAL_TYPE_P (type
)
6337 && TREE_CODE (type
) != BOOLEAN_TYPE
6338 && TREE_CODE (TREE_OPERAND (t
, 0)) == BIT_AND_EXPR
6339 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 1)) == INTEGER_CST
)
6341 tree
and = TREE_OPERAND (t
, 0);
6342 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
6345 if (TYPE_UNSIGNED (TREE_TYPE (and))
6346 || (TYPE_PRECISION (type
)
6347 <= TYPE_PRECISION (TREE_TYPE (and))))
6349 else if (TYPE_PRECISION (TREE_TYPE (and1
))
6350 <= HOST_BITS_PER_WIDE_INT
6351 && host_integerp (and1
, 1))
6353 unsigned HOST_WIDE_INT cst
;
6355 cst
= tree_low_cst (and1
, 1);
6356 cst
&= (HOST_WIDE_INT
) -1
6357 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
6358 change
= (cst
== 0);
6359 #ifdef LOAD_EXTEND_OP
6361 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
6364 tree uns
= lang_hooks
.types
.unsigned_type (TREE_TYPE (and0
));
6365 and0
= fold_convert (uns
, and0
);
6366 and1
= fold_convert (uns
, and1
);
6371 return fold (build2 (BIT_AND_EXPR
, type
,
6372 fold_convert (type
, and0
),
6373 fold_convert (type
, and1
)));
6376 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6377 T2 being pointers to types of the same size. */
6378 if (POINTER_TYPE_P (TREE_TYPE (t
))
6379 && TREE_CODE_CLASS (TREE_CODE (arg0
)) == '2'
6380 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
6381 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
6383 tree arg00
= TREE_OPERAND (arg0
, 0);
6384 tree t0
= TREE_TYPE (t
);
6385 tree t1
= TREE_TYPE (arg00
);
6386 tree tt0
= TREE_TYPE (t0
);
6387 tree tt1
= TREE_TYPE (t1
);
6388 tree s0
= TYPE_SIZE (tt0
);
6389 tree s1
= TYPE_SIZE (tt1
);
6391 if (s0
&& s1
&& operand_equal_p (s0
, s1
, OEP_ONLY_CONST
))
6392 return build2 (TREE_CODE (arg0
), t0
, fold_convert (t0
, arg00
),
6393 TREE_OPERAND (arg0
, 1));
6396 tem
= fold_convert_const (code
, type
, arg0
);
6397 return tem
? tem
: t
;
6399 case VIEW_CONVERT_EXPR
:
6400 if (TREE_CODE (TREE_OPERAND (t
, 0)) == VIEW_CONVERT_EXPR
)
6401 return build1 (VIEW_CONVERT_EXPR
, type
,
6402 TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
6406 if (TREE_CODE (arg0
) == CONSTRUCTOR
6407 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
6409 tree m
= purpose_member (arg1
, CONSTRUCTOR_ELTS (arg0
));
6411 return TREE_VALUE (m
);
6416 if (TREE_CONSTANT (t
) != wins
)
6418 tem
= copy_node (t
);
6419 TREE_CONSTANT (tem
) = wins
;
6420 TREE_INVARIANT (tem
) = wins
;
6426 if (negate_expr_p (arg0
))
6427 return fold_convert (type
, negate_expr (arg0
));
6431 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
6432 return fold_abs_const (arg0
, type
);
6433 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
6434 return fold (build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0)));
6435 /* Convert fabs((double)float) into (double)fabsf(float). */
6436 else if (TREE_CODE (arg0
) == NOP_EXPR
6437 && TREE_CODE (type
) == REAL_TYPE
)
6439 tree targ0
= strip_float_extensions (arg0
);
6441 return fold_convert (type
, fold (build1 (ABS_EXPR
,
6445 else if (tree_expr_nonnegative_p (arg0
))
6450 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
6451 return fold_convert (type
, arg0
);
6452 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
6453 return build2 (COMPLEX_EXPR
, type
,
6454 TREE_OPERAND (arg0
, 0),
6455 negate_expr (TREE_OPERAND (arg0
, 1)));
6456 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
6457 return build_complex (type
, TREE_REALPART (arg0
),
6458 negate_expr (TREE_IMAGPART (arg0
)));
6459 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
6460 return fold (build2 (TREE_CODE (arg0
), type
,
6461 fold (build1 (CONJ_EXPR
, type
,
6462 TREE_OPERAND (arg0
, 0))),
6463 fold (build1 (CONJ_EXPR
, type
,
6464 TREE_OPERAND (arg0
, 1)))));
6465 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
6466 return TREE_OPERAND (arg0
, 0);
6470 if (TREE_CODE (arg0
) == INTEGER_CST
)
6471 return fold_not_const (arg0
, type
);
6472 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
6473 return TREE_OPERAND (arg0
, 0);
6477 /* A + (-B) -> A - B */
6478 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
6479 return fold (build2 (MINUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
6480 /* (-A) + B -> B - A */
6481 if (TREE_CODE (arg0
) == NEGATE_EXPR
6482 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
6483 return fold (build2 (MINUS_EXPR
, type
, arg1
, TREE_OPERAND (arg0
, 0)));
6484 if (! FLOAT_TYPE_P (type
))
6486 if (integer_zerop (arg1
))
6487 return non_lvalue (fold_convert (type
, arg0
));
6489 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6490 with a constant, and the two constants have no bits in common,
6491 we should treat this as a BIT_IOR_EXPR since this may produce more
6493 if (TREE_CODE (arg0
) == BIT_AND_EXPR
6494 && TREE_CODE (arg1
) == BIT_AND_EXPR
6495 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
6496 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
6497 && integer_zerop (const_binop (BIT_AND_EXPR
,
6498 TREE_OPERAND (arg0
, 1),
6499 TREE_OPERAND (arg1
, 1), 0)))
6501 code
= BIT_IOR_EXPR
;
6505 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6506 (plus (plus (mult) (mult)) (foo)) so that we can
6507 take advantage of the factoring cases below. */
6508 if ((TREE_CODE (arg0
) == PLUS_EXPR
6509 && TREE_CODE (arg1
) == MULT_EXPR
)
6510 || (TREE_CODE (arg1
) == PLUS_EXPR
6511 && TREE_CODE (arg0
) == MULT_EXPR
))
6513 tree parg0
, parg1
, parg
, marg
;
6515 if (TREE_CODE (arg0
) == PLUS_EXPR
)
6516 parg
= arg0
, marg
= arg1
;
6518 parg
= arg1
, marg
= arg0
;
6519 parg0
= TREE_OPERAND (parg
, 0);
6520 parg1
= TREE_OPERAND (parg
, 1);
6524 if (TREE_CODE (parg0
) == MULT_EXPR
6525 && TREE_CODE (parg1
) != MULT_EXPR
)
6526 return fold (build2 (PLUS_EXPR
, type
,
6527 fold (build2 (PLUS_EXPR
, type
,
6528 fold_convert (type
, parg0
),
6529 fold_convert (type
, marg
))),
6530 fold_convert (type
, parg1
)));
6531 if (TREE_CODE (parg0
) != MULT_EXPR
6532 && TREE_CODE (parg1
) == MULT_EXPR
)
6533 return fold (build2 (PLUS_EXPR
, type
,
6534 fold (build2 (PLUS_EXPR
, type
,
6535 fold_convert (type
, parg1
),
6536 fold_convert (type
, marg
))),
6537 fold_convert (type
, parg0
)));
6540 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
)
6542 tree arg00
, arg01
, arg10
, arg11
;
6543 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6545 /* (A * C) + (B * C) -> (A+B) * C.
6546 We are most concerned about the case where C is a constant,
6547 but other combinations show up during loop reduction. Since
6548 it is not difficult, try all four possibilities. */
6550 arg00
= TREE_OPERAND (arg0
, 0);
6551 arg01
= TREE_OPERAND (arg0
, 1);
6552 arg10
= TREE_OPERAND (arg1
, 0);
6553 arg11
= TREE_OPERAND (arg1
, 1);
6556 if (operand_equal_p (arg01
, arg11
, 0))
6557 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6558 else if (operand_equal_p (arg00
, arg10
, 0))
6559 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6560 else if (operand_equal_p (arg00
, arg11
, 0))
6561 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6562 else if (operand_equal_p (arg01
, arg10
, 0))
6563 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6565 /* No identical multiplicands; see if we can find a common
6566 power-of-two factor in non-power-of-two multiplies. This
6567 can help in multi-dimensional array access. */
6568 else if (TREE_CODE (arg01
) == INTEGER_CST
6569 && TREE_CODE (arg11
) == INTEGER_CST
6570 && TREE_INT_CST_HIGH (arg01
) == 0
6571 && TREE_INT_CST_HIGH (arg11
) == 0)
6573 HOST_WIDE_INT int01
, int11
, tmp
;
6574 int01
= TREE_INT_CST_LOW (arg01
);
6575 int11
= TREE_INT_CST_LOW (arg11
);
6577 /* Move min of absolute values to int11. */
6578 if ((int01
>= 0 ? int01
: -int01
)
6579 < (int11
>= 0 ? int11
: -int11
))
6581 tmp
= int01
, int01
= int11
, int11
= tmp
;
6582 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6583 alt0
= arg01
, arg01
= arg11
, arg11
= alt0
;
6586 if (exact_log2 (int11
) > 0 && int01
% int11
== 0)
6588 alt0
= fold (build2 (MULT_EXPR
, type
, arg00
,
6589 build_int_2 (int01
/ int11
, 0)));
6596 return fold (build2 (MULT_EXPR
, type
,
6597 fold (build2 (PLUS_EXPR
, type
,
6604 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6605 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
6606 return non_lvalue (fold_convert (type
, arg0
));
6608 /* Likewise if the operands are reversed. */
6609 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
6610 return non_lvalue (fold_convert (type
, arg1
));
6612 /* Convert X + -C into X - C. */
6613 if (TREE_CODE (arg1
) == REAL_CST
6614 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
6616 tem
= fold_negate_const (arg1
, type
);
6617 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
6618 return fold (build2 (MINUS_EXPR
, type
,
6619 fold_convert (type
, arg0
),
6620 fold_convert (type
, tem
)));
6623 /* Convert x+x into x*2.0. */
6624 if (operand_equal_p (arg0
, arg1
, 0)
6625 && SCALAR_FLOAT_TYPE_P (type
))
6626 return fold (build2 (MULT_EXPR
, type
, arg0
,
6627 build_real (type
, dconst2
)));
6629 /* Convert x*c+x into x*(c+1). */
6630 if (flag_unsafe_math_optimizations
6631 && TREE_CODE (arg0
) == MULT_EXPR
6632 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
6633 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
6634 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
6638 c
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
6639 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6640 return fold (build2 (MULT_EXPR
, type
, arg1
,
6641 build_real (type
, c
)));
6644 /* Convert x+x*c into x*(c+1). */
6645 if (flag_unsafe_math_optimizations
6646 && TREE_CODE (arg1
) == MULT_EXPR
6647 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
6648 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
6649 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
6653 c
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
6654 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6655 return fold (build2 (MULT_EXPR
, type
, arg0
,
6656 build_real (type
, c
)));
6659 /* Convert x*c1+x*c2 into x*(c1+c2). */
6660 if (flag_unsafe_math_optimizations
6661 && TREE_CODE (arg0
) == MULT_EXPR
6662 && TREE_CODE (arg1
) == MULT_EXPR
6663 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
6664 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
6665 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
6666 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
6667 && operand_equal_p (TREE_OPERAND (arg0
, 0),
6668 TREE_OPERAND (arg1
, 0), 0))
6670 REAL_VALUE_TYPE c1
, c2
;
6672 c1
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
6673 c2
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
6674 real_arithmetic (&c1
, PLUS_EXPR
, &c1
, &c2
);
6675 return fold (build2 (MULT_EXPR
, type
,
6676 TREE_OPERAND (arg0
, 0),
6677 build_real (type
, c1
)));
6679 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6680 if (flag_unsafe_math_optimizations
6681 && TREE_CODE (arg1
) == PLUS_EXPR
6682 && TREE_CODE (arg0
) != MULT_EXPR
)
6684 tree tree10
= TREE_OPERAND (arg1
, 0);
6685 tree tree11
= TREE_OPERAND (arg1
, 1);
6686 if (TREE_CODE (tree11
) == MULT_EXPR
6687 && TREE_CODE (tree10
) == MULT_EXPR
)
6690 tree0
= fold (build2 (PLUS_EXPR
, type
, arg0
, tree10
));
6691 return fold (build2 (PLUS_EXPR
, type
, tree0
, tree11
));
6694 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6695 if (flag_unsafe_math_optimizations
6696 && TREE_CODE (arg0
) == PLUS_EXPR
6697 && TREE_CODE (arg1
) != MULT_EXPR
)
6699 tree tree00
= TREE_OPERAND (arg0
, 0);
6700 tree tree01
= TREE_OPERAND (arg0
, 1);
6701 if (TREE_CODE (tree01
) == MULT_EXPR
6702 && TREE_CODE (tree00
) == MULT_EXPR
)
6705 tree0
= fold (build2 (PLUS_EXPR
, type
, tree01
, arg1
));
6706 return fold (build2 (PLUS_EXPR
, type
, tree00
, tree0
));
6712 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6713 is a rotate of A by C1 bits. */
6714 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6715 is a rotate of A by B bits. */
6717 enum tree_code code0
, code1
;
6718 code0
= TREE_CODE (arg0
);
6719 code1
= TREE_CODE (arg1
);
6720 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
6721 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
6722 && operand_equal_p (TREE_OPERAND (arg0
, 0),
6723 TREE_OPERAND (arg1
, 0), 0)
6724 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
6726 tree tree01
, tree11
;
6727 enum tree_code code01
, code11
;
6729 tree01
= TREE_OPERAND (arg0
, 1);
6730 tree11
= TREE_OPERAND (arg1
, 1);
6731 STRIP_NOPS (tree01
);
6732 STRIP_NOPS (tree11
);
6733 code01
= TREE_CODE (tree01
);
6734 code11
= TREE_CODE (tree11
);
6735 if (code01
== INTEGER_CST
6736 && code11
== INTEGER_CST
6737 && TREE_INT_CST_HIGH (tree01
) == 0
6738 && TREE_INT_CST_HIGH (tree11
) == 0
6739 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
6740 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
6741 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6742 code0
== LSHIFT_EXPR
? tree01
: tree11
);
6743 else if (code11
== MINUS_EXPR
)
6745 tree tree110
, tree111
;
6746 tree110
= TREE_OPERAND (tree11
, 0);
6747 tree111
= TREE_OPERAND (tree11
, 1);
6748 STRIP_NOPS (tree110
);
6749 STRIP_NOPS (tree111
);
6750 if (TREE_CODE (tree110
) == INTEGER_CST
6751 && 0 == compare_tree_int (tree110
,
6753 (TREE_TYPE (TREE_OPERAND
6755 && operand_equal_p (tree01
, tree111
, 0))
6756 return build2 ((code0
== LSHIFT_EXPR
6759 type
, TREE_OPERAND (arg0
, 0), tree01
);
6761 else if (code01
== MINUS_EXPR
)
6763 tree tree010
, tree011
;
6764 tree010
= TREE_OPERAND (tree01
, 0);
6765 tree011
= TREE_OPERAND (tree01
, 1);
6766 STRIP_NOPS (tree010
);
6767 STRIP_NOPS (tree011
);
6768 if (TREE_CODE (tree010
) == INTEGER_CST
6769 && 0 == compare_tree_int (tree010
,
6771 (TREE_TYPE (TREE_OPERAND
6773 && operand_equal_p (tree11
, tree011
, 0))
6774 return build2 ((code0
!= LSHIFT_EXPR
6777 type
, TREE_OPERAND (arg0
, 0), tree11
);
6783 /* In most languages, can't associate operations on floats through
6784 parentheses. Rather than remember where the parentheses were, we
6785 don't associate floats at all, unless the user has specified
6786 -funsafe-math-optimizations. */
6789 && (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
6791 tree var0
, con0
, lit0
, minus_lit0
;
6792 tree var1
, con1
, lit1
, minus_lit1
;
6794 /* Split both trees into variables, constants, and literals. Then
6795 associate each group together, the constants with literals,
6796 then the result with variables. This increases the chances of
6797 literals being recombined later and of generating relocatable
6798 expressions for the sum of a constant and literal. */
6799 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
6800 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
6801 code
== MINUS_EXPR
);
6803 /* Only do something if we found more than two objects. Otherwise,
6804 nothing has changed and we risk infinite recursion. */
6805 if (2 < ((var0
!= 0) + (var1
!= 0)
6806 + (con0
!= 0) + (con1
!= 0)
6807 + (lit0
!= 0) + (lit1
!= 0)
6808 + (minus_lit0
!= 0) + (minus_lit1
!= 0)))
6810 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6811 if (code
== MINUS_EXPR
)
6814 var0
= associate_trees (var0
, var1
, code
, type
);
6815 con0
= associate_trees (con0
, con1
, code
, type
);
6816 lit0
= associate_trees (lit0
, lit1
, code
, type
);
6817 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
6819 /* Preserve the MINUS_EXPR if the negative part of the literal is
6820 greater than the positive part. Otherwise, the multiplicative
6821 folding code (i.e extract_muldiv) may be fooled in case
6822 unsigned constants are subtracted, like in the following
6823 example: ((X*2 + 4) - 8U)/2. */
6824 if (minus_lit0
&& lit0
)
6826 if (TREE_CODE (lit0
) == INTEGER_CST
6827 && TREE_CODE (minus_lit0
) == INTEGER_CST
6828 && tree_int_cst_lt (lit0
, minus_lit0
))
6830 minus_lit0
= associate_trees (minus_lit0
, lit0
,
6836 lit0
= associate_trees (lit0
, minus_lit0
,
6844 return fold_convert (type
,
6845 associate_trees (var0
, minus_lit0
,
6849 con0
= associate_trees (con0
, minus_lit0
,
6851 return fold_convert (type
,
6852 associate_trees (var0
, con0
,
6857 con0
= associate_trees (con0
, lit0
, code
, type
);
6858 return fold_convert (type
, associate_trees (var0
, con0
,
6865 t1
= const_binop (code
, arg0
, arg1
, 0);
6866 if (t1
!= NULL_TREE
)
6868 /* The return value should always have
6869 the same type as the original expression. */
6870 if (TREE_TYPE (t1
) != type
)
6871 t1
= fold_convert (type
, t1
);
6878 /* A - (-B) -> A + B */
6879 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
6880 return fold (build2 (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
6881 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6882 if (TREE_CODE (arg0
) == NEGATE_EXPR
6883 && (FLOAT_TYPE_P (type
)
6884 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
))
6885 && negate_expr_p (arg1
)
6886 && reorder_operands_p (arg0
, arg1
))
6887 return fold (build2 (MINUS_EXPR
, type
, negate_expr (arg1
),
6888 TREE_OPERAND (arg0
, 0)));
6890 if (! FLOAT_TYPE_P (type
))
6892 if (! wins
&& integer_zerop (arg0
))
6893 return negate_expr (fold_convert (type
, arg1
));
6894 if (integer_zerop (arg1
))
6895 return non_lvalue (fold_convert (type
, arg0
));
6897 /* Fold A - (A & B) into ~B & A. */
6898 if (!TREE_SIDE_EFFECTS (arg0
)
6899 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
6901 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
6902 return fold (build2 (BIT_AND_EXPR
, type
,
6903 fold (build1 (BIT_NOT_EXPR
, type
,
6904 TREE_OPERAND (arg1
, 0))),
6906 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
6907 return fold (build2 (BIT_AND_EXPR
, type
,
6908 fold (build1 (BIT_NOT_EXPR
, type
,
6909 TREE_OPERAND (arg1
, 1))),
6913 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6914 any power of 2 minus 1. */
6915 if (TREE_CODE (arg0
) == BIT_AND_EXPR
6916 && TREE_CODE (arg1
) == BIT_AND_EXPR
6917 && operand_equal_p (TREE_OPERAND (arg0
, 0),
6918 TREE_OPERAND (arg1
, 0), 0))
6920 tree mask0
= TREE_OPERAND (arg0
, 1);
6921 tree mask1
= TREE_OPERAND (arg1
, 1);
6922 tree tem
= fold (build1 (BIT_NOT_EXPR
, type
, mask0
));
6924 if (operand_equal_p (tem
, mask1
, 0))
6926 tem
= fold (build2 (BIT_XOR_EXPR
, type
,
6927 TREE_OPERAND (arg0
, 0), mask1
));
6928 return fold (build2 (MINUS_EXPR
, type
, tem
, mask1
));
6933 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6934 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
6935 return non_lvalue (fold_convert (type
, arg0
));
6937 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6938 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6939 (-ARG1 + ARG0) reduces to -ARG1. */
6940 else if (!wins
&& fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
6941 return negate_expr (fold_convert (type
, arg1
));
6943 /* Fold &x - &x. This can happen from &x.foo - &x.
6944 This is unsafe for certain floats even in non-IEEE formats.
6945 In IEEE, it is unsafe because it does wrong for NaNs.
6946 Also note that operand_equal_p is always false if an operand
6949 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
6950 && operand_equal_p (arg0
, arg1
, 0))
6951 return fold_convert (type
, integer_zero_node
);
6953 /* A - B -> A + (-B) if B is easily negatable. */
6954 if (!wins
&& negate_expr_p (arg1
)
6955 && ((FLOAT_TYPE_P (type
)
6956 /* Avoid this transformation if B is a positive REAL_CST. */
6957 && (TREE_CODE (arg1
) != REAL_CST
6958 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
6959 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
)))
6960 return fold (build2 (PLUS_EXPR
, type
, arg0
, negate_expr (arg1
)));
6962 if (TREE_CODE (arg0
) == MULT_EXPR
6963 && TREE_CODE (arg1
) == MULT_EXPR
6964 && (INTEGRAL_TYPE_P (type
) || flag_unsafe_math_optimizations
))
6966 /* (A * C) - (B * C) -> (A-B) * C. */
6967 if (operand_equal_p (TREE_OPERAND (arg0
, 1),
6968 TREE_OPERAND (arg1
, 1), 0))
6969 return fold (build2 (MULT_EXPR
, type
,
6970 fold (build2 (MINUS_EXPR
, type
,
6971 TREE_OPERAND (arg0
, 0),
6972 TREE_OPERAND (arg1
, 0))),
6973 TREE_OPERAND (arg0
, 1)));
6974 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6975 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
6976 TREE_OPERAND (arg1
, 0), 0))
6977 return fold (build2 (MULT_EXPR
, type
,
6978 TREE_OPERAND (arg0
, 0),
6979 fold (build2 (MINUS_EXPR
, type
,
6980 TREE_OPERAND (arg0
, 1),
6981 TREE_OPERAND (arg1
, 1)))));
6987 /* (-A) * (-B) -> A * B */
6988 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
6989 return fold (build2 (MULT_EXPR
, type
,
6990 TREE_OPERAND (arg0
, 0),
6991 negate_expr (arg1
)));
6992 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
6993 return fold (build2 (MULT_EXPR
, type
,
6995 TREE_OPERAND (arg1
, 0)));
6997 if (! FLOAT_TYPE_P (type
))
6999 if (integer_zerop (arg1
))
7000 return omit_one_operand (type
, arg1
, arg0
);
7001 if (integer_onep (arg1
))
7002 return non_lvalue (fold_convert (type
, arg0
));
7004 /* (a * (1 << b)) is (a << b) */
7005 if (TREE_CODE (arg1
) == LSHIFT_EXPR
7006 && integer_onep (TREE_OPERAND (arg1
, 0)))
7007 return fold (build2 (LSHIFT_EXPR
, type
, arg0
,
7008 TREE_OPERAND (arg1
, 1)));
7009 if (TREE_CODE (arg0
) == LSHIFT_EXPR
7010 && integer_onep (TREE_OPERAND (arg0
, 0)))
7011 return fold (build2 (LSHIFT_EXPR
, type
, arg1
,
7012 TREE_OPERAND (arg0
, 1)));
7014 if (TREE_CODE (arg1
) == INTEGER_CST
7015 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0),
7016 fold_convert (type
, arg1
),
7018 return fold_convert (type
, tem
);
7023 /* Maybe fold x * 0 to 0. The expressions aren't the same
7024 when x is NaN, since x * 0 is also NaN. Nor are they the
7025 same in modes with signed zeros, since multiplying a
7026 negative value by 0 gives -0, not +0. */
7027 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
7028 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
7029 && real_zerop (arg1
))
7030 return omit_one_operand (type
, arg1
, arg0
);
7031 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7032 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7033 && real_onep (arg1
))
7034 return non_lvalue (fold_convert (type
, arg0
));
7036 /* Transform x * -1.0 into -x. */
7037 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7038 && real_minus_onep (arg1
))
7039 return fold_convert (type
, negate_expr (arg0
));
7041 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7042 if (flag_unsafe_math_optimizations
7043 && TREE_CODE (arg0
) == RDIV_EXPR
7044 && TREE_CODE (arg1
) == REAL_CST
7045 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
7047 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
7050 return fold (build2 (RDIV_EXPR
, type
, tem
,
7051 TREE_OPERAND (arg0
, 1)));
7054 if (flag_unsafe_math_optimizations
)
7056 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
7057 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
7059 /* Optimizations of root(...)*root(...). */
7060 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
7062 tree rootfn
, arg
, arglist
;
7063 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7064 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7066 /* Optimize sqrt(x)*sqrt(x) as x. */
7067 if (BUILTIN_SQRT_P (fcode0
)
7068 && operand_equal_p (arg00
, arg10
, 0)
7069 && ! HONOR_SNANS (TYPE_MODE (type
)))
7072 /* Optimize root(x)*root(y) as root(x*y). */
7073 rootfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7074 arg
= fold (build2 (MULT_EXPR
, type
, arg00
, arg10
));
7075 arglist
= build_tree_list (NULL_TREE
, arg
);
7076 return build_function_call_expr (rootfn
, arglist
);
7079 /* Optimize expN(x)*expN(y) as expN(x+y). */
7080 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
7082 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7083 tree arg
= build2 (PLUS_EXPR
, type
,
7084 TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7085 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
7086 tree arglist
= build_tree_list (NULL_TREE
, fold (arg
));
7087 return build_function_call_expr (expfn
, arglist
);
7090 /* Optimizations of pow(...)*pow(...). */
7091 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
7092 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
7093 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
7095 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7096 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
7098 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7099 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
7102 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7103 if (operand_equal_p (arg01
, arg11
, 0))
7105 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7106 tree arg
= build2 (MULT_EXPR
, type
, arg00
, arg10
);
7107 tree arglist
= tree_cons (NULL_TREE
, fold (arg
),
7108 build_tree_list (NULL_TREE
,
7110 return build_function_call_expr (powfn
, arglist
);
7113 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7114 if (operand_equal_p (arg00
, arg10
, 0))
7116 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7117 tree arg
= fold (build2 (PLUS_EXPR
, type
, arg01
, arg11
));
7118 tree arglist
= tree_cons (NULL_TREE
, arg00
,
7119 build_tree_list (NULL_TREE
,
7121 return build_function_call_expr (powfn
, arglist
);
7125 /* Optimize tan(x)*cos(x) as sin(x). */
7126 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
7127 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
7128 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
7129 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
7130 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
7131 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
7132 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7133 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
7135 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
7137 if (sinfn
!= NULL_TREE
)
7138 return build_function_call_expr (sinfn
,
7139 TREE_OPERAND (arg0
, 1));
7142 /* Optimize x*pow(x,c) as pow(x,c+1). */
7143 if (fcode1
== BUILT_IN_POW
7144 || fcode1
== BUILT_IN_POWF
7145 || fcode1
== BUILT_IN_POWL
)
7147 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7148 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
7150 if (TREE_CODE (arg11
) == REAL_CST
7151 && ! TREE_CONSTANT_OVERFLOW (arg11
)
7152 && operand_equal_p (arg0
, arg10
, 0))
7154 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
7158 c
= TREE_REAL_CST (arg11
);
7159 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7160 arg
= build_real (type
, c
);
7161 arglist
= build_tree_list (NULL_TREE
, arg
);
7162 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
7163 return build_function_call_expr (powfn
, arglist
);
7167 /* Optimize pow(x,c)*x as pow(x,c+1). */
7168 if (fcode0
== BUILT_IN_POW
7169 || fcode0
== BUILT_IN_POWF
7170 || fcode0
== BUILT_IN_POWL
)
7172 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7173 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
7175 if (TREE_CODE (arg01
) == REAL_CST
7176 && ! TREE_CONSTANT_OVERFLOW (arg01
)
7177 && operand_equal_p (arg1
, arg00
, 0))
7179 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7183 c
= TREE_REAL_CST (arg01
);
7184 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
7185 arg
= build_real (type
, c
);
7186 arglist
= build_tree_list (NULL_TREE
, arg
);
7187 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
7188 return build_function_call_expr (powfn
, arglist
);
7192 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7194 && operand_equal_p (arg0
, arg1
, 0))
7196 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7200 tree arg
= build_real (type
, dconst2
);
7201 tree arglist
= build_tree_list (NULL_TREE
, arg
);
7202 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
7203 return build_function_call_expr (powfn
, arglist
);
7212 if (integer_all_onesp (arg1
))
7213 return omit_one_operand (type
, arg1
, arg0
);
7214 if (integer_zerop (arg1
))
7215 return non_lvalue (fold_convert (type
, arg0
));
7216 if (operand_equal_p (arg0
, arg1
, 0))
7217 return non_lvalue (fold_convert (type
, arg0
));
7220 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
7221 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7223 t1
= build_int_2 (-1, -1);
7224 TREE_TYPE (t1
) = type
;
7225 force_fit_type (t1
, 0);
7226 return omit_one_operand (type
, t1
, arg1
);
7230 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
7231 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7233 t1
= build_int_2 (-1, -1);
7234 TREE_TYPE (t1
) = type
;
7235 force_fit_type (t1
, 0);
7236 return omit_one_operand (type
, t1
, arg0
);
7239 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
7240 if (t1
!= NULL_TREE
)
7243 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7245 This results in more efficient code for machines without a NAND
7246 instruction. Combine will canonicalize to the first form
7247 which will allow use of NAND instructions provided by the
7248 backend if they exist. */
7249 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
7250 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
7252 return fold (build1 (BIT_NOT_EXPR
, type
,
7253 build2 (BIT_AND_EXPR
, type
,
7254 TREE_OPERAND (arg0
, 0),
7255 TREE_OPERAND (arg1
, 0))));
7258 /* See if this can be simplified into a rotate first. If that
7259 is unsuccessful continue in the association code. */
7263 if (integer_zerop (arg1
))
7264 return non_lvalue (fold_convert (type
, arg0
));
7265 if (integer_all_onesp (arg1
))
7266 return fold (build1 (BIT_NOT_EXPR
, type
, arg0
));
7267 if (operand_equal_p (arg0
, arg1
, 0))
7268 return omit_one_operand (type
, integer_zero_node
, arg0
);
7271 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
7272 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7274 t1
= build_int_2 (-1, -1);
7275 TREE_TYPE (t1
) = type
;
7276 force_fit_type (t1
, 0);
7277 return omit_one_operand (type
, t1
, arg1
);
7281 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
7282 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7284 t1
= build_int_2 (-1, -1);
7285 TREE_TYPE (t1
) = type
;
7286 force_fit_type (t1
, 0);
7287 return omit_one_operand (type
, t1
, arg0
);
7290 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7291 with a constant, and the two constants have no bits in common,
7292 we should treat this as a BIT_IOR_EXPR since this may produce more
7294 if (TREE_CODE (arg0
) == BIT_AND_EXPR
7295 && TREE_CODE (arg1
) == BIT_AND_EXPR
7296 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7297 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
7298 && integer_zerop (const_binop (BIT_AND_EXPR
,
7299 TREE_OPERAND (arg0
, 1),
7300 TREE_OPERAND (arg1
, 1), 0)))
7302 code
= BIT_IOR_EXPR
;
7306 /* See if this can be simplified into a rotate first. If that
7307 is unsuccessful continue in the association code. */
7311 if (integer_all_onesp (arg1
))
7312 return non_lvalue (fold_convert (type
, arg0
));
7313 if (integer_zerop (arg1
))
7314 return omit_one_operand (type
, arg1
, arg0
);
7315 if (operand_equal_p (arg0
, arg1
, 0))
7316 return non_lvalue (fold_convert (type
, arg0
));
7318 /* ~X & X is always zero. */
7319 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
7320 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7321 return omit_one_operand (type
, integer_zero_node
, arg1
);
7323 /* X & ~X is always zero. */
7324 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
7325 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7326 return omit_one_operand (type
, integer_zero_node
, arg0
);
7328 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
7329 if (t1
!= NULL_TREE
)
7331 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7332 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
7333 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7336 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
7338 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
7339 && (~TREE_INT_CST_LOW (arg1
)
7340 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
7341 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
7344 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7346 This results in more efficient code for machines without a NOR
7347 instruction. Combine will canonicalize to the first form
7348 which will allow use of NOR instructions provided by the
7349 backend if they exist. */
7350 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
7351 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
7353 return fold (build1 (BIT_NOT_EXPR
, type
,
7354 build2 (BIT_IOR_EXPR
, type
,
7355 TREE_OPERAND (arg0
, 0),
7356 TREE_OPERAND (arg1
, 0))));
7362 /* Don't touch a floating-point divide by zero unless the mode
7363 of the constant can represent infinity. */
7364 if (TREE_CODE (arg1
) == REAL_CST
7365 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
7366 && real_zerop (arg1
))
7369 /* (-A) / (-B) -> A / B */
7370 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
7371 return fold (build2 (RDIV_EXPR
, type
,
7372 TREE_OPERAND (arg0
, 0),
7373 negate_expr (arg1
)));
7374 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
7375 return fold (build2 (RDIV_EXPR
, type
,
7377 TREE_OPERAND (arg1
, 0)));
7379 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7380 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7381 && real_onep (arg1
))
7382 return non_lvalue (fold_convert (type
, arg0
));
7384 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7385 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
7386 && real_minus_onep (arg1
))
7387 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
7389 /* If ARG1 is a constant, we can convert this to a multiply by the
7390 reciprocal. This does not have the same rounding properties,
7391 so only do this if -funsafe-math-optimizations. We can actually
7392 always safely do it if ARG1 is a power of two, but it's hard to
7393 tell if it is or not in a portable manner. */
7394 if (TREE_CODE (arg1
) == REAL_CST
)
7396 if (flag_unsafe_math_optimizations
7397 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
7399 return fold (build2 (MULT_EXPR
, type
, arg0
, tem
));
7400 /* Find the reciprocal if optimizing and the result is exact. */
7404 r
= TREE_REAL_CST (arg1
);
7405 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
7407 tem
= build_real (type
, r
);
7408 return fold (build2 (MULT_EXPR
, type
, arg0
, tem
));
7412 /* Convert A/B/C to A/(B*C). */
7413 if (flag_unsafe_math_optimizations
7414 && TREE_CODE (arg0
) == RDIV_EXPR
)
7415 return fold (build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7416 fold (build2 (MULT_EXPR
, type
,
7417 TREE_OPERAND (arg0
, 1), arg1
))));
7419 /* Convert A/(B/C) to (A/B)*C. */
7420 if (flag_unsafe_math_optimizations
7421 && TREE_CODE (arg1
) == RDIV_EXPR
)
7422 return fold (build2 (MULT_EXPR
, type
,
7423 fold (build2 (RDIV_EXPR
, type
, arg0
,
7424 TREE_OPERAND (arg1
, 0))),
7425 TREE_OPERAND (arg1
, 1)));
7427 /* Convert C1/(X*C2) into (C1/C2)/X. */
7428 if (flag_unsafe_math_optimizations
7429 && TREE_CODE (arg1
) == MULT_EXPR
7430 && TREE_CODE (arg0
) == REAL_CST
7431 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
7433 tree tem
= const_binop (RDIV_EXPR
, arg0
,
7434 TREE_OPERAND (arg1
, 1), 0);
7436 return fold (build2 (RDIV_EXPR
, type
, tem
,
7437 TREE_OPERAND (arg1
, 0)));
7440 if (flag_unsafe_math_optimizations
)
7442 enum built_in_function fcode
= builtin_mathfn_code (arg1
);
7443 /* Optimize x/expN(y) into x*expN(-y). */
7444 if (BUILTIN_EXPONENT_P (fcode
))
7446 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
7447 tree arg
= negate_expr (TREE_VALUE (TREE_OPERAND (arg1
, 1)));
7448 tree arglist
= build_tree_list (NULL_TREE
,
7449 fold_convert (type
, arg
));
7450 arg1
= build_function_call_expr (expfn
, arglist
);
7451 return fold (build2 (MULT_EXPR
, type
, arg0
, arg1
));
7454 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7455 if (fcode
== BUILT_IN_POW
7456 || fcode
== BUILT_IN_POWF
7457 || fcode
== BUILT_IN_POWL
)
7459 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
7460 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7461 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
, 1)));
7462 tree neg11
= fold_convert (type
, negate_expr (arg11
));
7463 tree arglist
= tree_cons(NULL_TREE
, arg10
,
7464 build_tree_list (NULL_TREE
, neg11
));
7465 arg1
= build_function_call_expr (powfn
, arglist
);
7466 return fold (build2 (MULT_EXPR
, type
, arg0
, arg1
));
7470 if (flag_unsafe_math_optimizations
)
7472 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
7473 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
7475 /* Optimize sin(x)/cos(x) as tan(x). */
7476 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
7477 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
7478 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
7479 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7480 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
7482 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
7484 if (tanfn
!= NULL_TREE
)
7485 return build_function_call_expr (tanfn
,
7486 TREE_OPERAND (arg0
, 1));
7489 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7490 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
7491 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
7492 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
7493 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7494 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
7496 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
7498 if (tanfn
!= NULL_TREE
)
7500 tree tmp
= TREE_OPERAND (arg0
, 1);
7501 tmp
= build_function_call_expr (tanfn
, tmp
);
7502 return fold (build2 (RDIV_EXPR
, type
,
7503 build_real (type
, dconst1
), tmp
));
7507 /* Optimize pow(x,c)/x as pow(x,c-1). */
7508 if (fcode0
== BUILT_IN_POW
7509 || fcode0
== BUILT_IN_POWF
7510 || fcode0
== BUILT_IN_POWL
)
7512 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7513 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
7514 if (TREE_CODE (arg01
) == REAL_CST
7515 && ! TREE_CONSTANT_OVERFLOW (arg01
)
7516 && operand_equal_p (arg1
, arg00
, 0))
7518 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7522 c
= TREE_REAL_CST (arg01
);
7523 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
7524 arg
= build_real (type
, c
);
7525 arglist
= build_tree_list (NULL_TREE
, arg
);
7526 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
7527 return build_function_call_expr (powfn
, arglist
);
7533 case TRUNC_DIV_EXPR
:
7534 case ROUND_DIV_EXPR
:
7535 case FLOOR_DIV_EXPR
:
7537 case EXACT_DIV_EXPR
:
7538 if (integer_onep (arg1
))
7539 return non_lvalue (fold_convert (type
, arg0
));
7540 if (integer_zerop (arg1
))
7543 if (!TYPE_UNSIGNED (type
)
7544 && TREE_CODE (arg1
) == INTEGER_CST
7545 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
7546 && TREE_INT_CST_HIGH (arg1
) == -1)
7547 return fold_convert (type
, negate_expr (arg0
));
7549 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7550 operation, EXACT_DIV_EXPR.
7552 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7553 At one time others generated faster code, it's not clear if they do
7554 after the last round to changes to the DIV code in expmed.c. */
7555 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
7556 && multiple_of_p (type
, arg0
, arg1
))
7557 return fold (build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
));
7559 if (TREE_CODE (arg1
) == INTEGER_CST
7560 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
7562 return fold_convert (type
, tem
);
7567 case FLOOR_MOD_EXPR
:
7568 case ROUND_MOD_EXPR
:
7569 case TRUNC_MOD_EXPR
:
7570 if (integer_onep (arg1
))
7571 return omit_one_operand (type
, integer_zero_node
, arg0
);
7572 if (integer_zerop (arg1
))
7575 /* X % -1 is zero. */
7576 if (!TYPE_UNSIGNED (type
)
7577 && TREE_CODE (arg1
) == INTEGER_CST
7578 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
7579 && TREE_INT_CST_HIGH (arg1
) == -1)
7580 return omit_one_operand (type
, integer_zero_node
, arg0
);
7582 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7583 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7584 if (code
== TRUNC_MOD_EXPR
7585 && TYPE_UNSIGNED (type
)
7586 && integer_pow2p (arg1
))
7588 unsigned HOST_WIDE_INT high
, low
;
7592 l
= tree_log2 (arg1
);
7593 if (l
>= HOST_BITS_PER_WIDE_INT
)
7595 high
= ((unsigned HOST_WIDE_INT
) 1
7596 << (l
- HOST_BITS_PER_WIDE_INT
)) - 1;
7602 low
= ((unsigned HOST_WIDE_INT
) 1 << l
) - 1;
7605 mask
= build_int_2 (low
, high
);
7606 TREE_TYPE (mask
) = type
;
7607 return fold (build2 (BIT_AND_EXPR
, type
,
7608 fold_convert (type
, arg0
), mask
));
7611 /* X % -C is the same as X % C (for all rounding moduli). */
7612 if (!TYPE_UNSIGNED (type
)
7613 && TREE_CODE (arg1
) == INTEGER_CST
7614 && TREE_INT_CST_HIGH (arg1
) < 0
7616 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7617 && !sign_bit_p (arg1
, arg1
))
7618 return fold (build2 (code
, type
, fold_convert (type
, arg0
),
7619 fold_convert (type
, negate_expr (arg1
))));
7621 /* X % -Y is the same as X % Y (for all rounding moduli). */
7622 if (!TYPE_UNSIGNED (type
)
7623 && TREE_CODE (arg1
) == NEGATE_EXPR
7625 return fold (build2 (code
, type
, fold_convert (type
, arg0
),
7626 fold_convert (type
, TREE_OPERAND (arg1
, 0))));
7628 if (TREE_CODE (arg1
) == INTEGER_CST
7629 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
7631 return fold_convert (type
, tem
);
7637 if (integer_all_onesp (arg0
))
7638 return omit_one_operand (type
, arg0
, arg1
);
7642 /* Optimize -1 >> x for arithmetic right shifts. */
7643 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
7644 return omit_one_operand (type
, arg0
, arg1
);
7645 /* ... fall through ... */
7649 if (integer_zerop (arg1
))
7650 return non_lvalue (fold_convert (type
, arg0
));
7651 if (integer_zerop (arg0
))
7652 return omit_one_operand (type
, arg0
, arg1
);
7654 /* Since negative shift count is not well-defined,
7655 don't try to compute it in the compiler. */
7656 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
7658 /* Rewrite an LROTATE_EXPR by a constant into an
7659 RROTATE_EXPR by a new constant. */
7660 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
7662 tree tem
= build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type
)), 0);
7663 tem
= fold_convert (TREE_TYPE (arg1
), tem
);
7664 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
7665 return fold (build2 (RROTATE_EXPR
, type
, arg0
, tem
));
7668 /* If we have a rotate of a bit operation with the rotate count and
7669 the second operand of the bit operation both constant,
7670 permute the two operations. */
7671 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
7672 && (TREE_CODE (arg0
) == BIT_AND_EXPR
7673 || TREE_CODE (arg0
) == BIT_IOR_EXPR
7674 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
7675 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
7676 return fold (build2 (TREE_CODE (arg0
), type
,
7677 fold (build2 (code
, type
,
7678 TREE_OPERAND (arg0
, 0), arg1
)),
7679 fold (build2 (code
, type
,
7680 TREE_OPERAND (arg0
, 1), arg1
))));
7682 /* Two consecutive rotates adding up to the width of the mode can
7684 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
7685 && TREE_CODE (arg0
) == RROTATE_EXPR
7686 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7687 && TREE_INT_CST_HIGH (arg1
) == 0
7688 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
7689 && ((TREE_INT_CST_LOW (arg1
)
7690 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
7691 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
7692 return TREE_OPERAND (arg0
, 0);
7697 if (operand_equal_p (arg0
, arg1
, 0))
7698 return omit_one_operand (type
, arg0
, arg1
);
7699 if (INTEGRAL_TYPE_P (type
)
7700 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
7701 return omit_one_operand (type
, arg1
, arg0
);
7705 if (operand_equal_p (arg0
, arg1
, 0))
7706 return omit_one_operand (type
, arg0
, arg1
);
7707 if (INTEGRAL_TYPE_P (type
)
7708 && TYPE_MAX_VALUE (type
)
7709 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
7710 return omit_one_operand (type
, arg1
, arg0
);
7713 case TRUTH_NOT_EXPR
:
7714 /* The argument to invert_truthvalue must have Boolean type. */
7715 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
7716 arg0
= fold_convert (boolean_type_node
, arg0
);
7718 /* Note that the operand of this must be an int
7719 and its values must be 0 or 1.
7720 ("true" is a fixed value perhaps depending on the language,
7721 but we don't handle values other than 1 correctly yet.) */
7722 tem
= invert_truthvalue (arg0
);
7723 /* Avoid infinite recursion. */
7724 if (TREE_CODE (tem
) == TRUTH_NOT_EXPR
)
7726 tem
= fold_single_bit_test (code
, arg0
, arg1
, type
);
7731 return fold_convert (type
, tem
);
7733 case TRUTH_ANDIF_EXPR
:
7734 /* Note that the operands of this must be ints
7735 and their values must be 0 or 1.
7736 ("true" is a fixed value perhaps depending on the language.) */
7737 /* If first arg is constant zero, return it. */
7738 if (integer_zerop (arg0
))
7739 return fold_convert (type
, arg0
);
7740 case TRUTH_AND_EXPR
:
7741 /* If either arg is constant true, drop it. */
7742 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
7743 return non_lvalue (fold_convert (type
, arg1
));
7744 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
7745 /* Preserve sequence points. */
7746 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
7747 return non_lvalue (fold_convert (type
, arg0
));
7748 /* If second arg is constant zero, result is zero, but first arg
7749 must be evaluated. */
7750 if (integer_zerop (arg1
))
7751 return omit_one_operand (type
, arg1
, arg0
);
7752 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7753 case will be handled here. */
7754 if (integer_zerop (arg0
))
7755 return omit_one_operand (type
, arg0
, arg1
);
7757 /* !X && X is always false. */
7758 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
7759 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7760 return omit_one_operand (type
, integer_zero_node
, arg1
);
7761 /* X && !X is always false. */
7762 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
7763 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7764 return omit_one_operand (type
, integer_zero_node
, arg0
);
7767 /* We only do these simplifications if we are optimizing. */
7771 /* Check for things like (A || B) && (A || C). We can convert this
7772 to A || (B && C). Note that either operator can be any of the four
7773 truth and/or operations and the transformation will still be
7774 valid. Also note that we only care about order for the
7775 ANDIF and ORIF operators. If B contains side effects, this
7776 might change the truth-value of A. */
7777 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
7778 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
7779 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
7780 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
7781 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
7782 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
7784 tree a00
= TREE_OPERAND (arg0
, 0);
7785 tree a01
= TREE_OPERAND (arg0
, 1);
7786 tree a10
= TREE_OPERAND (arg1
, 0);
7787 tree a11
= TREE_OPERAND (arg1
, 1);
7788 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
7789 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
7790 && (code
== TRUTH_AND_EXPR
7791 || code
== TRUTH_OR_EXPR
));
7793 if (operand_equal_p (a00
, a10
, 0))
7794 return fold (build2 (TREE_CODE (arg0
), type
, a00
,
7795 fold (build2 (code
, type
, a01
, a11
))));
7796 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
7797 return fold (build2 (TREE_CODE (arg0
), type
, a00
,
7798 fold (build2 (code
, type
, a01
, a10
))));
7799 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
7800 return fold (build2 (TREE_CODE (arg0
), type
, a01
,
7801 fold (build2 (code
, type
, a00
, a11
))));
7803 /* This case if tricky because we must either have commutative
7804 operators or else A10 must not have side-effects. */
7806 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
7807 && operand_equal_p (a01
, a11
, 0))
7808 return fold (build2 (TREE_CODE (arg0
), type
,
7809 fold (build2 (code
, type
, a00
, a10
)),
7813 /* See if we can build a range comparison. */
7814 if (0 != (tem
= fold_range_test (t
)))
7817 /* Check for the possibility of merging component references. If our
7818 lhs is another similar operation, try to merge its rhs with our
7819 rhs. Then try to merge our lhs and rhs. */
7820 if (TREE_CODE (arg0
) == code
7821 && 0 != (tem
= fold_truthop (code
, type
,
7822 TREE_OPERAND (arg0
, 1), arg1
)))
7823 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
7825 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
7830 case TRUTH_ORIF_EXPR
:
7831 /* Note that the operands of this must be ints
7832 and their values must be 0 or true.
7833 ("true" is a fixed value perhaps depending on the language.) */
7834 /* If first arg is constant true, return it. */
7835 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
7836 return fold_convert (type
, arg0
);
7838 /* If either arg is constant zero, drop it. */
7839 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
7840 return non_lvalue (fold_convert (type
, arg1
));
7841 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
7842 /* Preserve sequence points. */
7843 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
7844 return non_lvalue (fold_convert (type
, arg0
));
7845 /* If second arg is constant true, result is true, but we must
7846 evaluate first arg. */
7847 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
7848 return omit_one_operand (type
, arg1
, arg0
);
7849 /* Likewise for first arg, but note this only occurs here for
7851 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
7852 return omit_one_operand (type
, arg0
, arg1
);
7854 /* !X || X is always true. */
7855 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
7856 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7857 return omit_one_operand (type
, integer_one_node
, arg1
);
7858 /* X || !X is always true. */
7859 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
7860 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7861 return omit_one_operand (type
, integer_one_node
, arg0
);
7865 case TRUTH_XOR_EXPR
:
7866 /* If the second arg is constant zero, drop it. */
7867 if (integer_zerop (arg1
))
7868 return non_lvalue (fold_convert (type
, arg0
));
7869 /* If the second arg is constant true, this is a logical inversion. */
7870 if (integer_onep (arg1
))
7871 return non_lvalue (fold_convert (type
, invert_truthvalue (arg0
)));
7872 /* Identical arguments cancel to zero. */
7873 if (operand_equal_p (arg0
, arg1
, 0))
7874 return omit_one_operand (type
, integer_zero_node
, arg0
);
7876 /* !X ^ X is always true. */
7877 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
7878 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
7879 return omit_one_operand (type
, integer_one_node
, arg1
);
7881 /* X ^ !X is always true. */
7882 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
7883 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
7884 return omit_one_operand (type
, integer_one_node
, arg0
);
7894 /* If one arg is a real or integer constant, put it last. */
7895 if (tree_swap_operands_p (arg0
, arg1
, true))
7896 return fold (build2 (swap_tree_comparison (code
), type
, arg1
, arg0
));
7898 /* If this is an equality comparison of the address of a non-weak
7899 object against zero, then we know the result. */
7900 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7901 && TREE_CODE (arg0
) == ADDR_EXPR
7902 && DECL_P (TREE_OPERAND (arg0
, 0))
7903 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
7904 && integer_zerop (arg1
))
7905 return constant_boolean_node (code
!= EQ_EXPR
, type
);
7907 /* If this is an equality comparison of the address of two non-weak,
7908 unaliased symbols neither of which are extern (since we do not
7909 have access to attributes for externs), then we know the result. */
7910 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7911 && TREE_CODE (arg0
) == ADDR_EXPR
7912 && DECL_P (TREE_OPERAND (arg0
, 0))
7913 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
7914 && ! lookup_attribute ("alias",
7915 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
7916 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
7917 && TREE_CODE (arg1
) == ADDR_EXPR
7918 && DECL_P (TREE_OPERAND (arg1
, 0))
7919 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
7920 && ! lookup_attribute ("alias",
7921 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
7922 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
7923 return constant_boolean_node (operand_equal_p (arg0
, arg1
, 0)
7924 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
7927 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
7929 tree targ0
= strip_float_extensions (arg0
);
7930 tree targ1
= strip_float_extensions (arg1
);
7931 tree newtype
= TREE_TYPE (targ0
);
7933 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
7934 newtype
= TREE_TYPE (targ1
);
7936 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7937 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
7938 return fold (build2 (code
, type
, fold_convert (newtype
, targ0
),
7939 fold_convert (newtype
, targ1
)));
7941 /* (-a) CMP (-b) -> b CMP a */
7942 if (TREE_CODE (arg0
) == NEGATE_EXPR
7943 && TREE_CODE (arg1
) == NEGATE_EXPR
)
7944 return fold (build2 (code
, type
, TREE_OPERAND (arg1
, 0),
7945 TREE_OPERAND (arg0
, 0)));
7947 if (TREE_CODE (arg1
) == REAL_CST
)
7949 REAL_VALUE_TYPE cst
;
7950 cst
= TREE_REAL_CST (arg1
);
7952 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7953 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
7955 fold (build2 (swap_tree_comparison (code
), type
,
7956 TREE_OPERAND (arg0
, 0),
7957 build_real (TREE_TYPE (arg1
),
7958 REAL_VALUE_NEGATE (cst
))));
7960 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7961 /* a CMP (-0) -> a CMP 0 */
7962 if (REAL_VALUE_MINUS_ZERO (cst
))
7963 return fold (build2 (code
, type
, arg0
,
7964 build_real (TREE_TYPE (arg1
), dconst0
)));
7966 /* x != NaN is always true, other ops are always false. */
7967 if (REAL_VALUE_ISNAN (cst
)
7968 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
7970 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
7971 return omit_one_operand (type
, tem
, arg0
);
7974 /* Fold comparisons against infinity. */
7975 if (REAL_VALUE_ISINF (cst
))
7977 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
7978 if (tem
!= NULL_TREE
)
7983 /* If this is a comparison of a real constant with a PLUS_EXPR
7984 or a MINUS_EXPR of a real constant, we can convert it into a
7985 comparison with a revised real constant as long as no overflow
7986 occurs when unsafe_math_optimizations are enabled. */
7987 if (flag_unsafe_math_optimizations
7988 && TREE_CODE (arg1
) == REAL_CST
7989 && (TREE_CODE (arg0
) == PLUS_EXPR
7990 || TREE_CODE (arg0
) == MINUS_EXPR
)
7991 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
7992 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
7993 ? MINUS_EXPR
: PLUS_EXPR
,
7994 arg1
, TREE_OPERAND (arg0
, 1), 0))
7995 && ! TREE_CONSTANT_OVERFLOW (tem
))
7996 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
7998 /* Likewise, we can simplify a comparison of a real constant with
7999 a MINUS_EXPR whose first operand is also a real constant, i.e.
8000 (c1 - x) < c2 becomes x > c1-c2. */
8001 if (flag_unsafe_math_optimizations
8002 && TREE_CODE (arg1
) == REAL_CST
8003 && TREE_CODE (arg0
) == MINUS_EXPR
8004 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
8005 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
8007 && ! TREE_CONSTANT_OVERFLOW (tem
))
8008 return fold (build2 (swap_tree_comparison (code
), type
,
8009 TREE_OPERAND (arg0
, 1), tem
));
8011 /* Fold comparisons against built-in math functions. */
8012 if (TREE_CODE (arg1
) == REAL_CST
8013 && flag_unsafe_math_optimizations
8014 && ! flag_errno_math
)
8016 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8018 if (fcode
!= END_BUILTINS
)
8020 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
8021 if (tem
!= NULL_TREE
)
8027 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8028 if (TREE_CONSTANT (arg1
)
8029 && (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
8030 || TREE_CODE (arg0
) == POSTDECREMENT_EXPR
)
8031 /* This optimization is invalid for ordered comparisons
8032 if CONST+INCR overflows or if foo+incr might overflow.
8033 This optimization is invalid for floating point due to rounding.
8034 For pointer types we assume overflow doesn't happen. */
8035 && (POINTER_TYPE_P (TREE_TYPE (arg0
))
8036 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8037 && (code
== EQ_EXPR
|| code
== NE_EXPR
))))
8039 tree varop
, newconst
;
8041 if (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
)
8043 newconst
= fold (build2 (PLUS_EXPR
, TREE_TYPE (arg0
),
8044 arg1
, TREE_OPERAND (arg0
, 1)));
8045 varop
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (arg0
),
8046 TREE_OPERAND (arg0
, 0),
8047 TREE_OPERAND (arg0
, 1));
8051 newconst
= fold (build2 (MINUS_EXPR
, TREE_TYPE (arg0
),
8052 arg1
, TREE_OPERAND (arg0
, 1)));
8053 varop
= build2 (PREDECREMENT_EXPR
, TREE_TYPE (arg0
),
8054 TREE_OPERAND (arg0
, 0),
8055 TREE_OPERAND (arg0
, 1));
8059 /* If VAROP is a reference to a bitfield, we must mask
8060 the constant by the width of the field. */
8061 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
8062 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop
, 0), 1))
8063 && host_integerp (DECL_SIZE (TREE_OPERAND
8064 (TREE_OPERAND (varop
, 0), 1)), 1))
8066 tree fielddecl
= TREE_OPERAND (TREE_OPERAND (varop
, 0), 1);
8067 HOST_WIDE_INT size
= tree_low_cst (DECL_SIZE (fielddecl
), 1);
8068 tree folded_compare
, shift
;
8070 /* First check whether the comparison would come out
8071 always the same. If we don't do that we would
8072 change the meaning with the masking. */
8073 folded_compare
= fold (build2 (code
, type
,
8074 TREE_OPERAND (varop
, 0), arg1
));
8075 if (integer_zerop (folded_compare
)
8076 || integer_onep (folded_compare
))
8077 return omit_one_operand (type
, folded_compare
, varop
);
8079 shift
= build_int_2 (TYPE_PRECISION (TREE_TYPE (varop
)) - size
,
8081 shift
= fold_convert (TREE_TYPE (varop
), shift
);
8082 newconst
= fold (build2 (LSHIFT_EXPR
, TREE_TYPE (varop
),
8084 newconst
= fold (build2 (RSHIFT_EXPR
, TREE_TYPE (varop
),
8088 return fold (build2 (code
, type
, varop
, newconst
));
8091 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8092 This transformation affects the cases which are handled in later
8093 optimizations involving comparisons with non-negative constants. */
8094 if (TREE_CODE (arg1
) == INTEGER_CST
8095 && TREE_CODE (arg0
) != INTEGER_CST
8096 && tree_int_cst_sgn (arg1
) > 0)
8101 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
8102 return fold (build2 (GT_EXPR
, type
, arg0
, arg1
));
8105 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
8106 return fold (build2 (LE_EXPR
, type
, arg0
, arg1
));
8113 /* Comparisons with the highest or lowest possible integer of
8114 the specified size will have known values.
8116 This is quite similar to fold_relational_hi_lo; however, my
8117 attempts to share the code have been nothing but trouble.
8118 I give up for now. */
8120 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1
)));
8122 if (TREE_CODE (arg1
) == INTEGER_CST
8123 && ! TREE_CONSTANT_OVERFLOW (arg1
)
8124 && width
<= HOST_BITS_PER_WIDE_INT
8125 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
8126 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
8128 unsigned HOST_WIDE_INT signed_max
;
8129 unsigned HOST_WIDE_INT max
, min
;
8131 signed_max
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1)) - 1;
8133 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
8135 max
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
8141 min
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
8144 if (TREE_INT_CST_HIGH (arg1
) == 0
8145 && TREE_INT_CST_LOW (arg1
) == max
)
8149 return omit_one_operand (type
, integer_zero_node
, arg0
);
8152 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
8155 return omit_one_operand (type
, integer_one_node
, arg0
);
8158 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
8160 /* The GE_EXPR and LT_EXPR cases above are not normally
8161 reached because of previous transformations. */
8166 else if (TREE_INT_CST_HIGH (arg1
) == 0
8167 && TREE_INT_CST_LOW (arg1
) == max
- 1)
8171 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
8172 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
8174 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
8175 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
8179 else if (TREE_INT_CST_HIGH (arg1
) == (min
? -1 : 0)
8180 && TREE_INT_CST_LOW (arg1
) == min
)
8184 return omit_one_operand (type
, integer_zero_node
, arg0
);
8187 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
8190 return omit_one_operand (type
, integer_one_node
, arg0
);
8193 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
8198 else if (TREE_INT_CST_HIGH (arg1
) == (min
? -1 : 0)
8199 && TREE_INT_CST_LOW (arg1
) == min
+ 1)
8203 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
8204 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
8206 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
8207 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
8212 else if (!in_gimple_form
8213 && TREE_INT_CST_HIGH (arg1
) == 0
8214 && TREE_INT_CST_LOW (arg1
) == signed_max
8215 && TYPE_UNSIGNED (TREE_TYPE (arg1
))
8216 /* signed_type does not work on pointer types. */
8217 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
8219 /* The following case also applies to X < signed_max+1
8220 and X >= signed_max+1 because previous transformations. */
8221 if (code
== LE_EXPR
|| code
== GT_EXPR
)
8224 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (arg0
));
8225 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (arg1
));
8227 (build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
8228 type
, fold_convert (st0
, arg0
),
8229 fold_convert (st1
, integer_zero_node
)));
8235 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8236 a MINUS_EXPR of a constant, we can convert it into a comparison with
8237 a revised constant as long as no overflow occurs. */
8238 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8239 && TREE_CODE (arg1
) == INTEGER_CST
8240 && (TREE_CODE (arg0
) == PLUS_EXPR
8241 || TREE_CODE (arg0
) == MINUS_EXPR
)
8242 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8243 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
8244 ? MINUS_EXPR
: PLUS_EXPR
,
8245 arg1
, TREE_OPERAND (arg0
, 1), 0))
8246 && ! TREE_CONSTANT_OVERFLOW (tem
))
8247 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
8249 /* Similarly for a NEGATE_EXPR. */
8250 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8251 && TREE_CODE (arg0
) == NEGATE_EXPR
8252 && TREE_CODE (arg1
) == INTEGER_CST
8253 && 0 != (tem
= negate_expr (arg1
))
8254 && TREE_CODE (tem
) == INTEGER_CST
8255 && ! TREE_CONSTANT_OVERFLOW (tem
))
8256 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
8258 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8259 for !=. Don't do this for ordered comparisons due to overflow. */
8260 else if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
8261 && integer_zerop (arg1
) && TREE_CODE (arg0
) == MINUS_EXPR
)
8262 return fold (build2 (code
, type
,
8263 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1)));
8265 /* If we are widening one operand of an integer comparison,
8266 see if the other operand is similarly being widened. Perhaps we
8267 can do the comparison in the narrower type. */
8268 else if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
8269 && TREE_CODE (arg0
) == NOP_EXPR
8270 && (tem
= get_unwidened (arg0
, NULL_TREE
)) != arg0
8271 && (code
== EQ_EXPR
|| code
== NE_EXPR
8272 || TYPE_UNSIGNED (TREE_TYPE (arg0
))
8273 == TYPE_UNSIGNED (TREE_TYPE (tem
)))
8274 && (t1
= get_unwidened (arg1
, TREE_TYPE (tem
))) != 0
8275 && (TREE_TYPE (t1
) == TREE_TYPE (tem
)
8276 || (TREE_CODE (t1
) == INTEGER_CST
8277 && int_fits_type_p (t1
, TREE_TYPE (tem
)))))
8278 return fold (build2 (code
, type
, tem
,
8279 fold_convert (TREE_TYPE (tem
), t1
)));
8281 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8282 constant, we can simplify it. */
8283 else if (TREE_CODE (arg1
) == INTEGER_CST
8284 && (TREE_CODE (arg0
) == MIN_EXPR
8285 || TREE_CODE (arg0
) == MAX_EXPR
)
8286 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8287 return optimize_minmax_comparison (t
);
8289 /* If we are comparing an ABS_EXPR with a constant, we can
8290 convert all the cases into explicit comparisons, but they may
8291 well not be faster than doing the ABS and one comparison.
8292 But ABS (X) <= C is a range comparison, which becomes a subtraction
8293 and a comparison, and is probably faster. */
8294 else if (code
== LE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
8295 && TREE_CODE (arg0
) == ABS_EXPR
8296 && ! TREE_SIDE_EFFECTS (arg0
)
8297 && (0 != (tem
= negate_expr (arg1
)))
8298 && TREE_CODE (tem
) == INTEGER_CST
8299 && ! TREE_CONSTANT_OVERFLOW (tem
))
8300 return fold (build2 (TRUTH_ANDIF_EXPR
, type
,
8301 build2 (GE_EXPR
, type
,
8302 TREE_OPERAND (arg0
, 0), tem
),
8303 build2 (LE_EXPR
, type
,
8304 TREE_OPERAND (arg0
, 0), arg1
)));
8306 /* If this is an EQ or NE comparison with zero and ARG0 is
8307 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8308 two operations, but the latter can be done in one less insn
8309 on machines that have only two-operand insns or on which a
8310 constant cannot be the first operand. */
8311 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
8312 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
8314 tree arg00
= TREE_OPERAND (arg0
, 0);
8315 tree arg01
= TREE_OPERAND (arg0
, 1);
8316 if (TREE_CODE (arg00
) == LSHIFT_EXPR
8317 && integer_onep (TREE_OPERAND (arg00
, 0)))
8319 fold (build2 (code
, type
,
8320 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
8321 build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
8322 arg01
, TREE_OPERAND (arg00
, 1)),
8323 fold_convert (TREE_TYPE (arg0
),
8326 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
8327 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
8329 fold (build2 (code
, type
,
8330 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
8331 build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
8332 arg00
, TREE_OPERAND (arg01
, 1)),
8333 fold_convert (TREE_TYPE (arg0
),
8338 /* If this is an NE or EQ comparison of zero against the result of a
8339 signed MOD operation whose second operand is a power of 2, make
8340 the MOD operation unsigned since it is simpler and equivalent. */
8341 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
8342 && integer_zerop (arg1
)
8343 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
8344 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
8345 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
8346 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
8347 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
8348 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
8350 tree newtype
= lang_hooks
.types
.unsigned_type (TREE_TYPE (arg0
));
8351 tree newmod
= fold (build2 (TREE_CODE (arg0
), newtype
,
8352 fold_convert (newtype
,
8353 TREE_OPERAND (arg0
, 0)),
8354 fold_convert (newtype
,
8355 TREE_OPERAND (arg0
, 1))));
8357 return fold (build2 (code
, type
, newmod
,
8358 fold_convert (newtype
, arg1
)));
8361 /* If this is an NE comparison of zero with an AND of one, remove the
8362 comparison since the AND will give the correct value. */
8363 if (code
== NE_EXPR
&& integer_zerop (arg1
)
8364 && TREE_CODE (arg0
) == BIT_AND_EXPR
8365 && integer_onep (TREE_OPERAND (arg0
, 1)))
8366 return fold_convert (type
, arg0
);
8368 /* If we have (A & C) == C where C is a power of 2, convert this into
8369 (A & C) != 0. Similarly for NE_EXPR. */
8370 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8371 && TREE_CODE (arg0
) == BIT_AND_EXPR
8372 && integer_pow2p (TREE_OPERAND (arg0
, 1))
8373 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
8374 return fold (build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
8375 arg0
, integer_zero_node
));
8377 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8378 2, then fold the expression into shifts and logical operations. */
8379 tem
= fold_single_bit_test (code
, arg0
, arg1
, type
);
8383 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8384 Similarly for NE_EXPR. */
8385 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8386 && TREE_CODE (arg0
) == BIT_AND_EXPR
8387 && TREE_CODE (arg1
) == INTEGER_CST
8388 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8391 = fold (build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
8392 arg1
, build1 (BIT_NOT_EXPR
,
8393 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
8394 TREE_OPERAND (arg0
, 1))));
8395 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
8396 if (integer_nonzerop (dandnotc
))
8397 return omit_one_operand (type
, rslt
, arg0
);
8400 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8401 Similarly for NE_EXPR. */
8402 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8403 && TREE_CODE (arg0
) == BIT_IOR_EXPR
8404 && TREE_CODE (arg1
) == INTEGER_CST
8405 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8408 = fold (build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
8409 TREE_OPERAND (arg0
, 1),
8410 build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
)));
8411 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
8412 if (integer_nonzerop (candnotd
))
8413 return omit_one_operand (type
, rslt
, arg0
);
8416 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8417 and similarly for >= into !=. */
8418 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
8419 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
8420 && TREE_CODE (arg1
) == LSHIFT_EXPR
8421 && integer_onep (TREE_OPERAND (arg1
, 0)))
8422 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
8423 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
8424 TREE_OPERAND (arg1
, 1)),
8425 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
8427 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
8428 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
8429 && (TREE_CODE (arg1
) == NOP_EXPR
8430 || TREE_CODE (arg1
) == CONVERT_EXPR
)
8431 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
8432 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
8434 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
8435 fold_convert (TREE_TYPE (arg0
),
8436 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
8437 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
8439 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
8441 /* Simplify comparison of something with itself. (For IEEE
8442 floating-point, we can only do some of these simplifications.) */
8443 if (operand_equal_p (arg0
, arg1
, 0))
8448 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8449 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8450 return constant_boolean_node (1, type
);
8455 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
8456 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8457 return constant_boolean_node (1, type
);
8458 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
8461 /* For NE, we can only do this simplification if integer
8462 or we don't honor IEEE floating point NaNs. */
8463 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
8464 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8466 /* ... fall through ... */
8469 return constant_boolean_node (0, type
);
8475 /* If we are comparing an expression that just has comparisons
8476 of two integer values, arithmetic expressions of those comparisons,
8477 and constants, we can simplify it. There are only three cases
8478 to check: the two values can either be equal, the first can be
8479 greater, or the second can be greater. Fold the expression for
8480 those three values. Since each value must be 0 or 1, we have
8481 eight possibilities, each of which corresponds to the constant 0
8482 or 1 or one of the six possible comparisons.
8484 This handles common cases like (a > b) == 0 but also handles
8485 expressions like ((x > y) - (y > x)) > 0, which supposedly
8486 occur in macroized code. */
8488 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8490 tree cval1
= 0, cval2
= 0;
8493 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8494 /* Don't handle degenerate cases here; they should already
8495 have been handled anyway. */
8496 && cval1
!= 0 && cval2
!= 0
8497 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8498 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8499 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8500 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8501 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8502 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8503 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8505 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8506 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8508 /* We can't just pass T to eval_subst in case cval1 or cval2
8509 was the same as ARG1. */
8512 = fold (build2 (code
, type
,
8513 eval_subst (arg0
, cval1
, maxval
,
8517 = fold (build2 (code
, type
,
8518 eval_subst (arg0
, cval1
, maxval
,
8522 = fold (build2 (code
, type
,
8523 eval_subst (arg0
, cval1
, minval
,
8527 /* All three of these results should be 0 or 1. Confirm they
8528 are. Then use those values to select the proper code
8531 if ((integer_zerop (high_result
)
8532 || integer_onep (high_result
))
8533 && (integer_zerop (equal_result
)
8534 || integer_onep (equal_result
))
8535 && (integer_zerop (low_result
)
8536 || integer_onep (low_result
)))
8538 /* Make a 3-bit mask with the high-order bit being the
8539 value for `>', the next for '=', and the low for '<'. */
8540 switch ((integer_onep (high_result
) * 4)
8541 + (integer_onep (equal_result
) * 2)
8542 + integer_onep (low_result
))
8546 return omit_one_operand (type
, integer_zero_node
, arg0
);
8567 return omit_one_operand (type
, integer_one_node
, arg0
);
8570 tem
= build2 (code
, type
, cval1
, cval2
);
8572 return save_expr (tem
);
8579 /* If this is a comparison of a field, we may be able to simplify it. */
8580 if (((TREE_CODE (arg0
) == COMPONENT_REF
8581 && lang_hooks
.can_use_bit_fields_p ())
8582 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
8583 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
8584 /* Handle the constant case even without -O
8585 to make sure the warnings are given. */
8586 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
8588 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
8593 /* If this is a comparison of complex values and either or both sides
8594 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8595 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8596 This may prevent needless evaluations. */
8597 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8598 && TREE_CODE (TREE_TYPE (arg0
)) == COMPLEX_TYPE
8599 && (TREE_CODE (arg0
) == COMPLEX_EXPR
8600 || TREE_CODE (arg1
) == COMPLEX_EXPR
8601 || TREE_CODE (arg0
) == COMPLEX_CST
8602 || TREE_CODE (arg1
) == COMPLEX_CST
))
8604 tree subtype
= TREE_TYPE (TREE_TYPE (arg0
));
8605 tree real0
, imag0
, real1
, imag1
;
8607 arg0
= save_expr (arg0
);
8608 arg1
= save_expr (arg1
);
8609 real0
= fold (build1 (REALPART_EXPR
, subtype
, arg0
));
8610 imag0
= fold (build1 (IMAGPART_EXPR
, subtype
, arg0
));
8611 real1
= fold (build1 (REALPART_EXPR
, subtype
, arg1
));
8612 imag1
= fold (build1 (IMAGPART_EXPR
, subtype
, arg1
));
8614 return fold (build2 ((code
== EQ_EXPR
? TRUTH_ANDIF_EXPR
8617 fold (build2 (code
, type
, real0
, real1
)),
8618 fold (build2 (code
, type
, imag0
, imag1
))));
8621 /* Optimize comparisons of strlen vs zero to a compare of the
8622 first character of the string vs zero. To wit,
8623 strlen(ptr) == 0 => *ptr == 0
8624 strlen(ptr) != 0 => *ptr != 0
8625 Other cases should reduce to one of these two (or a constant)
8626 due to the return value of strlen being unsigned. */
8627 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8628 && integer_zerop (arg1
)
8629 && TREE_CODE (arg0
) == CALL_EXPR
)
8631 tree fndecl
= get_callee_fndecl (arg0
);
8635 && DECL_BUILT_IN (fndecl
)
8636 && DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_MD
8637 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
8638 && (arglist
= TREE_OPERAND (arg0
, 1))
8639 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
8640 && ! TREE_CHAIN (arglist
))
8641 return fold (build2 (code
, type
,
8642 build1 (INDIRECT_REF
, char_type_node
,
8643 TREE_VALUE(arglist
)),
8644 integer_zero_node
));
8647 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8648 into a single range test. */
8649 if (TREE_CODE (arg0
) == TRUNC_DIV_EXPR
8650 && TREE_CODE (arg1
) == INTEGER_CST
8651 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8652 && !integer_zerop (TREE_OPERAND (arg0
, 1))
8653 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8654 && !TREE_OVERFLOW (arg1
))
8656 t1
= fold_div_compare (code
, type
, arg0
, arg1
);
8657 if (t1
!= NULL_TREE
)
8661 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8662 && !TREE_SIDE_EFFECTS (arg0
)
8663 && integer_zerop (arg1
)
8664 && tree_expr_nonzero_p (arg0
))
8665 return constant_boolean_node (code
==NE_EXPR
, type
);
8667 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
8668 return t1
== NULL_TREE
? t
: t1
;
8670 case UNORDERED_EXPR
:
8678 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
8680 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
8681 if (t1
!= NULL_TREE
)
8685 /* If the first operand is NaN, the result is constant. */
8686 if (TREE_CODE (arg0
) == REAL_CST
8687 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
8688 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
8690 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
8693 return omit_one_operand (type
, t1
, arg1
);
8696 /* If the second operand is NaN, the result is constant. */
8697 if (TREE_CODE (arg1
) == REAL_CST
8698 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
8699 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
8701 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
8704 return omit_one_operand (type
, t1
, arg0
);
8707 /* Simplify unordered comparison of something with itself. */
8708 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
8709 && operand_equal_p (arg0
, arg1
, 0))
8710 return constant_boolean_node (1, type
);
8712 if (code
== LTGT_EXPR
8713 && !flag_trapping_math
8714 && operand_equal_p (arg0
, arg1
, 0))
8715 return constant_boolean_node (0, type
);
8717 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8719 tree targ0
= strip_float_extensions (arg0
);
8720 tree targ1
= strip_float_extensions (arg1
);
8721 tree newtype
= TREE_TYPE (targ0
);
8723 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8724 newtype
= TREE_TYPE (targ1
);
8726 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
8727 return fold (build2 (code
, type
, fold_convert (newtype
, targ0
),
8728 fold_convert (newtype
, targ1
)));
8734 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8735 so all simple results must be passed through pedantic_non_lvalue. */
8736 if (TREE_CODE (arg0
) == INTEGER_CST
)
8738 tem
= TREE_OPERAND (t
, (integer_zerop (arg0
) ? 2 : 1));
8739 /* Only optimize constant conditions when the selected branch
8740 has the same type as the COND_EXPR. This avoids optimizing
8741 away "c ? x : throw", where the throw has a void type. */
8742 if (! VOID_TYPE_P (TREE_TYPE (tem
))
8743 || VOID_TYPE_P (type
))
8744 return pedantic_non_lvalue (tem
);
8747 if (operand_equal_p (arg1
, TREE_OPERAND (t
, 2), 0))
8748 return pedantic_omit_one_operand (type
, arg1
, arg0
);
8750 /* If we have A op B ? A : C, we may be able to convert this to a
8751 simpler expression, depending on the operation and the values
8752 of B and C. Signed zeros prevent all of these transformations,
8753 for reasons given above each one.
8755 Also try swapping the arguments and inverting the conditional. */
8756 if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
8757 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
8758 arg1
, TREE_OPERAND (arg0
, 1))
8759 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
8761 tem
= fold_cond_expr_with_comparison (type
, arg0
,
8762 TREE_OPERAND (t
, 1),
8763 TREE_OPERAND (t
, 2));
8768 if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
8769 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
8770 TREE_OPERAND (t
, 2),
8771 TREE_OPERAND (arg0
, 1))
8772 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 2)))))
8774 tem
= invert_truthvalue (arg0
);
8775 if (TREE_CODE_CLASS (TREE_CODE (tem
)) == '<')
8777 tem
= fold_cond_expr_with_comparison (type
, tem
,
8778 TREE_OPERAND (t
, 2),
8779 TREE_OPERAND (t
, 1));
8785 /* If the second operand is simpler than the third, swap them
8786 since that produces better jump optimization results. */
8787 if (tree_swap_operands_p (TREE_OPERAND (t
, 1),
8788 TREE_OPERAND (t
, 2), false))
8790 /* See if this can be inverted. If it can't, possibly because
8791 it was a floating-point inequality comparison, don't do
8793 tem
= invert_truthvalue (arg0
);
8795 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
8796 return fold (build3 (code
, type
, tem
,
8797 TREE_OPERAND (t
, 2), TREE_OPERAND (t
, 1)));
8800 /* Convert A ? 1 : 0 to simply A. */
8801 if (integer_onep (TREE_OPERAND (t
, 1))
8802 && integer_zerop (TREE_OPERAND (t
, 2))
8803 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8804 call to fold will try to move the conversion inside
8805 a COND, which will recurse. In that case, the COND_EXPR
8806 is probably the best choice, so leave it alone. */
8807 && type
== TREE_TYPE (arg0
))
8808 return pedantic_non_lvalue (arg0
);
8810 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8811 over COND_EXPR in cases such as floating point comparisons. */
8812 if (integer_zerop (TREE_OPERAND (t
, 1))
8813 && integer_onep (TREE_OPERAND (t
, 2))
8814 && truth_value_p (TREE_CODE (arg0
)))
8815 return pedantic_non_lvalue (fold_convert (type
,
8816 invert_truthvalue (arg0
)));
8818 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
8819 if (TREE_CODE (arg0
) == LT_EXPR
8820 && integer_zerop (TREE_OPERAND (arg0
, 1))
8821 && integer_zerop (TREE_OPERAND (t
, 2))
8822 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
8823 return fold_convert (type
, fold (build2 (BIT_AND_EXPR
,
8824 TREE_TYPE (tem
), tem
, arg1
)));
8826 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
8827 already handled above. */
8828 if (TREE_CODE (arg0
) == BIT_AND_EXPR
8829 && integer_onep (TREE_OPERAND (arg0
, 1))
8830 && integer_zerop (TREE_OPERAND (t
, 2))
8831 && integer_pow2p (arg1
))
8833 tree tem
= TREE_OPERAND (arg0
, 0);
8835 if (TREE_CODE (tem
) == RSHIFT_EXPR
8836 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
8837 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
8838 return fold (build2 (BIT_AND_EXPR
, type
,
8839 TREE_OPERAND (tem
, 0), arg1
));
8842 /* A & N ? N : 0 is simply A & N if N is a power of two. This
8843 is probably obsolete because the first operand should be a
8844 truth value (that's why we have the two cases above), but let's
8845 leave it in until we can confirm this for all front-ends. */
8846 if (integer_zerop (TREE_OPERAND (t
, 2))
8847 && TREE_CODE (arg0
) == NE_EXPR
8848 && integer_zerop (TREE_OPERAND (arg0
, 1))
8849 && integer_pow2p (arg1
)
8850 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
8851 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
8852 arg1
, OEP_ONLY_CONST
))
8853 return pedantic_non_lvalue (fold_convert (type
,
8854 TREE_OPERAND (arg0
, 0)));
8856 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8857 if (integer_zerop (TREE_OPERAND (t
, 2))
8858 && truth_value_p (TREE_CODE (arg0
))
8859 && truth_value_p (TREE_CODE (arg1
)))
8860 return fold (build2 (TRUTH_ANDIF_EXPR
, type
, arg0
, arg1
));
8862 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8863 if (integer_onep (TREE_OPERAND (t
, 2))
8864 && truth_value_p (TREE_CODE (arg0
))
8865 && truth_value_p (TREE_CODE (arg1
)))
8867 /* Only perform transformation if ARG0 is easily inverted. */
8868 tem
= invert_truthvalue (arg0
);
8869 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
8870 return fold (build2 (TRUTH_ORIF_EXPR
, type
, tem
, arg1
));
8873 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
8874 if (integer_zerop (arg1
)
8875 && truth_value_p (TREE_CODE (arg0
))
8876 && truth_value_p (TREE_CODE (TREE_OPERAND (t
, 2))))
8878 /* Only perform transformation if ARG0 is easily inverted. */
8879 tem
= invert_truthvalue (arg0
);
8880 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
8881 return fold (build2 (TRUTH_ANDIF_EXPR
, type
, tem
,
8882 TREE_OPERAND (t
, 2)));
8885 /* Convert A ? 1 : B into A || B if A and B are truth values. */
8886 if (integer_onep (arg1
)
8887 && truth_value_p (TREE_CODE (arg0
))
8888 && truth_value_p (TREE_CODE (TREE_OPERAND (t
, 2))))
8889 return fold (build2 (TRUTH_ORIF_EXPR
, type
, arg0
,
8890 TREE_OPERAND (t
, 2)));
8895 /* When pedantic, a compound expression can be neither an lvalue
8896 nor an integer constant expression. */
8897 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
8899 /* Don't let (0, 0) be null pointer constant. */
8900 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
8901 : fold_convert (type
, arg1
);
8902 return pedantic_non_lvalue (tem
);
8906 return build_complex (type
, arg0
, arg1
);
8910 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8912 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8913 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
8914 TREE_OPERAND (arg0
, 1));
8915 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
8916 return TREE_REALPART (arg0
);
8917 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8918 return fold (build2 (TREE_CODE (arg0
), type
,
8919 fold (build1 (REALPART_EXPR
, type
,
8920 TREE_OPERAND (arg0
, 0))),
8921 fold (build1 (REALPART_EXPR
, type
,
8922 TREE_OPERAND (arg0
, 1)))));
8926 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8927 return fold_convert (type
, integer_zero_node
);
8928 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8929 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
8930 TREE_OPERAND (arg0
, 0));
8931 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
8932 return TREE_IMAGPART (arg0
);
8933 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8934 return fold (build2 (TREE_CODE (arg0
), type
,
8935 fold (build1 (IMAGPART_EXPR
, type
,
8936 TREE_OPERAND (arg0
, 0))),
8937 fold (build1 (IMAGPART_EXPR
, type
,
8938 TREE_OPERAND (arg0
, 1)))));
8941 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8943 case CLEANUP_POINT_EXPR
:
8944 if (! has_cleanups (arg0
))
8945 return TREE_OPERAND (t
, 0);
8948 enum tree_code code0
= TREE_CODE (arg0
);
8949 int kind0
= TREE_CODE_CLASS (code0
);
8950 tree arg00
= TREE_OPERAND (arg0
, 0);
8953 if (kind0
== '1' || code0
== TRUTH_NOT_EXPR
)
8954 return fold (build1 (code0
, type
,
8955 fold (build1 (CLEANUP_POINT_EXPR
,
8956 TREE_TYPE (arg00
), arg00
))));
8958 if (kind0
== '<' || kind0
== '2'
8959 || code0
== TRUTH_ANDIF_EXPR
|| code0
== TRUTH_ORIF_EXPR
8960 || code0
== TRUTH_AND_EXPR
|| code0
== TRUTH_OR_EXPR
8961 || code0
== TRUTH_XOR_EXPR
)
8963 arg01
= TREE_OPERAND (arg0
, 1);
8965 if (TREE_CONSTANT (arg00
)
8966 || ((code0
== TRUTH_ANDIF_EXPR
|| code0
== TRUTH_ORIF_EXPR
)
8967 && ! has_cleanups (arg00
)))
8968 return fold (build2 (code0
, type
, arg00
,
8969 fold (build1 (CLEANUP_POINT_EXPR
,
8970 TREE_TYPE (arg01
), arg01
))));
8972 if (TREE_CONSTANT (arg01
))
8973 return fold (build2 (code0
, type
,
8974 fold (build1 (CLEANUP_POINT_EXPR
,
8975 TREE_TYPE (arg00
), arg00
)),
8983 /* Check for a built-in function. */
8984 if (TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
8985 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0))
8987 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)))
8989 tree tmp
= fold_builtin (t
, false);
8997 } /* switch (code) */
9000 #ifdef ENABLE_FOLD_CHECKING
9003 static void fold_checksum_tree (tree
, struct md5_ctx
*, htab_t
);
9004 static void fold_check_failed (tree
, tree
);
9005 void print_fold_checksum (tree
);
9007 /* When --enable-checking=fold, compute a digest of expr before
9008 and after actual fold call to see if fold did not accidentally
9009 change original expr. */
9016 unsigned char checksum_before
[16], checksum_after
[16];
9019 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
9020 md5_init_ctx (&ctx
);
9021 fold_checksum_tree (expr
, &ctx
, ht
);
9022 md5_finish_ctx (&ctx
, checksum_before
);
9025 ret
= fold_1 (expr
);
9027 md5_init_ctx (&ctx
);
9028 fold_checksum_tree (expr
, &ctx
, ht
);
9029 md5_finish_ctx (&ctx
, checksum_after
);
9032 if (memcmp (checksum_before
, checksum_after
, 16))
9033 fold_check_failed (expr
, ret
);
9039 print_fold_checksum (tree expr
)
9042 unsigned char checksum
[16], cnt
;
9045 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
9046 md5_init_ctx (&ctx
);
9047 fold_checksum_tree (expr
, &ctx
, ht
);
9048 md5_finish_ctx (&ctx
, checksum
);
9050 for (cnt
= 0; cnt
< 16; ++cnt
)
9051 fprintf (stderr
, "%02x", checksum
[cnt
]);
9052 putc ('\n', stderr
);
9056 fold_check_failed (tree expr ATTRIBUTE_UNUSED
, tree ret ATTRIBUTE_UNUSED
)
9058 internal_error ("fold check: original tree changed by fold");
9062 fold_checksum_tree (tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
9065 enum tree_code code
;
9066 char buf
[sizeof (struct tree_decl
)];
9069 if (sizeof (struct tree_exp
) + 5 * sizeof (tree
)
9070 > sizeof (struct tree_decl
)
9071 || sizeof (struct tree_type
) > sizeof (struct tree_decl
))
9075 slot
= htab_find_slot (ht
, expr
, INSERT
);
9079 code
= TREE_CODE (expr
);
9080 if (TREE_CODE_CLASS (code
) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr
))
9082 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9083 memcpy (buf
, expr
, tree_size (expr
));
9085 SET_DECL_ASSEMBLER_NAME (expr
, NULL
);
9087 else if (TREE_CODE_CLASS (code
) == 't'
9088 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)))
9090 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
9091 memcpy (buf
, expr
, tree_size (expr
));
9093 TYPE_POINTER_TO (expr
) = NULL
;
9094 TYPE_REFERENCE_TO (expr
) = NULL
;
9096 md5_process_bytes (expr
, tree_size (expr
), ctx
);
9097 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
9098 if (TREE_CODE_CLASS (code
) != 't' && TREE_CODE_CLASS (code
) != 'd')
9099 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
9100 switch (TREE_CODE_CLASS (code
))
9106 md5_process_bytes (TREE_STRING_POINTER (expr
),
9107 TREE_STRING_LENGTH (expr
), ctx
);
9110 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
9111 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
9114 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
9124 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
9125 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
9128 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
9129 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
9141 len
= first_rtl_op (code
);
9142 for (i
= 0; i
< len
; ++i
)
9143 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
9146 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
9147 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
9148 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
9149 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
9150 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
9151 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
9152 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
9153 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
9154 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
9155 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
9156 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
9159 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
9160 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
9161 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
9162 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
9163 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
9164 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
9165 if (INTEGRAL_TYPE_P (expr
)
9166 || SCALAR_FLOAT_TYPE_P (expr
))
9168 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
9169 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
9171 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
9172 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
9173 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
9182 /* Perform constant folding and related simplification of initializer
9183 expression EXPR. This behaves identically to "fold" but ignores
9184 potential run-time traps and exceptions that fold must preserve. */
9187 fold_initializer (tree expr
)
9189 int saved_signaling_nans
= flag_signaling_nans
;
9190 int saved_trapping_math
= flag_trapping_math
;
9191 int saved_trapv
= flag_trapv
;
9194 flag_signaling_nans
= 0;
9195 flag_trapping_math
= 0;
9198 result
= fold (expr
);
9200 flag_signaling_nans
= saved_signaling_nans
;
9201 flag_trapping_math
= saved_trapping_math
;
9202 flag_trapv
= saved_trapv
;
9207 /* Determine if first argument is a multiple of second argument. Return 0 if
9208 it is not, or we cannot easily determined it to be.
9210 An example of the sort of thing we care about (at this point; this routine
9211 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9212 fold cases do now) is discovering that
9214 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9220 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9222 This code also handles discovering that
9224 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9226 is a multiple of 8 so we don't have to worry about dealing with a
9229 Note that we *look* inside a SAVE_EXPR only to determine how it was
9230 calculated; it is not safe for fold to do much of anything else with the
9231 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9232 at run time. For example, the latter example above *cannot* be implemented
9233 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9234 evaluation time of the original SAVE_EXPR is not necessarily the same at
9235 the time the new expression is evaluated. The only optimization of this
9236 sort that would be valid is changing
9238 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9242 SAVE_EXPR (I) * SAVE_EXPR (J)
9244 (where the same SAVE_EXPR (J) is used in the original and the
9245 transformed version). */
9248 multiple_of_p (tree type
, tree top
, tree bottom
)
9250 if (operand_equal_p (top
, bottom
, 0))
9253 if (TREE_CODE (type
) != INTEGER_TYPE
)
9256 switch (TREE_CODE (top
))
9259 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
9260 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
9264 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
9265 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
9268 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
9272 op1
= TREE_OPERAND (top
, 1);
9273 /* const_binop may not detect overflow correctly,
9274 so check for it explicitly here. */
9275 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
9276 > TREE_INT_CST_LOW (op1
)
9277 && TREE_INT_CST_HIGH (op1
) == 0
9278 && 0 != (t1
= fold_convert (type
,
9279 const_binop (LSHIFT_EXPR
,
9282 && ! TREE_OVERFLOW (t1
))
9283 return multiple_of_p (type
, t1
, bottom
);
9288 /* Can't handle conversions from non-integral or wider integral type. */
9289 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
9290 || (TYPE_PRECISION (type
)
9291 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
9294 /* .. fall through ... */
9297 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
9300 if (TREE_CODE (bottom
) != INTEGER_CST
9301 || (TYPE_UNSIGNED (type
)
9302 && (tree_int_cst_sgn (top
) < 0
9303 || tree_int_cst_sgn (bottom
) < 0)))
9305 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,
9313 /* Return true if `t' is known to be non-negative. */
9316 tree_expr_nonnegative_p (tree t
)
9318 switch (TREE_CODE (t
))
9324 return tree_int_cst_sgn (t
) >= 0;
9327 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
9330 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
9331 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9332 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9334 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9335 both unsigned and at least 2 bits shorter than the result. */
9336 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
9337 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
9338 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
9340 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
9341 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
9342 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
9343 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
9345 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
9346 TYPE_PRECISION (inner2
)) + 1;
9347 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
9353 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
9355 /* x * x for floating point x is always non-negative. */
9356 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
9358 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9359 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9362 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9363 both unsigned and their total bits is shorter than the result. */
9364 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
9365 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
9366 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
9368 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
9369 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
9370 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
9371 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
9372 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
9373 < TYPE_PRECISION (TREE_TYPE (t
));
9377 case TRUNC_DIV_EXPR
:
9379 case FLOOR_DIV_EXPR
:
9380 case ROUND_DIV_EXPR
:
9381 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9382 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9384 case TRUNC_MOD_EXPR
:
9386 case FLOOR_MOD_EXPR
:
9387 case ROUND_MOD_EXPR
:
9388 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9391 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9392 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9395 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
9396 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9399 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9400 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9404 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
9405 tree outer_type
= TREE_TYPE (t
);
9407 if (TREE_CODE (outer_type
) == REAL_TYPE
)
9409 if (TREE_CODE (inner_type
) == REAL_TYPE
)
9410 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9411 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
9413 if (TYPE_UNSIGNED (inner_type
))
9415 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9418 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
9420 if (TREE_CODE (inner_type
) == REAL_TYPE
)
9421 return tree_expr_nonnegative_p (TREE_OPERAND (t
,0));
9422 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
9423 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
9424 && TYPE_UNSIGNED (inner_type
);
9430 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
9431 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
9433 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9435 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9436 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9438 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9439 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9441 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9443 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t
, 1)));
9445 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9446 case NON_LVALUE_EXPR
:
9447 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9449 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9453 tree temp
= TARGET_EXPR_SLOT (t
);
9454 t
= TARGET_EXPR_INITIAL (t
);
9456 /* If the initializer is non-void, then it's a normal expression
9457 that will be assigned to the slot. */
9458 if (!VOID_TYPE_P (t
))
9459 return tree_expr_nonnegative_p (t
);
9461 /* Otherwise, the initializer sets the slot in some way. One common
9462 way is an assignment statement at the end of the initializer. */
9465 if (TREE_CODE (t
) == BIND_EXPR
)
9466 t
= expr_last (BIND_EXPR_BODY (t
));
9467 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
9468 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
9469 t
= expr_last (TREE_OPERAND (t
, 0));
9470 else if (TREE_CODE (t
) == STATEMENT_LIST
)
9475 if (TREE_CODE (t
) == MODIFY_EXPR
9476 && TREE_OPERAND (t
, 0) == temp
)
9477 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9484 tree fndecl
= get_callee_fndecl (t
);
9485 tree arglist
= TREE_OPERAND (t
, 1);
9487 && DECL_BUILT_IN (fndecl
)
9488 && DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_MD
)
9489 switch (DECL_FUNCTION_CODE (fndecl
))
9491 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9492 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9493 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9494 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9496 CASE_BUILTIN_F (BUILT_IN_ACOS
)
9497 CASE_BUILTIN_F (BUILT_IN_ACOSH
)
9498 CASE_BUILTIN_F (BUILT_IN_CABS
)
9499 CASE_BUILTIN_F (BUILT_IN_COSH
)
9500 CASE_BUILTIN_F (BUILT_IN_ERFC
)
9501 CASE_BUILTIN_F (BUILT_IN_EXP
)
9502 CASE_BUILTIN_F (BUILT_IN_EXP10
)
9503 CASE_BUILTIN_F (BUILT_IN_EXP2
)
9504 CASE_BUILTIN_F (BUILT_IN_FABS
)
9505 CASE_BUILTIN_F (BUILT_IN_FDIM
)
9506 CASE_BUILTIN_F (BUILT_IN_FREXP
)
9507 CASE_BUILTIN_F (BUILT_IN_HYPOT
)
9508 CASE_BUILTIN_F (BUILT_IN_POW10
)
9509 CASE_BUILTIN_I (BUILT_IN_FFS
)
9510 CASE_BUILTIN_I (BUILT_IN_PARITY
)
9511 CASE_BUILTIN_I (BUILT_IN_POPCOUNT
)
9515 CASE_BUILTIN_F (BUILT_IN_SQRT
)
9516 /* sqrt(-0.0) is -0.0. */
9517 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
9519 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
9521 CASE_BUILTIN_F (BUILT_IN_ASINH
)
9522 CASE_BUILTIN_F (BUILT_IN_ATAN
)
9523 CASE_BUILTIN_F (BUILT_IN_ATANH
)
9524 CASE_BUILTIN_F (BUILT_IN_CBRT
)
9525 CASE_BUILTIN_F (BUILT_IN_CEIL
)
9526 CASE_BUILTIN_F (BUILT_IN_ERF
)
9527 CASE_BUILTIN_F (BUILT_IN_EXPM1
)
9528 CASE_BUILTIN_F (BUILT_IN_FLOOR
)
9529 CASE_BUILTIN_F (BUILT_IN_FMOD
)
9530 CASE_BUILTIN_F (BUILT_IN_LDEXP
)
9531 CASE_BUILTIN_F (BUILT_IN_LLRINT
)
9532 CASE_BUILTIN_F (BUILT_IN_LLROUND
)
9533 CASE_BUILTIN_F (BUILT_IN_LRINT
)
9534 CASE_BUILTIN_F (BUILT_IN_LROUND
)
9535 CASE_BUILTIN_F (BUILT_IN_MODF
)
9536 CASE_BUILTIN_F (BUILT_IN_NEARBYINT
)
9537 CASE_BUILTIN_F (BUILT_IN_POW
)
9538 CASE_BUILTIN_F (BUILT_IN_RINT
)
9539 CASE_BUILTIN_F (BUILT_IN_ROUND
)
9540 CASE_BUILTIN_F (BUILT_IN_SIGNBIT
)
9541 CASE_BUILTIN_F (BUILT_IN_SINH
)
9542 CASE_BUILTIN_F (BUILT_IN_TANH
)
9543 CASE_BUILTIN_F (BUILT_IN_TRUNC
)
9544 /* True if the 1st argument is nonnegative. */
9545 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
9547 CASE_BUILTIN_F (BUILT_IN_FMAX
)
9548 /* True if the 1st OR 2nd arguments are nonnegative. */
9549 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
9550 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
9552 CASE_BUILTIN_F (BUILT_IN_FMIN
)
9553 /* True if the 1st AND 2nd arguments are nonnegative. */
9554 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
9555 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
9557 CASE_BUILTIN_F (BUILT_IN_COPYSIGN
)
9558 /* True if the 2nd argument is nonnegative. */
9559 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
9563 #undef CASE_BUILTIN_F
9564 #undef CASE_BUILTIN_I
9568 /* ... fall through ... */
9571 if (truth_value_p (TREE_CODE (t
)))
9572 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9576 /* We don't know sign of `t', so be conservative and return false. */
9580 /* Return true when T is an address and is known to be nonzero.
9581 For floating point we further ensure that T is not denormal.
9582 Similar logic is present in nonzero_address in rtlanal.h */
9585 tree_expr_nonzero_p (tree t
)
9587 tree type
= TREE_TYPE (t
);
9589 /* Doing something useful for floating point would need more work. */
9590 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9593 switch (TREE_CODE (t
))
9596 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
9597 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
9600 return !integer_zerop (t
);
9603 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
9605 /* With the presence of negative values it is hard
9606 to say something. */
9607 if (!tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9608 || !tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
9610 /* One of operands must be positive and the other non-negative. */
9611 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
9612 || tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
9617 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
9619 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
9620 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
9626 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
9627 tree outer_type
= TREE_TYPE (t
);
9629 return (TYPE_PRECISION (inner_type
) >= TYPE_PRECISION (outer_type
)
9630 && tree_expr_nonzero_p (TREE_OPERAND (t
, 0)));
9635 /* Weak declarations may link to NULL. */
9636 if (DECL_P (TREE_OPERAND (t
, 0)))
9637 return !DECL_WEAK (TREE_OPERAND (t
, 0));
9638 /* Constants and all other cases are never weak. */
9642 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
9643 && tree_expr_nonzero_p (TREE_OPERAND (t
, 2)));
9646 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
9647 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
9650 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 0)))
9652 /* When both operands are nonzero, then MAX must be too. */
9653 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1)))
9656 /* MAX where operand 0 is positive is positive. */
9657 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9659 /* MAX where operand 1 is positive is positive. */
9660 else if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
9661 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
9668 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1));
9671 case NON_LVALUE_EXPR
:
9672 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
9675 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
9676 || tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
9684 /* Return true if `r' is known to be non-negative.
9685 Only handles constants at the moment. */
9688 rtl_expr_nonnegative_p (rtx r
)
9690 switch (GET_CODE (r
))
9693 return INTVAL (r
) >= 0;
9696 if (GET_MODE (r
) == VOIDmode
)
9697 return CONST_DOUBLE_HIGH (r
) >= 0;
9705 units
= CONST_VECTOR_NUNITS (r
);
9707 for (i
= 0; i
< units
; ++i
)
9709 elt
= CONST_VECTOR_ELT (r
, i
);
9710 if (!rtl_expr_nonnegative_p (elt
))
9719 /* These are always nonnegative. */
9728 /* See if we are applying CODE, a relational to the highest or lowest
9729 possible integer of TYPE. If so, then the result is a compile
9733 fold_relational_hi_lo (enum tree_code
*code_p
, const tree type
, tree
*op0_p
,
9738 enum tree_code code
= *code_p
;
9739 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1
)));
9741 if (TREE_CODE (op1
) == INTEGER_CST
9742 && ! TREE_CONSTANT_OVERFLOW (op1
)
9743 && width
<= HOST_BITS_PER_WIDE_INT
9744 && (INTEGRAL_TYPE_P (TREE_TYPE (op1
))
9745 || POINTER_TYPE_P (TREE_TYPE (op1
))))
9747 unsigned HOST_WIDE_INT signed_max
;
9748 unsigned HOST_WIDE_INT max
, min
;
9750 signed_max
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1)) - 1;
9752 if (TYPE_UNSIGNED (TREE_TYPE (op1
)))
9754 max
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
9760 min
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
9763 if (TREE_INT_CST_HIGH (op1
) == 0
9764 && TREE_INT_CST_LOW (op1
) == max
)
9768 return omit_one_operand (type
, integer_zero_node
, op0
);
9774 return omit_one_operand (type
, integer_one_node
, op0
);
9780 /* The GE_EXPR and LT_EXPR cases above are not normally
9781 reached because of previous transformations. */
9786 else if (TREE_INT_CST_HIGH (op1
) == 0
9787 && TREE_INT_CST_LOW (op1
) == max
- 1)
9792 *op1_p
= const_binop (PLUS_EXPR
, op1
, integer_one_node
, 0);
9796 *op1_p
= const_binop (PLUS_EXPR
, op1
, integer_one_node
, 0);
9801 else if (TREE_INT_CST_HIGH (op1
) == (min
? -1 : 0)
9802 && TREE_INT_CST_LOW (op1
) == min
)
9806 return omit_one_operand (type
, integer_zero_node
, op0
);
9813 return omit_one_operand (type
, integer_one_node
, op0
);
9822 else if (TREE_INT_CST_HIGH (op1
) == (min
? -1 : 0)
9823 && TREE_INT_CST_LOW (op1
) == min
+ 1)
9828 *op1_p
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
9832 *op1_p
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
9838 else if (TREE_INT_CST_HIGH (op1
) == 0
9839 && TREE_INT_CST_LOW (op1
) == signed_max
9840 && TYPE_UNSIGNED (TREE_TYPE (op1
))
9841 /* signed_type does not work on pointer types. */
9842 && INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
9844 /* The following case also applies to X < signed_max+1
9845 and X >= signed_max+1 because previous transformations. */
9846 if (code
== LE_EXPR
|| code
== GT_EXPR
)
9848 tree st0
, st1
, exp
, retval
;
9849 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (op0
));
9850 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (op1
));
9852 exp
= build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
9854 fold_convert (st0
, op0
),
9855 fold_convert (st1
, integer_zero_node
));
9858 = nondestructive_fold_binary_to_constant (TREE_CODE (exp
),
9860 TREE_OPERAND (exp
, 0),
9861 TREE_OPERAND (exp
, 1));
9863 /* If we are in gimple form, then returning EXP would create
9864 non-gimple expressions. Clearing it is safe and insures
9865 we do not allow a non-gimple expression to escape. */
9869 return (retval
? retval
: exp
);
9878 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9879 attempt to fold the expression to a constant without modifying TYPE,
9882 If the expression could be simplified to a constant, then return
9883 the constant. If the expression would not be simplified to a
9884 constant, then return NULL_TREE.
9886 Note this is primarily designed to be called after gimplification
9887 of the tree structures and when at least one operand is a constant.
9888 As a result of those simplifying assumptions this routine is far
9889 simpler than the generic fold routine. */
9892 nondestructive_fold_binary_to_constant (enum tree_code code
, tree type
,
9900 /* If this is a commutative operation, and ARG0 is a constant, move it
9901 to ARG1 to reduce the number of tests below. */
9902 if (commutative_tree_code (code
)
9903 && (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
))
9910 /* If either operand is a complex type, extract its real component. */
9911 if (TREE_CODE (op0
) == COMPLEX_CST
)
9912 subop0
= TREE_REALPART (op0
);
9916 if (TREE_CODE (op1
) == COMPLEX_CST
)
9917 subop1
= TREE_REALPART (op1
);
9921 /* Note if either argument is not a real or integer constant.
9922 With a few exceptions, simplification is limited to cases
9923 where both arguments are constants. */
9924 if ((TREE_CODE (subop0
) != INTEGER_CST
9925 && TREE_CODE (subop0
) != REAL_CST
)
9926 || (TREE_CODE (subop1
) != INTEGER_CST
9927 && TREE_CODE (subop1
) != REAL_CST
))
9933 /* (plus (address) (const_int)) is a constant. */
9934 if (TREE_CODE (op0
) == PLUS_EXPR
9935 && TREE_CODE (op1
) == INTEGER_CST
9936 && (TREE_CODE (TREE_OPERAND (op0
, 0)) == ADDR_EXPR
9937 || (TREE_CODE (TREE_OPERAND (op0
, 0)) == NOP_EXPR
9938 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0
, 0), 0))
9940 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
9942 return build2 (PLUS_EXPR
, type
, TREE_OPERAND (op0
, 0),
9943 const_binop (PLUS_EXPR
, op1
,
9944 TREE_OPERAND (op0
, 1), 0));
9952 /* Both arguments are constants. Simplify. */
9953 tem
= const_binop (code
, op0
, op1
, 0);
9954 if (tem
!= NULL_TREE
)
9956 /* The return value should always have the same type as
9957 the original expression. */
9958 if (TREE_TYPE (tem
) != type
)
9959 tem
= fold_convert (type
, tem
);
9966 /* Fold &x - &x. This can happen from &x.foo - &x.
9967 This is unsafe for certain floats even in non-IEEE formats.
9968 In IEEE, it is unsafe because it does wrong for NaNs.
9969 Also note that operand_equal_p is always false if an
9970 operand is volatile. */
9971 if (! FLOAT_TYPE_P (type
) && operand_equal_p (op0
, op1
, 0))
9972 return fold_convert (type
, integer_zero_node
);
9978 /* Special case multiplication or bitwise AND where one argument
9980 if (! FLOAT_TYPE_P (type
) && integer_zerop (op1
))
9981 return omit_one_operand (type
, op1
, op0
);
9983 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0
)))
9984 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0
)))
9985 && real_zerop (op1
))
9986 return omit_one_operand (type
, op1
, op0
);
9991 /* Special case when we know the result will be all ones. */
9992 if (integer_all_onesp (op1
))
9993 return omit_one_operand (type
, op1
, op0
);
9997 case TRUNC_DIV_EXPR
:
9998 case ROUND_DIV_EXPR
:
9999 case FLOOR_DIV_EXPR
:
10000 case CEIL_DIV_EXPR
:
10001 case EXACT_DIV_EXPR
:
10002 case TRUNC_MOD_EXPR
:
10003 case ROUND_MOD_EXPR
:
10004 case FLOOR_MOD_EXPR
:
10005 case CEIL_MOD_EXPR
:
10007 /* Division by zero is undefined. */
10008 if (integer_zerop (op1
))
10011 if (TREE_CODE (op1
) == REAL_CST
10012 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1
)))
10013 && real_zerop (op1
))
10019 if (INTEGRAL_TYPE_P (type
)
10020 && operand_equal_p (op1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
10021 return omit_one_operand (type
, op1
, op0
);
10026 if (INTEGRAL_TYPE_P (type
)
10027 && TYPE_MAX_VALUE (type
)
10028 && operand_equal_p (op1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
10029 return omit_one_operand (type
, op1
, op0
);
10034 /* Optimize -1 >> x for arithmetic right shifts. */
10035 if (integer_all_onesp (op0
) && ! TYPE_UNSIGNED (type
))
10036 return omit_one_operand (type
, op0
, op1
);
10037 /* ... fall through ... */
10040 if (integer_zerop (op0
))
10041 return omit_one_operand (type
, op0
, op1
);
10043 /* Since negative shift count is not well-defined, don't
10044 try to compute it in the compiler. */
10045 if (TREE_CODE (op1
) == INTEGER_CST
&& tree_int_cst_sgn (op1
) < 0)
10052 /* -1 rotated either direction by any amount is still -1. */
10053 if (integer_all_onesp (op0
))
10054 return omit_one_operand (type
, op0
, op1
);
10056 /* 0 rotated either direction by any amount is still zero. */
10057 if (integer_zerop (op0
))
10058 return omit_one_operand (type
, op0
, op1
);
10064 return build_complex (type
, op0
, op1
);
10073 /* If one arg is a real or integer constant, put it last. */
10074 if ((TREE_CODE (op0
) == INTEGER_CST
10075 && TREE_CODE (op1
) != INTEGER_CST
)
10076 || (TREE_CODE (op0
) == REAL_CST
10077 && TREE_CODE (op0
) != REAL_CST
))
10084 code
= swap_tree_comparison (code
);
10087 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10088 This transformation affects the cases which are handled in later
10089 optimizations involving comparisons with non-negative constants. */
10090 if (TREE_CODE (op1
) == INTEGER_CST
10091 && TREE_CODE (op0
) != INTEGER_CST
10092 && tree_int_cst_sgn (op1
) > 0)
10098 op1
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
10103 op1
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
10111 tem
= fold_relational_hi_lo (&code
, type
, &op0
, &op1
);
10115 /* Fall through. */
10118 case UNORDERED_EXPR
:
10128 return fold_relational_const (code
, type
, op0
, op1
);
10131 /* This could probably be handled. */
10134 case TRUTH_AND_EXPR
:
10135 /* If second arg is constant zero, result is zero, but first arg
10136 must be evaluated. */
10137 if (integer_zerop (op1
))
10138 return omit_one_operand (type
, op1
, op0
);
10139 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10140 case will be handled here. */
10141 if (integer_zerop (op0
))
10142 return omit_one_operand (type
, op0
, op1
);
10143 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
10144 return constant_boolean_node (true, type
);
10147 case TRUTH_OR_EXPR
:
10148 /* If second arg is constant true, result is true, but we must
10149 evaluate first arg. */
10150 if (TREE_CODE (op1
) == INTEGER_CST
&& ! integer_zerop (op1
))
10151 return omit_one_operand (type
, op1
, op0
);
10152 /* Likewise for first arg, but note this only occurs here for
10154 if (TREE_CODE (op0
) == INTEGER_CST
&& ! integer_zerop (op0
))
10155 return omit_one_operand (type
, op0
, op1
);
10156 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
10157 return constant_boolean_node (false, type
);
10160 case TRUTH_XOR_EXPR
:
10161 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
10163 int x
= ! integer_zerop (op0
) ^ ! integer_zerop (op1
);
10164 return constant_boolean_node (x
, type
);
10173 /* Given the components of a unary expression CODE, TYPE and OP0,
10174 attempt to fold the expression to a constant without modifying
10177 If the expression could be simplified to a constant, then return
10178 the constant. If the expression would not be simplified to a
10179 constant, then return NULL_TREE.
10181 Note this is primarily designed to be called after gimplification
10182 of the tree structures and when op0 is a constant. As a result
10183 of those simplifying assumptions this routine is far simpler than
10184 the generic fold routine. */
10187 nondestructive_fold_unary_to_constant (enum tree_code code
, tree type
,
10190 /* Make sure we have a suitable constant argument. */
10191 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
10195 if (TREE_CODE (op0
) == COMPLEX_CST
)
10196 subop
= TREE_REALPART (op0
);
10200 if (TREE_CODE (subop
) != INTEGER_CST
&& TREE_CODE (subop
) != REAL_CST
)
10209 case FIX_TRUNC_EXPR
:
10210 case FIX_FLOOR_EXPR
:
10211 case FIX_CEIL_EXPR
:
10212 return fold_convert_const (code
, type
, op0
);
10215 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
10216 return fold_negate_const (op0
, type
);
10221 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
10222 return fold_abs_const (op0
, type
);
10227 if (TREE_CODE (op0
) == INTEGER_CST
)
10228 return fold_not_const (op0
, type
);
10232 case REALPART_EXPR
:
10233 if (TREE_CODE (op0
) == COMPLEX_CST
)
10234 return TREE_REALPART (op0
);
10238 case IMAGPART_EXPR
:
10239 if (TREE_CODE (op0
) == COMPLEX_CST
)
10240 return TREE_IMAGPART (op0
);
10245 if (TREE_CODE (op0
) == COMPLEX_CST
10246 && TREE_CODE (TREE_TYPE (op0
)) == COMPLEX_TYPE
)
10247 return build_complex (type
, TREE_REALPART (op0
),
10248 negate_expr (TREE_IMAGPART (op0
)));
10256 /* If EXP represents referencing an element in a constant string
10257 (either via pointer arithmetic or array indexing), return the
10258 tree representing the value accessed, otherwise return NULL. */
10261 fold_read_from_constant_string (tree exp
)
10263 if (TREE_CODE (exp
) == INDIRECT_REF
|| TREE_CODE (exp
) == ARRAY_REF
)
10265 tree exp1
= TREE_OPERAND (exp
, 0);
10269 if (TREE_CODE (exp
) == INDIRECT_REF
)
10270 string
= string_constant (exp1
, &index
);
10273 tree low_bound
= array_ref_low_bound (exp
);
10274 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
10276 /* Optimize the special-case of a zero lower bound.
10278 We convert the low_bound to sizetype to avoid some problems
10279 with constant folding. (E.g. suppose the lower bound is 1,
10280 and its mode is QI. Without the conversion,l (ARRAY
10281 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10282 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10283 if (! integer_zerop (low_bound
))
10284 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
10290 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (string
))
10291 && TREE_CODE (string
) == STRING_CST
10292 && TREE_CODE (index
) == INTEGER_CST
10293 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
10294 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
10296 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
10297 return fold_convert (TREE_TYPE (exp
),
10298 build_int_2 ((TREE_STRING_POINTER (string
)
10299 [TREE_INT_CST_LOW (index
)]), 0));
10304 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10305 an integer constant or real constant.
10307 TYPE is the type of the result. */
10310 fold_negate_const (tree arg0
, tree type
)
10312 tree t
= NULL_TREE
;
10314 if (TREE_CODE (arg0
) == INTEGER_CST
)
10316 unsigned HOST_WIDE_INT low
;
10317 HOST_WIDE_INT high
;
10318 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
10319 TREE_INT_CST_HIGH (arg0
),
10321 t
= build_int_2 (low
, high
);
10322 TREE_TYPE (t
) = type
;
10324 = (TREE_OVERFLOW (arg0
)
10325 | force_fit_type (t
, overflow
&& !TYPE_UNSIGNED (type
)));
10326 TREE_CONSTANT_OVERFLOW (t
)
10327 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
10329 else if (TREE_CODE (arg0
) == REAL_CST
)
10330 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
10331 #ifdef ENABLE_CHECKING
10339 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10340 an integer constant or real constant.
10342 TYPE is the type of the result. */
10345 fold_abs_const (tree arg0
, tree type
)
10347 tree t
= NULL_TREE
;
10349 if (TREE_CODE (arg0
) == INTEGER_CST
)
10351 /* If the value is unsigned, then the absolute value is
10352 the same as the ordinary value. */
10353 if (TYPE_UNSIGNED (type
))
10355 /* Similarly, if the value is non-negative. */
10356 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
10358 /* If the value is negative, then the absolute value is
10362 unsigned HOST_WIDE_INT low
;
10363 HOST_WIDE_INT high
;
10364 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
10365 TREE_INT_CST_HIGH (arg0
),
10367 t
= build_int_2 (low
, high
);
10368 TREE_TYPE (t
) = type
;
10370 = (TREE_OVERFLOW (arg0
)
10371 | force_fit_type (t
, overflow
));
10372 TREE_CONSTANT_OVERFLOW (t
)
10373 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
10377 else if (TREE_CODE (arg0
) == REAL_CST
)
10379 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
10380 return build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
10384 #ifdef ENABLE_CHECKING
10392 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10393 constant. TYPE is the type of the result. */
10396 fold_not_const (tree arg0
, tree type
)
10398 tree t
= NULL_TREE
;
10400 if (TREE_CODE (arg0
) == INTEGER_CST
)
10402 t
= build_int_2 (~ TREE_INT_CST_LOW (arg0
),
10403 ~ TREE_INT_CST_HIGH (arg0
));
10404 TREE_TYPE (t
) = type
;
10405 force_fit_type (t
, 0);
10406 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg0
);
10407 TREE_CONSTANT_OVERFLOW (t
) = TREE_CONSTANT_OVERFLOW (arg0
);
10409 #ifdef ENABLE_CHECKING
10417 /* Given CODE, a relational operator, the target type, TYPE and two
10418 constant operands OP0 and OP1, return the result of the
10419 relational operation. If the result is not a compile time
10420 constant, then return NULL_TREE. */
10423 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
10425 int result
, invert
;
10427 /* From here on, the only cases we handle are when the result is
10428 known to be a constant. */
10430 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
10432 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
10433 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
10435 /* Handle the cases where either operand is a NaN. */
10436 if (real_isnan (c0
) || real_isnan (c1
))
10446 case UNORDERED_EXPR
:
10460 if (flag_trapping_math
)
10469 return constant_boolean_node (result
, type
);
10472 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
10475 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10477 To compute GT, swap the arguments and do LT.
10478 To compute GE, do LT and invert the result.
10479 To compute LE, swap the arguments, do LT and invert the result.
10480 To compute NE, do EQ and invert the result.
10482 Therefore, the code below must handle only EQ and LT. */
10484 if (code
== LE_EXPR
|| code
== GT_EXPR
)
10489 code
= swap_tree_comparison (code
);
10492 /* Note that it is safe to invert for real values here because we
10493 have already handled the one case that it matters. */
10496 if (code
== NE_EXPR
|| code
== GE_EXPR
)
10499 code
= invert_tree_comparison (code
, false);
10502 /* Compute a result for LT or EQ if args permit;
10503 Otherwise return T. */
10504 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
10506 if (code
== EQ_EXPR
)
10507 result
= tree_int_cst_equal (op0
, op1
);
10508 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
10509 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
10511 result
= INT_CST_LT (op0
, op1
);
10518 return constant_boolean_node (result
, type
);
10521 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10522 avoid confusing the gimplify process. */
10525 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
10527 if (TREE_CODE (t
) == INDIRECT_REF
)
10529 t
= TREE_OPERAND (t
, 0);
10530 if (TREE_TYPE (t
) != ptrtype
)
10531 t
= build1 (NOP_EXPR
, ptrtype
, t
);
10537 while (handled_component_p (base
)
10538 || TREE_CODE (base
) == REALPART_EXPR
10539 || TREE_CODE (base
) == IMAGPART_EXPR
)
10540 base
= TREE_OPERAND (base
, 0);
10542 TREE_ADDRESSABLE (base
) = 1;
10544 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
10551 build_fold_addr_expr (tree t
)
10553 return build_fold_addr_expr_with_type (t
, build_pointer_type (TREE_TYPE (t
)));
10556 /* Builds an expression for an indirection through T, simplifying some
10560 build_fold_indirect_ref (tree t
)
10562 tree type
= TREE_TYPE (TREE_TYPE (t
));
10567 if (TREE_CODE (sub
) == ADDR_EXPR
)
10569 tree op
= TREE_OPERAND (sub
, 0);
10570 tree optype
= TREE_TYPE (op
);
10572 if (lang_hooks
.types_compatible_p (type
, optype
))
10574 /* *(foo *)&fooarray => fooarray[0] */
10575 else if (TREE_CODE (optype
) == ARRAY_TYPE
10576 && lang_hooks
.types_compatible_p (type
, TREE_TYPE (optype
)))
10577 return build4 (ARRAY_REF
, type
, op
, size_zero_node
, NULL_TREE
, NULL_TREE
);
10580 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10581 subtype
= TREE_TYPE (sub
);
10582 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
10583 && lang_hooks
.types_compatible_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
10585 sub
= build_fold_indirect_ref (sub
);
10586 return build4 (ARRAY_REF
, type
, sub
, size_zero_node
, NULL_TREE
, NULL_TREE
);
10589 return build1 (INDIRECT_REF
, type
, t
);
10592 /* Strip non-trapping, non-side-effecting tree nodes from an expression
10593 whose result is ignored. The type of the returned tree need not be
10594 the same as the original expression. */
10597 fold_ignored_result (tree t
)
10599 if (!TREE_SIDE_EFFECTS (t
))
10600 return integer_zero_node
;
10603 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
10606 t
= TREE_OPERAND (t
, 0);
10611 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
10612 t
= TREE_OPERAND (t
, 0);
10613 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
10614 t
= TREE_OPERAND (t
, 1);
10620 switch (TREE_CODE (t
))
10622 case COMPOUND_EXPR
:
10623 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
10625 t
= TREE_OPERAND (t
, 0);
10629 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
10630 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
10632 t
= TREE_OPERAND (t
, 0);
10645 #include "gt-fold-const.h"