1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
62 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
63 static bool negate_mathfn_p (enum built_in_function
);
64 static bool negate_expr_p (tree
);
65 static tree
negate_expr (tree
);
66 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
67 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
68 static tree
int_const_binop (enum tree_code
, tree
, tree
, int);
69 static tree
const_binop (enum tree_code
, tree
, tree
, int);
70 static hashval_t
size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree
fold_convert (tree
, tree
);
73 static enum tree_code
invert_tree_comparison (enum tree_code
);
74 static enum tree_code
swap_tree_comparison (enum tree_code
);
75 static int comparison_to_compcode (enum tree_code
);
76 static enum tree_code
compcode_to_comparison (int);
77 static int truth_value_p (enum tree_code
);
78 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
79 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
80 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
81 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
82 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
83 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
84 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
85 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
86 enum machine_mode
*, int *, int *,
88 static int all_ones_mask_p (tree
, int);
89 static tree
sign_bit_p (tree
, tree
);
90 static int simple_operand_p (tree
);
91 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
92 static tree
make_range (tree
, int *, tree
*, tree
*);
93 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
94 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
96 static tree
fold_range_test (tree
);
97 static tree
unextend (tree
, int, int, tree
);
98 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
99 static tree
optimize_minmax_comparison (tree
);
100 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
);
101 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
);
102 static tree
strip_compound_expr (tree
, tree
);
103 static int multiple_of_p (tree
, tree
, tree
);
104 static tree
constant_boolean_node (int, tree
);
105 static int count_cond (tree
, int);
106 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
, tree
,
108 static bool fold_real_zero_addition_p (tree
, tree
, int);
109 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
111 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
112 static bool reorder_operands_p (tree
, tree
);
113 static bool tree_swap_operands_p (tree
, tree
, bool);
115 /* The following constants represent a bit based encoding of GCC's
116 comparison operators. This encoding simplifies transformations
117 on relational comparison operators, such as AND and OR. */
118 #define COMPCODE_FALSE 0
119 #define COMPCODE_LT 1
120 #define COMPCODE_EQ 2
121 #define COMPCODE_LE 3
122 #define COMPCODE_GT 4
123 #define COMPCODE_NE 5
124 #define COMPCODE_GE 6
125 #define COMPCODE_TRUE 7
127 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
128 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
129 and SUM1. Then this yields nonzero if overflow occurred during the
132 Overflow occurs if A and B have the same sign, but A and SUM differ in
133 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
135 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
137 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
138 We do that by representing the two-word integer in 4 words, with only
139 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
140 number. The value of the word is LOWPART + HIGHPART * BASE. */
143 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
144 #define HIGHPART(x) \
145 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
146 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
148 /* Unpack a two-word integer into 4 words.
149 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
150 WORDS points to the array of HOST_WIDE_INTs. */
153 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
155 words
[0] = LOWPART (low
);
156 words
[1] = HIGHPART (low
);
157 words
[2] = LOWPART (hi
);
158 words
[3] = HIGHPART (hi
);
161 /* Pack an array of 4 words into a two-word integer.
162 WORDS points to the array of words.
163 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
166 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
169 *low
= words
[0] + words
[1] * BASE
;
170 *hi
= words
[2] + words
[3] * BASE
;
173 /* Make the integer constant T valid for its type by setting to 0 or 1 all
174 the bits in the constant that don't belong in the type.
176 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
177 nonzero, a signed overflow has already occurred in calculating T, so
181 force_fit_type (tree t
, int overflow
)
183 unsigned HOST_WIDE_INT low
;
187 if (TREE_CODE (t
) == REAL_CST
)
189 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
190 Consider doing it via real_convert now. */
194 else if (TREE_CODE (t
) != INTEGER_CST
)
197 low
= TREE_INT_CST_LOW (t
);
198 high
= TREE_INT_CST_HIGH (t
);
200 if (POINTER_TYPE_P (TREE_TYPE (t
))
201 || TREE_CODE (TREE_TYPE (t
)) == OFFSET_TYPE
)
204 prec
= TYPE_PRECISION (TREE_TYPE (t
));
206 /* First clear all bits that are beyond the type's precision. */
208 if (prec
== 2 * HOST_BITS_PER_WIDE_INT
)
210 else if (prec
> HOST_BITS_PER_WIDE_INT
)
211 TREE_INT_CST_HIGH (t
)
212 &= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
215 TREE_INT_CST_HIGH (t
) = 0;
216 if (prec
< HOST_BITS_PER_WIDE_INT
)
217 TREE_INT_CST_LOW (t
) &= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
220 /* Unsigned types do not suffer sign extension or overflow unless they
222 if (TREE_UNSIGNED (TREE_TYPE (t
))
223 && ! (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
224 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))))
227 /* If the value's sign bit is set, extend the sign. */
228 if (prec
!= 2 * HOST_BITS_PER_WIDE_INT
229 && (prec
> HOST_BITS_PER_WIDE_INT
230 ? 0 != (TREE_INT_CST_HIGH (t
)
232 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
233 : 0 != (TREE_INT_CST_LOW (t
)
234 & ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)))))
236 /* Value is negative:
237 set to 1 all the bits that are outside this type's precision. */
238 if (prec
> HOST_BITS_PER_WIDE_INT
)
239 TREE_INT_CST_HIGH (t
)
240 |= ((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
243 TREE_INT_CST_HIGH (t
) = -1;
244 if (prec
< HOST_BITS_PER_WIDE_INT
)
245 TREE_INT_CST_LOW (t
) |= ((unsigned HOST_WIDE_INT
) (-1) << prec
);
249 /* Return nonzero if signed overflow occurred. */
251 ((overflow
| (low
^ TREE_INT_CST_LOW (t
)) | (high
^ TREE_INT_CST_HIGH (t
)))
255 /* Add two doubleword integers with doubleword result.
256 Each argument is given as two `HOST_WIDE_INT' pieces.
257 One argument is L1 and H1; the other, L2 and H2.
258 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
261 add_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
262 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
263 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
265 unsigned HOST_WIDE_INT l
;
269 h
= h1
+ h2
+ (l
< l1
);
273 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
276 /* Negate a doubleword integer with doubleword result.
277 Return nonzero if the operation overflows, assuming it's signed.
278 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
279 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
282 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
283 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
289 return (*hv
& h1
) < 0;
299 /* Multiply two doubleword integers with doubleword result.
300 Return nonzero if the operation overflows, assuming it's signed.
301 Each argument is given as two `HOST_WIDE_INT' pieces.
302 One argument is L1 and H1; the other, L2 and H2.
303 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
306 mul_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
307 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
308 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
310 HOST_WIDE_INT arg1
[4];
311 HOST_WIDE_INT arg2
[4];
312 HOST_WIDE_INT prod
[4 * 2];
313 unsigned HOST_WIDE_INT carry
;
315 unsigned HOST_WIDE_INT toplow
, neglow
;
316 HOST_WIDE_INT tophigh
, neghigh
;
318 encode (arg1
, l1
, h1
);
319 encode (arg2
, l2
, h2
);
321 memset (prod
, 0, sizeof prod
);
323 for (i
= 0; i
< 4; i
++)
326 for (j
= 0; j
< 4; j
++)
329 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
330 carry
+= arg1
[i
] * arg2
[j
];
331 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
333 prod
[k
] = LOWPART (carry
);
334 carry
= HIGHPART (carry
);
339 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
341 /* Check for overflow by calculating the top half of the answer in full;
342 it should agree with the low half's sign bit. */
343 decode (prod
+ 4, &toplow
, &tophigh
);
346 neg_double (l2
, h2
, &neglow
, &neghigh
);
347 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
351 neg_double (l1
, h1
, &neglow
, &neghigh
);
352 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
354 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
357 /* Shift the doubleword integer in L1, H1 left by COUNT places
358 keeping only PREC bits of result.
359 Shift right if COUNT is negative.
360 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
361 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
364 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
365 HOST_WIDE_INT count
, unsigned int prec
,
366 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
368 unsigned HOST_WIDE_INT signmask
;
372 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
376 #ifdef SHIFT_COUNT_TRUNCATED
377 if (SHIFT_COUNT_TRUNCATED
)
381 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
383 /* Shifting by the host word size is undefined according to the
384 ANSI standard, so we must handle this as a special case. */
388 else if (count
>= HOST_BITS_PER_WIDE_INT
)
390 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
395 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
396 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
400 /* Sign extend all bits that are beyond the precision. */
402 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
403 ? ((unsigned HOST_WIDE_INT
) *hv
404 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
405 : (*lv
>> (prec
- 1))) & 1);
407 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
409 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
411 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
412 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
417 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
418 *lv
|= signmask
<< prec
;
422 /* Shift the doubleword integer in L1, H1 right by COUNT places
423 keeping only PREC bits of result. COUNT must be positive.
424 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
425 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
428 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
429 HOST_WIDE_INT count
, unsigned int prec
,
430 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
433 unsigned HOST_WIDE_INT signmask
;
436 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
439 #ifdef SHIFT_COUNT_TRUNCATED
440 if (SHIFT_COUNT_TRUNCATED
)
444 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
446 /* Shifting by the host word size is undefined according to the
447 ANSI standard, so we must handle this as a special case. */
451 else if (count
>= HOST_BITS_PER_WIDE_INT
)
454 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
458 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
460 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
463 /* Zero / sign extend all bits that are beyond the precision. */
465 if (count
>= (HOST_WIDE_INT
)prec
)
470 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
472 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
474 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
475 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
480 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
481 *lv
|= signmask
<< (prec
- count
);
485 /* Rotate the doubleword integer in L1, H1 left by COUNT places
486 keeping only PREC bits of result.
487 Rotate right if COUNT is negative.
488 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
491 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
492 HOST_WIDE_INT count
, unsigned int prec
,
493 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
495 unsigned HOST_WIDE_INT s1l
, s2l
;
496 HOST_WIDE_INT s1h
, s2h
;
502 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
503 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
508 /* Rotate the doubleword integer in L1, H1 left by COUNT places
509 keeping only PREC bits of result. COUNT must be positive.
510 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
513 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
514 HOST_WIDE_INT count
, unsigned int prec
,
515 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
517 unsigned HOST_WIDE_INT s1l
, s2l
;
518 HOST_WIDE_INT s1h
, s2h
;
524 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
525 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
530 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
531 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
532 CODE is a tree code for a kind of division, one of
533 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
535 It controls how the quotient is rounded to an integer.
536 Return nonzero if the operation overflows.
537 UNS nonzero says do unsigned division. */
540 div_and_round_double (enum tree_code code
, int uns
,
541 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
542 HOST_WIDE_INT hnum_orig
,
543 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
544 HOST_WIDE_INT hden_orig
,
545 unsigned HOST_WIDE_INT
*lquo
,
546 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
550 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
551 HOST_WIDE_INT den
[4], quo
[4];
553 unsigned HOST_WIDE_INT work
;
554 unsigned HOST_WIDE_INT carry
= 0;
555 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
556 HOST_WIDE_INT hnum
= hnum_orig
;
557 unsigned HOST_WIDE_INT lden
= lden_orig
;
558 HOST_WIDE_INT hden
= hden_orig
;
561 if (hden
== 0 && lden
== 0)
562 overflow
= 1, lden
= 1;
564 /* Calculate quotient sign and convert operands to unsigned. */
570 /* (minimum integer) / (-1) is the only overflow case. */
571 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
572 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
578 neg_double (lden
, hden
, &lden
, &hden
);
582 if (hnum
== 0 && hden
== 0)
583 { /* single precision */
585 /* This unsigned division rounds toward zero. */
591 { /* trivial case: dividend < divisor */
592 /* hden != 0 already checked. */
599 memset (quo
, 0, sizeof quo
);
601 memset (num
, 0, sizeof num
); /* to zero 9th element */
602 memset (den
, 0, sizeof den
);
604 encode (num
, lnum
, hnum
);
605 encode (den
, lden
, hden
);
607 /* Special code for when the divisor < BASE. */
608 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
610 /* hnum != 0 already checked. */
611 for (i
= 4 - 1; i
>= 0; i
--)
613 work
= num
[i
] + carry
* BASE
;
614 quo
[i
] = work
/ lden
;
620 /* Full double precision division,
621 with thanks to Don Knuth's "Seminumerical Algorithms". */
622 int num_hi_sig
, den_hi_sig
;
623 unsigned HOST_WIDE_INT quo_est
, scale
;
625 /* Find the highest nonzero divisor digit. */
626 for (i
= 4 - 1;; i
--)
633 /* Insure that the first digit of the divisor is at least BASE/2.
634 This is required by the quotient digit estimation algorithm. */
636 scale
= BASE
/ (den
[den_hi_sig
] + 1);
638 { /* scale divisor and dividend */
640 for (i
= 0; i
<= 4 - 1; i
++)
642 work
= (num
[i
] * scale
) + carry
;
643 num
[i
] = LOWPART (work
);
644 carry
= HIGHPART (work
);
649 for (i
= 0; i
<= 4 - 1; i
++)
651 work
= (den
[i
] * scale
) + carry
;
652 den
[i
] = LOWPART (work
);
653 carry
= HIGHPART (work
);
654 if (den
[i
] != 0) den_hi_sig
= i
;
661 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
663 /* Guess the next quotient digit, quo_est, by dividing the first
664 two remaining dividend digits by the high order quotient digit.
665 quo_est is never low and is at most 2 high. */
666 unsigned HOST_WIDE_INT tmp
;
668 num_hi_sig
= i
+ den_hi_sig
+ 1;
669 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
670 if (num
[num_hi_sig
] != den
[den_hi_sig
])
671 quo_est
= work
/ den
[den_hi_sig
];
675 /* Refine quo_est so it's usually correct, and at most one high. */
676 tmp
= work
- quo_est
* den
[den_hi_sig
];
678 && (den
[den_hi_sig
- 1] * quo_est
679 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
682 /* Try QUO_EST as the quotient digit, by multiplying the
683 divisor by QUO_EST and subtracting from the remaining dividend.
684 Keep in mind that QUO_EST is the I - 1st digit. */
687 for (j
= 0; j
<= den_hi_sig
; j
++)
689 work
= quo_est
* den
[j
] + carry
;
690 carry
= HIGHPART (work
);
691 work
= num
[i
+ j
] - LOWPART (work
);
692 num
[i
+ j
] = LOWPART (work
);
693 carry
+= HIGHPART (work
) != 0;
696 /* If quo_est was high by one, then num[i] went negative and
697 we need to correct things. */
698 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
701 carry
= 0; /* add divisor back in */
702 for (j
= 0; j
<= den_hi_sig
; j
++)
704 work
= num
[i
+ j
] + den
[j
] + carry
;
705 carry
= HIGHPART (work
);
706 num
[i
+ j
] = LOWPART (work
);
709 num
[num_hi_sig
] += carry
;
712 /* Store the quotient digit. */
717 decode (quo
, lquo
, hquo
);
720 /* If result is negative, make it so. */
722 neg_double (*lquo
, *hquo
, lquo
, hquo
);
724 /* compute trial remainder: rem = num - (quo * den) */
725 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
726 neg_double (*lrem
, *hrem
, lrem
, hrem
);
727 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
732 case TRUNC_MOD_EXPR
: /* round toward zero */
733 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
737 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
738 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
741 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
749 case CEIL_MOD_EXPR
: /* round toward positive infinity */
750 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
752 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
760 case ROUND_MOD_EXPR
: /* round to closest integer */
762 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
763 HOST_WIDE_INT habs_rem
= *hrem
;
764 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
765 HOST_WIDE_INT habs_den
= hden
, htwice
;
767 /* Get absolute values. */
769 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
771 neg_double (lden
, hden
, &labs_den
, &habs_den
);
773 /* If (2 * abs (lrem) >= abs (lden)) */
774 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
775 labs_rem
, habs_rem
, <wice
, &htwice
);
777 if (((unsigned HOST_WIDE_INT
) habs_den
778 < (unsigned HOST_WIDE_INT
) htwice
)
779 || (((unsigned HOST_WIDE_INT
) habs_den
780 == (unsigned HOST_WIDE_INT
) htwice
)
781 && (labs_den
< ltwice
)))
785 add_double (*lquo
, *hquo
,
786 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
789 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
801 /* compute true remainder: rem = num - (quo * den) */
802 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
803 neg_double (*lrem
, *hrem
, lrem
, hrem
);
804 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
808 /* Return true if built-in mathematical function specified by CODE
809 preserves the sign of it argument, i.e. -f(x) == f(-x). */
812 negate_mathfn_p (enum built_in_function code
)
837 /* Determine whether an expression T can be cheaply negated using
838 the function negate_expr. */
841 negate_expr_p (tree t
)
843 unsigned HOST_WIDE_INT val
;
850 type
= TREE_TYPE (t
);
853 switch (TREE_CODE (t
))
856 if (TREE_UNSIGNED (type
) || ! flag_trapv
)
859 /* Check that -CST will not overflow type. */
860 prec
= TYPE_PRECISION (type
);
861 if (prec
> HOST_BITS_PER_WIDE_INT
)
863 if (TREE_INT_CST_LOW (t
) != 0)
865 prec
-= HOST_BITS_PER_WIDE_INT
;
866 val
= TREE_INT_CST_HIGH (t
);
869 val
= TREE_INT_CST_LOW (t
);
870 if (prec
< HOST_BITS_PER_WIDE_INT
)
871 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
872 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
879 return negate_expr_p (TREE_REALPART (t
))
880 && negate_expr_p (TREE_IMAGPART (t
));
883 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
884 return (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
885 && reorder_operands_p (TREE_OPERAND (t
, 0),
886 TREE_OPERAND (t
, 1));
889 if (TREE_UNSIGNED (TREE_TYPE (t
)))
895 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
896 return negate_expr_p (TREE_OPERAND (t
, 1))
897 || negate_expr_p (TREE_OPERAND (t
, 0));
901 /* Negate -((double)float) as (double)(-float). */
902 if (TREE_CODE (type
) == REAL_TYPE
)
904 tree tem
= strip_float_extensions (t
);
906 return negate_expr_p (tem
);
911 /* Negate -f(x) as f(-x). */
912 if (negate_mathfn_p (builtin_mathfn_code (t
)))
913 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1)));
922 /* Given T, an expression, return the negation of T. Allow for T to be
923 null, in which case return null. */
934 type
= TREE_TYPE (t
);
937 switch (TREE_CODE (t
))
941 unsigned HOST_WIDE_INT low
;
943 int overflow
= neg_double (TREE_INT_CST_LOW (t
),
944 TREE_INT_CST_HIGH (t
),
946 tem
= build_int_2 (low
, high
);
947 TREE_TYPE (tem
) = type
;
950 | force_fit_type (tem
, overflow
&& !TREE_UNSIGNED (type
)));
951 TREE_CONSTANT_OVERFLOW (tem
)
952 = TREE_OVERFLOW (tem
) | TREE_CONSTANT_OVERFLOW (t
);
954 if (! TREE_OVERFLOW (tem
)
955 || TREE_UNSIGNED (type
)
961 tem
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (t
)));
962 /* Two's complement FP formats, such as c4x, may overflow. */
963 if (! TREE_OVERFLOW (tem
) || ! flag_trapping_math
)
964 return convert (type
, tem
);
969 tree rpart
= negate_expr (TREE_REALPART (t
));
970 tree ipart
= negate_expr (TREE_IMAGPART (t
));
972 if ((TREE_CODE (rpart
) == REAL_CST
973 && TREE_CODE (ipart
) == REAL_CST
)
974 || (TREE_CODE (rpart
) == INTEGER_CST
975 && TREE_CODE (ipart
) == INTEGER_CST
))
976 return build_complex (type
, rpart
, ipart
);
981 return convert (type
, TREE_OPERAND (t
, 0));
984 /* - (A - B) -> B - A */
985 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
986 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
987 return convert (type
,
988 fold (build (MINUS_EXPR
, TREE_TYPE (t
),
990 TREE_OPERAND (t
, 0))));
994 if (TREE_UNSIGNED (TREE_TYPE (t
)))
1000 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1002 tem
= TREE_OPERAND (t
, 1);
1003 if (negate_expr_p (tem
))
1004 return convert (type
,
1005 fold (build (TREE_CODE (t
), TREE_TYPE (t
),
1006 TREE_OPERAND (t
, 0),
1007 negate_expr (tem
))));
1008 tem
= TREE_OPERAND (t
, 0);
1009 if (negate_expr_p (tem
))
1010 return convert (type
,
1011 fold (build (TREE_CODE (t
), TREE_TYPE (t
),
1013 TREE_OPERAND (t
, 1))));
1018 /* Convert -((double)float) into (double)(-float). */
1019 if (TREE_CODE (type
) == REAL_TYPE
)
1021 tem
= strip_float_extensions (t
);
1022 if (tem
!= t
&& negate_expr_p (tem
))
1023 return convert (type
, negate_expr (tem
));
1028 /* Negate -f(x) as f(-x). */
1029 if (negate_mathfn_p (builtin_mathfn_code (t
))
1030 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1))))
1032 tree fndecl
, arg
, arglist
;
1034 fndecl
= get_callee_fndecl (t
);
1035 arg
= negate_expr (TREE_VALUE (TREE_OPERAND (t
, 1)));
1036 arglist
= build_tree_list (NULL_TREE
, arg
);
1037 return build_function_call_expr (fndecl
, arglist
);
1045 return convert (type
, fold (build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
)));
1048 /* Split a tree IN into a constant, literal and variable parts that could be
1049 combined with CODE to make IN. "constant" means an expression with
1050 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1051 commutative arithmetic operation. Store the constant part into *CONP,
1052 the literal in *LITP and return the variable part. If a part isn't
1053 present, set it to null. If the tree does not decompose in this way,
1054 return the entire tree as the variable part and the other parts as null.
1056 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1057 case, we negate an operand that was subtracted. Except if it is a
1058 literal for which we use *MINUS_LITP instead.
1060 If NEGATE_P is true, we are negating all of IN, again except a literal
1061 for which we use *MINUS_LITP instead.
1063 If IN is itself a literal or constant, return it as appropriate.
1065 Note that we do not guarantee that any of the three values will be the
1066 same type as IN, but they will have the same signedness and mode. */
1069 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1070 tree
*minus_litp
, int negate_p
)
1078 /* Strip any conversions that don't change the machine mode or signedness. */
1079 STRIP_SIGN_NOPS (in
);
1081 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1083 else if (TREE_CODE (in
) == code
1084 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1085 /* We can associate addition and subtraction together (even
1086 though the C standard doesn't say so) for integers because
1087 the value is not affected. For reals, the value might be
1088 affected, so we can't. */
1089 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1090 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1092 tree op0
= TREE_OPERAND (in
, 0);
1093 tree op1
= TREE_OPERAND (in
, 1);
1094 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1095 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1097 /* First see if either of the operands is a literal, then a constant. */
1098 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1099 *litp
= op0
, op0
= 0;
1100 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1101 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1103 if (op0
!= 0 && TREE_CONSTANT (op0
))
1104 *conp
= op0
, op0
= 0;
1105 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1106 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1108 /* If we haven't dealt with either operand, this is not a case we can
1109 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1110 if (op0
!= 0 && op1
!= 0)
1115 var
= op1
, neg_var_p
= neg1_p
;
1117 /* Now do any needed negations. */
1119 *minus_litp
= *litp
, *litp
= 0;
1121 *conp
= negate_expr (*conp
);
1123 var
= negate_expr (var
);
1125 else if (TREE_CONSTANT (in
))
1133 *minus_litp
= *litp
, *litp
= 0;
1134 else if (*minus_litp
)
1135 *litp
= *minus_litp
, *minus_litp
= 0;
1136 *conp
= negate_expr (*conp
);
1137 var
= negate_expr (var
);
1143 /* Re-associate trees split by the above function. T1 and T2 are either
1144 expressions to associate or null. Return the new expression, if any. If
1145 we build an operation, do it in TYPE and with CODE. */
1148 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1155 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1156 try to fold this since we will have infinite recursion. But do
1157 deal with any NEGATE_EXPRs. */
1158 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1159 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1161 if (code
== PLUS_EXPR
)
1163 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1164 return build (MINUS_EXPR
, type
, convert (type
, t2
),
1165 convert (type
, TREE_OPERAND (t1
, 0)));
1166 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1167 return build (MINUS_EXPR
, type
, convert (type
, t1
),
1168 convert (type
, TREE_OPERAND (t2
, 0)));
1170 return build (code
, type
, convert (type
, t1
), convert (type
, t2
));
1173 return fold (build (code
, type
, convert (type
, t1
), convert (type
, t2
)));
1176 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1177 to produce a new constant.
1179 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1182 int_const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1184 unsigned HOST_WIDE_INT int1l
, int2l
;
1185 HOST_WIDE_INT int1h
, int2h
;
1186 unsigned HOST_WIDE_INT low
;
1188 unsigned HOST_WIDE_INT garbagel
;
1189 HOST_WIDE_INT garbageh
;
1191 tree type
= TREE_TYPE (arg1
);
1192 int uns
= TREE_UNSIGNED (type
);
1194 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1196 int no_overflow
= 0;
1198 int1l
= TREE_INT_CST_LOW (arg1
);
1199 int1h
= TREE_INT_CST_HIGH (arg1
);
1200 int2l
= TREE_INT_CST_LOW (arg2
);
1201 int2h
= TREE_INT_CST_HIGH (arg2
);
1206 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1210 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1214 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1220 /* It's unclear from the C standard whether shifts can overflow.
1221 The following code ignores overflow; perhaps a C standard
1222 interpretation ruling is needed. */
1223 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1231 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1236 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1240 neg_double (int2l
, int2h
, &low
, &hi
);
1241 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1242 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1246 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1249 case TRUNC_DIV_EXPR
:
1250 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1251 case EXACT_DIV_EXPR
:
1252 /* This is a shortcut for a common special case. */
1253 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1254 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1255 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1256 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1258 if (code
== CEIL_DIV_EXPR
)
1261 low
= int1l
/ int2l
, hi
= 0;
1265 /* ... fall through ... */
1267 case ROUND_DIV_EXPR
:
1268 if (int2h
== 0 && int2l
== 1)
1270 low
= int1l
, hi
= int1h
;
1273 if (int1l
== int2l
&& int1h
== int2h
1274 && ! (int1l
== 0 && int1h
== 0))
1279 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1280 &low
, &hi
, &garbagel
, &garbageh
);
1283 case TRUNC_MOD_EXPR
:
1284 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1285 /* This is a shortcut for a common special case. */
1286 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1287 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1288 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1289 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1291 if (code
== CEIL_MOD_EXPR
)
1293 low
= int1l
% int2l
, hi
= 0;
1297 /* ... fall through ... */
1299 case ROUND_MOD_EXPR
:
1300 overflow
= div_and_round_double (code
, uns
,
1301 int1l
, int1h
, int2l
, int2h
,
1302 &garbagel
, &garbageh
, &low
, &hi
);
1308 low
= (((unsigned HOST_WIDE_INT
) int1h
1309 < (unsigned HOST_WIDE_INT
) int2h
)
1310 || (((unsigned HOST_WIDE_INT
) int1h
1311 == (unsigned HOST_WIDE_INT
) int2h
)
1314 low
= (int1h
< int2h
1315 || (int1h
== int2h
&& int1l
< int2l
));
1317 if (low
== (code
== MIN_EXPR
))
1318 low
= int1l
, hi
= int1h
;
1320 low
= int2l
, hi
= int2h
;
1327 /* If this is for a sizetype, can be represented as one (signed)
1328 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1331 && ((hi
== 0 && (HOST_WIDE_INT
) low
>= 0)
1332 || (hi
== -1 && (HOST_WIDE_INT
) low
< 0))
1333 && overflow
== 0 && ! TREE_OVERFLOW (arg1
) && ! TREE_OVERFLOW (arg2
))
1334 return size_int_type_wide (low
, type
);
1337 t
= build_int_2 (low
, hi
);
1338 TREE_TYPE (t
) = TREE_TYPE (arg1
);
1343 ? (!uns
|| is_sizetype
) && overflow
1344 : (force_fit_type (t
, (!uns
|| is_sizetype
) && overflow
)
1346 | TREE_OVERFLOW (arg1
)
1347 | TREE_OVERFLOW (arg2
));
1349 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1350 So check if force_fit_type truncated the value. */
1352 && ! TREE_OVERFLOW (t
)
1353 && (TREE_INT_CST_HIGH (t
) != hi
1354 || TREE_INT_CST_LOW (t
) != low
))
1355 TREE_OVERFLOW (t
) = 1;
1357 TREE_CONSTANT_OVERFLOW (t
) = (TREE_OVERFLOW (t
)
1358 | TREE_CONSTANT_OVERFLOW (arg1
)
1359 | TREE_CONSTANT_OVERFLOW (arg2
));
1363 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1364 constant. We assume ARG1 and ARG2 have the same data type, or at least
1365 are the same kind of constant and the same machine mode.
1367 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1370 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1375 if (TREE_CODE (arg1
) == INTEGER_CST
)
1376 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1378 if (TREE_CODE (arg1
) == REAL_CST
)
1380 enum machine_mode mode
;
1383 REAL_VALUE_TYPE value
;
1386 d1
= TREE_REAL_CST (arg1
);
1387 d2
= TREE_REAL_CST (arg2
);
1389 type
= TREE_TYPE (arg1
);
1390 mode
= TYPE_MODE (type
);
1392 /* Don't perform operation if we honor signaling NaNs and
1393 either operand is a NaN. */
1394 if (HONOR_SNANS (mode
)
1395 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1398 /* Don't perform operation if it would raise a division
1399 by zero exception. */
1400 if (code
== RDIV_EXPR
1401 && REAL_VALUES_EQUAL (d2
, dconst0
)
1402 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1405 /* If either operand is a NaN, just return it. Otherwise, set up
1406 for floating-point trap; we return an overflow. */
1407 if (REAL_VALUE_ISNAN (d1
))
1409 else if (REAL_VALUE_ISNAN (d2
))
1412 REAL_ARITHMETIC (value
, code
, d1
, d2
);
1414 t
= build_real (type
, real_value_truncate (mode
, value
));
1417 = (force_fit_type (t
, 0)
1418 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1419 TREE_CONSTANT_OVERFLOW (t
)
1421 | TREE_CONSTANT_OVERFLOW (arg1
)
1422 | TREE_CONSTANT_OVERFLOW (arg2
);
1425 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1427 tree type
= TREE_TYPE (arg1
);
1428 tree r1
= TREE_REALPART (arg1
);
1429 tree i1
= TREE_IMAGPART (arg1
);
1430 tree r2
= TREE_REALPART (arg2
);
1431 tree i2
= TREE_IMAGPART (arg2
);
1437 t
= build_complex (type
,
1438 const_binop (PLUS_EXPR
, r1
, r2
, notrunc
),
1439 const_binop (PLUS_EXPR
, i1
, i2
, notrunc
));
1443 t
= build_complex (type
,
1444 const_binop (MINUS_EXPR
, r1
, r2
, notrunc
),
1445 const_binop (MINUS_EXPR
, i1
, i2
, notrunc
));
1449 t
= build_complex (type
,
1450 const_binop (MINUS_EXPR
,
1451 const_binop (MULT_EXPR
,
1453 const_binop (MULT_EXPR
,
1456 const_binop (PLUS_EXPR
,
1457 const_binop (MULT_EXPR
,
1459 const_binop (MULT_EXPR
,
1467 = const_binop (PLUS_EXPR
,
1468 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1469 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1472 t
= build_complex (type
,
1474 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1475 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1476 const_binop (PLUS_EXPR
,
1477 const_binop (MULT_EXPR
, r1
, r2
,
1479 const_binop (MULT_EXPR
, i1
, i2
,
1482 magsquared
, notrunc
),
1484 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1485 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1486 const_binop (MINUS_EXPR
,
1487 const_binop (MULT_EXPR
, i1
, r2
,
1489 const_binop (MULT_EXPR
, r1
, i2
,
1492 magsquared
, notrunc
));
1504 /* These are the hash table functions for the hash table of INTEGER_CST
1505 nodes of a sizetype. */
1507 /* Return the hash code code X, an INTEGER_CST. */
1510 size_htab_hash (const void *x
)
1514 return (TREE_INT_CST_HIGH (t
) ^ TREE_INT_CST_LOW (t
)
1515 ^ htab_hash_pointer (TREE_TYPE (t
))
1516 ^ (TREE_OVERFLOW (t
) << 20));
1519 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1520 is the same as that given by *Y, which is the same. */
1523 size_htab_eq (const void *x
, const void *y
)
1528 return (TREE_INT_CST_HIGH (xt
) == TREE_INT_CST_HIGH (yt
)
1529 && TREE_INT_CST_LOW (xt
) == TREE_INT_CST_LOW (yt
)
1530 && TREE_TYPE (xt
) == TREE_TYPE (yt
)
1531 && TREE_OVERFLOW (xt
) == TREE_OVERFLOW (yt
));
1534 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1535 bits are given by NUMBER and of the sizetype represented by KIND. */
1538 size_int_wide (HOST_WIDE_INT number
, enum size_type_kind kind
)
1540 return size_int_type_wide (number
, sizetype_tab
[(int) kind
]);
1543 /* Likewise, but the desired type is specified explicitly. */
1545 static GTY (()) tree new_const
;
1546 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
1550 size_int_type_wide (HOST_WIDE_INT number
, tree type
)
1556 size_htab
= htab_create_ggc (1024, size_htab_hash
, size_htab_eq
, NULL
);
1557 new_const
= make_node (INTEGER_CST
);
1560 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1561 hash table, we return the value from the hash table. Otherwise, we
1562 place that in the hash table and make a new node for the next time. */
1563 TREE_INT_CST_LOW (new_const
) = number
;
1564 TREE_INT_CST_HIGH (new_const
) = number
< 0 ? -1 : 0;
1565 TREE_TYPE (new_const
) = type
;
1566 TREE_OVERFLOW (new_const
) = TREE_CONSTANT_OVERFLOW (new_const
)
1567 = force_fit_type (new_const
, 0);
1569 slot
= htab_find_slot (size_htab
, new_const
, INSERT
);
1575 new_const
= make_node (INTEGER_CST
);
1579 return (tree
) *slot
;
1582 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1583 is a tree code. The type of the result is taken from the operands.
1584 Both must be the same type integer type and it must be a size type.
1585 If the operands are constant, so is the result. */
1588 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
1590 tree type
= TREE_TYPE (arg0
);
1592 if (TREE_CODE (type
) != INTEGER_TYPE
|| ! TYPE_IS_SIZETYPE (type
)
1593 || type
!= TREE_TYPE (arg1
))
1596 /* Handle the special case of two integer constants faster. */
1597 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1599 /* And some specific cases even faster than that. */
1600 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1602 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1603 && integer_zerop (arg1
))
1605 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1608 /* Handle general case of two integer constants. */
1609 return int_const_binop (code
, arg0
, arg1
, 0);
1612 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1613 return error_mark_node
;
1615 return fold (build (code
, type
, arg0
, arg1
));
1618 /* Given two values, either both of sizetype or both of bitsizetype,
1619 compute the difference between the two values. Return the value
1620 in signed type corresponding to the type of the operands. */
1623 size_diffop (tree arg0
, tree arg1
)
1625 tree type
= TREE_TYPE (arg0
);
1628 if (TREE_CODE (type
) != INTEGER_TYPE
|| ! TYPE_IS_SIZETYPE (type
)
1629 || type
!= TREE_TYPE (arg1
))
1632 /* If the type is already signed, just do the simple thing. */
1633 if (! TREE_UNSIGNED (type
))
1634 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1636 ctype
= (type
== bitsizetype
|| type
== ubitsizetype
1637 ? sbitsizetype
: ssizetype
);
1639 /* If either operand is not a constant, do the conversions to the signed
1640 type and subtract. The hardware will do the right thing with any
1641 overflow in the subtraction. */
1642 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1643 return size_binop (MINUS_EXPR
, convert (ctype
, arg0
),
1644 convert (ctype
, arg1
));
1646 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1647 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1648 overflow) and negate (which can't either). Special-case a result
1649 of zero while we're here. */
1650 if (tree_int_cst_equal (arg0
, arg1
))
1651 return convert (ctype
, integer_zero_node
);
1652 else if (tree_int_cst_lt (arg1
, arg0
))
1653 return convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1655 return size_binop (MINUS_EXPR
, convert (ctype
, integer_zero_node
),
1656 convert (ctype
, size_binop (MINUS_EXPR
, arg1
, arg0
)));
1660 /* Given T, a tree representing type conversion of ARG1, a constant,
1661 return a constant tree representing the result of conversion. */
1664 fold_convert (tree t
, tree arg1
)
1666 tree type
= TREE_TYPE (t
);
1669 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
1671 if (TREE_CODE (arg1
) == INTEGER_CST
)
1673 /* If we would build a constant wider than GCC supports,
1674 leave the conversion unfolded. */
1675 if (TYPE_PRECISION (type
) > 2 * HOST_BITS_PER_WIDE_INT
)
1678 /* If we are trying to make a sizetype for a small integer, use
1679 size_int to pick up cached types to reduce duplicate nodes. */
1680 if (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1681 && !TREE_CONSTANT_OVERFLOW (arg1
)
1682 && compare_tree_int (arg1
, 10000) < 0)
1683 return size_int_type_wide (TREE_INT_CST_LOW (arg1
), type
);
1685 /* Given an integer constant, make new constant with new type,
1686 appropriately sign-extended or truncated. */
1687 t
= build_int_2 (TREE_INT_CST_LOW (arg1
),
1688 TREE_INT_CST_HIGH (arg1
));
1689 TREE_TYPE (t
) = type
;
1690 /* Indicate an overflow if (1) ARG1 already overflowed,
1691 or (2) force_fit_type indicates an overflow.
1692 Tell force_fit_type that an overflow has already occurred
1693 if ARG1 is a too-large unsigned value and T is signed.
1694 But don't indicate an overflow if converting a pointer. */
1696 = ((force_fit_type (t
,
1697 (TREE_INT_CST_HIGH (arg1
) < 0
1698 && (TREE_UNSIGNED (type
)
1699 < TREE_UNSIGNED (TREE_TYPE (arg1
)))))
1700 && ! POINTER_TYPE_P (TREE_TYPE (arg1
)))
1701 || TREE_OVERFLOW (arg1
));
1702 TREE_CONSTANT_OVERFLOW (t
)
1703 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1705 else if (TREE_CODE (arg1
) == REAL_CST
)
1707 /* The following code implements the floating point to integer
1708 conversion rules required by the Java Language Specification,
1709 that IEEE NaNs are mapped to zero and values that overflow
1710 the target precision saturate, i.e. values greater than
1711 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1712 are mapped to INT_MIN. These semantics are allowed by the
1713 C and C++ standards that simply state that the behavior of
1714 FP-to-integer conversion is unspecified upon overflow. */
1716 HOST_WIDE_INT high
, low
;
1718 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1719 /* If x is NaN, return zero and show we have an overflow. */
1720 if (REAL_VALUE_ISNAN (x
))
1727 /* See if X will be in range after truncation towards 0.
1728 To compensate for truncation, move the bounds away from 0,
1729 but reject if X exactly equals the adjusted bounds. */
1733 tree lt
= TYPE_MIN_VALUE (type
);
1734 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1735 REAL_ARITHMETIC (l
, MINUS_EXPR
, l
, dconst1
);
1736 if (! REAL_VALUES_LESS (l
, x
))
1739 high
= TREE_INT_CST_HIGH (lt
);
1740 low
= TREE_INT_CST_LOW (lt
);
1746 tree ut
= TYPE_MAX_VALUE (type
);
1749 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1750 REAL_ARITHMETIC (u
, PLUS_EXPR
, u
, dconst1
);
1751 if (! REAL_VALUES_LESS (x
, u
))
1754 high
= TREE_INT_CST_HIGH (ut
);
1755 low
= TREE_INT_CST_LOW (ut
);
1761 REAL_VALUE_TO_INT (&low
, &high
, x
);
1763 t
= build_int_2 (low
, high
);
1764 TREE_TYPE (t
) = type
;
1766 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, overflow
);
1767 TREE_CONSTANT_OVERFLOW (t
)
1768 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1770 TREE_TYPE (t
) = type
;
1772 else if (TREE_CODE (type
) == REAL_TYPE
)
1774 if (TREE_CODE (arg1
) == INTEGER_CST
)
1775 return build_real_from_int_cst (type
, arg1
);
1776 if (TREE_CODE (arg1
) == REAL_CST
)
1778 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
1780 /* We make a copy of ARG1 so that we don't modify an
1781 existing constant tree. */
1782 t
= copy_node (arg1
);
1783 TREE_TYPE (t
) = type
;
1787 t
= build_real (type
,
1788 real_value_truncate (TYPE_MODE (type
),
1789 TREE_REAL_CST (arg1
)));
1792 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, 0);
1793 TREE_CONSTANT_OVERFLOW (t
)
1794 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1798 TREE_CONSTANT (t
) = 1;
1802 /* Return an expr equal to X but certainly not valid as an lvalue. */
1809 /* These things are certainly not lvalues. */
1810 if (TREE_CODE (x
) == NON_LVALUE_EXPR
1811 || TREE_CODE (x
) == INTEGER_CST
1812 || TREE_CODE (x
) == REAL_CST
1813 || TREE_CODE (x
) == STRING_CST
1814 || TREE_CODE (x
) == ADDR_EXPR
)
1817 result
= build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
1818 TREE_CONSTANT (result
) = TREE_CONSTANT (x
);
1822 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1823 Zero means allow extended lvalues. */
1825 int pedantic_lvalues
;
1827 /* When pedantic, return an expr equal to X but certainly not valid as a
1828 pedantic lvalue. Otherwise, return X. */
1831 pedantic_non_lvalue (tree x
)
1833 if (pedantic_lvalues
)
1834 return non_lvalue (x
);
1839 /* Given a tree comparison code, return the code that is the logical inverse
1840 of the given code. It is not safe to do this for floating-point
1841 comparisons, except for NE_EXPR and EQ_EXPR. */
1843 static enum tree_code
1844 invert_tree_comparison (enum tree_code code
)
1865 /* Similar, but return the comparison that results if the operands are
1866 swapped. This is safe for floating-point. */
1868 static enum tree_code
1869 swap_tree_comparison (enum tree_code code
)
1890 /* Convert a comparison tree code from an enum tree_code representation
1891 into a compcode bit-based encoding. This function is the inverse of
1892 compcode_to_comparison. */
1895 comparison_to_compcode (enum tree_code code
)
1916 /* Convert a compcode bit-based encoding of a comparison operator back
1917 to GCC's enum tree_code representation. This function is the
1918 inverse of comparison_to_compcode. */
1920 static enum tree_code
1921 compcode_to_comparison (int code
)
1942 /* Return nonzero if CODE is a tree code that represents a truth value. */
1945 truth_value_p (enum tree_code code
)
1947 return (TREE_CODE_CLASS (code
) == '<'
1948 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
1949 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
1950 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
1953 /* Return nonzero if two operands (typically of the same tree node)
1954 are necessarily equal. If either argument has side-effects this
1955 function returns zero.
1957 If ONLY_CONST is nonzero, only return nonzero for constants.
1958 This function tests whether the operands are indistinguishable;
1959 it does not test whether they are equal using C's == operation.
1960 The distinction is important for IEEE floating point, because
1961 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1962 (2) two NaNs may be indistinguishable, but NaN!=NaN.
1964 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
1965 even though it may hold multiple values during a function.
1966 This is because a GCC tree node guarantees that nothing else is
1967 executed between the evaluation of its "operands" (which may often
1968 be evaluated in arbitrary order). Hence if the operands themselves
1969 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
1970 same value in each operand/subexpression. Hence a zero value for
1971 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
1972 If comparing arbitrary expression trees, such as from different
1973 statements, ONLY_CONST must usually be nonzero. */
1976 operand_equal_p (tree arg0
, tree arg1
, int only_const
)
1980 /* If both types don't have the same signedness, then we can't consider
1981 them equal. We must check this before the STRIP_NOPS calls
1982 because they may change the signedness of the arguments. */
1983 if (TREE_UNSIGNED (TREE_TYPE (arg0
)) != TREE_UNSIGNED (TREE_TYPE (arg1
)))
1989 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
1990 /* This is needed for conversions and for COMPONENT_REF.
1991 Might as well play it safe and always test this. */
1992 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
1993 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
1994 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
1997 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1998 We don't care about side effects in that case because the SAVE_EXPR
1999 takes care of that for us. In all other cases, two expressions are
2000 equal if they have no side effects. If we have two identical
2001 expressions with side effects that should be treated the same due
2002 to the only side effects being identical SAVE_EXPR's, that will
2003 be detected in the recursive calls below. */
2004 if (arg0
== arg1
&& ! only_const
2005 && (TREE_CODE (arg0
) == SAVE_EXPR
2006 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2009 /* Next handle constant cases, those for which we can return 1 even
2010 if ONLY_CONST is set. */
2011 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2012 switch (TREE_CODE (arg0
))
2015 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2016 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2017 && tree_int_cst_equal (arg0
, arg1
));
2020 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2021 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2022 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2023 TREE_REAL_CST (arg1
)));
2029 if (TREE_CONSTANT_OVERFLOW (arg0
)
2030 || TREE_CONSTANT_OVERFLOW (arg1
))
2033 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2034 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2037 if (!operand_equal_p (v1
, v2
, only_const
))
2039 v1
= TREE_CHAIN (v1
);
2040 v2
= TREE_CHAIN (v2
);
2047 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2049 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2053 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2054 && ! memcmp (TREE_STRING_POINTER (arg0
),
2055 TREE_STRING_POINTER (arg1
),
2056 TREE_STRING_LENGTH (arg0
)));
2059 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2068 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2071 /* Two conversions are equal only if signedness and modes match. */
2072 if ((TREE_CODE (arg0
) == NOP_EXPR
|| TREE_CODE (arg0
) == CONVERT_EXPR
)
2073 && (TREE_UNSIGNED (TREE_TYPE (arg0
))
2074 != TREE_UNSIGNED (TREE_TYPE (arg1
))))
2077 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2078 TREE_OPERAND (arg1
, 0), 0);
2082 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0)
2083 && operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1),
2087 /* For commutative ops, allow the other order. */
2088 return ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
2089 || TREE_CODE (arg0
) == MIN_EXPR
|| TREE_CODE (arg0
) == MAX_EXPR
2090 || TREE_CODE (arg0
) == BIT_IOR_EXPR
2091 || TREE_CODE (arg0
) == BIT_XOR_EXPR
2092 || TREE_CODE (arg0
) == BIT_AND_EXPR
2093 || TREE_CODE (arg0
) == NE_EXPR
|| TREE_CODE (arg0
) == EQ_EXPR
)
2094 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2095 TREE_OPERAND (arg1
, 1), 0)
2096 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2097 TREE_OPERAND (arg1
, 0), 0));
2100 /* If either of the pointer (or reference) expressions we are
2101 dereferencing contain a side effect, these cannot be equal. */
2102 if (TREE_SIDE_EFFECTS (arg0
)
2103 || TREE_SIDE_EFFECTS (arg1
))
2106 switch (TREE_CODE (arg0
))
2109 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2110 TREE_OPERAND (arg1
, 0), 0);
2114 case ARRAY_RANGE_REF
:
2115 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2116 TREE_OPERAND (arg1
, 0), 0)
2117 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2118 TREE_OPERAND (arg1
, 1), 0));
2121 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2122 TREE_OPERAND (arg1
, 0), 0)
2123 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2124 TREE_OPERAND (arg1
, 1), 0)
2125 && operand_equal_p (TREE_OPERAND (arg0
, 2),
2126 TREE_OPERAND (arg1
, 2), 0));
2132 switch (TREE_CODE (arg0
))
2135 case TRUTH_NOT_EXPR
:
2136 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2137 TREE_OPERAND (arg1
, 0), 0);
2140 return rtx_equal_p (RTL_EXPR_RTL (arg0
), RTL_EXPR_RTL (arg1
));
2143 /* If the CALL_EXPRs call different functions, then they
2144 clearly can not be equal. */
2145 if (! operand_equal_p (TREE_OPERAND (arg0
, 0),
2146 TREE_OPERAND (arg1
, 0), 0))
2149 /* Only consider const functions equivalent. */
2150 fndecl
= get_callee_fndecl (arg0
);
2151 if (fndecl
== NULL_TREE
2152 || ! (flags_from_decl_or_type (fndecl
) & ECF_CONST
))
2155 /* Now see if all the arguments are the same. operand_equal_p
2156 does not handle TREE_LIST, so we walk the operands here
2157 feeding them to operand_equal_p. */
2158 arg0
= TREE_OPERAND (arg0
, 1);
2159 arg1
= TREE_OPERAND (arg1
, 1);
2160 while (arg0
&& arg1
)
2162 if (! operand_equal_p (TREE_VALUE (arg0
), TREE_VALUE (arg1
), 0))
2165 arg0
= TREE_CHAIN (arg0
);
2166 arg1
= TREE_CHAIN (arg1
);
2169 /* If we get here and both argument lists are exhausted
2170 then the CALL_EXPRs are equal. */
2171 return ! (arg0
|| arg1
);
2178 /* Consider __builtin_sqrt equal to sqrt. */
2179 return TREE_CODE (arg0
) == FUNCTION_DECL
2180 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2181 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2182 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
);
2189 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2190 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2192 When in doubt, return 0. */
2195 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2197 int unsignedp1
, unsignedpo
;
2198 tree primarg0
, primarg1
, primother
;
2199 unsigned int correct_width
;
2201 if (operand_equal_p (arg0
, arg1
, 0))
2204 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2205 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2208 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2209 and see if the inner values are the same. This removes any
2210 signedness comparison, which doesn't matter here. */
2211 primarg0
= arg0
, primarg1
= arg1
;
2212 STRIP_NOPS (primarg0
);
2213 STRIP_NOPS (primarg1
);
2214 if (operand_equal_p (primarg0
, primarg1
, 0))
2217 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2218 actual comparison operand, ARG0.
2220 First throw away any conversions to wider types
2221 already present in the operands. */
2223 primarg1
= get_narrower (arg1
, &unsignedp1
);
2224 primother
= get_narrower (other
, &unsignedpo
);
2226 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2227 if (unsignedp1
== unsignedpo
2228 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2229 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2231 tree type
= TREE_TYPE (arg0
);
2233 /* Make sure shorter operand is extended the right way
2234 to match the longer operand. */
2235 primarg1
= convert ((*lang_hooks
.types
.signed_or_unsigned_type
)
2236 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2238 if (operand_equal_p (arg0
, convert (type
, primarg1
), 0))
2245 /* See if ARG is an expression that is either a comparison or is performing
2246 arithmetic on comparisons. The comparisons must only be comparing
2247 two different values, which will be stored in *CVAL1 and *CVAL2; if
2248 they are nonzero it means that some operands have already been found.
2249 No variables may be used anywhere else in the expression except in the
2250 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2251 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2253 If this is true, return 1. Otherwise, return zero. */
2256 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2258 enum tree_code code
= TREE_CODE (arg
);
2259 char class = TREE_CODE_CLASS (code
);
2261 /* We can handle some of the 'e' cases here. */
2262 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
2264 else if (class == 'e'
2265 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2266 || code
== COMPOUND_EXPR
))
2269 else if (class == 'e' && code
== SAVE_EXPR
&& SAVE_EXPR_RTL (arg
) == 0
2270 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2272 /* If we've already found a CVAL1 or CVAL2, this expression is
2273 two complex to handle. */
2274 if (*cval1
|| *cval2
)
2284 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2287 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2288 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2289 cval1
, cval2
, save_p
));
2295 if (code
== COND_EXPR
)
2296 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2297 cval1
, cval2
, save_p
)
2298 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2299 cval1
, cval2
, save_p
)
2300 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2301 cval1
, cval2
, save_p
));
2305 /* First see if we can handle the first operand, then the second. For
2306 the second operand, we know *CVAL1 can't be zero. It must be that
2307 one side of the comparison is each of the values; test for the
2308 case where this isn't true by failing if the two operands
2311 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2312 TREE_OPERAND (arg
, 1), 0))
2316 *cval1
= TREE_OPERAND (arg
, 0);
2317 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2319 else if (*cval2
== 0)
2320 *cval2
= TREE_OPERAND (arg
, 0);
2321 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2326 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2328 else if (*cval2
== 0)
2329 *cval2
= TREE_OPERAND (arg
, 1);
2330 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2342 /* ARG is a tree that is known to contain just arithmetic operations and
2343 comparisons. Evaluate the operations in the tree substituting NEW0 for
2344 any occurrence of OLD0 as an operand of a comparison and likewise for
2348 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
2350 tree type
= TREE_TYPE (arg
);
2351 enum tree_code code
= TREE_CODE (arg
);
2352 char class = TREE_CODE_CLASS (code
);
2354 /* We can handle some of the 'e' cases here. */
2355 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
2357 else if (class == 'e'
2358 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2364 return fold (build1 (code
, type
,
2365 eval_subst (TREE_OPERAND (arg
, 0),
2366 old0
, new0
, old1
, new1
)));
2369 return fold (build (code
, type
,
2370 eval_subst (TREE_OPERAND (arg
, 0),
2371 old0
, new0
, old1
, new1
),
2372 eval_subst (TREE_OPERAND (arg
, 1),
2373 old0
, new0
, old1
, new1
)));
2379 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
2382 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
2385 return fold (build (code
, type
,
2386 eval_subst (TREE_OPERAND (arg
, 0),
2387 old0
, new0
, old1
, new1
),
2388 eval_subst (TREE_OPERAND (arg
, 1),
2389 old0
, new0
, old1
, new1
),
2390 eval_subst (TREE_OPERAND (arg
, 2),
2391 old0
, new0
, old1
, new1
)));
2395 /* Fall through - ??? */
2399 tree arg0
= TREE_OPERAND (arg
, 0);
2400 tree arg1
= TREE_OPERAND (arg
, 1);
2402 /* We need to check both for exact equality and tree equality. The
2403 former will be true if the operand has a side-effect. In that
2404 case, we know the operand occurred exactly once. */
2406 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2408 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2411 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2413 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2416 return fold (build (code
, type
, arg0
, arg1
));
2424 /* Return a tree for the case when the result of an expression is RESULT
2425 converted to TYPE and OMITTED was previously an operand of the expression
2426 but is now not needed (e.g., we folded OMITTED * 0).
2428 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2429 the conversion of RESULT to TYPE. */
2432 omit_one_operand (tree type
, tree result
, tree omitted
)
2434 tree t
= convert (type
, result
);
2436 if (TREE_SIDE_EFFECTS (omitted
))
2437 return build (COMPOUND_EXPR
, type
, omitted
, t
);
2439 return non_lvalue (t
);
2442 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2445 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
2447 tree t
= convert (type
, result
);
2449 if (TREE_SIDE_EFFECTS (omitted
))
2450 return build (COMPOUND_EXPR
, type
, omitted
, t
);
2452 return pedantic_non_lvalue (t
);
2455 /* Return a simplified tree node for the truth-negation of ARG. This
2456 never alters ARG itself. We assume that ARG is an operation that
2457 returns a truth value (0 or 1). */
2460 invert_truthvalue (tree arg
)
2462 tree type
= TREE_TYPE (arg
);
2463 enum tree_code code
= TREE_CODE (arg
);
2465 if (code
== ERROR_MARK
)
2468 /* If this is a comparison, we can simply invert it, except for
2469 floating-point non-equality comparisons, in which case we just
2470 enclose a TRUTH_NOT_EXPR around what we have. */
2472 if (TREE_CODE_CLASS (code
) == '<')
2474 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg
, 0)))
2475 && !flag_unsafe_math_optimizations
2478 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2480 return build (invert_tree_comparison (code
), type
,
2481 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
2487 return convert (type
, build_int_2 (integer_zerop (arg
), 0));
2489 case TRUTH_AND_EXPR
:
2490 return build (TRUTH_OR_EXPR
, type
,
2491 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2492 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2495 return build (TRUTH_AND_EXPR
, type
,
2496 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2497 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2499 case TRUTH_XOR_EXPR
:
2500 /* Here we can invert either operand. We invert the first operand
2501 unless the second operand is a TRUTH_NOT_EXPR in which case our
2502 result is the XOR of the first operand with the inside of the
2503 negation of the second operand. */
2505 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
2506 return build (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
2507 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
2509 return build (TRUTH_XOR_EXPR
, type
,
2510 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2511 TREE_OPERAND (arg
, 1));
2513 case TRUTH_ANDIF_EXPR
:
2514 return build (TRUTH_ORIF_EXPR
, type
,
2515 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2516 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2518 case TRUTH_ORIF_EXPR
:
2519 return build (TRUTH_ANDIF_EXPR
, type
,
2520 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2521 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2523 case TRUTH_NOT_EXPR
:
2524 return TREE_OPERAND (arg
, 0);
2527 return build (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2528 invert_truthvalue (TREE_OPERAND (arg
, 1)),
2529 invert_truthvalue (TREE_OPERAND (arg
, 2)));
2532 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2533 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2535 case WITH_RECORD_EXPR
:
2536 return build (WITH_RECORD_EXPR
, type
,
2537 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2538 TREE_OPERAND (arg
, 1));
2540 case NON_LVALUE_EXPR
:
2541 return invert_truthvalue (TREE_OPERAND (arg
, 0));
2546 return build1 (TREE_CODE (arg
), type
,
2547 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2550 if (!integer_onep (TREE_OPERAND (arg
, 1)))
2552 return build (EQ_EXPR
, type
, arg
, convert (type
, integer_zero_node
));
2555 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2557 case CLEANUP_POINT_EXPR
:
2558 return build1 (CLEANUP_POINT_EXPR
, type
,
2559 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2564 if (TREE_CODE (TREE_TYPE (arg
)) != BOOLEAN_TYPE
)
2566 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2569 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2570 operands are another bit-wise operation with a common input. If so,
2571 distribute the bit operations to save an operation and possibly two if
2572 constants are involved. For example, convert
2573 (A | B) & (A | C) into A | (B & C)
2574 Further simplification will occur if B and C are constants.
2576 If this optimization cannot be done, 0 will be returned. */
2579 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
2584 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2585 || TREE_CODE (arg0
) == code
2586 || (TREE_CODE (arg0
) != BIT_AND_EXPR
2587 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
2590 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
2592 common
= TREE_OPERAND (arg0
, 0);
2593 left
= TREE_OPERAND (arg0
, 1);
2594 right
= TREE_OPERAND (arg1
, 1);
2596 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
2598 common
= TREE_OPERAND (arg0
, 0);
2599 left
= TREE_OPERAND (arg0
, 1);
2600 right
= TREE_OPERAND (arg1
, 0);
2602 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
2604 common
= TREE_OPERAND (arg0
, 1);
2605 left
= TREE_OPERAND (arg0
, 0);
2606 right
= TREE_OPERAND (arg1
, 1);
2608 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
2610 common
= TREE_OPERAND (arg0
, 1);
2611 left
= TREE_OPERAND (arg0
, 0);
2612 right
= TREE_OPERAND (arg1
, 0);
2617 return fold (build (TREE_CODE (arg0
), type
, common
,
2618 fold (build (code
, type
, left
, right
))));
2621 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2622 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2625 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
2628 tree result
= build (BIT_FIELD_REF
, type
, inner
,
2629 size_int (bitsize
), bitsize_int (bitpos
));
2631 TREE_UNSIGNED (result
) = unsignedp
;
2636 /* Optimize a bit-field compare.
2638 There are two cases: First is a compare against a constant and the
2639 second is a comparison of two items where the fields are at the same
2640 bit position relative to the start of a chunk (byte, halfword, word)
2641 large enough to contain it. In these cases we can avoid the shift
2642 implicit in bitfield extractions.
2644 For constants, we emit a compare of the shifted constant with the
2645 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2646 compared. For two fields at the same position, we do the ANDs with the
2647 similar mask and compare the result of the ANDs.
2649 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2650 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2651 are the left and right operands of the comparison, respectively.
2653 If the optimization described above can be done, we return the resulting
2654 tree. Otherwise we return zero. */
2657 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
2660 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
2661 tree type
= TREE_TYPE (lhs
);
2662 tree signed_type
, unsigned_type
;
2663 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
2664 enum machine_mode lmode
, rmode
, nmode
;
2665 int lunsignedp
, runsignedp
;
2666 int lvolatilep
= 0, rvolatilep
= 0;
2667 tree linner
, rinner
= NULL_TREE
;
2671 /* Get all the information about the extractions being done. If the bit size
2672 if the same as the size of the underlying object, we aren't doing an
2673 extraction at all and so can do nothing. We also don't want to
2674 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2675 then will no longer be able to replace it. */
2676 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
2677 &lunsignedp
, &lvolatilep
);
2678 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
2679 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
2684 /* If this is not a constant, we can only do something if bit positions,
2685 sizes, and signedness are the same. */
2686 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
2687 &runsignedp
, &rvolatilep
);
2689 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
2690 || lunsignedp
!= runsignedp
|| offset
!= 0
2691 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
2695 /* See if we can find a mode to refer to this field. We should be able to,
2696 but fail if we can't. */
2697 nmode
= get_best_mode (lbitsize
, lbitpos
,
2698 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
2699 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
2700 TYPE_ALIGN (TREE_TYPE (rinner
))),
2701 word_mode
, lvolatilep
|| rvolatilep
);
2702 if (nmode
== VOIDmode
)
2705 /* Set signed and unsigned types of the precision of this mode for the
2707 signed_type
= (*lang_hooks
.types
.type_for_mode
) (nmode
, 0);
2708 unsigned_type
= (*lang_hooks
.types
.type_for_mode
) (nmode
, 1);
2710 /* Compute the bit position and size for the new reference and our offset
2711 within it. If the new reference is the same size as the original, we
2712 won't optimize anything, so return zero. */
2713 nbitsize
= GET_MODE_BITSIZE (nmode
);
2714 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
2716 if (nbitsize
== lbitsize
)
2719 if (BYTES_BIG_ENDIAN
)
2720 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
2722 /* Make the mask to be used against the extracted field. */
2723 mask
= build_int_2 (~0, ~0);
2724 TREE_TYPE (mask
) = unsigned_type
;
2725 force_fit_type (mask
, 0);
2726 mask
= convert (unsigned_type
, mask
);
2727 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
2728 mask
= const_binop (RSHIFT_EXPR
, mask
,
2729 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
2732 /* If not comparing with constant, just rework the comparison
2734 return build (code
, compare_type
,
2735 build (BIT_AND_EXPR
, unsigned_type
,
2736 make_bit_field_ref (linner
, unsigned_type
,
2737 nbitsize
, nbitpos
, 1),
2739 build (BIT_AND_EXPR
, unsigned_type
,
2740 make_bit_field_ref (rinner
, unsigned_type
,
2741 nbitsize
, nbitpos
, 1),
2744 /* Otherwise, we are handling the constant case. See if the constant is too
2745 big for the field. Warn and return a tree of for 0 (false) if so. We do
2746 this not only for its own sake, but to avoid having to test for this
2747 error case below. If we didn't, we might generate wrong code.
2749 For unsigned fields, the constant shifted right by the field length should
2750 be all zero. For signed fields, the high-order bits should agree with
2755 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
2756 convert (unsigned_type
, rhs
),
2757 size_int (lbitsize
), 0)))
2759 warning ("comparison is always %d due to width of bit-field",
2761 return convert (compare_type
,
2763 ? integer_one_node
: integer_zero_node
));
2768 tree tem
= const_binop (RSHIFT_EXPR
, convert (signed_type
, rhs
),
2769 size_int (lbitsize
- 1), 0);
2770 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
2772 warning ("comparison is always %d due to width of bit-field",
2774 return convert (compare_type
,
2776 ? integer_one_node
: integer_zero_node
));
2780 /* Single-bit compares should always be against zero. */
2781 if (lbitsize
== 1 && ! integer_zerop (rhs
))
2783 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
2784 rhs
= convert (type
, integer_zero_node
);
2787 /* Make a new bitfield reference, shift the constant over the
2788 appropriate number of bits and mask it with the computed mask
2789 (in case this was a signed field). If we changed it, make a new one. */
2790 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
2793 TREE_SIDE_EFFECTS (lhs
) = 1;
2794 TREE_THIS_VOLATILE (lhs
) = 1;
2797 rhs
= fold (const_binop (BIT_AND_EXPR
,
2798 const_binop (LSHIFT_EXPR
,
2799 convert (unsigned_type
, rhs
),
2800 size_int (lbitpos
), 0),
2803 return build (code
, compare_type
,
2804 build (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
2808 /* Subroutine for fold_truthop: decode a field reference.
2810 If EXP is a comparison reference, we return the innermost reference.
2812 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2813 set to the starting bit number.
2815 If the innermost field can be completely contained in a mode-sized
2816 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2818 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2819 otherwise it is not changed.
2821 *PUNSIGNEDP is set to the signedness of the field.
2823 *PMASK is set to the mask used. This is either contained in a
2824 BIT_AND_EXPR or derived from the width of the field.
2826 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2828 Return 0 if this is not a component reference or is one that we can't
2829 do anything with. */
2832 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
2833 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
2834 int *punsignedp
, int *pvolatilep
,
2835 tree
*pmask
, tree
*pand_mask
)
2837 tree outer_type
= 0;
2839 tree mask
, inner
, offset
;
2841 unsigned int precision
;
2843 /* All the optimizations using this function assume integer fields.
2844 There are problems with FP fields since the type_for_size call
2845 below can fail for, e.g., XFmode. */
2846 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
2849 /* We are interested in the bare arrangement of bits, so strip everything
2850 that doesn't affect the machine mode. However, record the type of the
2851 outermost expression if it may matter below. */
2852 if (TREE_CODE (exp
) == NOP_EXPR
2853 || TREE_CODE (exp
) == CONVERT_EXPR
2854 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
2855 outer_type
= TREE_TYPE (exp
);
2858 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
2860 and_mask
= TREE_OPERAND (exp
, 1);
2861 exp
= TREE_OPERAND (exp
, 0);
2862 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
2863 if (TREE_CODE (and_mask
) != INTEGER_CST
)
2867 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
2868 punsignedp
, pvolatilep
);
2869 if ((inner
== exp
&& and_mask
== 0)
2870 || *pbitsize
< 0 || offset
!= 0
2871 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
2874 /* If the number of bits in the reference is the same as the bitsize of
2875 the outer type, then the outer type gives the signedness. Otherwise
2876 (in case of a small bitfield) the signedness is unchanged. */
2877 if (outer_type
&& *pbitsize
== tree_low_cst (TYPE_SIZE (outer_type
), 1))
2878 *punsignedp
= TREE_UNSIGNED (outer_type
);
2880 /* Compute the mask to access the bitfield. */
2881 unsigned_type
= (*lang_hooks
.types
.type_for_size
) (*pbitsize
, 1);
2882 precision
= TYPE_PRECISION (unsigned_type
);
2884 mask
= build_int_2 (~0, ~0);
2885 TREE_TYPE (mask
) = unsigned_type
;
2886 force_fit_type (mask
, 0);
2887 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
2888 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
2890 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2892 mask
= fold (build (BIT_AND_EXPR
, unsigned_type
,
2893 convert (unsigned_type
, and_mask
), mask
));
2896 *pand_mask
= and_mask
;
2900 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2904 all_ones_mask_p (tree mask
, int size
)
2906 tree type
= TREE_TYPE (mask
);
2907 unsigned int precision
= TYPE_PRECISION (type
);
2910 tmask
= build_int_2 (~0, ~0);
2911 TREE_TYPE (tmask
) = (*lang_hooks
.types
.signed_type
) (type
);
2912 force_fit_type (tmask
, 0);
2914 tree_int_cst_equal (mask
,
2915 const_binop (RSHIFT_EXPR
,
2916 const_binop (LSHIFT_EXPR
, tmask
,
2917 size_int (precision
- size
),
2919 size_int (precision
- size
), 0));
2922 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2923 represents the sign bit of EXP's type. If EXP represents a sign
2924 or zero extension, also test VAL against the unextended type.
2925 The return value is the (sub)expression whose sign bit is VAL,
2926 or NULL_TREE otherwise. */
2929 sign_bit_p (tree exp
, tree val
)
2931 unsigned HOST_WIDE_INT mask_lo
, lo
;
2932 HOST_WIDE_INT mask_hi
, hi
;
2936 /* Tree EXP must have an integral type. */
2937 t
= TREE_TYPE (exp
);
2938 if (! INTEGRAL_TYPE_P (t
))
2941 /* Tree VAL must be an integer constant. */
2942 if (TREE_CODE (val
) != INTEGER_CST
2943 || TREE_CONSTANT_OVERFLOW (val
))
2946 width
= TYPE_PRECISION (t
);
2947 if (width
> HOST_BITS_PER_WIDE_INT
)
2949 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
2952 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
2953 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
2959 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
2962 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
2963 >> (HOST_BITS_PER_WIDE_INT
- width
));
2966 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
2967 treat VAL as if it were unsigned. */
2968 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
2969 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
2972 /* Handle extension from a narrower type. */
2973 if (TREE_CODE (exp
) == NOP_EXPR
2974 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
2975 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
2980 /* Subroutine for fold_truthop: determine if an operand is simple enough
2981 to be evaluated unconditionally. */
2984 simple_operand_p (tree exp
)
2986 /* Strip any conversions that don't change the machine mode. */
2987 while ((TREE_CODE (exp
) == NOP_EXPR
2988 || TREE_CODE (exp
) == CONVERT_EXPR
)
2989 && (TYPE_MODE (TREE_TYPE (exp
))
2990 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
2991 exp
= TREE_OPERAND (exp
, 0);
2993 return (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c'
2995 && ! TREE_ADDRESSABLE (exp
)
2996 && ! TREE_THIS_VOLATILE (exp
)
2997 && ! DECL_NONLOCAL (exp
)
2998 /* Don't regard global variables as simple. They may be
2999 allocated in ways unknown to the compiler (shared memory,
3000 #pragma weak, etc). */
3001 && ! TREE_PUBLIC (exp
)
3002 && ! DECL_EXTERNAL (exp
)
3003 /* Loading a static variable is unduly expensive, but global
3004 registers aren't expensive. */
3005 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3008 /* The following functions are subroutines to fold_range_test and allow it to
3009 try to change a logical combination of comparisons into a range test.
3012 X == 2 || X == 3 || X == 4 || X == 5
3016 (unsigned) (X - 2) <= 3
3018 We describe each set of comparisons as being either inside or outside
3019 a range, using a variable named like IN_P, and then describe the
3020 range with a lower and upper bound. If one of the bounds is omitted,
3021 it represents either the highest or lowest value of the type.
3023 In the comments below, we represent a range by two numbers in brackets
3024 preceded by a "+" to designate being inside that range, or a "-" to
3025 designate being outside that range, so the condition can be inverted by
3026 flipping the prefix. An omitted bound is represented by a "-". For
3027 example, "- [-, 10]" means being outside the range starting at the lowest
3028 possible value and ending at 10, in other words, being greater than 10.
3029 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3032 We set up things so that the missing bounds are handled in a consistent
3033 manner so neither a missing bound nor "true" and "false" need to be
3034 handled using a special case. */
3036 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3037 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3038 and UPPER1_P are nonzero if the respective argument is an upper bound
3039 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3040 must be specified for a comparison. ARG1 will be converted to ARG0's
3041 type if both are specified. */
3044 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3045 tree arg1
, int upper1_p
)
3051 /* If neither arg represents infinity, do the normal operation.
3052 Else, if not a comparison, return infinity. Else handle the special
3053 comparison rules. Note that most of the cases below won't occur, but
3054 are handled for consistency. */
3056 if (arg0
!= 0 && arg1
!= 0)
3058 tem
= fold (build (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3059 arg0
, convert (TREE_TYPE (arg0
), arg1
)));
3061 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3064 if (TREE_CODE_CLASS (code
) != '<')
3067 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3068 for neither. In real maths, we cannot assume open ended ranges are
3069 the same. But, this is computer arithmetic, where numbers are finite.
3070 We can therefore make the transformation of any unbounded range with
3071 the value Z, Z being greater than any representable number. This permits
3072 us to treat unbounded ranges as equal. */
3073 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3074 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3078 result
= sgn0
== sgn1
;
3081 result
= sgn0
!= sgn1
;
3084 result
= sgn0
< sgn1
;
3087 result
= sgn0
<= sgn1
;
3090 result
= sgn0
> sgn1
;
3093 result
= sgn0
>= sgn1
;
3099 return convert (type
, result
? integer_one_node
: integer_zero_node
);
3102 /* Given EXP, a logical expression, set the range it is testing into
3103 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3104 actually being tested. *PLOW and *PHIGH will be made of the same type
3105 as the returned expression. If EXP is not a comparison, we will most
3106 likely not be returning a useful value and range. */
3109 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
)
3111 enum tree_code code
;
3112 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, type
= NULL_TREE
;
3113 tree orig_type
= NULL_TREE
;
3115 tree low
, high
, n_low
, n_high
;
3117 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3118 and see if we can refine the range. Some of the cases below may not
3119 happen, but it doesn't seem worth worrying about this. We "continue"
3120 the outer loop when we've changed something; otherwise we "break"
3121 the switch, which will "break" the while. */
3123 in_p
= 0, low
= high
= convert (TREE_TYPE (exp
), integer_zero_node
);
3127 code
= TREE_CODE (exp
);
3129 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3131 if (first_rtl_op (code
) > 0)
3132 arg0
= TREE_OPERAND (exp
, 0);
3133 if (TREE_CODE_CLASS (code
) == '<'
3134 || TREE_CODE_CLASS (code
) == '1'
3135 || TREE_CODE_CLASS (code
) == '2')
3136 type
= TREE_TYPE (arg0
);
3137 if (TREE_CODE_CLASS (code
) == '2'
3138 || TREE_CODE_CLASS (code
) == '<'
3139 || (TREE_CODE_CLASS (code
) == 'e'
3140 && TREE_CODE_LENGTH (code
) > 1))
3141 arg1
= TREE_OPERAND (exp
, 1);
3144 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3145 lose a cast by accident. */
3146 if (type
!= NULL_TREE
&& orig_type
== NULL_TREE
)
3151 case TRUTH_NOT_EXPR
:
3152 in_p
= ! in_p
, exp
= arg0
;
3155 case EQ_EXPR
: case NE_EXPR
:
3156 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3157 /* We can only do something if the range is testing for zero
3158 and if the second operand is an integer constant. Note that
3159 saying something is "in" the range we make is done by
3160 complementing IN_P since it will set in the initial case of
3161 being not equal to zero; "out" is leaving it alone. */
3162 if (low
== 0 || high
== 0
3163 || ! integer_zerop (low
) || ! integer_zerop (high
)
3164 || TREE_CODE (arg1
) != INTEGER_CST
)
3169 case NE_EXPR
: /* - [c, c] */
3172 case EQ_EXPR
: /* + [c, c] */
3173 in_p
= ! in_p
, low
= high
= arg1
;
3175 case GT_EXPR
: /* - [-, c] */
3176 low
= 0, high
= arg1
;
3178 case GE_EXPR
: /* + [c, -] */
3179 in_p
= ! in_p
, low
= arg1
, high
= 0;
3181 case LT_EXPR
: /* - [c, -] */
3182 low
= arg1
, high
= 0;
3184 case LE_EXPR
: /* + [-, c] */
3185 in_p
= ! in_p
, low
= 0, high
= arg1
;
3193 /* If this is an unsigned comparison, we also know that EXP is
3194 greater than or equal to zero. We base the range tests we make
3195 on that fact, so we record it here so we can parse existing
3197 if (TREE_UNSIGNED (type
) && (low
== 0 || high
== 0))
3199 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, in_p
, low
, high
,
3200 1, convert (type
, integer_zero_node
),
3204 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3206 /* If the high bound is missing, but we have a nonzero low
3207 bound, reverse the range so it goes from zero to the low bound
3209 if (high
== 0 && low
&& ! integer_zerop (low
))
3212 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3213 integer_one_node
, 0);
3214 low
= convert (type
, integer_zero_node
);
3220 /* (-x) IN [a,b] -> x in [-b, -a] */
3221 n_low
= range_binop (MINUS_EXPR
, type
,
3222 convert (type
, integer_zero_node
), 0, high
, 1);
3223 n_high
= range_binop (MINUS_EXPR
, type
,
3224 convert (type
, integer_zero_node
), 0, low
, 0);
3225 low
= n_low
, high
= n_high
;
3231 exp
= build (MINUS_EXPR
, type
, negate_expr (arg0
),
3232 convert (type
, integer_one_node
));
3235 case PLUS_EXPR
: case MINUS_EXPR
:
3236 if (TREE_CODE (arg1
) != INTEGER_CST
)
3239 /* If EXP is signed, any overflow in the computation is undefined,
3240 so we don't worry about it so long as our computations on
3241 the bounds don't overflow. For unsigned, overflow is defined
3242 and this is exactly the right thing. */
3243 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3244 type
, low
, 0, arg1
, 0);
3245 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3246 type
, high
, 1, arg1
, 0);
3247 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3248 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3251 /* Check for an unsigned range which has wrapped around the maximum
3252 value thus making n_high < n_low, and normalize it. */
3253 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3255 low
= range_binop (PLUS_EXPR
, type
, n_high
, 0,
3256 integer_one_node
, 0);
3257 high
= range_binop (MINUS_EXPR
, type
, n_low
, 0,
3258 integer_one_node
, 0);
3260 /* If the range is of the form +/- [ x+1, x ], we won't
3261 be able to normalize it. But then, it represents the
3262 whole range or the empty set, so make it
3264 if (tree_int_cst_equal (n_low
, low
)
3265 && tree_int_cst_equal (n_high
, high
))
3271 low
= n_low
, high
= n_high
;
3276 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
3277 if (TYPE_PRECISION (type
) > TYPE_PRECISION (orig_type
))
3280 if (! INTEGRAL_TYPE_P (type
)
3281 || (low
!= 0 && ! int_fits_type_p (low
, type
))
3282 || (high
!= 0 && ! int_fits_type_p (high
, type
)))
3285 n_low
= low
, n_high
= high
;
3288 n_low
= convert (type
, n_low
);
3291 n_high
= convert (type
, n_high
);
3293 /* If we're converting from an unsigned to a signed type,
3294 we will be doing the comparison as unsigned. The tests above
3295 have already verified that LOW and HIGH are both positive.
3297 So we have to make sure that the original unsigned value will
3298 be interpreted as positive. */
3299 if (TREE_UNSIGNED (type
) && ! TREE_UNSIGNED (TREE_TYPE (exp
)))
3301 tree equiv_type
= (*lang_hooks
.types
.type_for_mode
)
3302 (TYPE_MODE (type
), 1);
3305 /* A range without an upper bound is, naturally, unbounded.
3306 Since convert would have cropped a very large value, use
3307 the max value for the destination type. */
3309 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3310 : TYPE_MAX_VALUE (type
);
3312 if (TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (exp
)))
3313 high_positive
= fold (build (RSHIFT_EXPR
, type
,
3314 convert (type
, high_positive
),
3315 convert (type
, integer_one_node
)));
3317 /* If the low bound is specified, "and" the range with the
3318 range for which the original unsigned value will be
3322 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3324 1, convert (type
, integer_zero_node
),
3328 in_p
= (n_in_p
== in_p
);
3332 /* Otherwise, "or" the range with the range of the input
3333 that will be interpreted as negative. */
3334 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3336 1, convert (type
, integer_zero_node
),
3340 in_p
= (in_p
!= n_in_p
);
3345 low
= n_low
, high
= n_high
;
3355 /* If EXP is a constant, we can evaluate whether this is true or false. */
3356 if (TREE_CODE (exp
) == INTEGER_CST
)
3358 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
3360 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3366 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3370 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3371 type, TYPE, return an expression to test if EXP is in (or out of, depending
3372 on IN_P) the range. */
3375 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
3377 tree etype
= TREE_TYPE (exp
);
3381 && (0 != (value
= build_range_check (type
, exp
, 1, low
, high
))))
3382 return invert_truthvalue (value
);
3384 if (low
== 0 && high
== 0)
3385 return convert (type
, integer_one_node
);
3388 return fold (build (LE_EXPR
, type
, exp
, high
));
3391 return fold (build (GE_EXPR
, type
, exp
, low
));
3393 if (operand_equal_p (low
, high
, 0))
3394 return fold (build (EQ_EXPR
, type
, exp
, low
));
3396 if (integer_zerop (low
))
3398 if (! TREE_UNSIGNED (etype
))
3400 etype
= (*lang_hooks
.types
.unsigned_type
) (etype
);
3401 high
= convert (etype
, high
);
3402 exp
= convert (etype
, exp
);
3404 return build_range_check (type
, exp
, 1, 0, high
);
3407 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3408 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
3410 unsigned HOST_WIDE_INT lo
;
3414 prec
= TYPE_PRECISION (etype
);
3415 if (prec
<= HOST_BITS_PER_WIDE_INT
)
3418 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
3422 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
3423 lo
= (unsigned HOST_WIDE_INT
) -1;
3426 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
3428 if (TREE_UNSIGNED (etype
))
3430 etype
= (*lang_hooks
.types
.signed_type
) (etype
);
3431 exp
= convert (etype
, exp
);
3433 return fold (build (GT_EXPR
, type
, exp
,
3434 convert (etype
, integer_zero_node
)));
3438 if (0 != (value
= const_binop (MINUS_EXPR
, high
, low
, 0))
3439 && ! TREE_OVERFLOW (value
))
3440 return build_range_check (type
,
3441 fold (build (MINUS_EXPR
, etype
, exp
, low
)),
3442 1, convert (etype
, integer_zero_node
), value
);
3447 /* Given two ranges, see if we can merge them into one. Return 1 if we
3448 can, 0 if we can't. Set the output range into the specified parameters. */
3451 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
3452 tree high0
, int in1_p
, tree low1
, tree high1
)
3460 int lowequal
= ((low0
== 0 && low1
== 0)
3461 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3462 low0
, 0, low1
, 0)));
3463 int highequal
= ((high0
== 0 && high1
== 0)
3464 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3465 high0
, 1, high1
, 1)));
3467 /* Make range 0 be the range that starts first, or ends last if they
3468 start at the same value. Swap them if it isn't. */
3469 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3472 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3473 high1
, 1, high0
, 1))))
3475 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
3476 tem
= low0
, low0
= low1
, low1
= tem
;
3477 tem
= high0
, high0
= high1
, high1
= tem
;
3480 /* Now flag two cases, whether the ranges are disjoint or whether the
3481 second range is totally subsumed in the first. Note that the tests
3482 below are simplified by the ones above. */
3483 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
3484 high0
, 1, low1
, 0));
3485 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3486 high1
, 1, high0
, 1));
3488 /* We now have four cases, depending on whether we are including or
3489 excluding the two ranges. */
3492 /* If they don't overlap, the result is false. If the second range
3493 is a subset it is the result. Otherwise, the range is from the start
3494 of the second to the end of the first. */
3496 in_p
= 0, low
= high
= 0;
3498 in_p
= 1, low
= low1
, high
= high1
;
3500 in_p
= 1, low
= low1
, high
= high0
;
3503 else if (in0_p
&& ! in1_p
)
3505 /* If they don't overlap, the result is the first range. If they are
3506 equal, the result is false. If the second range is a subset of the
3507 first, and the ranges begin at the same place, we go from just after
3508 the end of the first range to the end of the second. If the second
3509 range is not a subset of the first, or if it is a subset and both
3510 ranges end at the same place, the range starts at the start of the
3511 first range and ends just before the second range.
3512 Otherwise, we can't describe this as a single range. */
3514 in_p
= 1, low
= low0
, high
= high0
;
3515 else if (lowequal
&& highequal
)
3516 in_p
= 0, low
= high
= 0;
3517 else if (subset
&& lowequal
)
3519 in_p
= 1, high
= high0
;
3520 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high1
, 0,
3521 integer_one_node
, 0);
3523 else if (! subset
|| highequal
)
3525 in_p
= 1, low
= low0
;
3526 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
3527 integer_one_node
, 0);
3533 else if (! in0_p
&& in1_p
)
3535 /* If they don't overlap, the result is the second range. If the second
3536 is a subset of the first, the result is false. Otherwise,
3537 the range starts just after the first range and ends at the
3538 end of the second. */
3540 in_p
= 1, low
= low1
, high
= high1
;
3541 else if (subset
|| highequal
)
3542 in_p
= 0, low
= high
= 0;
3545 in_p
= 1, high
= high1
;
3546 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
3547 integer_one_node
, 0);
3553 /* The case where we are excluding both ranges. Here the complex case
3554 is if they don't overlap. In that case, the only time we have a
3555 range is if they are adjacent. If the second is a subset of the
3556 first, the result is the first. Otherwise, the range to exclude
3557 starts at the beginning of the first range and ends at the end of the
3561 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3562 range_binop (PLUS_EXPR
, NULL_TREE
,
3564 integer_one_node
, 1),
3566 in_p
= 0, low
= low0
, high
= high1
;
3571 in_p
= 0, low
= low0
, high
= high0
;
3573 in_p
= 0, low
= low0
, high
= high1
;
3576 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3580 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3581 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3584 /* EXP is some logical combination of boolean tests. See if we can
3585 merge it into some range test. Return the new tree if so. */
3588 fold_range_test (tree exp
)
3590 int or_op
= (TREE_CODE (exp
) == TRUTH_ORIF_EXPR
3591 || TREE_CODE (exp
) == TRUTH_OR_EXPR
);
3592 int in0_p
, in1_p
, in_p
;
3593 tree low0
, low1
, low
, high0
, high1
, high
;
3594 tree lhs
= make_range (TREE_OPERAND (exp
, 0), &in0_p
, &low0
, &high0
);
3595 tree rhs
= make_range (TREE_OPERAND (exp
, 1), &in1_p
, &low1
, &high1
);
3598 /* If this is an OR operation, invert both sides; we will invert
3599 again at the end. */
3601 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
3603 /* If both expressions are the same, if we can merge the ranges, and we
3604 can build the range test, return it or it inverted. If one of the
3605 ranges is always true or always false, consider it to be the same
3606 expression as the other. */
3607 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
3608 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
3610 && 0 != (tem
= (build_range_check (TREE_TYPE (exp
),
3612 : rhs
!= 0 ? rhs
: integer_zero_node
,
3614 return or_op
? invert_truthvalue (tem
) : tem
;
3616 /* On machines where the branch cost is expensive, if this is a
3617 short-circuited branch and the underlying object on both sides
3618 is the same, make a non-short-circuit operation. */
3619 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3620 && lhs
!= 0 && rhs
!= 0
3621 && (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
3622 || TREE_CODE (exp
) == TRUTH_ORIF_EXPR
)
3623 && operand_equal_p (lhs
, rhs
, 0))
3625 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3626 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3627 which cases we can't do this. */
3628 if (simple_operand_p (lhs
))
3629 return build (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
3630 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
3631 TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
3632 TREE_OPERAND (exp
, 1));
3634 else if ((*lang_hooks
.decls
.global_bindings_p
) () == 0
3635 && ! CONTAINS_PLACEHOLDER_P (lhs
))
3637 tree common
= save_expr (lhs
);
3639 if (0 != (lhs
= build_range_check (TREE_TYPE (exp
), common
,
3640 or_op
? ! in0_p
: in0_p
,
3642 && (0 != (rhs
= build_range_check (TREE_TYPE (exp
), common
,
3643 or_op
? ! in1_p
: in1_p
,
3645 return build (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
3646 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
3647 TREE_TYPE (exp
), lhs
, rhs
);
3654 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3655 bit value. Arrange things so the extra bits will be set to zero if and
3656 only if C is signed-extended to its full width. If MASK is nonzero,
3657 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3660 unextend (tree c
, int p
, int unsignedp
, tree mask
)
3662 tree type
= TREE_TYPE (c
);
3663 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
3666 if (p
== modesize
|| unsignedp
)
3669 /* We work by getting just the sign bit into the low-order bit, then
3670 into the high-order bit, then sign-extend. We then XOR that value
3672 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
3673 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
3675 /* We must use a signed type in order to get an arithmetic right shift.
3676 However, we must also avoid introducing accidental overflows, so that
3677 a subsequent call to integer_zerop will work. Hence we must
3678 do the type conversion here. At this point, the constant is either
3679 zero or one, and the conversion to a signed type can never overflow.
3680 We could get an overflow if this conversion is done anywhere else. */
3681 if (TREE_UNSIGNED (type
))
3682 temp
= convert ((*lang_hooks
.types
.signed_type
) (type
), temp
);
3684 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
3685 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
3687 temp
= const_binop (BIT_AND_EXPR
, temp
, convert (TREE_TYPE (c
), mask
), 0);
3688 /* If necessary, convert the type back to match the type of C. */
3689 if (TREE_UNSIGNED (type
))
3690 temp
= convert (type
, temp
);
3692 return convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
3695 /* Find ways of folding logical expressions of LHS and RHS:
3696 Try to merge two comparisons to the same innermost item.
3697 Look for range tests like "ch >= '0' && ch <= '9'".
3698 Look for combinations of simple terms on machines with expensive branches
3699 and evaluate the RHS unconditionally.
3701 For example, if we have p->a == 2 && p->b == 4 and we can make an
3702 object large enough to span both A and B, we can do this with a comparison
3703 against the object ANDed with the a mask.
3705 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3706 operations to do this with one comparison.
3708 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3709 function and the one above.
3711 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3712 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3714 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3717 We return the simplified tree or 0 if no optimization is possible. */
3720 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
3722 /* If this is the "or" of two comparisons, we can do something if
3723 the comparisons are NE_EXPR. If this is the "and", we can do something
3724 if the comparisons are EQ_EXPR. I.e.,
3725 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3727 WANTED_CODE is this operation code. For single bit fields, we can
3728 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3729 comparison for one-bit fields. */
3731 enum tree_code wanted_code
;
3732 enum tree_code lcode
, rcode
;
3733 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
3734 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
3735 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
3736 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
3737 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
3738 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
3739 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
3740 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
3741 enum machine_mode lnmode
, rnmode
;
3742 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
3743 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
3744 tree l_const
, r_const
;
3745 tree lntype
, rntype
, result
;
3746 int first_bit
, end_bit
;
3749 /* Start by getting the comparison codes. Fail if anything is volatile.
3750 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3751 it were surrounded with a NE_EXPR. */
3753 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
3756 lcode
= TREE_CODE (lhs
);
3757 rcode
= TREE_CODE (rhs
);
3759 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
3760 lcode
= NE_EXPR
, lhs
= build (NE_EXPR
, truth_type
, lhs
, integer_zero_node
);
3762 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
3763 rcode
= NE_EXPR
, rhs
= build (NE_EXPR
, truth_type
, rhs
, integer_zero_node
);
3765 if (TREE_CODE_CLASS (lcode
) != '<' || TREE_CODE_CLASS (rcode
) != '<')
3768 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
3769 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
3771 ll_arg
= TREE_OPERAND (lhs
, 0);
3772 lr_arg
= TREE_OPERAND (lhs
, 1);
3773 rl_arg
= TREE_OPERAND (rhs
, 0);
3774 rr_arg
= TREE_OPERAND (rhs
, 1);
3776 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3777 if (simple_operand_p (ll_arg
)
3778 && simple_operand_p (lr_arg
)
3779 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg
)))
3783 if (operand_equal_p (ll_arg
, rl_arg
, 0)
3784 && operand_equal_p (lr_arg
, rr_arg
, 0))
3786 int lcompcode
, rcompcode
;
3788 lcompcode
= comparison_to_compcode (lcode
);
3789 rcompcode
= comparison_to_compcode (rcode
);
3790 compcode
= (code
== TRUTH_AND_EXPR
)
3791 ? lcompcode
& rcompcode
3792 : lcompcode
| rcompcode
;
3794 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
3795 && operand_equal_p (lr_arg
, rl_arg
, 0))
3797 int lcompcode
, rcompcode
;
3799 rcode
= swap_tree_comparison (rcode
);
3800 lcompcode
= comparison_to_compcode (lcode
);
3801 rcompcode
= comparison_to_compcode (rcode
);
3802 compcode
= (code
== TRUTH_AND_EXPR
)
3803 ? lcompcode
& rcompcode
3804 : lcompcode
| rcompcode
;
3809 if (compcode
== COMPCODE_TRUE
)
3810 return convert (truth_type
, integer_one_node
);
3811 else if (compcode
== COMPCODE_FALSE
)
3812 return convert (truth_type
, integer_zero_node
);
3813 else if (compcode
!= -1)
3814 return build (compcode_to_comparison (compcode
),
3815 truth_type
, ll_arg
, lr_arg
);
3818 /* If the RHS can be evaluated unconditionally and its operands are
3819 simple, it wins to evaluate the RHS unconditionally on machines
3820 with expensive branches. In this case, this isn't a comparison
3821 that can be merged. Avoid doing this if the RHS is a floating-point
3822 comparison since those can trap. */
3824 if (BRANCH_COST
>= 2
3825 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
3826 && simple_operand_p (rl_arg
)
3827 && simple_operand_p (rr_arg
))
3829 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3830 if (code
== TRUTH_OR_EXPR
3831 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
3832 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
3833 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
3834 return build (NE_EXPR
, truth_type
,
3835 build (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
3839 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3840 if (code
== TRUTH_AND_EXPR
3841 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
3842 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
3843 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
3844 return build (EQ_EXPR
, truth_type
,
3845 build (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
3849 return build (code
, truth_type
, lhs
, rhs
);
3852 /* See if the comparisons can be merged. Then get all the parameters for
3855 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
3856 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
3860 ll_inner
= decode_field_reference (ll_arg
,
3861 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
3862 &ll_unsignedp
, &volatilep
, &ll_mask
,
3864 lr_inner
= decode_field_reference (lr_arg
,
3865 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
3866 &lr_unsignedp
, &volatilep
, &lr_mask
,
3868 rl_inner
= decode_field_reference (rl_arg
,
3869 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
3870 &rl_unsignedp
, &volatilep
, &rl_mask
,
3872 rr_inner
= decode_field_reference (rr_arg
,
3873 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
3874 &rr_unsignedp
, &volatilep
, &rr_mask
,
3877 /* It must be true that the inner operation on the lhs of each
3878 comparison must be the same if we are to be able to do anything.
3879 Then see if we have constants. If not, the same must be true for
3881 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
3882 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
3885 if (TREE_CODE (lr_arg
) == INTEGER_CST
3886 && TREE_CODE (rr_arg
) == INTEGER_CST
)
3887 l_const
= lr_arg
, r_const
= rr_arg
;
3888 else if (lr_inner
== 0 || rr_inner
== 0
3889 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
3892 l_const
= r_const
= 0;
3894 /* If either comparison code is not correct for our logical operation,
3895 fail. However, we can convert a one-bit comparison against zero into
3896 the opposite comparison against that bit being set in the field. */
3898 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
3899 if (lcode
!= wanted_code
)
3901 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
3903 /* Make the left operand unsigned, since we are only interested
3904 in the value of one bit. Otherwise we are doing the wrong
3913 /* This is analogous to the code for l_const above. */
3914 if (rcode
!= wanted_code
)
3916 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
3925 /* After this point all optimizations will generate bit-field
3926 references, which we might not want. */
3927 if (! (*lang_hooks
.can_use_bit_fields_p
) ())
3930 /* See if we can find a mode that contains both fields being compared on
3931 the left. If we can't, fail. Otherwise, update all constants and masks
3932 to be relative to a field of that size. */
3933 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
3934 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
3935 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
3936 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
3938 if (lnmode
== VOIDmode
)
3941 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
3942 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
3943 lntype
= (*lang_hooks
.types
.type_for_size
) (lnbitsize
, 1);
3944 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
3946 if (BYTES_BIG_ENDIAN
)
3948 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
3949 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
3952 ll_mask
= const_binop (LSHIFT_EXPR
, convert (lntype
, ll_mask
),
3953 size_int (xll_bitpos
), 0);
3954 rl_mask
= const_binop (LSHIFT_EXPR
, convert (lntype
, rl_mask
),
3955 size_int (xrl_bitpos
), 0);
3959 l_const
= convert (lntype
, l_const
);
3960 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
3961 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
3962 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
3963 fold (build1 (BIT_NOT_EXPR
,
3967 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
3969 return convert (truth_type
,
3970 wanted_code
== NE_EXPR
3971 ? integer_one_node
: integer_zero_node
);
3976 r_const
= convert (lntype
, r_const
);
3977 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
3978 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
3979 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
3980 fold (build1 (BIT_NOT_EXPR
,
3984 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
3986 return convert (truth_type
,
3987 wanted_code
== NE_EXPR
3988 ? integer_one_node
: integer_zero_node
);
3992 /* If the right sides are not constant, do the same for it. Also,
3993 disallow this optimization if a size or signedness mismatch occurs
3994 between the left and right sides. */
3997 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
3998 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
3999 /* Make sure the two fields on the right
4000 correspond to the left without being swapped. */
4001 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
4004 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
4005 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
4006 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4007 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
4009 if (rnmode
== VOIDmode
)
4012 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
4013 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
4014 rntype
= (*lang_hooks
.types
.type_for_size
) (rnbitsize
, 1);
4015 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
4017 if (BYTES_BIG_ENDIAN
)
4019 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
4020 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
4023 lr_mask
= const_binop (LSHIFT_EXPR
, convert (rntype
, lr_mask
),
4024 size_int (xlr_bitpos
), 0);
4025 rr_mask
= const_binop (LSHIFT_EXPR
, convert (rntype
, rr_mask
),
4026 size_int (xrr_bitpos
), 0);
4028 /* Make a mask that corresponds to both fields being compared.
4029 Do this for both items being compared. If the operands are the
4030 same size and the bits being compared are in the same position
4031 then we can do this by masking both and comparing the masked
4033 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4034 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
4035 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
4037 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4038 ll_unsignedp
|| rl_unsignedp
);
4039 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4040 lhs
= build (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
4042 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
4043 lr_unsignedp
|| rr_unsignedp
);
4044 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
4045 rhs
= build (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
4047 return build (wanted_code
, truth_type
, lhs
, rhs
);
4050 /* There is still another way we can do something: If both pairs of
4051 fields being compared are adjacent, we may be able to make a wider
4052 field containing them both.
4054 Note that we still must mask the lhs/rhs expressions. Furthermore,
4055 the mask must be shifted to account for the shift done by
4056 make_bit_field_ref. */
4057 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
4058 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
4059 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
4060 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
4064 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
4065 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
4066 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
4067 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
4069 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
4070 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
4071 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
4072 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
4074 /* Convert to the smaller type before masking out unwanted bits. */
4076 if (lntype
!= rntype
)
4078 if (lnbitsize
> rnbitsize
)
4080 lhs
= convert (rntype
, lhs
);
4081 ll_mask
= convert (rntype
, ll_mask
);
4084 else if (lnbitsize
< rnbitsize
)
4086 rhs
= convert (lntype
, rhs
);
4087 lr_mask
= convert (lntype
, lr_mask
);
4092 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
4093 lhs
= build (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
4095 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
4096 rhs
= build (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
4098 return build (wanted_code
, truth_type
, lhs
, rhs
);
4104 /* Handle the case of comparisons with constants. If there is something in
4105 common between the masks, those bits of the constants must be the same.
4106 If not, the condition is always false. Test for this to avoid generating
4107 incorrect code below. */
4108 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
4109 if (! integer_zerop (result
)
4110 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
4111 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
4113 if (wanted_code
== NE_EXPR
)
4115 warning ("`or' of unmatched not-equal tests is always 1");
4116 return convert (truth_type
, integer_one_node
);
4120 warning ("`and' of mutually exclusive equal-tests is always 0");
4121 return convert (truth_type
, integer_zero_node
);
4125 /* Construct the expression we will return. First get the component
4126 reference we will make. Unless the mask is all ones the width of
4127 that field, perform the mask operation. Then compare with the
4129 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4130 ll_unsignedp
|| rl_unsignedp
);
4132 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4133 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4134 result
= build (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
4136 return build (wanted_code
, truth_type
, result
,
4137 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
4140 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4144 optimize_minmax_comparison (tree t
)
4146 tree type
= TREE_TYPE (t
);
4147 tree arg0
= TREE_OPERAND (t
, 0);
4148 enum tree_code op_code
;
4149 tree comp_const
= TREE_OPERAND (t
, 1);
4151 int consts_equal
, consts_lt
;
4154 STRIP_SIGN_NOPS (arg0
);
4156 op_code
= TREE_CODE (arg0
);
4157 minmax_const
= TREE_OPERAND (arg0
, 1);
4158 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
4159 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
4160 inner
= TREE_OPERAND (arg0
, 0);
4162 /* If something does not permit us to optimize, return the original tree. */
4163 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
4164 || TREE_CODE (comp_const
) != INTEGER_CST
4165 || TREE_CONSTANT_OVERFLOW (comp_const
)
4166 || TREE_CODE (minmax_const
) != INTEGER_CST
4167 || TREE_CONSTANT_OVERFLOW (minmax_const
))
4170 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4171 and GT_EXPR, doing the rest with recursive calls using logical
4173 switch (TREE_CODE (t
))
4175 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
4177 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t
)));
4181 fold (build (TRUTH_ORIF_EXPR
, type
,
4182 optimize_minmax_comparison
4183 (build (EQ_EXPR
, type
, arg0
, comp_const
)),
4184 optimize_minmax_comparison
4185 (build (GT_EXPR
, type
, arg0
, comp_const
))));
4188 if (op_code
== MAX_EXPR
&& consts_equal
)
4189 /* MAX (X, 0) == 0 -> X <= 0 */
4190 return fold (build (LE_EXPR
, type
, inner
, comp_const
));
4192 else if (op_code
== MAX_EXPR
&& consts_lt
)
4193 /* MAX (X, 0) == 5 -> X == 5 */
4194 return fold (build (EQ_EXPR
, type
, inner
, comp_const
));
4196 else if (op_code
== MAX_EXPR
)
4197 /* MAX (X, 0) == -1 -> false */
4198 return omit_one_operand (type
, integer_zero_node
, inner
);
4200 else if (consts_equal
)
4201 /* MIN (X, 0) == 0 -> X >= 0 */
4202 return fold (build (GE_EXPR
, type
, inner
, comp_const
));
4205 /* MIN (X, 0) == 5 -> false */
4206 return omit_one_operand (type
, integer_zero_node
, inner
);
4209 /* MIN (X, 0) == -1 -> X == -1 */
4210 return fold (build (EQ_EXPR
, type
, inner
, comp_const
));
4213 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
4214 /* MAX (X, 0) > 0 -> X > 0
4215 MAX (X, 0) > 5 -> X > 5 */
4216 return fold (build (GT_EXPR
, type
, inner
, comp_const
));
4218 else if (op_code
== MAX_EXPR
)
4219 /* MAX (X, 0) > -1 -> true */
4220 return omit_one_operand (type
, integer_one_node
, inner
);
4222 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
4223 /* MIN (X, 0) > 0 -> false
4224 MIN (X, 0) > 5 -> false */
4225 return omit_one_operand (type
, integer_zero_node
, inner
);
4228 /* MIN (X, 0) > -1 -> X > -1 */
4229 return fold (build (GT_EXPR
, type
, inner
, comp_const
));
4236 /* T is an integer expression that is being multiplied, divided, or taken a
4237 modulus (CODE says which and what kind of divide or modulus) by a
4238 constant C. See if we can eliminate that operation by folding it with
4239 other operations already in T. WIDE_TYPE, if non-null, is a type that
4240 should be used for the computation if wider than our type.
4242 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4243 (X * 2) + (Y * 4). We must, however, be assured that either the original
4244 expression would not overflow or that overflow is undefined for the type
4245 in the language in question.
4247 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4248 the machine has a multiply-accumulate insn or that this is part of an
4249 addressing calculation.
4251 If we return a non-null expression, it is an equivalent form of the
4252 original computation, but need not be in the original type. */
4255 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
)
4257 /* To avoid exponential search depth, refuse to allow recursion past
4258 three levels. Beyond that (1) it's highly unlikely that we'll find
4259 something interesting and (2) we've probably processed it before
4260 when we built the inner expression. */
4269 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
);
4276 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
)
4278 tree type
= TREE_TYPE (t
);
4279 enum tree_code tcode
= TREE_CODE (t
);
4280 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
4281 > GET_MODE_SIZE (TYPE_MODE (type
)))
4282 ? wide_type
: type
);
4284 int same_p
= tcode
== code
;
4285 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
4287 /* Don't deal with constants of zero here; they confuse the code below. */
4288 if (integer_zerop (c
))
4291 if (TREE_CODE_CLASS (tcode
) == '1')
4292 op0
= TREE_OPERAND (t
, 0);
4294 if (TREE_CODE_CLASS (tcode
) == '2')
4295 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
4297 /* Note that we need not handle conditional operations here since fold
4298 already handles those cases. So just do arithmetic here. */
4302 /* For a constant, we can always simplify if we are a multiply
4303 or (for divide and modulus) if it is a multiple of our constant. */
4304 if (code
== MULT_EXPR
4305 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
4306 return const_binop (code
, convert (ctype
, t
), convert (ctype
, c
), 0);
4309 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
4310 /* If op0 is an expression ... */
4311 if ((TREE_CODE_CLASS (TREE_CODE (op0
)) == '<'
4312 || TREE_CODE_CLASS (TREE_CODE (op0
)) == '1'
4313 || TREE_CODE_CLASS (TREE_CODE (op0
)) == '2'
4314 || TREE_CODE_CLASS (TREE_CODE (op0
)) == 'e')
4315 /* ... and is unsigned, and its type is smaller than ctype,
4316 then we cannot pass through as widening. */
4317 && ((TREE_UNSIGNED (TREE_TYPE (op0
))
4318 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
4319 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
4320 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
4321 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
4322 /* ... or its type is larger than ctype,
4323 then we cannot pass through this truncation. */
4324 || (GET_MODE_SIZE (TYPE_MODE (ctype
))
4325 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
4326 /* ... or signedness changes for division or modulus,
4327 then we cannot pass through this conversion. */
4328 || (code
!= MULT_EXPR
4329 && (TREE_UNSIGNED (ctype
)
4330 != TREE_UNSIGNED (TREE_TYPE (op0
))))))
4333 /* Pass the constant down and see if we can make a simplification. If
4334 we can, replace this expression with the inner simplification for
4335 possible later conversion to our or some other type. */
4336 if ((t2
= convert (TREE_TYPE (op0
), c
)) != 0
4337 && TREE_CODE (t2
) == INTEGER_CST
4338 && ! TREE_CONSTANT_OVERFLOW (t2
)
4339 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
4341 ? ctype
: NULL_TREE
))))
4345 case NEGATE_EXPR
: case ABS_EXPR
:
4346 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
4347 return fold (build1 (tcode
, ctype
, convert (ctype
, t1
)));
4350 case MIN_EXPR
: case MAX_EXPR
:
4351 /* If widening the type changes the signedness, then we can't perform
4352 this optimization as that changes the result. */
4353 if (TREE_UNSIGNED (ctype
) != TREE_UNSIGNED (type
))
4356 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4357 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
4358 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
4360 if (tree_int_cst_sgn (c
) < 0)
4361 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
4363 return fold (build (tcode
, ctype
, convert (ctype
, t1
),
4364 convert (ctype
, t2
)));
4368 case WITH_RECORD_EXPR
:
4369 if ((t1
= extract_muldiv (TREE_OPERAND (t
, 0), c
, code
, wide_type
)) != 0)
4370 return build (WITH_RECORD_EXPR
, TREE_TYPE (t1
), t1
,
4371 TREE_OPERAND (t
, 1));
4374 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
4375 /* If the second operand is constant, this is a multiplication
4376 or floor division, by a power of two, so we can treat it that
4377 way unless the multiplier or divisor overflows. */
4378 if (TREE_CODE (op1
) == INTEGER_CST
4379 /* const_binop may not detect overflow correctly,
4380 so check for it explicitly here. */
4381 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
4382 && TREE_INT_CST_HIGH (op1
) == 0
4383 && 0 != (t1
= convert (ctype
,
4384 const_binop (LSHIFT_EXPR
, size_one_node
,
4386 && ! TREE_OVERFLOW (t1
))
4387 return extract_muldiv (build (tcode
== LSHIFT_EXPR
4388 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
4389 ctype
, convert (ctype
, op0
), t1
),
4390 c
, code
, wide_type
);
4393 case PLUS_EXPR
: case MINUS_EXPR
:
4394 /* See if we can eliminate the operation on both sides. If we can, we
4395 can return a new PLUS or MINUS. If we can't, the only remaining
4396 cases where we can do anything are if the second operand is a
4398 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
4399 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
4400 if (t1
!= 0 && t2
!= 0
4401 && (code
== MULT_EXPR
4402 /* If not multiplication, we can only do this if both operands
4403 are divisible by c. */
4404 || (multiple_of_p (ctype
, op0
, c
)
4405 && multiple_of_p (ctype
, op1
, c
))))
4406 return fold (build (tcode
, ctype
, convert (ctype
, t1
),
4407 convert (ctype
, t2
)));
4409 /* If this was a subtraction, negate OP1 and set it to be an addition.
4410 This simplifies the logic below. */
4411 if (tcode
== MINUS_EXPR
)
4412 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
4414 if (TREE_CODE (op1
) != INTEGER_CST
)
4417 /* If either OP1 or C are negative, this optimization is not safe for
4418 some of the division and remainder types while for others we need
4419 to change the code. */
4420 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
4422 if (code
== CEIL_DIV_EXPR
)
4423 code
= FLOOR_DIV_EXPR
;
4424 else if (code
== FLOOR_DIV_EXPR
)
4425 code
= CEIL_DIV_EXPR
;
4426 else if (code
!= MULT_EXPR
4427 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
4431 /* If it's a multiply or a division/modulus operation of a multiple
4432 of our constant, do the operation and verify it doesn't overflow. */
4433 if (code
== MULT_EXPR
4434 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
4436 op1
= const_binop (code
, convert (ctype
, op1
),
4437 convert (ctype
, c
), 0);
4438 /* We allow the constant to overflow with wrapping semantics. */
4440 || (TREE_OVERFLOW (op1
) && ! flag_wrapv
))
4446 /* If we have an unsigned type is not a sizetype, we cannot widen
4447 the operation since it will change the result if the original
4448 computation overflowed. */
4449 if (TREE_UNSIGNED (ctype
)
4450 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
4454 /* If we were able to eliminate our operation from the first side,
4455 apply our operation to the second side and reform the PLUS. */
4456 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
4457 return fold (build (tcode
, ctype
, convert (ctype
, t1
), op1
));
4459 /* The last case is if we are a multiply. In that case, we can
4460 apply the distributive law to commute the multiply and addition
4461 if the multiplication of the constants doesn't overflow. */
4462 if (code
== MULT_EXPR
)
4463 return fold (build (tcode
, ctype
, fold (build (code
, ctype
,
4464 convert (ctype
, op0
),
4465 convert (ctype
, c
))),
4471 /* We have a special case here if we are doing something like
4472 (C * 8) % 4 since we know that's zero. */
4473 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
4474 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
4475 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
4476 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
4477 return omit_one_operand (type
, integer_zero_node
, op0
);
4479 /* ... fall through ... */
4481 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
4482 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
4483 /* If we can extract our operation from the LHS, do so and return a
4484 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4485 do something only if the second operand is a constant. */
4487 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
4488 return fold (build (tcode
, ctype
, convert (ctype
, t1
),
4489 convert (ctype
, op1
)));
4490 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
4491 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
4492 return fold (build (tcode
, ctype
, convert (ctype
, op0
),
4493 convert (ctype
, t1
)));
4494 else if (TREE_CODE (op1
) != INTEGER_CST
)
4497 /* If these are the same operation types, we can associate them
4498 assuming no overflow. */
4500 && 0 != (t1
= const_binop (MULT_EXPR
, convert (ctype
, op1
),
4501 convert (ctype
, c
), 0))
4502 && ! TREE_OVERFLOW (t1
))
4503 return fold (build (tcode
, ctype
, convert (ctype
, op0
), t1
));
4505 /* If these operations "cancel" each other, we have the main
4506 optimizations of this pass, which occur when either constant is a
4507 multiple of the other, in which case we replace this with either an
4508 operation or CODE or TCODE.
4510 If we have an unsigned type that is not a sizetype, we cannot do
4511 this since it will change the result if the original computation
4513 if ((! TREE_UNSIGNED (ctype
)
4514 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
4516 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
4517 || (tcode
== MULT_EXPR
4518 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
4519 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
4521 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
4522 return fold (build (tcode
, ctype
, convert (ctype
, op0
),
4524 const_binop (TRUNC_DIV_EXPR
,
4526 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
4527 return fold (build (code
, ctype
, convert (ctype
, op0
),
4529 const_binop (TRUNC_DIV_EXPR
,
4541 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4542 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4543 that we may sometimes modify the tree. */
4546 strip_compound_expr (tree t
, tree s
)
4548 enum tree_code code
= TREE_CODE (t
);
4550 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4551 if (code
== COMPOUND_EXPR
&& TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
4552 && TREE_OPERAND (TREE_OPERAND (t
, 0), 0) == s
)
4553 return TREE_OPERAND (t
, 1);
4555 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4556 don't bother handling any other types. */
4557 else if (code
== COND_EXPR
)
4559 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
4560 TREE_OPERAND (t
, 1) = strip_compound_expr (TREE_OPERAND (t
, 1), s
);
4561 TREE_OPERAND (t
, 2) = strip_compound_expr (TREE_OPERAND (t
, 2), s
);
4563 else if (TREE_CODE_CLASS (code
) == '1')
4564 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
4565 else if (TREE_CODE_CLASS (code
) == '<'
4566 || TREE_CODE_CLASS (code
) == '2')
4568 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
4569 TREE_OPERAND (t
, 1) = strip_compound_expr (TREE_OPERAND (t
, 1), s
);
4575 /* Return a node which has the indicated constant VALUE (either 0 or
4576 1), and is of the indicated TYPE. */
4579 constant_boolean_node (int value
, tree type
)
4581 if (type
== integer_type_node
)
4582 return value
? integer_one_node
: integer_zero_node
;
4583 else if (TREE_CODE (type
) == BOOLEAN_TYPE
)
4584 return (*lang_hooks
.truthvalue_conversion
) (value
? integer_one_node
:
4588 tree t
= build_int_2 (value
, 0);
4590 TREE_TYPE (t
) = type
;
4595 /* Utility function for the following routine, to see how complex a nesting of
4596 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4597 we don't care (to avoid spending too much time on complex expressions.). */
4600 count_cond (tree expr
, int lim
)
4604 if (TREE_CODE (expr
) != COND_EXPR
)
4609 ctrue
= count_cond (TREE_OPERAND (expr
, 1), lim
- 1);
4610 cfalse
= count_cond (TREE_OPERAND (expr
, 2), lim
- 1 - ctrue
);
4611 return MIN (lim
, 1 + ctrue
+ cfalse
);
4614 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4615 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4616 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4617 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4618 COND is the first argument to CODE; otherwise (as in the example
4619 given here), it is the second argument. TYPE is the type of the
4620 original expression. */
4623 fold_binary_op_with_conditional_arg (enum tree_code code
, tree type
,
4624 tree cond
, tree arg
, int cond_first_p
)
4626 tree test
, true_value
, false_value
;
4627 tree lhs
= NULL_TREE
;
4628 tree rhs
= NULL_TREE
;
4629 /* In the end, we'll produce a COND_EXPR. Both arms of the
4630 conditional expression will be binary operations. The left-hand
4631 side of the expression to be executed if the condition is true
4632 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4633 of the expression to be executed if the condition is true will be
4634 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4635 but apply to the expression to be executed if the conditional is
4641 /* These are the codes to use for the left-hand side and right-hand
4642 side of the COND_EXPR. Normally, they are the same as CODE. */
4643 enum tree_code lhs_code
= code
;
4644 enum tree_code rhs_code
= code
;
4645 /* And these are the types of the expressions. */
4646 tree lhs_type
= type
;
4647 tree rhs_type
= type
;
4652 true_rhs
= false_rhs
= &arg
;
4653 true_lhs
= &true_value
;
4654 false_lhs
= &false_value
;
4658 true_lhs
= false_lhs
= &arg
;
4659 true_rhs
= &true_value
;
4660 false_rhs
= &false_value
;
4663 if (TREE_CODE (cond
) == COND_EXPR
)
4665 test
= TREE_OPERAND (cond
, 0);
4666 true_value
= TREE_OPERAND (cond
, 1);
4667 false_value
= TREE_OPERAND (cond
, 2);
4668 /* If this operand throws an expression, then it does not make
4669 sense to try to perform a logical or arithmetic operation
4670 involving it. Instead of building `a + throw 3' for example,
4671 we simply build `a, throw 3'. */
4672 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
4676 lhs_code
= COMPOUND_EXPR
;
4677 lhs_type
= void_type_node
;
4682 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
4686 rhs_code
= COMPOUND_EXPR
;
4687 rhs_type
= void_type_node
;
4695 tree testtype
= TREE_TYPE (cond
);
4697 true_value
= convert (testtype
, integer_one_node
);
4698 false_value
= convert (testtype
, integer_zero_node
);
4701 /* If ARG is complex we want to make sure we only evaluate it once. Though
4702 this is only required if it is volatile, it might be more efficient even
4703 if it is not. However, if we succeed in folding one part to a constant,
4704 we do not need to make this SAVE_EXPR. Since we do this optimization
4705 primarily to see if we do end up with constant and this SAVE_EXPR
4706 interferes with later optimizations, suppressing it when we can is
4709 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4710 do so. Don't try to see if the result is a constant if an arm is a
4711 COND_EXPR since we get exponential behavior in that case. */
4713 if (saved_expr_p (arg
))
4715 else if (lhs
== 0 && rhs
== 0
4716 && !TREE_CONSTANT (arg
)
4717 && (*lang_hooks
.decls
.global_bindings_p
) () == 0
4718 && ((TREE_CODE (arg
) != VAR_DECL
&& TREE_CODE (arg
) != PARM_DECL
)
4719 || TREE_SIDE_EFFECTS (arg
)))
4721 if (TREE_CODE (true_value
) != COND_EXPR
)
4722 lhs
= fold (build (lhs_code
, lhs_type
, *true_lhs
, *true_rhs
));
4724 if (TREE_CODE (false_value
) != COND_EXPR
)
4725 rhs
= fold (build (rhs_code
, rhs_type
, *false_lhs
, *false_rhs
));
4727 if ((lhs
== 0 || ! TREE_CONSTANT (lhs
))
4728 && (rhs
== 0 || !TREE_CONSTANT (rhs
)))
4730 arg
= save_expr (arg
);
4737 lhs
= fold (build (lhs_code
, lhs_type
, *true_lhs
, *true_rhs
));
4739 rhs
= fold (build (rhs_code
, rhs_type
, *false_lhs
, *false_rhs
));
4741 test
= fold (build (COND_EXPR
, type
, test
, lhs
, rhs
));
4744 return build (COMPOUND_EXPR
, type
,
4745 convert (void_type_node
, arg
),
4746 strip_compound_expr (test
, arg
));
4748 return convert (type
, test
);
4752 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4754 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4755 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4756 ADDEND is the same as X.
4758 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4759 and finite. The problematic cases are when X is zero, and its mode
4760 has signed zeros. In the case of rounding towards -infinity,
4761 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4762 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4765 fold_real_zero_addition_p (tree type
, tree addend
, int negate
)
4767 if (!real_zerop (addend
))
4770 /* Don't allow the fold with -fsignaling-nans. */
4771 if (HONOR_SNANS (TYPE_MODE (type
)))
4774 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4775 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
4778 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4779 if (TREE_CODE (addend
) == REAL_CST
4780 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
4783 /* The mode has signed zeros, and we have to honor their sign.
4784 In this situation, there is only one case we can return true for.
4785 X - 0 is the same as X unless rounding towards -infinity is
4787 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
4790 /* Subroutine of fold() that checks comparisons of built-in math
4791 functions against real constants.
4793 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4794 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4795 is the type of the result and ARG0 and ARG1 are the operands of the
4796 comparison. ARG1 must be a TREE_REAL_CST.
4798 The function returns the constant folded tree if a simplification
4799 can be made, and NULL_TREE otherwise. */
4802 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
4803 tree type
, tree arg0
, tree arg1
)
4807 if (fcode
== BUILT_IN_SQRT
4808 || fcode
== BUILT_IN_SQRTF
4809 || fcode
== BUILT_IN_SQRTL
)
4811 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
4812 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
4814 c
= TREE_REAL_CST (arg1
);
4815 if (REAL_VALUE_NEGATIVE (c
))
4817 /* sqrt(x) < y is always false, if y is negative. */
4818 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
4819 return omit_one_operand (type
,
4820 convert (type
, integer_zero_node
),
4823 /* sqrt(x) > y is always true, if y is negative and we
4824 don't care about NaNs, i.e. negative values of x. */
4825 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
4826 return omit_one_operand (type
,
4827 convert (type
, integer_one_node
),
4830 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
4831 return fold (build (GE_EXPR
, type
, arg
,
4832 build_real (TREE_TYPE (arg
), dconst0
)));
4834 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
4838 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
4839 real_convert (&c2
, mode
, &c2
);
4841 if (REAL_VALUE_ISINF (c2
))
4843 /* sqrt(x) > y is x == +Inf, when y is very large. */
4844 if (HONOR_INFINITIES (mode
))
4845 return fold (build (EQ_EXPR
, type
, arg
,
4846 build_real (TREE_TYPE (arg
), c2
)));
4848 /* sqrt(x) > y is always false, when y is very large
4849 and we don't care about infinities. */
4850 return omit_one_operand (type
,
4851 convert (type
, integer_zero_node
),
4855 /* sqrt(x) > c is the same as x > c*c. */
4856 return fold (build (code
, type
, arg
,
4857 build_real (TREE_TYPE (arg
), c2
)));
4859 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
4863 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
4864 real_convert (&c2
, mode
, &c2
);
4866 if (REAL_VALUE_ISINF (c2
))
4868 /* sqrt(x) < y is always true, when y is a very large
4869 value and we don't care about NaNs or Infinities. */
4870 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
4871 return omit_one_operand (type
,
4872 convert (type
, integer_one_node
),
4875 /* sqrt(x) < y is x != +Inf when y is very large and we
4876 don't care about NaNs. */
4877 if (! HONOR_NANS (mode
))
4878 return fold (build (NE_EXPR
, type
, arg
,
4879 build_real (TREE_TYPE (arg
), c2
)));
4881 /* sqrt(x) < y is x >= 0 when y is very large and we
4882 don't care about Infinities. */
4883 if (! HONOR_INFINITIES (mode
))
4884 return fold (build (GE_EXPR
, type
, arg
,
4885 build_real (TREE_TYPE (arg
), dconst0
)));
4887 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
4888 if ((*lang_hooks
.decls
.global_bindings_p
) () != 0
4889 || CONTAINS_PLACEHOLDER_P (arg
))
4892 arg
= save_expr (arg
);
4893 return fold (build (TRUTH_ANDIF_EXPR
, type
,
4894 fold (build (GE_EXPR
, type
, arg
,
4895 build_real (TREE_TYPE (arg
),
4897 fold (build (NE_EXPR
, type
, arg
,
4898 build_real (TREE_TYPE (arg
),
4902 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
4903 if (! HONOR_NANS (mode
))
4904 return fold (build (code
, type
, arg
,
4905 build_real (TREE_TYPE (arg
), c2
)));
4907 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
4908 if ((*lang_hooks
.decls
.global_bindings_p
) () == 0
4909 && ! CONTAINS_PLACEHOLDER_P (arg
))
4911 arg
= save_expr (arg
);
4912 return fold (build (TRUTH_ANDIF_EXPR
, type
,
4913 fold (build (GE_EXPR
, type
, arg
,
4914 build_real (TREE_TYPE (arg
),
4916 fold (build (code
, type
, arg
,
4917 build_real (TREE_TYPE (arg
),
4926 /* Subroutine of fold() that optimizes comparisons against Infinities,
4927 either +Inf or -Inf.
4929 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
4930 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
4931 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
4933 The function returns the constant folded tree if a simplification
4934 can be made, and NULL_TREE otherwise. */
4937 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
4939 enum machine_mode mode
;
4940 REAL_VALUE_TYPE max
;
4944 mode
= TYPE_MODE (TREE_TYPE (arg0
));
4946 /* For negative infinity swap the sense of the comparison. */
4947 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
4949 code
= swap_tree_comparison (code
);
4954 /* x > +Inf is always false, if with ignore sNANs. */
4955 if (HONOR_SNANS (mode
))
4957 return omit_one_operand (type
,
4958 convert (type
, integer_zero_node
),
4962 /* x <= +Inf is always true, if we don't case about NaNs. */
4963 if (! HONOR_NANS (mode
))
4964 return omit_one_operand (type
,
4965 convert (type
, integer_one_node
),
4968 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
4969 if ((*lang_hooks
.decls
.global_bindings_p
) () == 0
4970 && ! CONTAINS_PLACEHOLDER_P (arg0
))
4972 arg0
= save_expr (arg0
);
4973 return fold (build (EQ_EXPR
, type
, arg0
, arg0
));
4979 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
4980 real_maxval (&max
, neg
, mode
);
4981 return fold (build (neg
? LT_EXPR
: GT_EXPR
, type
,
4982 arg0
, build_real (TREE_TYPE (arg0
), max
)));
4985 /* x < +Inf is always equal to x <= DBL_MAX. */
4986 real_maxval (&max
, neg
, mode
);
4987 return fold (build (neg
? GE_EXPR
: LE_EXPR
, type
,
4988 arg0
, build_real (TREE_TYPE (arg0
), max
)));
4991 /* x != +Inf is always equal to !(x > DBL_MAX). */
4992 real_maxval (&max
, neg
, mode
);
4993 if (! HONOR_NANS (mode
))
4994 return fold (build (neg
? GE_EXPR
: LE_EXPR
, type
,
4995 arg0
, build_real (TREE_TYPE (arg0
), max
)));
4996 temp
= fold (build (neg
? LT_EXPR
: GT_EXPR
, type
,
4997 arg0
, build_real (TREE_TYPE (arg0
), max
)));
4998 return fold (build1 (TRUTH_NOT_EXPR
, type
, temp
));
5007 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5008 equality/inequality test, then return a simplified form of
5009 the test using shifts and logical operations. Otherwise return
5010 NULL. TYPE is the desired result type. */
5013 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
5016 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5018 if (code
== TRUTH_NOT_EXPR
)
5020 code
= TREE_CODE (arg0
);
5021 if (code
!= NE_EXPR
&& code
!= EQ_EXPR
)
5024 /* Extract the arguments of the EQ/NE. */
5025 arg1
= TREE_OPERAND (arg0
, 1);
5026 arg0
= TREE_OPERAND (arg0
, 0);
5028 /* This requires us to invert the code. */
5029 code
= (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
);
5032 /* If this is testing a single bit, we can optimize the test. */
5033 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
5034 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
5035 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
5037 tree inner
= TREE_OPERAND (arg0
, 0);
5038 tree type
= TREE_TYPE (arg0
);
5039 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
5040 enum machine_mode operand_mode
= TYPE_MODE (type
);
5042 tree signed_type
, unsigned_type
, intermediate_type
;
5045 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5046 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5047 arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
5048 if (arg00
!= NULL_TREE
)
5050 tree stype
= (*lang_hooks
.types
.signed_type
) (TREE_TYPE (arg00
));
5051 return fold (build (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, result_type
,
5052 convert (stype
, arg00
),
5053 convert (stype
, integer_zero_node
)));
5056 /* At this point, we know that arg0 is not testing the sign bit. */
5057 if (TYPE_PRECISION (type
) - 1 == bitnum
)
5060 /* Otherwise we have (A & C) != 0 where C is a single bit,
5061 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5062 Similarly for (A & C) == 0. */
5064 /* If INNER is a right shift of a constant and it plus BITNUM does
5065 not overflow, adjust BITNUM and INNER. */
5066 if (TREE_CODE (inner
) == RSHIFT_EXPR
5067 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
5068 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
5069 && bitnum
< TYPE_PRECISION (type
)
5070 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
5071 bitnum
- TYPE_PRECISION (type
)))
5073 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
5074 inner
= TREE_OPERAND (inner
, 0);
5077 /* If we are going to be able to omit the AND below, we must do our
5078 operations as unsigned. If we must use the AND, we have a choice.
5079 Normally unsigned is faster, but for some machines signed is. */
5080 #ifdef LOAD_EXTEND_OP
5081 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1);
5086 signed_type
= (*lang_hooks
.types
.type_for_mode
) (operand_mode
, 0);
5087 unsigned_type
= (*lang_hooks
.types
.type_for_mode
) (operand_mode
, 1);
5088 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
5089 inner
= convert (intermediate_type
, inner
);
5092 inner
= build (RSHIFT_EXPR
, intermediate_type
,
5093 inner
, size_int (bitnum
));
5095 if (code
== EQ_EXPR
)
5096 inner
= build (BIT_XOR_EXPR
, intermediate_type
,
5097 inner
, integer_one_node
);
5099 /* Put the AND last so it can combine with more things. */
5100 inner
= build (BIT_AND_EXPR
, intermediate_type
,
5101 inner
, integer_one_node
);
5103 /* Make sure to return the proper type. */
5104 inner
= convert (result_type
, inner
);
5111 /* Check whether we are allowed to reorder operands arg0 and arg1,
5112 such that the evaluation of arg1 occurs before arg0. */
5115 reorder_operands_p (tree arg0
, tree arg1
)
5117 if (! flag_evaluation_order
)
5119 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
5121 return ! TREE_SIDE_EFFECTS (arg0
)
5122 && ! TREE_SIDE_EFFECTS (arg1
);
5125 /* Test whether it is preferable two swap two operands, ARG0 and
5126 ARG1, for example because ARG0 is an integer constant and ARG1
5127 isn't. If REORDER is true, only recommend swapping if we can
5128 evaluate the operands in reverse order. */
5131 tree_swap_operands_p (tree arg0
, tree arg1
, bool reorder
)
5133 STRIP_SIGN_NOPS (arg0
);
5134 STRIP_SIGN_NOPS (arg1
);
5136 if (TREE_CODE (arg1
) == INTEGER_CST
)
5138 if (TREE_CODE (arg0
) == INTEGER_CST
)
5141 if (TREE_CODE (arg1
) == REAL_CST
)
5143 if (TREE_CODE (arg0
) == REAL_CST
)
5146 if (TREE_CODE (arg1
) == COMPLEX_CST
)
5148 if (TREE_CODE (arg0
) == COMPLEX_CST
)
5151 if (TREE_CONSTANT (arg1
))
5153 if (TREE_CONSTANT (arg0
))
5159 if (reorder
&& flag_evaluation_order
5160 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
5171 /* Perform constant folding and related simplification of EXPR.
5172 The related simplifications include x*1 => x, x*0 => 0, etc.,
5173 and application of the associative law.
5174 NOP_EXPR conversions may be removed freely (as long as we
5175 are careful not to change the C type of the overall expression)
5176 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5177 but we can constant-fold them if they have constant operands. */
5179 #ifdef ENABLE_FOLD_CHECKING
5180 # define fold(x) fold_1 (x)
5181 static tree
fold_1 (tree
);
5187 tree t
= expr
, orig_t
;
5188 tree t1
= NULL_TREE
;
5190 tree type
= TREE_TYPE (expr
);
5191 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
5192 enum tree_code code
= TREE_CODE (t
);
5193 int kind
= TREE_CODE_CLASS (code
);
5195 /* WINS will be nonzero when the switch is done
5196 if all operands are constant. */
5199 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5200 Likewise for a SAVE_EXPR that's already been evaluated. */
5201 if (code
== RTL_EXPR
|| (code
== SAVE_EXPR
&& SAVE_EXPR_RTL (t
) != 0))
5204 /* Return right away if a constant. */
5210 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
5214 /* Special case for conversion ops that can have fixed point args. */
5215 arg0
= TREE_OPERAND (t
, 0);
5217 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5219 STRIP_SIGN_NOPS (arg0
);
5221 if (arg0
!= 0 && TREE_CODE (arg0
) == COMPLEX_CST
)
5222 subop
= TREE_REALPART (arg0
);
5226 if (subop
!= 0 && TREE_CODE (subop
) != INTEGER_CST
5227 && TREE_CODE (subop
) != REAL_CST
)
5228 /* Note that TREE_CONSTANT isn't enough:
5229 static var addresses are constant but we can't
5230 do arithmetic on them. */
5233 else if (IS_EXPR_CODE_CLASS (kind
))
5235 int len
= first_rtl_op (code
);
5237 for (i
= 0; i
< len
; i
++)
5239 tree op
= TREE_OPERAND (t
, i
);
5243 continue; /* Valid for CALL_EXPR, at least. */
5245 if (kind
== '<' || code
== RSHIFT_EXPR
)
5247 /* Signedness matters here. Perhaps we can refine this
5249 STRIP_SIGN_NOPS (op
);
5252 /* Strip any conversions that don't change the mode. */
5255 if (TREE_CODE (op
) == COMPLEX_CST
)
5256 subop
= TREE_REALPART (op
);
5260 if (TREE_CODE (subop
) != INTEGER_CST
5261 && TREE_CODE (subop
) != REAL_CST
)
5262 /* Note that TREE_CONSTANT isn't enough:
5263 static var addresses are constant but we can't
5264 do arithmetic on them. */
5274 /* If this is a commutative operation, and ARG0 is a constant, move it
5275 to ARG1 to reduce the number of tests below. */
5276 if ((code
== PLUS_EXPR
|| code
== MULT_EXPR
|| code
== MIN_EXPR
5277 || code
== MAX_EXPR
|| code
== BIT_IOR_EXPR
|| code
== BIT_XOR_EXPR
5278 || code
== BIT_AND_EXPR
)
5279 && tree_swap_operands_p (arg0
, arg1
, true))
5280 return fold (build (code
, type
, arg1
, arg0
));
5282 /* Now WINS is set as described above,
5283 ARG0 is the first operand of EXPR,
5284 and ARG1 is the second operand (if it has more than one operand).
5286 First check for cases where an arithmetic operation is applied to a
5287 compound, conditional, or comparison operation. Push the arithmetic
5288 operation inside the compound or conditional to see if any folding
5289 can then be done. Convert comparison to conditional for this purpose.
5290 The also optimizes non-constant cases that used to be done in
5293 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5294 one of the operands is a comparison and the other is a comparison, a
5295 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5296 code below would make the expression more complex. Change it to a
5297 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5298 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5300 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
5301 || code
== EQ_EXPR
|| code
== NE_EXPR
)
5302 && ((truth_value_p (TREE_CODE (arg0
))
5303 && (truth_value_p (TREE_CODE (arg1
))
5304 || (TREE_CODE (arg1
) == BIT_AND_EXPR
5305 && integer_onep (TREE_OPERAND (arg1
, 1)))))
5306 || (truth_value_p (TREE_CODE (arg1
))
5307 && (truth_value_p (TREE_CODE (arg0
))
5308 || (TREE_CODE (arg0
) == BIT_AND_EXPR
5309 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
5311 t
= fold (build (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
5312 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
5316 if (code
== EQ_EXPR
)
5317 t
= invert_truthvalue (t
);
5322 if (TREE_CODE_CLASS (code
) == '1')
5324 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
5325 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5326 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))));
5327 else if (TREE_CODE (arg0
) == COND_EXPR
)
5329 tree arg01
= TREE_OPERAND (arg0
, 1);
5330 tree arg02
= TREE_OPERAND (arg0
, 2);
5331 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
5332 arg01
= fold (build1 (code
, type
, arg01
));
5333 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
5334 arg02
= fold (build1 (code
, type
, arg02
));
5335 t
= fold (build (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5338 /* If this was a conversion, and all we did was to move into
5339 inside the COND_EXPR, bring it back out. But leave it if
5340 it is a conversion from integer to integer and the
5341 result precision is no wider than a word since such a
5342 conversion is cheap and may be optimized away by combine,
5343 while it couldn't if it were outside the COND_EXPR. Then return
5344 so we don't get into an infinite recursion loop taking the
5345 conversion out and then back in. */
5347 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
5348 || code
== NON_LVALUE_EXPR
)
5349 && TREE_CODE (t
) == COND_EXPR
5350 && TREE_CODE (TREE_OPERAND (t
, 1)) == code
5351 && TREE_CODE (TREE_OPERAND (t
, 2)) == code
5352 && ! VOID_TYPE_P (TREE_OPERAND (t
, 1))
5353 && ! VOID_TYPE_P (TREE_OPERAND (t
, 2))
5354 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0))
5355 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 2), 0)))
5356 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t
))
5358 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0))))
5359 && TYPE_PRECISION (TREE_TYPE (t
)) <= BITS_PER_WORD
))
5360 t
= build1 (code
, type
,
5362 TREE_TYPE (TREE_OPERAND
5363 (TREE_OPERAND (t
, 1), 0)),
5364 TREE_OPERAND (t
, 0),
5365 TREE_OPERAND (TREE_OPERAND (t
, 1), 0),
5366 TREE_OPERAND (TREE_OPERAND (t
, 2), 0)));
5369 else if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<')
5370 return fold (build (COND_EXPR
, type
, arg0
,
5371 fold (build1 (code
, type
, integer_one_node
)),
5372 fold (build1 (code
, type
, integer_zero_node
))));
5374 else if (TREE_CODE_CLASS (code
) == '<'
5375 && TREE_CODE (arg0
) == COMPOUND_EXPR
)
5376 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5377 fold (build (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
5378 else if (TREE_CODE_CLASS (code
) == '<'
5379 && TREE_CODE (arg1
) == COMPOUND_EXPR
)
5380 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
5381 fold (build (code
, type
, arg0
, TREE_OPERAND (arg1
, 1))));
5382 else if (TREE_CODE_CLASS (code
) == '2'
5383 || TREE_CODE_CLASS (code
) == '<')
5385 if (TREE_CODE (arg1
) == COMPOUND_EXPR
5386 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1
, 0))
5387 && ! TREE_SIDE_EFFECTS (arg0
))
5388 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
5389 fold (build (code
, type
,
5390 arg0
, TREE_OPERAND (arg1
, 1))));
5391 else if ((TREE_CODE (arg1
) == COND_EXPR
5392 || (TREE_CODE_CLASS (TREE_CODE (arg1
)) == '<'
5393 && TREE_CODE_CLASS (code
) != '<'))
5394 && (TREE_CODE (arg0
) != COND_EXPR
5395 || count_cond (arg0
, 25) + count_cond (arg1
, 25) <= 25)
5396 && (! TREE_SIDE_EFFECTS (arg0
)
5397 || ((*lang_hooks
.decls
.global_bindings_p
) () == 0
5398 && ! CONTAINS_PLACEHOLDER_P (arg0
))))
5400 fold_binary_op_with_conditional_arg (code
, type
, arg1
, arg0
,
5401 /*cond_first_p=*/0);
5402 else if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
5403 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5404 fold (build (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
5405 else if ((TREE_CODE (arg0
) == COND_EXPR
5406 || (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
5407 && TREE_CODE_CLASS (code
) != '<'))
5408 && (TREE_CODE (arg1
) != COND_EXPR
5409 || count_cond (arg0
, 25) + count_cond (arg1
, 25) <= 25)
5410 && (! TREE_SIDE_EFFECTS (arg1
)
5411 || ((*lang_hooks
.decls
.global_bindings_p
) () == 0
5412 && ! CONTAINS_PLACEHOLDER_P (arg1
))))
5414 fold_binary_op_with_conditional_arg (code
, type
, arg0
, arg1
,
5415 /*cond_first_p=*/1);
5429 return fold (DECL_INITIAL (t
));
5434 case FIX_TRUNC_EXPR
:
5435 /* Other kinds of FIX are not handled properly by fold_convert. */
5437 if (TREE_TYPE (TREE_OPERAND (t
, 0)) == TREE_TYPE (t
))
5438 return TREE_OPERAND (t
, 0);
5440 /* Handle cases of two conversions in a row. */
5441 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
5442 || TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
)
5444 tree inside_type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
5445 tree inter_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
5446 tree final_type
= TREE_TYPE (t
);
5447 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
5448 int inside_ptr
= POINTER_TYPE_P (inside_type
);
5449 int inside_float
= FLOAT_TYPE_P (inside_type
);
5450 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
5451 int inside_unsignedp
= TREE_UNSIGNED (inside_type
);
5452 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
5453 int inter_ptr
= POINTER_TYPE_P (inter_type
);
5454 int inter_float
= FLOAT_TYPE_P (inter_type
);
5455 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
5456 int inter_unsignedp
= TREE_UNSIGNED (inter_type
);
5457 int final_int
= INTEGRAL_TYPE_P (final_type
);
5458 int final_ptr
= POINTER_TYPE_P (final_type
);
5459 int final_float
= FLOAT_TYPE_P (final_type
);
5460 unsigned int final_prec
= TYPE_PRECISION (final_type
);
5461 int final_unsignedp
= TREE_UNSIGNED (final_type
);
5463 /* In addition to the cases of two conversions in a row
5464 handled below, if we are converting something to its own
5465 type via an object of identical or wider precision, neither
5466 conversion is needed. */
5467 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (final_type
)
5468 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
5469 && inter_prec
>= final_prec
)
5470 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
5472 /* Likewise, if the intermediate and final types are either both
5473 float or both integer, we don't need the middle conversion if
5474 it is wider than the final type and doesn't change the signedness
5475 (for integers). Avoid this if the final type is a pointer
5476 since then we sometimes need the inner conversion. Likewise if
5477 the outer has a precision not equal to the size of its mode. */
5478 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
5479 || (inter_float
&& inside_float
))
5480 && inter_prec
>= inside_prec
5481 && (inter_float
|| inter_unsignedp
== inside_unsignedp
)
5482 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (final_type
))
5483 && TYPE_MODE (final_type
) == TYPE_MODE (inter_type
))
5485 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
5487 /* If we have a sign-extension of a zero-extended value, we can
5488 replace that by a single zero-extension. */
5489 if (inside_int
&& inter_int
&& final_int
5490 && inside_prec
< inter_prec
&& inter_prec
< final_prec
5491 && inside_unsignedp
&& !inter_unsignedp
)
5492 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
5494 /* Two conversions in a row are not needed unless:
5495 - some conversion is floating-point (overstrict for now), or
5496 - the intermediate type is narrower than both initial and
5498 - the intermediate type and innermost type differ in signedness,
5499 and the outermost type is wider than the intermediate, or
5500 - the initial type is a pointer type and the precisions of the
5501 intermediate and final types differ, or
5502 - the final type is a pointer type and the precisions of the
5503 initial and intermediate types differ. */
5504 if (! inside_float
&& ! inter_float
&& ! final_float
5505 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
5506 && ! (inside_int
&& inter_int
5507 && inter_unsignedp
!= inside_unsignedp
5508 && inter_prec
< final_prec
)
5509 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
5510 == (final_unsignedp
&& final_prec
> inter_prec
))
5511 && ! (inside_ptr
&& inter_prec
!= final_prec
)
5512 && ! (final_ptr
&& inside_prec
!= inter_prec
)
5513 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (final_type
))
5514 && TYPE_MODE (final_type
) == TYPE_MODE (inter_type
))
5516 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
5519 if (TREE_CODE (TREE_OPERAND (t
, 0)) == MODIFY_EXPR
5520 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t
, 0), 1))
5521 /* Detect assigning a bitfield. */
5522 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)) == COMPONENT_REF
5523 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 1))))
5525 /* Don't leave an assignment inside a conversion
5526 unless assigning a bitfield. */
5527 tree prev
= TREE_OPERAND (t
, 0);
5530 TREE_OPERAND (t
, 0) = TREE_OPERAND (prev
, 1);
5531 /* First do the assignment, then return converted constant. */
5532 t
= build (COMPOUND_EXPR
, TREE_TYPE (t
), prev
, fold (t
));
5537 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5538 constants (if x has signed type, the sign bit cannot be set
5539 in c). This folds extension into the BIT_AND_EXPR. */
5540 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
5541 && TREE_CODE (TREE_TYPE (t
)) != BOOLEAN_TYPE
5542 && TREE_CODE (TREE_OPERAND (t
, 0)) == BIT_AND_EXPR
5543 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 1)) == INTEGER_CST
)
5545 tree
and = TREE_OPERAND (t
, 0);
5546 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
5549 if (TREE_UNSIGNED (TREE_TYPE (and))
5550 || (TYPE_PRECISION (TREE_TYPE (t
))
5551 <= TYPE_PRECISION (TREE_TYPE (and))))
5553 else if (TYPE_PRECISION (TREE_TYPE (and1
))
5554 <= HOST_BITS_PER_WIDE_INT
5555 && host_integerp (and1
, 1))
5557 unsigned HOST_WIDE_INT cst
;
5559 cst
= tree_low_cst (and1
, 1);
5560 cst
&= (HOST_WIDE_INT
) -1
5561 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
5562 change
= (cst
== 0);
5563 #ifdef LOAD_EXTEND_OP
5565 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
5568 tree uns
= (*lang_hooks
.types
.unsigned_type
) (TREE_TYPE (and0
));
5569 and0
= convert (uns
, and0
);
5570 and1
= convert (uns
, and1
);
5575 return fold (build (BIT_AND_EXPR
, TREE_TYPE (t
),
5576 convert (TREE_TYPE (t
), and0
),
5577 convert (TREE_TYPE (t
), and1
)));
5582 if (TREE_CONSTANT (t
) != TREE_CONSTANT (arg0
))
5586 TREE_CONSTANT (t
) = TREE_CONSTANT (arg0
);
5590 return fold_convert (t
, arg0
);
5592 case VIEW_CONVERT_EXPR
:
5593 if (TREE_CODE (TREE_OPERAND (t
, 0)) == VIEW_CONVERT_EXPR
)
5594 return build1 (VIEW_CONVERT_EXPR
, type
,
5595 TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
5599 if (TREE_CODE (arg0
) == CONSTRUCTOR
5600 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
5602 tree m
= purpose_member (arg1
, CONSTRUCTOR_ELTS (arg0
));
5609 if (TREE_CONSTANT (t
) != wins
)
5613 TREE_CONSTANT (t
) = wins
;
5618 if (negate_expr_p (arg0
))
5619 return negate_expr (arg0
);
5625 if (TREE_CODE (arg0
) == INTEGER_CST
)
5627 /* If the value is unsigned, then the absolute value is
5628 the same as the ordinary value. */
5629 if (TREE_UNSIGNED (type
))
5631 /* Similarly, if the value is non-negative. */
5632 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
5634 /* If the value is negative, then the absolute value is
5638 unsigned HOST_WIDE_INT low
;
5640 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
5641 TREE_INT_CST_HIGH (arg0
),
5643 t
= build_int_2 (low
, high
);
5644 TREE_TYPE (t
) = type
;
5646 = (TREE_OVERFLOW (arg0
)
5647 | force_fit_type (t
, overflow
));
5648 TREE_CONSTANT_OVERFLOW (t
)
5649 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
5652 else if (TREE_CODE (arg0
) == REAL_CST
)
5654 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
5655 t
= build_real (type
,
5656 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
5659 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
5660 return fold (build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0)));
5661 /* Convert fabs((double)float) into (double)fabsf(float). */
5662 else if (TREE_CODE (arg0
) == NOP_EXPR
5663 && TREE_CODE (type
) == REAL_TYPE
)
5665 tree targ0
= strip_float_extensions (arg0
);
5667 return convert (type
, fold (build1 (ABS_EXPR
, TREE_TYPE (targ0
),
5670 else if (tree_expr_nonnegative_p (arg0
))
5675 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
5676 return convert (type
, arg0
);
5677 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
5678 return build (COMPLEX_EXPR
, type
,
5679 TREE_OPERAND (arg0
, 0),
5680 negate_expr (TREE_OPERAND (arg0
, 1)));
5681 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
5682 return build_complex (type
, TREE_REALPART (arg0
),
5683 negate_expr (TREE_IMAGPART (arg0
)));
5684 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
5685 return fold (build (TREE_CODE (arg0
), type
,
5686 fold (build1 (CONJ_EXPR
, type
,
5687 TREE_OPERAND (arg0
, 0))),
5688 fold (build1 (CONJ_EXPR
,
5689 type
, TREE_OPERAND (arg0
, 1)))));
5690 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
5691 return TREE_OPERAND (arg0
, 0);
5697 t
= build_int_2 (~ TREE_INT_CST_LOW (arg0
),
5698 ~ TREE_INT_CST_HIGH (arg0
));
5699 TREE_TYPE (t
) = type
;
5700 force_fit_type (t
, 0);
5701 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg0
);
5702 TREE_CONSTANT_OVERFLOW (t
) = TREE_CONSTANT_OVERFLOW (arg0
);
5704 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
5705 return TREE_OPERAND (arg0
, 0);
5709 /* A + (-B) -> A - B */
5710 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
5711 return fold (build (MINUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
5712 /* (-A) + B -> B - A */
5713 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
5714 return fold (build (MINUS_EXPR
, type
, arg1
, TREE_OPERAND (arg0
, 0)));
5715 else if (! FLOAT_TYPE_P (type
))
5717 if (integer_zerop (arg1
))
5718 return non_lvalue (convert (type
, arg0
));
5720 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5721 with a constant, and the two constants have no bits in common,
5722 we should treat this as a BIT_IOR_EXPR since this may produce more
5724 if (TREE_CODE (arg0
) == BIT_AND_EXPR
5725 && TREE_CODE (arg1
) == BIT_AND_EXPR
5726 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
5727 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
5728 && integer_zerop (const_binop (BIT_AND_EXPR
,
5729 TREE_OPERAND (arg0
, 1),
5730 TREE_OPERAND (arg1
, 1), 0)))
5732 code
= BIT_IOR_EXPR
;
5736 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5737 (plus (plus (mult) (mult)) (foo)) so that we can
5738 take advantage of the factoring cases below. */
5739 if ((TREE_CODE (arg0
) == PLUS_EXPR
5740 && TREE_CODE (arg1
) == MULT_EXPR
)
5741 || (TREE_CODE (arg1
) == PLUS_EXPR
5742 && TREE_CODE (arg0
) == MULT_EXPR
))
5744 tree parg0
, parg1
, parg
, marg
;
5746 if (TREE_CODE (arg0
) == PLUS_EXPR
)
5747 parg
= arg0
, marg
= arg1
;
5749 parg
= arg1
, marg
= arg0
;
5750 parg0
= TREE_OPERAND (parg
, 0);
5751 parg1
= TREE_OPERAND (parg
, 1);
5755 if (TREE_CODE (parg0
) == MULT_EXPR
5756 && TREE_CODE (parg1
) != MULT_EXPR
)
5757 return fold (build (PLUS_EXPR
, type
,
5758 fold (build (PLUS_EXPR
, type
,
5759 convert (type
, parg0
),
5760 convert (type
, marg
))),
5761 convert (type
, parg1
)));
5762 if (TREE_CODE (parg0
) != MULT_EXPR
5763 && TREE_CODE (parg1
) == MULT_EXPR
)
5764 return fold (build (PLUS_EXPR
, type
,
5765 fold (build (PLUS_EXPR
, type
,
5766 convert (type
, parg1
),
5767 convert (type
, marg
))),
5768 convert (type
, parg0
)));
5771 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
)
5773 tree arg00
, arg01
, arg10
, arg11
;
5774 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
5776 /* (A * C) + (B * C) -> (A+B) * C.
5777 We are most concerned about the case where C is a constant,
5778 but other combinations show up during loop reduction. Since
5779 it is not difficult, try all four possibilities. */
5781 arg00
= TREE_OPERAND (arg0
, 0);
5782 arg01
= TREE_OPERAND (arg0
, 1);
5783 arg10
= TREE_OPERAND (arg1
, 0);
5784 arg11
= TREE_OPERAND (arg1
, 1);
5787 if (operand_equal_p (arg01
, arg11
, 0))
5788 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
5789 else if (operand_equal_p (arg00
, arg10
, 0))
5790 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
5791 else if (operand_equal_p (arg00
, arg11
, 0))
5792 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
5793 else if (operand_equal_p (arg01
, arg10
, 0))
5794 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
5796 /* No identical multiplicands; see if we can find a common
5797 power-of-two factor in non-power-of-two multiplies. This
5798 can help in multi-dimensional array access. */
5799 else if (TREE_CODE (arg01
) == INTEGER_CST
5800 && TREE_CODE (arg11
) == INTEGER_CST
5801 && TREE_INT_CST_HIGH (arg01
) == 0
5802 && TREE_INT_CST_HIGH (arg11
) == 0)
5804 HOST_WIDE_INT int01
, int11
, tmp
;
5805 int01
= TREE_INT_CST_LOW (arg01
);
5806 int11
= TREE_INT_CST_LOW (arg11
);
5808 /* Move min of absolute values to int11. */
5809 if ((int01
>= 0 ? int01
: -int01
)
5810 < (int11
>= 0 ? int11
: -int11
))
5812 tmp
= int01
, int01
= int11
, int11
= tmp
;
5813 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
5814 alt0
= arg01
, arg01
= arg11
, arg11
= alt0
;
5817 if (exact_log2 (int11
) > 0 && int01
% int11
== 0)
5819 alt0
= fold (build (MULT_EXPR
, type
, arg00
,
5820 build_int_2 (int01
/ int11
, 0)));
5827 return fold (build (MULT_EXPR
, type
,
5828 fold (build (PLUS_EXPR
, type
, alt0
, alt1
)),
5834 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5835 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
5836 return non_lvalue (convert (type
, arg0
));
5838 /* Likewise if the operands are reversed. */
5839 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
5840 return non_lvalue (convert (type
, arg1
));
5842 /* Convert x+x into x*2.0. */
5843 if (operand_equal_p (arg0
, arg1
, 0)
5844 && SCALAR_FLOAT_TYPE_P (type
))
5845 return fold (build (MULT_EXPR
, type
, arg0
,
5846 build_real (type
, dconst2
)));
5848 /* Convert x*c+x into x*(c+1). */
5849 if (flag_unsafe_math_optimizations
5850 && TREE_CODE (arg0
) == MULT_EXPR
5851 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
5852 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
5853 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
5857 c
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
5858 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
5859 return fold (build (MULT_EXPR
, type
, arg1
,
5860 build_real (type
, c
)));
5863 /* Convert x+x*c into x*(c+1). */
5864 if (flag_unsafe_math_optimizations
5865 && TREE_CODE (arg1
) == MULT_EXPR
5866 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
5867 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
5868 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
5872 c
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
5873 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
5874 return fold (build (MULT_EXPR
, type
, arg0
,
5875 build_real (type
, c
)));
5878 /* Convert x*c1+x*c2 into x*(c1+c2). */
5879 if (flag_unsafe_math_optimizations
5880 && TREE_CODE (arg0
) == MULT_EXPR
5881 && TREE_CODE (arg1
) == MULT_EXPR
5882 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
5883 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
5884 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
5885 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
5886 && operand_equal_p (TREE_OPERAND (arg0
, 0),
5887 TREE_OPERAND (arg1
, 0), 0))
5889 REAL_VALUE_TYPE c1
, c2
;
5891 c1
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
5892 c2
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
5893 real_arithmetic (&c1
, PLUS_EXPR
, &c1
, &c2
);
5894 return fold (build (MULT_EXPR
, type
,
5895 TREE_OPERAND (arg0
, 0),
5896 build_real (type
, c1
)));
5901 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5902 is a rotate of A by C1 bits. */
5903 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5904 is a rotate of A by B bits. */
5906 enum tree_code code0
, code1
;
5907 code0
= TREE_CODE (arg0
);
5908 code1
= TREE_CODE (arg1
);
5909 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
5910 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
5911 && operand_equal_p (TREE_OPERAND (arg0
, 0),
5912 TREE_OPERAND (arg1
, 0), 0)
5913 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
5915 tree tree01
, tree11
;
5916 enum tree_code code01
, code11
;
5918 tree01
= TREE_OPERAND (arg0
, 1);
5919 tree11
= TREE_OPERAND (arg1
, 1);
5920 STRIP_NOPS (tree01
);
5921 STRIP_NOPS (tree11
);
5922 code01
= TREE_CODE (tree01
);
5923 code11
= TREE_CODE (tree11
);
5924 if (code01
== INTEGER_CST
5925 && code11
== INTEGER_CST
5926 && TREE_INT_CST_HIGH (tree01
) == 0
5927 && TREE_INT_CST_HIGH (tree11
) == 0
5928 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
5929 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
5930 return build (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5931 code0
== LSHIFT_EXPR
? tree01
: tree11
);
5932 else if (code11
== MINUS_EXPR
)
5934 tree tree110
, tree111
;
5935 tree110
= TREE_OPERAND (tree11
, 0);
5936 tree111
= TREE_OPERAND (tree11
, 1);
5937 STRIP_NOPS (tree110
);
5938 STRIP_NOPS (tree111
);
5939 if (TREE_CODE (tree110
) == INTEGER_CST
5940 && 0 == compare_tree_int (tree110
,
5942 (TREE_TYPE (TREE_OPERAND
5944 && operand_equal_p (tree01
, tree111
, 0))
5945 return build ((code0
== LSHIFT_EXPR
5948 type
, TREE_OPERAND (arg0
, 0), tree01
);
5950 else if (code01
== MINUS_EXPR
)
5952 tree tree010
, tree011
;
5953 tree010
= TREE_OPERAND (tree01
, 0);
5954 tree011
= TREE_OPERAND (tree01
, 1);
5955 STRIP_NOPS (tree010
);
5956 STRIP_NOPS (tree011
);
5957 if (TREE_CODE (tree010
) == INTEGER_CST
5958 && 0 == compare_tree_int (tree010
,
5960 (TREE_TYPE (TREE_OPERAND
5962 && operand_equal_p (tree11
, tree011
, 0))
5963 return build ((code0
!= LSHIFT_EXPR
5966 type
, TREE_OPERAND (arg0
, 0), tree11
);
5972 /* In most languages, can't associate operations on floats through
5973 parentheses. Rather than remember where the parentheses were, we
5974 don't associate floats at all, unless the user has specified
5975 -funsafe-math-optimizations. */
5978 && (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
5980 tree var0
, con0
, lit0
, minus_lit0
;
5981 tree var1
, con1
, lit1
, minus_lit1
;
5983 /* Split both trees into variables, constants, and literals. Then
5984 associate each group together, the constants with literals,
5985 then the result with variables. This increases the chances of
5986 literals being recombined later and of generating relocatable
5987 expressions for the sum of a constant and literal. */
5988 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
5989 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
5990 code
== MINUS_EXPR
);
5992 /* Only do something if we found more than two objects. Otherwise,
5993 nothing has changed and we risk infinite recursion. */
5994 if (2 < ((var0
!= 0) + (var1
!= 0)
5995 + (con0
!= 0) + (con1
!= 0)
5996 + (lit0
!= 0) + (lit1
!= 0)
5997 + (minus_lit0
!= 0) + (minus_lit1
!= 0)))
5999 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6000 if (code
== MINUS_EXPR
)
6003 var0
= associate_trees (var0
, var1
, code
, type
);
6004 con0
= associate_trees (con0
, con1
, code
, type
);
6005 lit0
= associate_trees (lit0
, lit1
, code
, type
);
6006 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
6008 /* Preserve the MINUS_EXPR if the negative part of the literal is
6009 greater than the positive part. Otherwise, the multiplicative
6010 folding code (i.e extract_muldiv) may be fooled in case
6011 unsigned constants are subtracted, like in the following
6012 example: ((X*2 + 4) - 8U)/2. */
6013 if (minus_lit0
&& lit0
)
6015 if (TREE_CODE (lit0
) == INTEGER_CST
6016 && TREE_CODE (minus_lit0
) == INTEGER_CST
6017 && tree_int_cst_lt (lit0
, minus_lit0
))
6019 minus_lit0
= associate_trees (minus_lit0
, lit0
,
6025 lit0
= associate_trees (lit0
, minus_lit0
,
6033 return convert (type
, associate_trees (var0
, minus_lit0
,
6037 con0
= associate_trees (con0
, minus_lit0
,
6039 return convert (type
, associate_trees (var0
, con0
,
6044 con0
= associate_trees (con0
, lit0
, code
, type
);
6045 return convert (type
, associate_trees (var0
, con0
, code
, type
));
6051 t1
= const_binop (code
, arg0
, arg1
, 0);
6052 if (t1
!= NULL_TREE
)
6054 /* The return value should always have
6055 the same type as the original expression. */
6056 if (TREE_TYPE (t1
) != TREE_TYPE (t
))
6057 t1
= convert (TREE_TYPE (t
), t1
);
6064 /* A - (-B) -> A + B */
6065 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
6066 return fold (build (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
6067 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6068 if (TREE_CODE (arg0
) == NEGATE_EXPR
6069 && (FLOAT_TYPE_P (type
)
6070 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
))
6071 && negate_expr_p (arg1
)
6072 && reorder_operands_p (arg0
, arg1
))
6073 return fold (build (MINUS_EXPR
, type
, negate_expr (arg1
),
6074 TREE_OPERAND (arg0
, 0)));
6076 if (! FLOAT_TYPE_P (type
))
6078 if (! wins
&& integer_zerop (arg0
))
6079 return negate_expr (convert (type
, arg1
));
6080 if (integer_zerop (arg1
))
6081 return non_lvalue (convert (type
, arg0
));
6083 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
6084 about the case where C is a constant, just try one of the
6085 four possibilities. */
6087 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
6088 && operand_equal_p (TREE_OPERAND (arg0
, 1),
6089 TREE_OPERAND (arg1
, 1), 0))
6090 return fold (build (MULT_EXPR
, type
,
6091 fold (build (MINUS_EXPR
, type
,
6092 TREE_OPERAND (arg0
, 0),
6093 TREE_OPERAND (arg1
, 0))),
6094 TREE_OPERAND (arg0
, 1)));
6096 /* Fold A - (A & B) into ~B & A. */
6097 if (!TREE_SIDE_EFFECTS (arg0
)
6098 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
6100 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
6101 return fold (build (BIT_AND_EXPR
, type
,
6102 fold (build1 (BIT_NOT_EXPR
, type
,
6103 TREE_OPERAND (arg1
, 0))),
6105 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
6106 return fold (build (BIT_AND_EXPR
, type
,
6107 fold (build1 (BIT_NOT_EXPR
, type
,
6108 TREE_OPERAND (arg1
, 1))),
6112 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6113 any power of 2 minus 1. */
6114 if (TREE_CODE (arg0
) == BIT_AND_EXPR
6115 && TREE_CODE (arg1
) == BIT_AND_EXPR
6116 && operand_equal_p (TREE_OPERAND (arg0
, 0),
6117 TREE_OPERAND (arg1
, 0), 0))
6119 tree mask0
= TREE_OPERAND (arg0
, 1);
6120 tree mask1
= TREE_OPERAND (arg1
, 1);
6121 tree tem
= fold (build1 (BIT_NOT_EXPR
, type
, mask0
));
6123 if (operand_equal_p (tem
, mask1
, 0))
6125 tem
= fold (build (BIT_XOR_EXPR
, type
,
6126 TREE_OPERAND (arg0
, 0), mask1
));
6127 return fold (build (MINUS_EXPR
, type
, tem
, mask1
));
6132 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6133 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
6134 return non_lvalue (convert (type
, arg0
));
6136 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6137 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6138 (-ARG1 + ARG0) reduces to -ARG1. */
6139 else if (!wins
&& fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
6140 return negate_expr (convert (type
, arg1
));
6142 /* Fold &x - &x. This can happen from &x.foo - &x.
6143 This is unsafe for certain floats even in non-IEEE formats.
6144 In IEEE, it is unsafe because it does wrong for NaNs.
6145 Also note that operand_equal_p is always false if an operand
6148 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
6149 && operand_equal_p (arg0
, arg1
, 0))
6150 return convert (type
, integer_zero_node
);
6155 /* (-A) * (-B) -> A * B */
6156 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
6157 return fold (build (MULT_EXPR
, type
,
6158 TREE_OPERAND (arg0
, 0),
6159 negate_expr (arg1
)));
6160 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
6161 return fold (build (MULT_EXPR
, type
,
6163 TREE_OPERAND (arg1
, 0)));
6165 if (! FLOAT_TYPE_P (type
))
6167 if (integer_zerop (arg1
))
6168 return omit_one_operand (type
, arg1
, arg0
);
6169 if (integer_onep (arg1
))
6170 return non_lvalue (convert (type
, arg0
));
6172 /* (a * (1 << b)) is (a << b) */
6173 if (TREE_CODE (arg1
) == LSHIFT_EXPR
6174 && integer_onep (TREE_OPERAND (arg1
, 0)))
6175 return fold (build (LSHIFT_EXPR
, type
, arg0
,
6176 TREE_OPERAND (arg1
, 1)));
6177 if (TREE_CODE (arg0
) == LSHIFT_EXPR
6178 && integer_onep (TREE_OPERAND (arg0
, 0)))
6179 return fold (build (LSHIFT_EXPR
, type
, arg1
,
6180 TREE_OPERAND (arg0
, 1)));
6182 if (TREE_CODE (arg1
) == INTEGER_CST
6183 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0),
6184 convert (type
, arg1
),
6186 return convert (type
, tem
);
6191 /* Maybe fold x * 0 to 0. The expressions aren't the same
6192 when x is NaN, since x * 0 is also NaN. Nor are they the
6193 same in modes with signed zeros, since multiplying a
6194 negative value by 0 gives -0, not +0. */
6195 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
6196 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
6197 && real_zerop (arg1
))
6198 return omit_one_operand (type
, arg1
, arg0
);
6199 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6200 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
6201 && real_onep (arg1
))
6202 return non_lvalue (convert (type
, arg0
));
6204 /* Transform x * -1.0 into -x. */
6205 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
6206 && real_minus_onep (arg1
))
6207 return fold (build1 (NEGATE_EXPR
, type
, arg0
));
6209 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6210 if (flag_unsafe_math_optimizations
6211 && TREE_CODE (arg0
) == RDIV_EXPR
6212 && TREE_CODE (arg1
) == REAL_CST
6213 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
6215 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
6218 return fold (build (RDIV_EXPR
, type
, tem
,
6219 TREE_OPERAND (arg0
, 1)));
6222 if (flag_unsafe_math_optimizations
)
6224 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
6225 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
6227 /* Optimizations of sqrt(...)*sqrt(...). */
6228 if ((fcode0
== BUILT_IN_SQRT
&& fcode1
== BUILT_IN_SQRT
)
6229 || (fcode0
== BUILT_IN_SQRTF
&& fcode1
== BUILT_IN_SQRTF
)
6230 || (fcode0
== BUILT_IN_SQRTL
&& fcode1
== BUILT_IN_SQRTL
))
6232 tree sqrtfn
, arg
, arglist
;
6233 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
6234 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
6236 /* Optimize sqrt(x)*sqrt(x) as x. */
6237 if (operand_equal_p (arg00
, arg10
, 0)
6238 && ! HONOR_SNANS (TYPE_MODE (type
)))
6241 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6242 sqrtfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6243 arg
= fold (build (MULT_EXPR
, type
, arg00
, arg10
));
6244 arglist
= build_tree_list (NULL_TREE
, arg
);
6245 return build_function_call_expr (sqrtfn
, arglist
);
6248 /* Optimize expN(x)*expN(y) as expN(x+y). */
6249 if (fcode0
== fcode1
6250 && (fcode0
== BUILT_IN_EXP
6251 || fcode0
== BUILT_IN_EXPF
6252 || fcode0
== BUILT_IN_EXPL
6253 || fcode0
== BUILT_IN_EXP2
6254 || fcode0
== BUILT_IN_EXP2F
6255 || fcode0
== BUILT_IN_EXP2L
6256 || fcode0
== BUILT_IN_EXP10
6257 || fcode0
== BUILT_IN_EXP10F
6258 || fcode0
== BUILT_IN_EXP10L
6259 || fcode0
== BUILT_IN_POW10
6260 || fcode0
== BUILT_IN_POW10F
6261 || fcode0
== BUILT_IN_POW10L
))
6263 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6264 tree arg
= build (PLUS_EXPR
, type
,
6265 TREE_VALUE (TREE_OPERAND (arg0
, 1)),
6266 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
6267 tree arglist
= build_tree_list (NULL_TREE
, fold (arg
));
6268 return build_function_call_expr (expfn
, arglist
);
6271 /* Optimizations of pow(...)*pow(...). */
6272 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
6273 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
6274 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
6276 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
6277 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
6279 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
6280 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
6283 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6284 if (operand_equal_p (arg01
, arg11
, 0))
6286 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6287 tree arg
= build (MULT_EXPR
, type
, arg00
, arg10
);
6288 tree arglist
= tree_cons (NULL_TREE
, fold (arg
),
6289 build_tree_list (NULL_TREE
,
6291 return build_function_call_expr (powfn
, arglist
);
6294 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6295 if (operand_equal_p (arg00
, arg10
, 0))
6297 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6298 tree arg
= fold (build (PLUS_EXPR
, type
, arg01
, arg11
));
6299 tree arglist
= tree_cons (NULL_TREE
, arg00
,
6300 build_tree_list (NULL_TREE
,
6302 return build_function_call_expr (powfn
, arglist
);
6306 /* Optimize tan(x)*cos(x) as sin(x). */
6307 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
6308 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
6309 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
6310 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
6311 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
6312 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
6313 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
6314 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
6322 sinfn
= implicit_built_in_decls
[BUILT_IN_SIN
];
6326 sinfn
= implicit_built_in_decls
[BUILT_IN_SINF
];
6330 sinfn
= implicit_built_in_decls
[BUILT_IN_SINL
];
6336 if (sinfn
!= NULL_TREE
)
6337 return build_function_call_expr (sinfn
,
6338 TREE_OPERAND (arg0
, 1));
6341 /* Optimize x*pow(x,c) as pow(x,c+1). */
6342 if (fcode1
== BUILT_IN_POW
6343 || fcode1
== BUILT_IN_POWF
6344 || fcode1
== BUILT_IN_POWL
)
6346 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
6347 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
6349 if (TREE_CODE (arg11
) == REAL_CST
6350 && ! TREE_CONSTANT_OVERFLOW (arg11
)
6351 && operand_equal_p (arg0
, arg10
, 0))
6353 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
6357 c
= TREE_REAL_CST (arg11
);
6358 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6359 arg
= build_real (type
, c
);
6360 arglist
= build_tree_list (NULL_TREE
, arg
);
6361 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
6362 return build_function_call_expr (powfn
, arglist
);
6366 /* Optimize pow(x,c)*x as pow(x,c+1). */
6367 if (fcode0
== BUILT_IN_POW
6368 || fcode0
== BUILT_IN_POWF
6369 || fcode0
== BUILT_IN_POWL
)
6371 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
6372 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
6374 if (TREE_CODE (arg01
) == REAL_CST
6375 && ! TREE_CONSTANT_OVERFLOW (arg01
)
6376 && operand_equal_p (arg1
, arg00
, 0))
6378 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6382 c
= TREE_REAL_CST (arg01
);
6383 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6384 arg
= build_real (type
, c
);
6385 arglist
= build_tree_list (NULL_TREE
, arg
);
6386 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
6387 return build_function_call_expr (powfn
, arglist
);
6391 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6393 && operand_equal_p (arg0
, arg1
, 0))
6397 if (type
== double_type_node
)
6398 powfn
= implicit_built_in_decls
[BUILT_IN_POW
];
6399 else if (type
== float_type_node
)
6400 powfn
= implicit_built_in_decls
[BUILT_IN_POWF
];
6401 else if (type
== long_double_type_node
)
6402 powfn
= implicit_built_in_decls
[BUILT_IN_POWL
];
6408 tree arg
= build_real (type
, dconst2
);
6409 tree arglist
= build_tree_list (NULL_TREE
, arg
);
6410 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
6411 return build_function_call_expr (powfn
, arglist
);
6420 if (integer_all_onesp (arg1
))
6421 return omit_one_operand (type
, arg1
, arg0
);
6422 if (integer_zerop (arg1
))
6423 return non_lvalue (convert (type
, arg0
));
6424 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
6425 if (t1
!= NULL_TREE
)
6428 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6430 This results in more efficient code for machines without a NAND
6431 instruction. Combine will canonicalize to the first form
6432 which will allow use of NAND instructions provided by the
6433 backend if they exist. */
6434 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
6435 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
6437 return fold (build1 (BIT_NOT_EXPR
, type
,
6438 build (BIT_AND_EXPR
, type
,
6439 TREE_OPERAND (arg0
, 0),
6440 TREE_OPERAND (arg1
, 0))));
6443 /* See if this can be simplified into a rotate first. If that
6444 is unsuccessful continue in the association code. */
6448 if (integer_zerop (arg1
))
6449 return non_lvalue (convert (type
, arg0
));
6450 if (integer_all_onesp (arg1
))
6451 return fold (build1 (BIT_NOT_EXPR
, type
, arg0
));
6453 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6454 with a constant, and the two constants have no bits in common,
6455 we should treat this as a BIT_IOR_EXPR since this may produce more
6457 if (TREE_CODE (arg0
) == BIT_AND_EXPR
6458 && TREE_CODE (arg1
) == BIT_AND_EXPR
6459 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
6460 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
6461 && integer_zerop (const_binop (BIT_AND_EXPR
,
6462 TREE_OPERAND (arg0
, 1),
6463 TREE_OPERAND (arg1
, 1), 0)))
6465 code
= BIT_IOR_EXPR
;
6469 /* See if this can be simplified into a rotate first. If that
6470 is unsuccessful continue in the association code. */
6474 if (integer_all_onesp (arg1
))
6475 return non_lvalue (convert (type
, arg0
));
6476 if (integer_zerop (arg1
))
6477 return omit_one_operand (type
, arg1
, arg0
);
6478 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
6479 if (t1
!= NULL_TREE
)
6481 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6482 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
6483 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
6486 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
6488 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
6489 && (~TREE_INT_CST_LOW (arg1
)
6490 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
6491 return build1 (NOP_EXPR
, type
, TREE_OPERAND (arg0
, 0));
6494 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6496 This results in more efficient code for machines without a NOR
6497 instruction. Combine will canonicalize to the first form
6498 which will allow use of NOR instructions provided by the
6499 backend if they exist. */
6500 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
6501 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
6503 return fold (build1 (BIT_NOT_EXPR
, type
,
6504 build (BIT_IOR_EXPR
, type
,
6505 TREE_OPERAND (arg0
, 0),
6506 TREE_OPERAND (arg1
, 0))));
6512 /* Don't touch a floating-point divide by zero unless the mode
6513 of the constant can represent infinity. */
6514 if (TREE_CODE (arg1
) == REAL_CST
6515 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
6516 && real_zerop (arg1
))
6519 /* (-A) / (-B) -> A / B */
6520 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
6521 return fold (build (RDIV_EXPR
, type
,
6522 TREE_OPERAND (arg0
, 0),
6523 negate_expr (arg1
)));
6524 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
6525 return fold (build (RDIV_EXPR
, type
,
6527 TREE_OPERAND (arg1
, 0)));
6529 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6530 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
6531 && real_onep (arg1
))
6532 return non_lvalue (convert (type
, arg0
));
6534 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6535 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
6536 && real_minus_onep (arg1
))
6537 return non_lvalue (convert (type
, negate_expr (arg0
)));
6539 /* If ARG1 is a constant, we can convert this to a multiply by the
6540 reciprocal. This does not have the same rounding properties,
6541 so only do this if -funsafe-math-optimizations. We can actually
6542 always safely do it if ARG1 is a power of two, but it's hard to
6543 tell if it is or not in a portable manner. */
6544 if (TREE_CODE (arg1
) == REAL_CST
)
6546 if (flag_unsafe_math_optimizations
6547 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
6549 return fold (build (MULT_EXPR
, type
, arg0
, tem
));
6550 /* Find the reciprocal if optimizing and the result is exact. */
6554 r
= TREE_REAL_CST (arg1
);
6555 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
6557 tem
= build_real (type
, r
);
6558 return fold (build (MULT_EXPR
, type
, arg0
, tem
));
6562 /* Convert A/B/C to A/(B*C). */
6563 if (flag_unsafe_math_optimizations
6564 && TREE_CODE (arg0
) == RDIV_EXPR
)
6565 return fold (build (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6566 fold (build (MULT_EXPR
, type
,
6567 TREE_OPERAND (arg0
, 1), arg1
))));
6569 /* Convert A/(B/C) to (A/B)*C. */
6570 if (flag_unsafe_math_optimizations
6571 && TREE_CODE (arg1
) == RDIV_EXPR
)
6572 return fold (build (MULT_EXPR
, type
,
6573 fold (build (RDIV_EXPR
, type
, arg0
,
6574 TREE_OPERAND (arg1
, 0))),
6575 TREE_OPERAND (arg1
, 1)));
6577 /* Convert C1/(X*C2) into (C1/C2)/X. */
6578 if (flag_unsafe_math_optimizations
6579 && TREE_CODE (arg1
) == MULT_EXPR
6580 && TREE_CODE (arg0
) == REAL_CST
6581 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
6583 tree tem
= const_binop (RDIV_EXPR
, arg0
,
6584 TREE_OPERAND (arg1
, 1), 0);
6586 return fold (build (RDIV_EXPR
, type
, tem
,
6587 TREE_OPERAND (arg1
, 0)));
6590 if (flag_unsafe_math_optimizations
)
6592 enum built_in_function fcode
= builtin_mathfn_code (arg1
);
6593 /* Optimize x/expN(y) into x*expN(-y). */
6594 if (fcode
== BUILT_IN_EXP
6595 || fcode
== BUILT_IN_EXPF
6596 || fcode
== BUILT_IN_EXPL
6597 || fcode
== BUILT_IN_EXP2
6598 || fcode
== BUILT_IN_EXP2F
6599 || fcode
== BUILT_IN_EXP2L
6600 || fcode
== BUILT_IN_EXP10
6601 || fcode
== BUILT_IN_EXP10F
6602 || fcode
== BUILT_IN_EXP10L
6603 || fcode
== BUILT_IN_POW10
6604 || fcode
== BUILT_IN_POW10F
6605 || fcode
== BUILT_IN_POW10L
)
6607 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
6608 tree arg
= build1 (NEGATE_EXPR
, type
,
6609 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
6610 tree arglist
= build_tree_list (NULL_TREE
, fold (arg
));
6611 arg1
= build_function_call_expr (expfn
, arglist
);
6612 return fold (build (MULT_EXPR
, type
, arg0
, arg1
));
6615 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6616 if (fcode
== BUILT_IN_POW
6617 || fcode
== BUILT_IN_POWF
6618 || fcode
== BUILT_IN_POWL
)
6620 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
6621 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
6622 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
, 1)));
6623 tree neg11
= fold (build1 (NEGATE_EXPR
, type
, arg11
));
6624 tree arglist
= tree_cons(NULL_TREE
, arg10
,
6625 build_tree_list (NULL_TREE
, neg11
));
6626 arg1
= build_function_call_expr (powfn
, arglist
);
6627 return fold (build (MULT_EXPR
, type
, arg0
, arg1
));
6631 if (flag_unsafe_math_optimizations
)
6633 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
6634 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
6636 /* Optimize sin(x)/cos(x) as tan(x). */
6637 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
6638 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
6639 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
6640 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
6641 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
6645 if (fcode0
== BUILT_IN_SIN
)
6646 tanfn
= implicit_built_in_decls
[BUILT_IN_TAN
];
6647 else if (fcode0
== BUILT_IN_SINF
)
6648 tanfn
= implicit_built_in_decls
[BUILT_IN_TANF
];
6649 else if (fcode0
== BUILT_IN_SINL
)
6650 tanfn
= implicit_built_in_decls
[BUILT_IN_TANL
];
6654 if (tanfn
!= NULL_TREE
)
6655 return build_function_call_expr (tanfn
,
6656 TREE_OPERAND (arg0
, 1));
6659 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6660 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
6661 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
6662 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
6663 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
6664 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
6668 if (fcode0
== BUILT_IN_COS
)
6669 tanfn
= implicit_built_in_decls
[BUILT_IN_TAN
];
6670 else if (fcode0
== BUILT_IN_COSF
)
6671 tanfn
= implicit_built_in_decls
[BUILT_IN_TANF
];
6672 else if (fcode0
== BUILT_IN_COSL
)
6673 tanfn
= implicit_built_in_decls
[BUILT_IN_TANL
];
6677 if (tanfn
!= NULL_TREE
)
6679 tree tmp
= TREE_OPERAND (arg0
, 1);
6680 tmp
= build_function_call_expr (tanfn
, tmp
);
6681 return fold (build (RDIV_EXPR
, type
,
6682 build_real (type
, dconst1
),
6687 /* Optimize pow(x,c)/x as pow(x,c-1). */
6688 if (fcode0
== BUILT_IN_POW
6689 || fcode0
== BUILT_IN_POWF
6690 || fcode0
== BUILT_IN_POWL
)
6692 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
6693 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
6694 if (TREE_CODE (arg01
) == REAL_CST
6695 && ! TREE_CONSTANT_OVERFLOW (arg01
)
6696 && operand_equal_p (arg1
, arg00
, 0))
6698 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6702 c
= TREE_REAL_CST (arg01
);
6703 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
6704 arg
= build_real (type
, c
);
6705 arglist
= build_tree_list (NULL_TREE
, arg
);
6706 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
6707 return build_function_call_expr (powfn
, arglist
);
6713 case TRUNC_DIV_EXPR
:
6714 case ROUND_DIV_EXPR
:
6715 case FLOOR_DIV_EXPR
:
6717 case EXACT_DIV_EXPR
:
6718 if (integer_onep (arg1
))
6719 return non_lvalue (convert (type
, arg0
));
6720 if (integer_zerop (arg1
))
6723 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6724 operation, EXACT_DIV_EXPR.
6726 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6727 At one time others generated faster code, it's not clear if they do
6728 after the last round to changes to the DIV code in expmed.c. */
6729 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
6730 && multiple_of_p (type
, arg0
, arg1
))
6731 return fold (build (EXACT_DIV_EXPR
, type
, arg0
, arg1
));
6733 if (TREE_CODE (arg1
) == INTEGER_CST
6734 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
6736 return convert (type
, tem
);
6741 case FLOOR_MOD_EXPR
:
6742 case ROUND_MOD_EXPR
:
6743 case TRUNC_MOD_EXPR
:
6744 if (integer_onep (arg1
))
6745 return omit_one_operand (type
, integer_zero_node
, arg0
);
6746 if (integer_zerop (arg1
))
6749 if (TREE_CODE (arg1
) == INTEGER_CST
6750 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
6752 return convert (type
, tem
);
6758 if (integer_all_onesp (arg0
))
6759 return omit_one_operand (type
, arg0
, arg1
);
6763 /* Optimize -1 >> x for arithmetic right shifts. */
6764 if (integer_all_onesp (arg0
) && ! TREE_UNSIGNED (type
))
6765 return omit_one_operand (type
, arg0
, arg1
);
6766 /* ... fall through ... */
6770 if (integer_zerop (arg1
))
6771 return non_lvalue (convert (type
, arg0
));
6772 if (integer_zerop (arg0
))
6773 return omit_one_operand (type
, arg0
, arg1
);
6775 /* Since negative shift count is not well-defined,
6776 don't try to compute it in the compiler. */
6777 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
6779 /* Rewrite an LROTATE_EXPR by a constant into an
6780 RROTATE_EXPR by a new constant. */
6781 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
6783 tree tem
= build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type
)), 0);
6784 tem
= convert (TREE_TYPE (arg1
), tem
);
6785 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
6786 return fold (build (RROTATE_EXPR
, type
, arg0
, tem
));
6789 /* If we have a rotate of a bit operation with the rotate count and
6790 the second operand of the bit operation both constant,
6791 permute the two operations. */
6792 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
6793 && (TREE_CODE (arg0
) == BIT_AND_EXPR
6794 || TREE_CODE (arg0
) == BIT_IOR_EXPR
6795 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
6796 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
6797 return fold (build (TREE_CODE (arg0
), type
,
6798 fold (build (code
, type
,
6799 TREE_OPERAND (arg0
, 0), arg1
)),
6800 fold (build (code
, type
,
6801 TREE_OPERAND (arg0
, 1), arg1
))));
6803 /* Two consecutive rotates adding up to the width of the mode can
6805 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
6806 && TREE_CODE (arg0
) == RROTATE_EXPR
6807 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
6808 && TREE_INT_CST_HIGH (arg1
) == 0
6809 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
6810 && ((TREE_INT_CST_LOW (arg1
)
6811 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
6812 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
6813 return TREE_OPERAND (arg0
, 0);
6818 if (operand_equal_p (arg0
, arg1
, 0))
6819 return omit_one_operand (type
, arg0
, arg1
);
6820 if (INTEGRAL_TYPE_P (type
)
6821 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), 1))
6822 return omit_one_operand (type
, arg1
, arg0
);
6826 if (operand_equal_p (arg0
, arg1
, 0))
6827 return omit_one_operand (type
, arg0
, arg1
);
6828 if (INTEGRAL_TYPE_P (type
)
6829 && TYPE_MAX_VALUE (type
)
6830 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), 1))
6831 return omit_one_operand (type
, arg1
, arg0
);
6834 case TRUTH_NOT_EXPR
:
6835 /* Note that the operand of this must be an int
6836 and its values must be 0 or 1.
6837 ("true" is a fixed value perhaps depending on the language,
6838 but we don't handle values other than 1 correctly yet.) */
6839 tem
= invert_truthvalue (arg0
);
6840 /* Avoid infinite recursion. */
6841 if (TREE_CODE (tem
) == TRUTH_NOT_EXPR
)
6843 tem
= fold_single_bit_test (code
, arg0
, arg1
, type
);
6848 return convert (type
, tem
);
6850 case TRUTH_ANDIF_EXPR
:
6851 /* Note that the operands of this must be ints
6852 and their values must be 0 or 1.
6853 ("true" is a fixed value perhaps depending on the language.) */
6854 /* If first arg is constant zero, return it. */
6855 if (integer_zerop (arg0
))
6856 return convert (type
, arg0
);
6857 case TRUTH_AND_EXPR
:
6858 /* If either arg is constant true, drop it. */
6859 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
6860 return non_lvalue (convert (type
, arg1
));
6861 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
6862 /* Preserve sequence points. */
6863 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
6864 return non_lvalue (convert (type
, arg0
));
6865 /* If second arg is constant zero, result is zero, but first arg
6866 must be evaluated. */
6867 if (integer_zerop (arg1
))
6868 return omit_one_operand (type
, arg1
, arg0
);
6869 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
6870 case will be handled here. */
6871 if (integer_zerop (arg0
))
6872 return omit_one_operand (type
, arg0
, arg1
);
6875 /* We only do these simplifications if we are optimizing. */
6879 /* Check for things like (A || B) && (A || C). We can convert this
6880 to A || (B && C). Note that either operator can be any of the four
6881 truth and/or operations and the transformation will still be
6882 valid. Also note that we only care about order for the
6883 ANDIF and ORIF operators. If B contains side effects, this
6884 might change the truth-value of A. */
6885 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
6886 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
6887 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
6888 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
6889 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
6890 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
6892 tree a00
= TREE_OPERAND (arg0
, 0);
6893 tree a01
= TREE_OPERAND (arg0
, 1);
6894 tree a10
= TREE_OPERAND (arg1
, 0);
6895 tree a11
= TREE_OPERAND (arg1
, 1);
6896 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
6897 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
6898 && (code
== TRUTH_AND_EXPR
6899 || code
== TRUTH_OR_EXPR
));
6901 if (operand_equal_p (a00
, a10
, 0))
6902 return fold (build (TREE_CODE (arg0
), type
, a00
,
6903 fold (build (code
, type
, a01
, a11
))));
6904 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
6905 return fold (build (TREE_CODE (arg0
), type
, a00
,
6906 fold (build (code
, type
, a01
, a10
))));
6907 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
6908 return fold (build (TREE_CODE (arg0
), type
, a01
,
6909 fold (build (code
, type
, a00
, a11
))));
6911 /* This case if tricky because we must either have commutative
6912 operators or else A10 must not have side-effects. */
6914 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
6915 && operand_equal_p (a01
, a11
, 0))
6916 return fold (build (TREE_CODE (arg0
), type
,
6917 fold (build (code
, type
, a00
, a10
)),
6921 /* See if we can build a range comparison. */
6922 if (0 != (tem
= fold_range_test (t
)))
6925 /* Check for the possibility of merging component references. If our
6926 lhs is another similar operation, try to merge its rhs with our
6927 rhs. Then try to merge our lhs and rhs. */
6928 if (TREE_CODE (arg0
) == code
6929 && 0 != (tem
= fold_truthop (code
, type
,
6930 TREE_OPERAND (arg0
, 1), arg1
)))
6931 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
6933 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
6938 case TRUTH_ORIF_EXPR
:
6939 /* Note that the operands of this must be ints
6940 and their values must be 0 or true.
6941 ("true" is a fixed value perhaps depending on the language.) */
6942 /* If first arg is constant true, return it. */
6943 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
6944 return convert (type
, arg0
);
6946 /* If either arg is constant zero, drop it. */
6947 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
6948 return non_lvalue (convert (type
, arg1
));
6949 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
6950 /* Preserve sequence points. */
6951 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
6952 return non_lvalue (convert (type
, arg0
));
6953 /* If second arg is constant true, result is true, but we must
6954 evaluate first arg. */
6955 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
6956 return omit_one_operand (type
, arg1
, arg0
);
6957 /* Likewise for first arg, but note this only occurs here for
6959 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
6960 return omit_one_operand (type
, arg0
, arg1
);
6963 case TRUTH_XOR_EXPR
:
6964 /* If either arg is constant zero, drop it. */
6965 if (integer_zerop (arg0
))
6966 return non_lvalue (convert (type
, arg1
));
6967 if (integer_zerop (arg1
))
6968 return non_lvalue (convert (type
, arg0
));
6969 /* If either arg is constant true, this is a logical inversion. */
6970 if (integer_onep (arg0
))
6971 return non_lvalue (convert (type
, invert_truthvalue (arg1
)));
6972 if (integer_onep (arg1
))
6973 return non_lvalue (convert (type
, invert_truthvalue (arg0
)));
6982 /* If one arg is a real or integer constant, put it last. */
6983 if (tree_swap_operands_p (arg0
, arg1
, true))
6984 return fold (build (swap_tree_comparison (code
), type
, arg1
, arg0
));
6986 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
6988 tree targ0
= strip_float_extensions (arg0
);
6989 tree targ1
= strip_float_extensions (arg1
);
6990 tree newtype
= TREE_TYPE (targ0
);
6992 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
6993 newtype
= TREE_TYPE (targ1
);
6995 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
6996 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
6997 return fold (build (code
, type
, convert (newtype
, targ0
),
6998 convert (newtype
, targ1
)));
7000 /* (-a) CMP (-b) -> b CMP a */
7001 if (TREE_CODE (arg0
) == NEGATE_EXPR
7002 && TREE_CODE (arg1
) == NEGATE_EXPR
)
7003 return fold (build (code
, type
, TREE_OPERAND (arg1
, 0),
7004 TREE_OPERAND (arg0
, 0)));
7006 if (TREE_CODE (arg1
) == REAL_CST
)
7008 REAL_VALUE_TYPE cst
;
7009 cst
= TREE_REAL_CST (arg1
);
7011 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7012 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
7014 fold (build (swap_tree_comparison (code
), type
,
7015 TREE_OPERAND (arg0
, 0),
7016 build_real (TREE_TYPE (arg1
),
7017 REAL_VALUE_NEGATE (cst
))));
7019 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7020 /* a CMP (-0) -> a CMP 0 */
7021 if (REAL_VALUE_MINUS_ZERO (cst
))
7022 return fold (build (code
, type
, arg0
,
7023 build_real (TREE_TYPE (arg1
), dconst0
)));
7025 /* x != NaN is always true, other ops are always false. */
7026 if (REAL_VALUE_ISNAN (cst
)
7027 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
7029 t
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
7030 return omit_one_operand (type
, convert (type
, t
), arg0
);
7033 /* Fold comparisons against infinity. */
7034 if (REAL_VALUE_ISINF (cst
))
7036 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
7037 if (tem
!= NULL_TREE
)
7042 /* If this is a comparison of a real constant with a PLUS_EXPR
7043 or a MINUS_EXPR of a real constant, we can convert it into a
7044 comparison with a revised real constant as long as no overflow
7045 occurs when unsafe_math_optimizations are enabled. */
7046 if (flag_unsafe_math_optimizations
7047 && TREE_CODE (arg1
) == REAL_CST
7048 && (TREE_CODE (arg0
) == PLUS_EXPR
7049 || TREE_CODE (arg0
) == MINUS_EXPR
)
7050 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
7051 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
7052 ? MINUS_EXPR
: PLUS_EXPR
,
7053 arg1
, TREE_OPERAND (arg0
, 1), 0))
7054 && ! TREE_CONSTANT_OVERFLOW (tem
))
7055 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
7057 /* Likewise, we can simplify a comparison of a real constant with
7058 a MINUS_EXPR whose first operand is also a real constant, i.e.
7059 (c1 - x) < c2 becomes x > c1-c2. */
7060 if (flag_unsafe_math_optimizations
7061 && TREE_CODE (arg1
) == REAL_CST
7062 && TREE_CODE (arg0
) == MINUS_EXPR
7063 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
7064 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
7066 && ! TREE_CONSTANT_OVERFLOW (tem
))
7067 return fold (build (swap_tree_comparison (code
), type
,
7068 TREE_OPERAND (arg0
, 1), tem
));
7070 /* Fold comparisons against built-in math functions. */
7071 if (TREE_CODE (arg1
) == REAL_CST
7072 && flag_unsafe_math_optimizations
7073 && ! flag_errno_math
)
7075 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
7077 if (fcode
!= END_BUILTINS
)
7079 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
7080 if (tem
!= NULL_TREE
)
7086 /* Convert foo++ == CONST into ++foo == CONST + INCR.
7087 First, see if one arg is constant; find the constant arg
7088 and the other one. */
7090 tree constop
= 0, varop
= NULL_TREE
;
7091 int constopnum
= -1;
7093 if (TREE_CONSTANT (arg1
))
7094 constopnum
= 1, constop
= arg1
, varop
= arg0
;
7095 if (TREE_CONSTANT (arg0
))
7096 constopnum
= 0, constop
= arg0
, varop
= arg1
;
7098 if (constop
&& TREE_CODE (varop
) == POSTINCREMENT_EXPR
)
7100 /* This optimization is invalid for ordered comparisons
7101 if CONST+INCR overflows or if foo+incr might overflow.
7102 This optimization is invalid for floating point due to rounding.
7103 For pointer types we assume overflow doesn't happen. */
7104 if (POINTER_TYPE_P (TREE_TYPE (varop
))
7105 || (! FLOAT_TYPE_P (TREE_TYPE (varop
))
7106 && (code
== EQ_EXPR
|| code
== NE_EXPR
)))
7109 = fold (build (PLUS_EXPR
, TREE_TYPE (varop
),
7110 constop
, TREE_OPERAND (varop
, 1)));
7112 /* Do not overwrite the current varop to be a preincrement,
7113 create a new node so that we won't confuse our caller who
7114 might create trees and throw them away, reusing the
7115 arguments that they passed to build. This shows up in
7116 the THEN or ELSE parts of ?: being postincrements. */
7117 varop
= build (PREINCREMENT_EXPR
, TREE_TYPE (varop
),
7118 TREE_OPERAND (varop
, 0),
7119 TREE_OPERAND (varop
, 1));
7121 /* If VAROP is a reference to a bitfield, we must mask
7122 the constant by the width of the field. */
7123 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
7124 && DECL_BIT_FIELD(TREE_OPERAND
7125 (TREE_OPERAND (varop
, 0), 1)))
7128 = TREE_INT_CST_LOW (DECL_SIZE
7130 (TREE_OPERAND (varop
, 0), 1)));
7131 tree mask
, unsigned_type
;
7132 unsigned int precision
;
7133 tree folded_compare
;
7135 /* First check whether the comparison would come out
7136 always the same. If we don't do that we would
7137 change the meaning with the masking. */
7138 if (constopnum
== 0)
7139 folded_compare
= fold (build (code
, type
, constop
,
7140 TREE_OPERAND (varop
, 0)));
7142 folded_compare
= fold (build (code
, type
,
7143 TREE_OPERAND (varop
, 0),
7145 if (integer_zerop (folded_compare
)
7146 || integer_onep (folded_compare
))
7147 return omit_one_operand (type
, folded_compare
, varop
);
7149 unsigned_type
= (*lang_hooks
.types
.type_for_size
)(size
, 1);
7150 precision
= TYPE_PRECISION (unsigned_type
);
7151 mask
= build_int_2 (~0, ~0);
7152 TREE_TYPE (mask
) = unsigned_type
;
7153 force_fit_type (mask
, 0);
7154 mask
= const_binop (RSHIFT_EXPR
, mask
,
7155 size_int (precision
- size
), 0);
7156 newconst
= fold (build (BIT_AND_EXPR
,
7157 TREE_TYPE (varop
), newconst
,
7158 convert (TREE_TYPE (varop
),
7162 t
= build (code
, type
,
7163 (constopnum
== 0) ? newconst
: varop
,
7164 (constopnum
== 1) ? newconst
: varop
);
7168 else if (constop
&& TREE_CODE (varop
) == POSTDECREMENT_EXPR
)
7170 if (POINTER_TYPE_P (TREE_TYPE (varop
))
7171 || (! FLOAT_TYPE_P (TREE_TYPE (varop
))
7172 && (code
== EQ_EXPR
|| code
== NE_EXPR
)))
7175 = fold (build (MINUS_EXPR
, TREE_TYPE (varop
),
7176 constop
, TREE_OPERAND (varop
, 1)));
7178 /* Do not overwrite the current varop to be a predecrement,
7179 create a new node so that we won't confuse our caller who
7180 might create trees and throw them away, reusing the
7181 arguments that they passed to build. This shows up in
7182 the THEN or ELSE parts of ?: being postdecrements. */
7183 varop
= build (PREDECREMENT_EXPR
, TREE_TYPE (varop
),
7184 TREE_OPERAND (varop
, 0),
7185 TREE_OPERAND (varop
, 1));
7187 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
7188 && DECL_BIT_FIELD(TREE_OPERAND
7189 (TREE_OPERAND (varop
, 0), 1)))
7192 = TREE_INT_CST_LOW (DECL_SIZE
7194 (TREE_OPERAND (varop
, 0), 1)));
7195 tree mask
, unsigned_type
;
7196 unsigned int precision
;
7197 tree folded_compare
;
7199 if (constopnum
== 0)
7200 folded_compare
= fold (build (code
, type
, constop
,
7201 TREE_OPERAND (varop
, 0)));
7203 folded_compare
= fold (build (code
, type
,
7204 TREE_OPERAND (varop
, 0),
7206 if (integer_zerop (folded_compare
)
7207 || integer_onep (folded_compare
))
7208 return omit_one_operand (type
, folded_compare
, varop
);
7210 unsigned_type
= (*lang_hooks
.types
.type_for_size
)(size
, 1);
7211 precision
= TYPE_PRECISION (unsigned_type
);
7212 mask
= build_int_2 (~0, ~0);
7213 TREE_TYPE (mask
) = TREE_TYPE (varop
);
7214 force_fit_type (mask
, 0);
7215 mask
= const_binop (RSHIFT_EXPR
, mask
,
7216 size_int (precision
- size
), 0);
7217 newconst
= fold (build (BIT_AND_EXPR
,
7218 TREE_TYPE (varop
), newconst
,
7219 convert (TREE_TYPE (varop
),
7223 t
= build (code
, type
,
7224 (constopnum
== 0) ? newconst
: varop
,
7225 (constopnum
== 1) ? newconst
: varop
);
7231 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7232 This transformation affects the cases which are handled in later
7233 optimizations involving comparisons with non-negative constants. */
7234 if (TREE_CODE (arg1
) == INTEGER_CST
7235 && TREE_CODE (arg0
) != INTEGER_CST
7236 && tree_int_cst_sgn (arg1
) > 0)
7241 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
7242 return fold (build (GT_EXPR
, type
, arg0
, arg1
));
7245 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
7246 return fold (build (LE_EXPR
, type
, arg0
, arg1
));
7253 /* Comparisons with the highest or lowest possible integer of
7254 the specified size will have known values. */
7256 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1
)));
7258 if (TREE_CODE (arg1
) == INTEGER_CST
7259 && ! TREE_CONSTANT_OVERFLOW (arg1
)
7260 && width
<= HOST_BITS_PER_WIDE_INT
7261 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
7262 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
7264 unsigned HOST_WIDE_INT signed_max
;
7265 unsigned HOST_WIDE_INT max
, min
;
7267 signed_max
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1)) - 1;
7269 if (TREE_UNSIGNED (TREE_TYPE (arg1
)))
7271 max
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
7277 min
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
7280 if (TREE_INT_CST_HIGH (arg1
) == 0
7281 && TREE_INT_CST_LOW (arg1
) == max
)
7285 return omit_one_operand (type
,
7286 convert (type
, integer_zero_node
),
7289 return fold (build (EQ_EXPR
, type
, arg0
, arg1
));
7292 return omit_one_operand (type
,
7293 convert (type
, integer_one_node
),
7296 return fold (build (NE_EXPR
, type
, arg0
, arg1
));
7298 /* The GE_EXPR and LT_EXPR cases above are not normally
7299 reached because of previous transformations. */
7304 else if (TREE_INT_CST_HIGH (arg1
) == 0
7305 && TREE_INT_CST_LOW (arg1
) == max
- 1)
7309 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
7310 return fold (build (EQ_EXPR
, type
, arg0
, arg1
));
7312 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
7313 return fold (build (NE_EXPR
, type
, arg0
, arg1
));
7317 else if (TREE_INT_CST_HIGH (arg1
) == (min
? -1 : 0)
7318 && TREE_INT_CST_LOW (arg1
) == min
)
7322 return omit_one_operand (type
,
7323 convert (type
, integer_zero_node
),
7326 return fold (build (EQ_EXPR
, type
, arg0
, arg1
));
7329 return omit_one_operand (type
,
7330 convert (type
, integer_one_node
),
7333 return fold (build (NE_EXPR
, type
, arg0
, arg1
));
7338 else if (TREE_INT_CST_HIGH (arg1
) == (min
? -1 : 0)
7339 && TREE_INT_CST_LOW (arg1
) == min
+ 1)
7343 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
7344 return fold (build (NE_EXPR
, type
, arg0
, arg1
));
7346 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
7347 return fold (build (EQ_EXPR
, type
, arg0
, arg1
));
7352 else if (TREE_INT_CST_HIGH (arg1
) == 0
7353 && TREE_INT_CST_LOW (arg1
) == signed_max
7354 && TREE_UNSIGNED (TREE_TYPE (arg1
))
7355 /* signed_type does not work on pointer types. */
7356 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
7358 /* The following case also applies to X < signed_max+1
7359 and X >= signed_max+1 because previous transformations. */
7360 if (code
== LE_EXPR
|| code
== GT_EXPR
)
7363 st0
= (*lang_hooks
.types
.signed_type
) (TREE_TYPE (arg0
));
7364 st1
= (*lang_hooks
.types
.signed_type
) (TREE_TYPE (arg1
));
7366 (build (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
7367 type
, convert (st0
, arg0
),
7368 convert (st1
, integer_zero_node
)));
7374 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7375 a MINUS_EXPR of a constant, we can convert it into a comparison with
7376 a revised constant as long as no overflow occurs. */
7377 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7378 && TREE_CODE (arg1
) == INTEGER_CST
7379 && (TREE_CODE (arg0
) == PLUS_EXPR
7380 || TREE_CODE (arg0
) == MINUS_EXPR
)
7381 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7382 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
7383 ? MINUS_EXPR
: PLUS_EXPR
,
7384 arg1
, TREE_OPERAND (arg0
, 1), 0))
7385 && ! TREE_CONSTANT_OVERFLOW (tem
))
7386 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
7388 /* Similarly for a NEGATE_EXPR. */
7389 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7390 && TREE_CODE (arg0
) == NEGATE_EXPR
7391 && TREE_CODE (arg1
) == INTEGER_CST
7392 && 0 != (tem
= negate_expr (arg1
))
7393 && TREE_CODE (tem
) == INTEGER_CST
7394 && ! TREE_CONSTANT_OVERFLOW (tem
))
7395 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
7397 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7398 for !=. Don't do this for ordered comparisons due to overflow. */
7399 else if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7400 && integer_zerop (arg1
) && TREE_CODE (arg0
) == MINUS_EXPR
)
7401 return fold (build (code
, type
,
7402 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1)));
7404 /* If we are widening one operand of an integer comparison,
7405 see if the other operand is similarly being widened. Perhaps we
7406 can do the comparison in the narrower type. */
7407 else if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
7408 && TREE_CODE (arg0
) == NOP_EXPR
7409 && (tem
= get_unwidened (arg0
, NULL_TREE
)) != arg0
7410 && (t1
= get_unwidened (arg1
, TREE_TYPE (tem
))) != 0
7411 && (TREE_TYPE (t1
) == TREE_TYPE (tem
)
7412 || (TREE_CODE (t1
) == INTEGER_CST
7413 && int_fits_type_p (t1
, TREE_TYPE (tem
)))))
7414 return fold (build (code
, type
, tem
, convert (TREE_TYPE (tem
), t1
)));
7416 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7417 constant, we can simplify it. */
7418 else if (TREE_CODE (arg1
) == INTEGER_CST
7419 && (TREE_CODE (arg0
) == MIN_EXPR
7420 || TREE_CODE (arg0
) == MAX_EXPR
)
7421 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
7422 return optimize_minmax_comparison (t
);
7424 /* If we are comparing an ABS_EXPR with a constant, we can
7425 convert all the cases into explicit comparisons, but they may
7426 well not be faster than doing the ABS and one comparison.
7427 But ABS (X) <= C is a range comparison, which becomes a subtraction
7428 and a comparison, and is probably faster. */
7429 else if (code
== LE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
7430 && TREE_CODE (arg0
) == ABS_EXPR
7431 && ! TREE_SIDE_EFFECTS (arg0
)
7432 && (0 != (tem
= negate_expr (arg1
)))
7433 && TREE_CODE (tem
) == INTEGER_CST
7434 && ! TREE_CONSTANT_OVERFLOW (tem
))
7435 return fold (build (TRUTH_ANDIF_EXPR
, type
,
7436 build (GE_EXPR
, type
, TREE_OPERAND (arg0
, 0), tem
),
7437 build (LE_EXPR
, type
,
7438 TREE_OPERAND (arg0
, 0), arg1
)));
7440 /* If this is an EQ or NE comparison with zero and ARG0 is
7441 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7442 two operations, but the latter can be done in one less insn
7443 on machines that have only two-operand insns or on which a
7444 constant cannot be the first operand. */
7445 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
7446 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
7448 if (TREE_CODE (TREE_OPERAND (arg0
, 0)) == LSHIFT_EXPR
7449 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0)))
7451 fold (build (code
, type
,
7452 build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
7454 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
7455 TREE_OPERAND (arg0
, 1),
7456 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1)),
7457 convert (TREE_TYPE (arg0
),
7460 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
7461 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
7463 fold (build (code
, type
,
7464 build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
7466 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
7467 TREE_OPERAND (arg0
, 0),
7468 TREE_OPERAND (TREE_OPERAND (arg0
, 1), 1)),
7469 convert (TREE_TYPE (arg0
),
7474 /* If this is an NE or EQ comparison of zero against the result of a
7475 signed MOD operation whose second operand is a power of 2, make
7476 the MOD operation unsigned since it is simpler and equivalent. */
7477 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7478 && integer_zerop (arg1
)
7479 && ! TREE_UNSIGNED (TREE_TYPE (arg0
))
7480 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
7481 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
7482 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
7483 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
7484 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
7486 tree newtype
= (*lang_hooks
.types
.unsigned_type
) (TREE_TYPE (arg0
));
7487 tree newmod
= build (TREE_CODE (arg0
), newtype
,
7488 convert (newtype
, TREE_OPERAND (arg0
, 0)),
7489 convert (newtype
, TREE_OPERAND (arg0
, 1)));
7491 return build (code
, type
, newmod
, convert (newtype
, arg1
));
7494 /* If this is an NE comparison of zero with an AND of one, remove the
7495 comparison since the AND will give the correct value. */
7496 if (code
== NE_EXPR
&& integer_zerop (arg1
)
7497 && TREE_CODE (arg0
) == BIT_AND_EXPR
7498 && integer_onep (TREE_OPERAND (arg0
, 1)))
7499 return convert (type
, arg0
);
7501 /* If we have (A & C) == C where C is a power of 2, convert this into
7502 (A & C) != 0. Similarly for NE_EXPR. */
7503 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7504 && TREE_CODE (arg0
) == BIT_AND_EXPR
7505 && integer_pow2p (TREE_OPERAND (arg0
, 1))
7506 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
7507 return fold (build (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
7508 arg0
, integer_zero_node
));
7510 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7511 2, then fold the expression into shifts and logical operations. */
7512 tem
= fold_single_bit_test (code
, arg0
, arg1
, type
);
7516 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7517 Similarly for NE_EXPR. */
7518 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7519 && TREE_CODE (arg0
) == BIT_AND_EXPR
7520 && TREE_CODE (arg1
) == INTEGER_CST
7521 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
7524 = fold (build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
7525 arg1
, build1 (BIT_NOT_EXPR
,
7526 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
7527 TREE_OPERAND (arg0
, 1))));
7528 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
7529 if (integer_nonzerop (dandnotc
))
7530 return omit_one_operand (type
, rslt
, arg0
);
7533 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7534 Similarly for NE_EXPR. */
7535 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7536 && TREE_CODE (arg0
) == BIT_IOR_EXPR
7537 && TREE_CODE (arg1
) == INTEGER_CST
7538 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
7541 = fold (build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
7542 TREE_OPERAND (arg0
, 1),
7543 build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
)));
7544 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
7545 if (integer_nonzerop (candnotd
))
7546 return omit_one_operand (type
, rslt
, arg0
);
7549 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7550 and similarly for >= into !=. */
7551 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
7552 && TREE_UNSIGNED (TREE_TYPE (arg0
))
7553 && TREE_CODE (arg1
) == LSHIFT_EXPR
7554 && integer_onep (TREE_OPERAND (arg1
, 0)))
7555 return build (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
7556 build (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
7557 TREE_OPERAND (arg1
, 1)),
7558 convert (TREE_TYPE (arg0
), integer_zero_node
));
7560 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
7561 && TREE_UNSIGNED (TREE_TYPE (arg0
))
7562 && (TREE_CODE (arg1
) == NOP_EXPR
7563 || TREE_CODE (arg1
) == CONVERT_EXPR
)
7564 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
7565 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
7567 build (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
7568 convert (TREE_TYPE (arg0
),
7569 build (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
7570 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1))),
7571 convert (TREE_TYPE (arg0
), integer_zero_node
));
7573 /* Simplify comparison of something with itself. (For IEEE
7574 floating-point, we can only do some of these simplifications.) */
7575 if (operand_equal_p (arg0
, arg1
, 0))
7580 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
7581 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
7582 return constant_boolean_node (1, type
);
7587 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
7588 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
7589 return constant_boolean_node (1, type
);
7590 return fold (build (EQ_EXPR
, type
, arg0
, arg1
));
7593 /* For NE, we can only do this simplification if integer
7594 or we don't honor IEEE floating point NaNs. */
7595 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
7596 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
7598 /* ... fall through ... */
7601 return constant_boolean_node (0, type
);
7607 /* If we are comparing an expression that just has comparisons
7608 of two integer values, arithmetic expressions of those comparisons,
7609 and constants, we can simplify it. There are only three cases
7610 to check: the two values can either be equal, the first can be
7611 greater, or the second can be greater. Fold the expression for
7612 those three values. Since each value must be 0 or 1, we have
7613 eight possibilities, each of which corresponds to the constant 0
7614 or 1 or one of the six possible comparisons.
7616 This handles common cases like (a > b) == 0 but also handles
7617 expressions like ((x > y) - (y > x)) > 0, which supposedly
7618 occur in macroized code. */
7620 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
7622 tree cval1
= 0, cval2
= 0;
7625 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
7626 /* Don't handle degenerate cases here; they should already
7627 have been handled anyway. */
7628 && cval1
!= 0 && cval2
!= 0
7629 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
7630 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
7631 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
7632 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
7633 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
7634 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
7635 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
7637 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
7638 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
7640 /* We can't just pass T to eval_subst in case cval1 or cval2
7641 was the same as ARG1. */
7644 = fold (build (code
, type
,
7645 eval_subst (arg0
, cval1
, maxval
, cval2
, minval
),
7648 = fold (build (code
, type
,
7649 eval_subst (arg0
, cval1
, maxval
, cval2
, maxval
),
7652 = fold (build (code
, type
,
7653 eval_subst (arg0
, cval1
, minval
, cval2
, maxval
),
7656 /* All three of these results should be 0 or 1. Confirm they
7657 are. Then use those values to select the proper code
7660 if ((integer_zerop (high_result
)
7661 || integer_onep (high_result
))
7662 && (integer_zerop (equal_result
)
7663 || integer_onep (equal_result
))
7664 && (integer_zerop (low_result
)
7665 || integer_onep (low_result
)))
7667 /* Make a 3-bit mask with the high-order bit being the
7668 value for `>', the next for '=', and the low for '<'. */
7669 switch ((integer_onep (high_result
) * 4)
7670 + (integer_onep (equal_result
) * 2)
7671 + integer_onep (low_result
))
7675 return omit_one_operand (type
, integer_zero_node
, arg0
);
7696 return omit_one_operand (type
, integer_one_node
, arg0
);
7699 t
= build (code
, type
, cval1
, cval2
);
7701 return save_expr (t
);
7708 /* If this is a comparison of a field, we may be able to simplify it. */
7709 if (((TREE_CODE (arg0
) == COMPONENT_REF
7710 && (*lang_hooks
.can_use_bit_fields_p
) ())
7711 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
7712 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
7713 /* Handle the constant case even without -O
7714 to make sure the warnings are given. */
7715 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
7717 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
7722 /* If this is a comparison of complex values and either or both sides
7723 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7724 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7725 This may prevent needless evaluations. */
7726 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7727 && TREE_CODE (TREE_TYPE (arg0
)) == COMPLEX_TYPE
7728 && (TREE_CODE (arg0
) == COMPLEX_EXPR
7729 || TREE_CODE (arg1
) == COMPLEX_EXPR
7730 || TREE_CODE (arg0
) == COMPLEX_CST
7731 || TREE_CODE (arg1
) == COMPLEX_CST
))
7733 tree subtype
= TREE_TYPE (TREE_TYPE (arg0
));
7734 tree real0
, imag0
, real1
, imag1
;
7736 arg0
= save_expr (arg0
);
7737 arg1
= save_expr (arg1
);
7738 real0
= fold (build1 (REALPART_EXPR
, subtype
, arg0
));
7739 imag0
= fold (build1 (IMAGPART_EXPR
, subtype
, arg0
));
7740 real1
= fold (build1 (REALPART_EXPR
, subtype
, arg1
));
7741 imag1
= fold (build1 (IMAGPART_EXPR
, subtype
, arg1
));
7743 return fold (build ((code
== EQ_EXPR
? TRUTH_ANDIF_EXPR
7746 fold (build (code
, type
, real0
, real1
)),
7747 fold (build (code
, type
, imag0
, imag1
))));
7750 /* Optimize comparisons of strlen vs zero to a compare of the
7751 first character of the string vs zero. To wit,
7752 strlen(ptr) == 0 => *ptr == 0
7753 strlen(ptr) != 0 => *ptr != 0
7754 Other cases should reduce to one of these two (or a constant)
7755 due to the return value of strlen being unsigned. */
7756 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7757 && integer_zerop (arg1
)
7758 && TREE_CODE (arg0
) == CALL_EXPR
)
7760 tree fndecl
= get_callee_fndecl (arg0
);
7764 && DECL_BUILT_IN (fndecl
)
7765 && DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_MD
7766 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
7767 && (arglist
= TREE_OPERAND (arg0
, 1))
7768 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
7769 && ! TREE_CHAIN (arglist
))
7770 return fold (build (code
, type
,
7771 build1 (INDIRECT_REF
, char_type_node
,
7772 TREE_VALUE(arglist
)),
7773 integer_zero_node
));
7776 /* From here on, the only cases we handle are when the result is
7777 known to be a constant.
7779 To compute GT, swap the arguments and do LT.
7780 To compute GE, do LT and invert the result.
7781 To compute LE, swap the arguments, do LT and invert the result.
7782 To compute NE, do EQ and invert the result.
7784 Therefore, the code below must handle only EQ and LT. */
7786 if (code
== LE_EXPR
|| code
== GT_EXPR
)
7788 tem
= arg0
, arg0
= arg1
, arg1
= tem
;
7789 code
= swap_tree_comparison (code
);
7792 /* Note that it is safe to invert for real values here because we
7793 will check below in the one case that it matters. */
7797 if (code
== NE_EXPR
|| code
== GE_EXPR
)
7800 code
= invert_tree_comparison (code
);
7803 /* Compute a result for LT or EQ if args permit;
7804 otherwise return T. */
7805 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
7807 if (code
== EQ_EXPR
)
7808 t1
= build_int_2 (tree_int_cst_equal (arg0
, arg1
), 0);
7810 t1
= build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0
))
7811 ? INT_CST_LT_UNSIGNED (arg0
, arg1
)
7812 : INT_CST_LT (arg0
, arg1
)),
7816 #if 0 /* This is no longer useful, but breaks some real code. */
7817 /* Assume a nonexplicit constant cannot equal an explicit one,
7818 since such code would be undefined anyway.
7819 Exception: on sysvr4, using #pragma weak,
7820 a label can come out as 0. */
7821 else if (TREE_CODE (arg1
) == INTEGER_CST
7822 && !integer_zerop (arg1
)
7823 && TREE_CONSTANT (arg0
)
7824 && TREE_CODE (arg0
) == ADDR_EXPR
7826 t1
= build_int_2 (0, 0);
7828 /* Two real constants can be compared explicitly. */
7829 else if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
7831 /* If either operand is a NaN, the result is false with two
7832 exceptions: First, an NE_EXPR is true on NaNs, but that case
7833 is already handled correctly since we will be inverting the
7834 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7835 or a GE_EXPR into a LT_EXPR, we must return true so that it
7836 will be inverted into false. */
7838 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
7839 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
7840 t1
= build_int_2 (invert
&& code
== LT_EXPR
, 0);
7842 else if (code
== EQ_EXPR
)
7843 t1
= build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
),
7844 TREE_REAL_CST (arg1
)),
7847 t1
= build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0
),
7848 TREE_REAL_CST (arg1
)),
7852 if (t1
== NULL_TREE
)
7856 TREE_INT_CST_LOW (t1
) ^= 1;
7858 TREE_TYPE (t1
) = type
;
7859 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7860 return (*lang_hooks
.truthvalue_conversion
) (t1
);
7864 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7865 so all simple results must be passed through pedantic_non_lvalue. */
7866 if (TREE_CODE (arg0
) == INTEGER_CST
)
7868 tem
= TREE_OPERAND (t
, (integer_zerop (arg0
) ? 2 : 1));
7869 /* Only optimize constant conditions when the selected branch
7870 has the same type as the COND_EXPR. This avoids optimizing
7871 away "c ? x : throw", where the throw has a void type. */
7872 if (! VOID_TYPE_P (TREE_TYPE (tem
))
7873 || VOID_TYPE_P (TREE_TYPE (t
)))
7874 return pedantic_non_lvalue (tem
);
7877 if (operand_equal_p (arg1
, TREE_OPERAND (expr
, 2), 0))
7878 return pedantic_omit_one_operand (type
, arg1
, arg0
);
7880 /* If we have A op B ? A : C, we may be able to convert this to a
7881 simpler expression, depending on the operation and the values
7882 of B and C. Signed zeros prevent all of these transformations,
7883 for reasons given above each one. */
7885 if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
7886 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
7887 arg1
, TREE_OPERAND (arg0
, 1))
7888 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
7890 tree arg2
= TREE_OPERAND (t
, 2);
7891 enum tree_code comp_code
= TREE_CODE (arg0
);
7895 /* If we have A op 0 ? A : -A, consider applying the following
7898 A == 0? A : -A same as -A
7899 A != 0? A : -A same as A
7900 A >= 0? A : -A same as abs (A)
7901 A > 0? A : -A same as abs (A)
7902 A <= 0? A : -A same as -abs (A)
7903 A < 0? A : -A same as -abs (A)
7905 None of these transformations work for modes with signed
7906 zeros. If A is +/-0, the first two transformations will
7907 change the sign of the result (from +0 to -0, or vice
7908 versa). The last four will fix the sign of the result,
7909 even though the original expressions could be positive or
7910 negative, depending on the sign of A.
7912 Note that all these transformations are correct if A is
7913 NaN, since the two alternatives (A and -A) are also NaNs. */
7914 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
7915 ? real_zerop (TREE_OPERAND (arg0
, 1))
7916 : integer_zerop (TREE_OPERAND (arg0
, 1)))
7917 && TREE_CODE (arg2
) == NEGATE_EXPR
7918 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
7926 (convert (TREE_TYPE (TREE_OPERAND (t
, 1)),
7929 return pedantic_non_lvalue (convert (type
, arg1
));
7932 if (TREE_UNSIGNED (TREE_TYPE (arg1
)))
7933 arg1
= convert ((*lang_hooks
.types
.signed_type
)
7934 (TREE_TYPE (arg1
)), arg1
);
7935 return pedantic_non_lvalue
7936 (convert (type
, fold (build1 (ABS_EXPR
,
7937 TREE_TYPE (arg1
), arg1
))));
7940 if (TREE_UNSIGNED (TREE_TYPE (arg1
)))
7941 arg1
= convert ((lang_hooks
.types
.signed_type
)
7942 (TREE_TYPE (arg1
)), arg1
);
7943 return pedantic_non_lvalue
7944 (negate_expr (convert (type
,
7945 fold (build1 (ABS_EXPR
,
7952 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
7953 A == 0 ? A : 0 is always 0 unless A is -0. Note that
7954 both transformations are correct when A is NaN: A != 0
7955 is then true, and A == 0 is false. */
7957 if (integer_zerop (TREE_OPERAND (arg0
, 1)) && integer_zerop (arg2
))
7959 if (comp_code
== NE_EXPR
)
7960 return pedantic_non_lvalue (convert (type
, arg1
));
7961 else if (comp_code
== EQ_EXPR
)
7962 return pedantic_non_lvalue (convert (type
, integer_zero_node
));
7965 /* Try some transformations of A op B ? A : B.
7967 A == B? A : B same as B
7968 A != B? A : B same as A
7969 A >= B? A : B same as max (A, B)
7970 A > B? A : B same as max (B, A)
7971 A <= B? A : B same as min (A, B)
7972 A < B? A : B same as min (B, A)
7974 As above, these transformations don't work in the presence
7975 of signed zeros. For example, if A and B are zeros of
7976 opposite sign, the first two transformations will change
7977 the sign of the result. In the last four, the original
7978 expressions give different results for (A=+0, B=-0) and
7979 (A=-0, B=+0), but the transformed expressions do not.
7981 The first two transformations are correct if either A or B
7982 is a NaN. In the first transformation, the condition will
7983 be false, and B will indeed be chosen. In the case of the
7984 second transformation, the condition A != B will be true,
7985 and A will be chosen.
7987 The conversions to max() and min() are not correct if B is
7988 a number and A is not. The conditions in the original
7989 expressions will be false, so all four give B. The min()
7990 and max() versions would give a NaN instead. */
7991 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 1),
7992 arg2
, TREE_OPERAND (arg0
, 0)))
7994 tree comp_op0
= TREE_OPERAND (arg0
, 0);
7995 tree comp_op1
= TREE_OPERAND (arg0
, 1);
7996 tree comp_type
= TREE_TYPE (comp_op0
);
7998 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
7999 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
8009 return pedantic_non_lvalue (convert (type
, arg2
));
8011 return pedantic_non_lvalue (convert (type
, arg1
));
8014 /* In C++ a ?: expression can be an lvalue, so put the
8015 operand which will be used if they are equal first
8016 so that we can convert this back to the
8017 corresponding COND_EXPR. */
8018 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
8019 return pedantic_non_lvalue
8020 (convert (type
, fold (build (MIN_EXPR
, comp_type
,
8021 (comp_code
== LE_EXPR
8022 ? comp_op0
: comp_op1
),
8023 (comp_code
== LE_EXPR
8024 ? comp_op1
: comp_op0
)))));
8028 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
8029 return pedantic_non_lvalue
8030 (convert (type
, fold (build (MAX_EXPR
, comp_type
,
8031 (comp_code
== GE_EXPR
8032 ? comp_op0
: comp_op1
),
8033 (comp_code
== GE_EXPR
8034 ? comp_op1
: comp_op0
)))));
8041 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8042 we might still be able to simplify this. For example,
8043 if C1 is one less or one more than C2, this might have started
8044 out as a MIN or MAX and been transformed by this function.
8045 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8047 if (INTEGRAL_TYPE_P (type
)
8048 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8049 && TREE_CODE (arg2
) == INTEGER_CST
)
8053 /* We can replace A with C1 in this case. */
8054 arg1
= convert (type
, TREE_OPERAND (arg0
, 1));
8055 return fold (build (code
, type
, TREE_OPERAND (t
, 0), arg1
,
8056 TREE_OPERAND (t
, 2)));
8059 /* If C1 is C2 + 1, this is min(A, C2). */
8060 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
), 1)
8061 && operand_equal_p (TREE_OPERAND (arg0
, 1),
8062 const_binop (PLUS_EXPR
, arg2
,
8063 integer_one_node
, 0), 1))
8064 return pedantic_non_lvalue
8065 (fold (build (MIN_EXPR
, type
, arg1
, arg2
)));
8069 /* If C1 is C2 - 1, this is min(A, C2). */
8070 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
), 1)
8071 && operand_equal_p (TREE_OPERAND (arg0
, 1),
8072 const_binop (MINUS_EXPR
, arg2
,
8073 integer_one_node
, 0), 1))
8074 return pedantic_non_lvalue
8075 (fold (build (MIN_EXPR
, type
, arg1
, arg2
)));
8079 /* If C1 is C2 - 1, this is max(A, C2). */
8080 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
), 1)
8081 && operand_equal_p (TREE_OPERAND (arg0
, 1),
8082 const_binop (MINUS_EXPR
, arg2
,
8083 integer_one_node
, 0), 1))
8084 return pedantic_non_lvalue
8085 (fold (build (MAX_EXPR
, type
, arg1
, arg2
)));
8089 /* If C1 is C2 + 1, this is max(A, C2). */
8090 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
), 1)
8091 && operand_equal_p (TREE_OPERAND (arg0
, 1),
8092 const_binop (PLUS_EXPR
, arg2
,
8093 integer_one_node
, 0), 1))
8094 return pedantic_non_lvalue
8095 (fold (build (MAX_EXPR
, type
, arg1
, arg2
)));
8104 /* If the second operand is simpler than the third, swap them
8105 since that produces better jump optimization results. */
8106 if (tree_swap_operands_p (TREE_OPERAND (t
, 1),
8107 TREE_OPERAND (t
, 2), false))
8109 /* See if this can be inverted. If it can't, possibly because
8110 it was a floating-point inequality comparison, don't do
8112 tem
= invert_truthvalue (arg0
);
8114 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
8115 return fold (build (code
, type
, tem
,
8116 TREE_OPERAND (t
, 2), TREE_OPERAND (t
, 1)));
8119 /* Convert A ? 1 : 0 to simply A. */
8120 if (integer_onep (TREE_OPERAND (t
, 1))
8121 && integer_zerop (TREE_OPERAND (t
, 2))
8122 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8123 call to fold will try to move the conversion inside
8124 a COND, which will recurse. In that case, the COND_EXPR
8125 is probably the best choice, so leave it alone. */
8126 && type
== TREE_TYPE (arg0
))
8127 return pedantic_non_lvalue (arg0
);
8129 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8130 over COND_EXPR in cases such as floating point comparisons. */
8131 if (integer_zerop (TREE_OPERAND (t
, 1))
8132 && integer_onep (TREE_OPERAND (t
, 2))
8133 && truth_value_p (TREE_CODE (arg0
)))
8134 return pedantic_non_lvalue (convert (type
,
8135 invert_truthvalue (arg0
)));
8137 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8138 operation is simply A & 2. */
8140 if (integer_zerop (TREE_OPERAND (t
, 2))
8141 && TREE_CODE (arg0
) == NE_EXPR
8142 && integer_zerop (TREE_OPERAND (arg0
, 1))
8143 && integer_pow2p (arg1
)
8144 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
8145 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
8147 return pedantic_non_lvalue (convert (type
, TREE_OPERAND (arg0
, 0)));
8149 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8150 if (integer_zerop (TREE_OPERAND (t
, 2))
8151 && truth_value_p (TREE_CODE (arg0
))
8152 && truth_value_p (TREE_CODE (arg1
)))
8153 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR
, type
,
8156 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8157 if (integer_onep (TREE_OPERAND (t
, 2))
8158 && truth_value_p (TREE_CODE (arg0
))
8159 && truth_value_p (TREE_CODE (arg1
)))
8161 /* Only perform transformation if ARG0 is easily inverted. */
8162 tem
= invert_truthvalue (arg0
);
8163 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
8164 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR
, type
,
8171 /* When pedantic, a compound expression can be neither an lvalue
8172 nor an integer constant expression. */
8173 if (TREE_SIDE_EFFECTS (arg0
) || pedantic
)
8175 /* Don't let (0, 0) be null pointer constant. */
8176 if (integer_zerop (arg1
))
8177 return build1 (NOP_EXPR
, type
, arg1
);
8178 return convert (type
, arg1
);
8182 return build_complex (type
, arg0
, arg1
);
8186 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8188 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8189 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
8190 TREE_OPERAND (arg0
, 1));
8191 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
8192 return TREE_REALPART (arg0
);
8193 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8194 return fold (build (TREE_CODE (arg0
), type
,
8195 fold (build1 (REALPART_EXPR
, type
,
8196 TREE_OPERAND (arg0
, 0))),
8197 fold (build1 (REALPART_EXPR
,
8198 type
, TREE_OPERAND (arg0
, 1)))));
8202 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8203 return convert (type
, integer_zero_node
);
8204 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8205 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
8206 TREE_OPERAND (arg0
, 0));
8207 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
8208 return TREE_IMAGPART (arg0
);
8209 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8210 return fold (build (TREE_CODE (arg0
), type
,
8211 fold (build1 (IMAGPART_EXPR
, type
,
8212 TREE_OPERAND (arg0
, 0))),
8213 fold (build1 (IMAGPART_EXPR
, type
,
8214 TREE_OPERAND (arg0
, 1)))));
8217 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8219 case CLEANUP_POINT_EXPR
:
8220 if (! has_cleanups (arg0
))
8221 return TREE_OPERAND (t
, 0);
8224 enum tree_code code0
= TREE_CODE (arg0
);
8225 int kind0
= TREE_CODE_CLASS (code0
);
8226 tree arg00
= TREE_OPERAND (arg0
, 0);
8229 if (kind0
== '1' || code0
== TRUTH_NOT_EXPR
)
8230 return fold (build1 (code0
, type
,
8231 fold (build1 (CLEANUP_POINT_EXPR
,
8232 TREE_TYPE (arg00
), arg00
))));
8234 if (kind0
== '<' || kind0
== '2'
8235 || code0
== TRUTH_ANDIF_EXPR
|| code0
== TRUTH_ORIF_EXPR
8236 || code0
== TRUTH_AND_EXPR
|| code0
== TRUTH_OR_EXPR
8237 || code0
== TRUTH_XOR_EXPR
)
8239 arg01
= TREE_OPERAND (arg0
, 1);
8241 if (TREE_CONSTANT (arg00
)
8242 || ((code0
== TRUTH_ANDIF_EXPR
|| code0
== TRUTH_ORIF_EXPR
)
8243 && ! has_cleanups (arg00
)))
8244 return fold (build (code0
, type
, arg00
,
8245 fold (build1 (CLEANUP_POINT_EXPR
,
8246 TREE_TYPE (arg01
), arg01
))));
8248 if (TREE_CONSTANT (arg01
))
8249 return fold (build (code0
, type
,
8250 fold (build1 (CLEANUP_POINT_EXPR
,
8251 TREE_TYPE (arg00
), arg00
)),
8259 /* Check for a built-in function. */
8260 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
8261 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0))
8263 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0)))
8265 tree tmp
= fold_builtin (expr
);
8273 } /* switch (code) */
8276 #ifdef ENABLE_FOLD_CHECKING
8279 static void fold_checksum_tree (tree
, struct md5_ctx
*, htab_t
);
8280 static void fold_check_failed (tree
, tree
);
8281 void print_fold_checksum (tree
);
8283 /* When --enable-checking=fold, compute a digest of expr before
8284 and after actual fold call to see if fold did not accidentally
8285 change original expr. */
8292 unsigned char checksum_before
[16], checksum_after
[16];
8295 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
8296 md5_init_ctx (&ctx
);
8297 fold_checksum_tree (expr
, &ctx
, ht
);
8298 md5_finish_ctx (&ctx
, checksum_before
);
8301 ret
= fold_1 (expr
);
8303 md5_init_ctx (&ctx
);
8304 fold_checksum_tree (expr
, &ctx
, ht
);
8305 md5_finish_ctx (&ctx
, checksum_after
);
8308 if (memcmp (checksum_before
, checksum_after
, 16))
8309 fold_check_failed (expr
, ret
);
8315 print_fold_checksum (tree expr
)
8318 unsigned char checksum
[16], cnt
;
8321 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
8322 md5_init_ctx (&ctx
);
8323 fold_checksum_tree (expr
, &ctx
, ht
);
8324 md5_finish_ctx (&ctx
, checksum
);
8326 for (cnt
= 0; cnt
< 16; ++cnt
)
8327 fprintf (stderr
, "%02x", checksum
[cnt
]);
8328 putc ('\n', stderr
);
8332 fold_check_failed (tree expr ATTRIBUTE_UNUSED
, tree ret ATTRIBUTE_UNUSED
)
8334 internal_error ("fold check: original tree changed by fold");
8338 fold_checksum_tree (tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
8341 enum tree_code code
;
8342 char buf
[sizeof (struct tree_decl
)];
8345 if (sizeof (struct tree_exp
) + 5 * sizeof (tree
)
8346 > sizeof (struct tree_decl
)
8347 || sizeof (struct tree_type
) > sizeof (struct tree_decl
))
8351 slot
= htab_find_slot (ht
, expr
, INSERT
);
8355 code
= TREE_CODE (expr
);
8356 if (code
== SAVE_EXPR
&& SAVE_EXPR_NOPLACEHOLDER (expr
))
8358 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8359 memcpy (buf
, expr
, tree_size (expr
));
8361 SAVE_EXPR_NOPLACEHOLDER (expr
) = 0;
8363 else if (TREE_CODE_CLASS (code
) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr
))
8365 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8366 memcpy (buf
, expr
, tree_size (expr
));
8368 SET_DECL_ASSEMBLER_NAME (expr
, NULL
);
8370 else if (TREE_CODE_CLASS (code
) == 't'
8371 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)))
8373 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8374 memcpy (buf
, expr
, tree_size (expr
));
8376 TYPE_POINTER_TO (expr
) = NULL
;
8377 TYPE_REFERENCE_TO (expr
) = NULL
;
8379 md5_process_bytes (expr
, tree_size (expr
), ctx
);
8380 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
8381 if (TREE_CODE_CLASS (code
) != 't' && TREE_CODE_CLASS (code
) != 'd')
8382 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
8383 len
= TREE_CODE_LENGTH (code
);
8384 switch (TREE_CODE_CLASS (code
))
8390 md5_process_bytes (TREE_STRING_POINTER (expr
),
8391 TREE_STRING_LENGTH (expr
), ctx
);
8394 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
8395 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
8398 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
8408 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
8409 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
8412 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
8413 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
8422 case SAVE_EXPR
: len
= 2; break;
8423 case GOTO_SUBROUTINE_EXPR
: len
= 0; break;
8424 case RTL_EXPR
: len
= 0; break;
8425 case WITH_CLEANUP_EXPR
: len
= 2; break;
8434 for (i
= 0; i
< len
; ++i
)
8435 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
8438 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
8439 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
8440 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
8441 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
8442 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
8443 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
8444 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
8445 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
8446 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
8447 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
8448 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
8451 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
8452 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
8453 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
8454 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
8455 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
8456 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
8457 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
8458 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
8459 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
8460 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
8469 /* Perform constant folding and related simplification of initializer
8470 expression EXPR. This behaves identically to "fold" but ignores
8471 potential run-time traps and exceptions that fold must preserve. */
8474 fold_initializer (tree expr
)
8476 int saved_signaling_nans
= flag_signaling_nans
;
8477 int saved_trapping_math
= flag_trapping_math
;
8478 int saved_trapv
= flag_trapv
;
8481 flag_signaling_nans
= 0;
8482 flag_trapping_math
= 0;
8485 result
= fold (expr
);
8487 flag_signaling_nans
= saved_signaling_nans
;
8488 flag_trapping_math
= saved_trapping_math
;
8489 flag_trapv
= saved_trapv
;
8494 /* Determine if first argument is a multiple of second argument. Return 0 if
8495 it is not, or we cannot easily determined it to be.
8497 An example of the sort of thing we care about (at this point; this routine
8498 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8499 fold cases do now) is discovering that
8501 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8507 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8509 This code also handles discovering that
8511 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8513 is a multiple of 8 so we don't have to worry about dealing with a
8516 Note that we *look* inside a SAVE_EXPR only to determine how it was
8517 calculated; it is not safe for fold to do much of anything else with the
8518 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8519 at run time. For example, the latter example above *cannot* be implemented
8520 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8521 evaluation time of the original SAVE_EXPR is not necessarily the same at
8522 the time the new expression is evaluated. The only optimization of this
8523 sort that would be valid is changing
8525 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8529 SAVE_EXPR (I) * SAVE_EXPR (J)
8531 (where the same SAVE_EXPR (J) is used in the original and the
8532 transformed version). */
8535 multiple_of_p (tree type
, tree top
, tree bottom
)
8537 if (operand_equal_p (top
, bottom
, 0))
8540 if (TREE_CODE (type
) != INTEGER_TYPE
)
8543 switch (TREE_CODE (top
))
8546 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
8547 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
8551 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
8552 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
8555 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
8559 op1
= TREE_OPERAND (top
, 1);
8560 /* const_binop may not detect overflow correctly,
8561 so check for it explicitly here. */
8562 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
8563 > TREE_INT_CST_LOW (op1
)
8564 && TREE_INT_CST_HIGH (op1
) == 0
8565 && 0 != (t1
= convert (type
,
8566 const_binop (LSHIFT_EXPR
, size_one_node
,
8568 && ! TREE_OVERFLOW (t1
))
8569 return multiple_of_p (type
, t1
, bottom
);
8574 /* Can't handle conversions from non-integral or wider integral type. */
8575 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
8576 || (TYPE_PRECISION (type
)
8577 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
8580 /* .. fall through ... */
8583 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
8586 if (TREE_CODE (bottom
) != INTEGER_CST
8587 || (TREE_UNSIGNED (type
)
8588 && (tree_int_cst_sgn (top
) < 0
8589 || tree_int_cst_sgn (bottom
) < 0)))
8591 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,
8599 /* Return true if `t' is known to be non-negative. */
8602 tree_expr_nonnegative_p (tree t
)
8604 switch (TREE_CODE (t
))
8610 return tree_int_cst_sgn (t
) >= 0;
8613 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
8616 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
8617 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8618 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8620 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8621 both unsigned and at least 2 bits shorter than the result. */
8622 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
8623 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
8624 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
8626 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
8627 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
8628 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TREE_UNSIGNED (inner1
)
8629 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TREE_UNSIGNED (inner2
))
8631 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
8632 TYPE_PRECISION (inner2
)) + 1;
8633 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
8639 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
8641 /* x * x for floating point x is always non-negative. */
8642 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
8644 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8645 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8648 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8649 both unsigned and their total bits is shorter than the result. */
8650 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
8651 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
8652 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
8654 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
8655 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
8656 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TREE_UNSIGNED (inner1
)
8657 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TREE_UNSIGNED (inner2
))
8658 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
8659 < TYPE_PRECISION (TREE_TYPE (t
));
8663 case TRUNC_DIV_EXPR
:
8665 case FLOOR_DIV_EXPR
:
8666 case ROUND_DIV_EXPR
:
8667 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8668 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8670 case TRUNC_MOD_EXPR
:
8672 case FLOOR_MOD_EXPR
:
8673 case ROUND_MOD_EXPR
:
8674 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8677 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8678 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8682 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
8683 tree outer_type
= TREE_TYPE (t
);
8685 if (TREE_CODE (outer_type
) == REAL_TYPE
)
8687 if (TREE_CODE (inner_type
) == REAL_TYPE
)
8688 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8689 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
8691 if (TREE_UNSIGNED (inner_type
))
8693 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8696 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
8698 if (TREE_CODE (inner_type
) == REAL_TYPE
)
8699 return tree_expr_nonnegative_p (TREE_OPERAND (t
,0));
8700 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
8701 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
8702 && TREE_UNSIGNED (inner_type
);
8708 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
8709 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
8711 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8713 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8714 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8716 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8717 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8719 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8721 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8723 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8724 case NON_LVALUE_EXPR
:
8725 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8727 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8729 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t
));
8733 tree fndecl
= get_callee_fndecl (t
);
8734 tree arglist
= TREE_OPERAND (t
, 1);
8736 && DECL_BUILT_IN (fndecl
)
8737 && DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_MD
)
8738 switch (DECL_FUNCTION_CODE (fndecl
))
8741 case BUILT_IN_CABSL
:
8742 case BUILT_IN_CABSF
:
8747 case BUILT_IN_EXP2F
:
8748 case BUILT_IN_EXP2L
:
8749 case BUILT_IN_EXP10
:
8750 case BUILT_IN_EXP10F
:
8751 case BUILT_IN_EXP10L
:
8753 case BUILT_IN_FABSF
:
8754 case BUILT_IN_FABSL
:
8757 case BUILT_IN_FFSLL
:
8758 case BUILT_IN_PARITY
:
8759 case BUILT_IN_PARITYL
:
8760 case BUILT_IN_PARITYLL
:
8761 case BUILT_IN_POPCOUNT
:
8762 case BUILT_IN_POPCOUNTL
:
8763 case BUILT_IN_POPCOUNTLL
:
8764 case BUILT_IN_POW10
:
8765 case BUILT_IN_POW10F
:
8766 case BUILT_IN_POW10L
:
8768 case BUILT_IN_SQRTF
:
8769 case BUILT_IN_SQRTL
:
8773 case BUILT_IN_ATANF
:
8774 case BUILT_IN_ATANL
:
8776 case BUILT_IN_CEILF
:
8777 case BUILT_IN_CEILL
:
8778 case BUILT_IN_FLOOR
:
8779 case BUILT_IN_FLOORF
:
8780 case BUILT_IN_FLOORL
:
8781 case BUILT_IN_NEARBYINT
:
8782 case BUILT_IN_NEARBYINTF
:
8783 case BUILT_IN_NEARBYINTL
:
8784 case BUILT_IN_ROUND
:
8785 case BUILT_IN_ROUNDF
:
8786 case BUILT_IN_ROUNDL
:
8787 case BUILT_IN_TRUNC
:
8788 case BUILT_IN_TRUNCF
:
8789 case BUILT_IN_TRUNCL
:
8790 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
8795 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
8802 /* ... fall through ... */
8805 if (truth_value_p (TREE_CODE (t
)))
8806 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8810 /* We don't know sign of `t', so be conservative and return false. */
8814 /* Return true if `r' is known to be non-negative.
8815 Only handles constants at the moment. */
8818 rtl_expr_nonnegative_p (rtx r
)
8820 switch (GET_CODE (r
))
8823 return INTVAL (r
) >= 0;
8826 if (GET_MODE (r
) == VOIDmode
)
8827 return CONST_DOUBLE_HIGH (r
) >= 0;
8835 units
= CONST_VECTOR_NUNITS (r
);
8837 for (i
= 0; i
< units
; ++i
)
8839 elt
= CONST_VECTOR_ELT (r
, i
);
8840 if (!rtl_expr_nonnegative_p (elt
))
8849 /* These are always nonnegative. */
8857 #include "gt-fold-const.h"