1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 88, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 /*@@ This file should be rewritten to use an arbitrary precision
22 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
23 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
24 @@ The routines that translate from the ap rep should
25 @@ warn if precision et. al. is lost.
26 @@ This would also make life easier when this technology is used
27 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
53 /* Handle floating overflow for `const_binop'. */
54 static jmp_buf float_error
;
56 static void encode
PROTO((HOST_WIDE_INT
*,
57 HOST_WIDE_INT
, HOST_WIDE_INT
));
58 static void decode
PROTO((HOST_WIDE_INT
*,
59 HOST_WIDE_INT
*, HOST_WIDE_INT
*));
60 int div_and_round_double
PROTO((enum tree_code
, int, HOST_WIDE_INT
,
61 HOST_WIDE_INT
, HOST_WIDE_INT
,
62 HOST_WIDE_INT
, HOST_WIDE_INT
*,
63 HOST_WIDE_INT
*, HOST_WIDE_INT
*,
65 static int split_tree
PROTO((tree
, enum tree_code
, tree
*,
67 static tree int_const_binop
PROTO((enum tree_code
, tree
, tree
, int, int));
68 static tree const_binop
PROTO((enum tree_code
, tree
, tree
, int));
69 static tree fold_convert
PROTO((tree
, tree
));
70 static enum tree_code invert_tree_comparison
PROTO((enum tree_code
));
71 static enum tree_code swap_tree_comparison
PROTO((enum tree_code
));
72 static int truth_value_p
PROTO((enum tree_code
));
73 static int operand_equal_for_comparison_p
PROTO((tree
, tree
, tree
));
74 static int twoval_comparison_p
PROTO((tree
, tree
*, tree
*, int *));
75 static tree eval_subst
PROTO((tree
, tree
, tree
, tree
, tree
));
76 static tree omit_one_operand
PROTO((tree
, tree
, tree
));
77 static tree pedantic_omit_one_operand
PROTO((tree
, tree
, tree
));
78 static tree distribute_bit_expr
PROTO((enum tree_code
, tree
, tree
, tree
));
79 static tree make_bit_field_ref
PROTO((tree
, tree
, int, int, int));
80 static tree optimize_bit_field_compare
PROTO((enum tree_code
, tree
,
82 static tree decode_field_reference
PROTO((tree
, int *, int *,
83 enum machine_mode
*, int *,
84 int *, tree
*, tree
*));
85 static int all_ones_mask_p
PROTO((tree
, int));
86 static int simple_operand_p
PROTO((tree
));
87 static tree range_binop
PROTO((enum tree_code
, tree
, tree
, int,
89 static tree make_range
PROTO((tree
, int *, tree
*, tree
*));
90 static tree build_range_check
PROTO((tree
, tree
, int, tree
, tree
));
91 static int merge_ranges
PROTO((int *, tree
*, tree
*, int, tree
, tree
,
93 static tree fold_range_test
PROTO((tree
));
94 static tree unextend
PROTO((tree
, int, int, tree
));
95 static tree fold_truthop
PROTO((enum tree_code
, tree
, tree
, tree
));
96 static tree strip_compound_expr
PROTO((tree
, tree
));
97 static int multiple_of_p
PROTO((tree
, tree
, tree
));
98 static tree constant_boolean_node
PROTO((int, tree
));
101 #define BRANCH_COST 1
104 /* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
105 Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
106 Then this yields nonzero if overflow occurred during the addition.
107 Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
108 Use `^' to test whether signs differ, and `< 0' to isolate the sign. */
109 #define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
111 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
112 We do that by representing the two-word integer in 4 words, with only
113 HOST_BITS_PER_WIDE_INT/2 bits stored in each word, as a positive number. */
116 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT/2)) - 1))
117 #define HIGHPART(x) \
118 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT/2)
119 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT/2)
121 /* Unpack a two-word integer into 4 words.
122 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
123 WORDS points to the array of HOST_WIDE_INTs. */
126 encode (words
, low
, hi
)
127 HOST_WIDE_INT
*words
;
128 HOST_WIDE_INT low
, hi
;
130 words
[0] = LOWPART (low
);
131 words
[1] = HIGHPART (low
);
132 words
[2] = LOWPART (hi
);
133 words
[3] = HIGHPART (hi
);
136 /* Pack an array of 4 words into a two-word integer.
137 WORDS points to the array of words.
138 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
141 decode (words
, low
, hi
)
142 HOST_WIDE_INT
*words
;
143 HOST_WIDE_INT
*low
, *hi
;
145 *low
= words
[0] | words
[1] * BASE
;
146 *hi
= words
[2] | words
[3] * BASE
;
149 /* Make the integer constant T valid for its type
150 by setting to 0 or 1 all the bits in the constant
151 that don't belong in the type.
152 Yield 1 if a signed overflow occurs, 0 otherwise.
153 If OVERFLOW is nonzero, a signed overflow has already occurred
154 in calculating T, so propagate it.
156 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
160 force_fit_type (t
, overflow
)
164 HOST_WIDE_INT low
, high
;
167 if (TREE_CODE (t
) == REAL_CST
)
169 #ifdef CHECK_FLOAT_VALUE
170 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t
)), TREE_REAL_CST (t
),
176 else if (TREE_CODE (t
) != INTEGER_CST
)
179 low
= TREE_INT_CST_LOW (t
);
180 high
= TREE_INT_CST_HIGH (t
);
182 if (POINTER_TYPE_P (TREE_TYPE (t
)))
185 prec
= TYPE_PRECISION (TREE_TYPE (t
));
187 /* First clear all bits that are beyond the type's precision. */
189 if (prec
== 2 * HOST_BITS_PER_WIDE_INT
)
191 else if (prec
> HOST_BITS_PER_WIDE_INT
)
193 TREE_INT_CST_HIGH (t
)
194 &= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
198 TREE_INT_CST_HIGH (t
) = 0;
199 if (prec
< HOST_BITS_PER_WIDE_INT
)
200 TREE_INT_CST_LOW (t
) &= ~((HOST_WIDE_INT
) (-1) << prec
);
203 /* Unsigned types do not suffer sign extension or overflow. */
204 if (TREE_UNSIGNED (TREE_TYPE (t
)))
207 /* If the value's sign bit is set, extend the sign. */
208 if (prec
!= 2 * HOST_BITS_PER_WIDE_INT
209 && (prec
> HOST_BITS_PER_WIDE_INT
210 ? (TREE_INT_CST_HIGH (t
)
211 & ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
212 : TREE_INT_CST_LOW (t
) & ((HOST_WIDE_INT
) 1 << (prec
- 1))))
214 /* Value is negative:
215 set to 1 all the bits that are outside this type's precision. */
216 if (prec
> HOST_BITS_PER_WIDE_INT
)
218 TREE_INT_CST_HIGH (t
)
219 |= ((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
223 TREE_INT_CST_HIGH (t
) = -1;
224 if (prec
< HOST_BITS_PER_WIDE_INT
)
225 TREE_INT_CST_LOW (t
) |= ((HOST_WIDE_INT
) (-1) << prec
);
229 /* Yield nonzero if signed overflow occurred. */
231 ((overflow
| (low
^ TREE_INT_CST_LOW (t
)) | (high
^ TREE_INT_CST_HIGH (t
)))
235 /* Add two doubleword integers with doubleword result.
236 Each argument is given as two `HOST_WIDE_INT' pieces.
237 One argument is L1 and H1; the other, L2 and H2.
238 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
241 add_double (l1
, h1
, l2
, h2
, lv
, hv
)
242 HOST_WIDE_INT l1
, h1
, l2
, h2
;
243 HOST_WIDE_INT
*lv
, *hv
;
248 h
= h1
+ h2
+ ((unsigned HOST_WIDE_INT
) l
< l1
);
252 return overflow_sum_sign (h1
, h2
, h
);
255 /* Negate a doubleword integer with doubleword result.
256 Return nonzero if the operation overflows, assuming it's signed.
257 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
258 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
261 neg_double (l1
, h1
, lv
, hv
)
262 HOST_WIDE_INT l1
, h1
;
263 HOST_WIDE_INT
*lv
, *hv
;
269 return (*hv
& h1
) < 0;
279 /* Multiply two doubleword integers with doubleword result.
280 Return nonzero if the operation overflows, assuming it's signed.
281 Each argument is given as two `HOST_WIDE_INT' pieces.
282 One argument is L1 and H1; the other, L2 and H2.
283 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
286 mul_double (l1
, h1
, l2
, h2
, lv
, hv
)
287 HOST_WIDE_INT l1
, h1
, l2
, h2
;
288 HOST_WIDE_INT
*lv
, *hv
;
290 HOST_WIDE_INT arg1
[4];
291 HOST_WIDE_INT arg2
[4];
292 HOST_WIDE_INT prod
[4 * 2];
293 register unsigned HOST_WIDE_INT carry
;
294 register int i
, j
, k
;
295 HOST_WIDE_INT toplow
, tophigh
, neglow
, neghigh
;
297 encode (arg1
, l1
, h1
);
298 encode (arg2
, l2
, h2
);
300 bzero ((char *) prod
, sizeof prod
);
302 for (i
= 0; i
< 4; i
++)
305 for (j
= 0; j
< 4; j
++)
308 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
309 carry
+= arg1
[i
] * arg2
[j
];
310 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
312 prod
[k
] = LOWPART (carry
);
313 carry
= HIGHPART (carry
);
318 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
320 /* Check for overflow by calculating the top half of the answer in full;
321 it should agree with the low half's sign bit. */
322 decode (prod
+4, &toplow
, &tophigh
);
325 neg_double (l2
, h2
, &neglow
, &neghigh
);
326 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
330 neg_double (l1
, h1
, &neglow
, &neghigh
);
331 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
333 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
336 /* Shift the doubleword integer in L1, H1 left by COUNT places
337 keeping only PREC bits of result.
338 Shift right if COUNT is negative.
339 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
340 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
343 lshift_double (l1
, h1
, count
, prec
, lv
, hv
, arith
)
344 HOST_WIDE_INT l1
, h1
, count
;
346 HOST_WIDE_INT
*lv
, *hv
;
351 rshift_double (l1
, h1
, - count
, prec
, lv
, hv
, arith
);
355 #ifdef SHIFT_COUNT_TRUNCATED
356 if (SHIFT_COUNT_TRUNCATED
)
360 if (count
>= HOST_BITS_PER_WIDE_INT
)
362 *hv
= (unsigned HOST_WIDE_INT
) l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
367 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
368 | ((unsigned HOST_WIDE_INT
) l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
369 *lv
= (unsigned HOST_WIDE_INT
) l1
<< count
;
373 /* Shift the doubleword integer in L1, H1 right by COUNT places
374 keeping only PREC bits of result. COUNT must be positive.
375 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
376 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
379 rshift_double (l1
, h1
, count
, prec
, lv
, hv
, arith
)
380 HOST_WIDE_INT l1
, h1
, count
;
382 HOST_WIDE_INT
*lv
, *hv
;
385 unsigned HOST_WIDE_INT signmask
;
387 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
390 #ifdef SHIFT_COUNT_TRUNCATED
391 if (SHIFT_COUNT_TRUNCATED
)
395 if (count
>= HOST_BITS_PER_WIDE_INT
)
398 *lv
= ((signmask
<< (2 * HOST_BITS_PER_WIDE_INT
- count
- 1) << 1)
399 | ((unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
)));
403 *lv
= (((unsigned HOST_WIDE_INT
) l1
>> count
)
404 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
405 *hv
= ((signmask
<< (HOST_BITS_PER_WIDE_INT
- count
))
406 | ((unsigned HOST_WIDE_INT
) h1
>> count
));
410 /* Rotate the doubleword integer in L1, H1 left by COUNT places
411 keeping only PREC bits of result.
412 Rotate right if COUNT is negative.
413 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
416 lrotate_double (l1
, h1
, count
, prec
, lv
, hv
)
417 HOST_WIDE_INT l1
, h1
, count
;
419 HOST_WIDE_INT
*lv
, *hv
;
421 HOST_WIDE_INT s1l
, s1h
, s2l
, s2h
;
427 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
428 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
433 /* Rotate the doubleword integer in L1, H1 left by COUNT places
434 keeping only PREC bits of result. COUNT must be positive.
435 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
438 rrotate_double (l1
, h1
, count
, prec
, lv
, hv
)
439 HOST_WIDE_INT l1
, h1
, count
;
441 HOST_WIDE_INT
*lv
, *hv
;
443 HOST_WIDE_INT s1l
, s1h
, s2l
, s2h
;
449 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
450 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
455 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
456 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
457 CODE is a tree code for a kind of division, one of
458 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
460 It controls how the quotient is rounded to a integer.
461 Return nonzero if the operation overflows.
462 UNS nonzero says do unsigned division. */
465 div_and_round_double (code
, uns
,
466 lnum_orig
, hnum_orig
, lden_orig
, hden_orig
,
467 lquo
, hquo
, lrem
, hrem
)
470 HOST_WIDE_INT lnum_orig
, hnum_orig
; /* num == numerator == dividend */
471 HOST_WIDE_INT lden_orig
, hden_orig
; /* den == denominator == divisor */
472 HOST_WIDE_INT
*lquo
, *hquo
, *lrem
, *hrem
;
475 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
476 HOST_WIDE_INT den
[4], quo
[4];
478 unsigned HOST_WIDE_INT work
;
479 register unsigned HOST_WIDE_INT carry
= 0;
480 HOST_WIDE_INT lnum
= lnum_orig
;
481 HOST_WIDE_INT hnum
= hnum_orig
;
482 HOST_WIDE_INT lden
= lden_orig
;
483 HOST_WIDE_INT hden
= hden_orig
;
486 if ((hden
== 0) && (lden
== 0))
487 overflow
= 1, lden
= 1;
489 /* calculate quotient sign and convert operands to unsigned. */
495 /* (minimum integer) / (-1) is the only overflow case. */
496 if (neg_double (lnum
, hnum
, &lnum
, &hnum
) && (lden
& hden
) == -1)
502 neg_double (lden
, hden
, &lden
, &hden
);
506 if (hnum
== 0 && hden
== 0)
507 { /* single precision */
509 /* This unsigned division rounds toward zero. */
510 *lquo
= lnum
/ (unsigned HOST_WIDE_INT
) lden
;
515 { /* trivial case: dividend < divisor */
516 /* hden != 0 already checked. */
523 bzero ((char *) quo
, sizeof quo
);
525 bzero ((char *) num
, sizeof num
); /* to zero 9th element */
526 bzero ((char *) den
, sizeof den
);
528 encode (num
, lnum
, hnum
);
529 encode (den
, lden
, hden
);
531 /* Special code for when the divisor < BASE. */
532 if (hden
== 0 && lden
< (HOST_WIDE_INT
) BASE
)
534 /* hnum != 0 already checked. */
535 for (i
= 4 - 1; i
>= 0; i
--)
537 work
= num
[i
] + carry
* BASE
;
538 quo
[i
] = work
/ (unsigned HOST_WIDE_INT
) lden
;
539 carry
= work
% (unsigned HOST_WIDE_INT
) lden
;
544 /* Full double precision division,
545 with thanks to Don Knuth's "Seminumerical Algorithms". */
546 int num_hi_sig
, den_hi_sig
;
547 unsigned HOST_WIDE_INT quo_est
, scale
;
549 /* Find the highest non-zero divisor digit. */
550 for (i
= 4 - 1; ; i
--)
556 /* Insure that the first digit of the divisor is at least BASE/2.
557 This is required by the quotient digit estimation algorithm. */
559 scale
= BASE
/ (den
[den_hi_sig
] + 1);
560 if (scale
> 1) { /* scale divisor and dividend */
562 for (i
= 0; i
<= 4 - 1; i
++) {
563 work
= (num
[i
] * scale
) + carry
;
564 num
[i
] = LOWPART (work
);
565 carry
= HIGHPART (work
);
568 for (i
= 0; i
<= 4 - 1; i
++) {
569 work
= (den
[i
] * scale
) + carry
;
570 den
[i
] = LOWPART (work
);
571 carry
= HIGHPART (work
);
572 if (den
[i
] != 0) den_hi_sig
= i
;
579 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--) {
580 /* guess the next quotient digit, quo_est, by dividing the first
581 two remaining dividend digits by the high order quotient digit.
582 quo_est is never low and is at most 2 high. */
583 unsigned HOST_WIDE_INT tmp
;
585 num_hi_sig
= i
+ den_hi_sig
+ 1;
586 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
587 if (num
[num_hi_sig
] != den
[den_hi_sig
])
588 quo_est
= work
/ den
[den_hi_sig
];
592 /* refine quo_est so it's usually correct, and at most one high. */
593 tmp
= work
- quo_est
* den
[den_hi_sig
];
595 && den
[den_hi_sig
- 1] * quo_est
> (tmp
* BASE
+ num
[num_hi_sig
- 2]))
598 /* Try QUO_EST as the quotient digit, by multiplying the
599 divisor by QUO_EST and subtracting from the remaining dividend.
600 Keep in mind that QUO_EST is the I - 1st digit. */
603 for (j
= 0; j
<= den_hi_sig
; j
++)
605 work
= quo_est
* den
[j
] + carry
;
606 carry
= HIGHPART (work
);
607 work
= num
[i
+ j
] - LOWPART (work
);
608 num
[i
+ j
] = LOWPART (work
);
609 carry
+= HIGHPART (work
) != 0;
612 /* if quo_est was high by one, then num[i] went negative and
613 we need to correct things. */
615 if (num
[num_hi_sig
] < carry
)
618 carry
= 0; /* add divisor back in */
619 for (j
= 0; j
<= den_hi_sig
; j
++)
621 work
= num
[i
+ j
] + den
[j
] + carry
;
622 carry
= HIGHPART (work
);
623 num
[i
+ j
] = LOWPART (work
);
625 num
[num_hi_sig
] += carry
;
628 /* store the quotient digit. */
633 decode (quo
, lquo
, hquo
);
636 /* if result is negative, make it so. */
638 neg_double (*lquo
, *hquo
, lquo
, hquo
);
640 /* compute trial remainder: rem = num - (quo * den) */
641 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
642 neg_double (*lrem
, *hrem
, lrem
, hrem
);
643 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
648 case TRUNC_MOD_EXPR
: /* round toward zero */
649 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
653 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
654 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
657 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
660 else return overflow
;
664 case CEIL_MOD_EXPR
: /* round toward positive infinity */
665 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
667 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
670 else return overflow
;
674 case ROUND_MOD_EXPR
: /* round to closest integer */
676 HOST_WIDE_INT labs_rem
= *lrem
, habs_rem
= *hrem
;
677 HOST_WIDE_INT labs_den
= lden
, habs_den
= hden
, ltwice
, htwice
;
679 /* get absolute values */
680 if (*hrem
< 0) neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
681 if (hden
< 0) neg_double (lden
, hden
, &labs_den
, &habs_den
);
683 /* if (2 * abs (lrem) >= abs (lden)) */
684 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
685 labs_rem
, habs_rem
, <wice
, &htwice
);
686 if (((unsigned HOST_WIDE_INT
) habs_den
687 < (unsigned HOST_WIDE_INT
) htwice
)
688 || (((unsigned HOST_WIDE_INT
) habs_den
689 == (unsigned HOST_WIDE_INT
) htwice
)
690 && ((HOST_WIDE_INT
unsigned) labs_den
691 < (unsigned HOST_WIDE_INT
) ltwice
)))
695 add_double (*lquo
, *hquo
,
696 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
699 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
702 else return overflow
;
710 /* compute true remainder: rem = num - (quo * den) */
711 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
712 neg_double (*lrem
, *hrem
, lrem
, hrem
);
713 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
717 #ifndef REAL_ARITHMETIC
718 /* Effectively truncate a real value to represent the nearest possible value
719 in a narrower mode. The result is actually represented in the same data
720 type as the argument, but its value is usually different.
722 A trap may occur during the FP operations and it is the responsibility
723 of the calling function to have a handler established. */
726 real_value_truncate (mode
, arg
)
727 enum machine_mode mode
;
730 return REAL_VALUE_TRUNCATE (mode
, arg
);
733 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
735 /* Check for infinity in an IEEE double precision number. */
741 /* The IEEE 64-bit double format. */
746 unsigned exponent
: 11;
747 unsigned mantissa1
: 20;
752 unsigned mantissa1
: 20;
753 unsigned exponent
: 11;
759 if (u
.big_endian
.sign
== 1)
762 return (u
.big_endian
.exponent
== 2047
763 && u
.big_endian
.mantissa1
== 0
764 && u
.big_endian
.mantissa2
== 0);
769 return (u
.little_endian
.exponent
== 2047
770 && u
.little_endian
.mantissa1
== 0
771 && u
.little_endian
.mantissa2
== 0);
775 /* Check whether an IEEE double precision number is a NaN. */
781 /* The IEEE 64-bit double format. */
786 unsigned exponent
: 11;
787 unsigned mantissa1
: 20;
792 unsigned mantissa1
: 20;
793 unsigned exponent
: 11;
799 if (u
.big_endian
.sign
== 1)
802 return (u
.big_endian
.exponent
== 2047
803 && (u
.big_endian
.mantissa1
!= 0
804 || u
.big_endian
.mantissa2
!= 0));
809 return (u
.little_endian
.exponent
== 2047
810 && (u
.little_endian
.mantissa1
!= 0
811 || u
.little_endian
.mantissa2
!= 0));
815 /* Check for a negative IEEE double precision number. */
821 /* The IEEE 64-bit double format. */
826 unsigned exponent
: 11;
827 unsigned mantissa1
: 20;
832 unsigned mantissa1
: 20;
833 unsigned exponent
: 11;
839 if (u
.big_endian
.sign
== 1)
842 return u
.big_endian
.sign
;
847 return u
.little_endian
.sign
;
850 #else /* Target not IEEE */
852 /* Let's assume other float formats don't have infinity.
853 (This can be overridden by redefining REAL_VALUE_ISINF.) */
861 /* Let's assume other float formats don't have NaNs.
862 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
870 /* Let's assume other float formats don't have minus zero.
871 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
878 #endif /* Target not IEEE */
880 /* Try to change R into its exact multiplicative inverse in machine mode
881 MODE. Return nonzero function value if successful. */
884 exact_real_inverse (mode
, r
)
885 enum machine_mode mode
;
895 /* Usually disable if bounds checks are not reliable. */
896 if ((HOST_FLOAT_FORMAT
!= TARGET_FLOAT_FORMAT
) && !flag_pretend_float
)
899 /* Set array index to the less significant bits in the unions, depending
900 on the endian-ness of the host doubles.
901 Disable if insufficient information on the data structure. */
902 #if HOST_FLOAT_FORMAT == UNKNOWN_FLOAT_FORMAT
905 #if HOST_FLOAT_FORMAT == VAX_FLOAT_FORMAT
908 #if HOST_FLOAT_FORMAT == IBM_FLOAT_FORMAT
911 #define K (2 * HOST_FLOAT_WORDS_BIG_ENDIAN)
916 if (setjmp (float_error
))
918 /* Don't do the optimization if there was an arithmetic error. */
920 set_float_handler (NULL_PTR
);
923 set_float_handler (float_error
);
925 /* Domain check the argument. */
931 if (REAL_VALUE_ISINF (x
.d
) || REAL_VALUE_ISNAN (x
.d
))
935 /* Compute the reciprocal and check for numerical exactness.
936 It is unnecessary to check all the significand bits to determine
937 whether X is a power of 2. If X is not, then it is impossible for
938 the bottom half significand of both X and 1/X to be all zero bits.
939 Hence we ignore the data structure of the top half and examine only
940 the low order bits of the two significands. */
942 if (x
.i
[K
] != 0 || x
.i
[K
+ 1] != 0 || t
.i
[K
] != 0 || t
.i
[K
+ 1] != 0)
945 /* Truncate to the required mode and range-check the result. */
946 y
.d
= REAL_VALUE_TRUNCATE (mode
, t
.d
);
947 #ifdef CHECK_FLOAT_VALUE
949 if (CHECK_FLOAT_VALUE (mode
, y
.d
, i
))
953 /* Fail if truncation changed the value. */
954 if (y
.d
!= t
.d
|| y
.d
== 0.0)
958 if (REAL_VALUE_ISINF (y
.d
) || REAL_VALUE_ISNAN (y
.d
))
962 /* Output the reciprocal and return success flag. */
963 set_float_handler (NULL_PTR
);
969 /* Convert C9X hexadecimal floating point string constant S. Return
970 real value type in mode MODE. This function uses the host computer's
971 fp arithmetic when there is no REAL_ARITHMETIC. */
974 real_hex_to_f (s
, mode
)
976 enum machine_mode mode
;
980 unsigned HOST_WIDE_INT low
, high
;
981 int frexpon
, expon
, shcount
, nrmcount
, k
;
982 int sign
, expsign
, decpt
, isfloat
, isldouble
, gotp
, lost
;
992 while (*p
== ' ' || *p
== '\t')
995 /* Sign, if any, comes first. */
1003 /* The string is supposed to start with 0x or 0X . */
1007 if (*p
== 'x' || *p
== 'X')
1020 lost
= 0; /* Nonzero low order bits shifted out and discarded. */
1021 frexpon
= 0; /* Bits after the decimal point. */
1022 expon
= 0; /* Value of exponent. */
1023 decpt
= 0; /* How many decimal points. */
1024 gotp
= 0; /* How many P's. */
1026 while ((c
= *p
) != '\0')
1028 if ((c
>= '0' && c
<= '9') || (c
>= 'A' && c
<= 'F')
1029 || (c
>= 'a' && c
<= 'f'))
1039 if ((high
& 0xf0000000) == 0)
1041 high
= (high
<< 4) + ((low
>> 28) & 15);
1042 low
= (low
<< 4) + k
;
1049 /* Record nonzero lost bits. */
1061 else if (c
== 'p' || c
== 'P')
1065 /* Sign of exponent. */
1071 /* Value of exponent.
1072 The exponent field is a decimal integer. */
1075 k
= (*p
++ & 0x7f) - '0';
1076 expon
= 10 * expon
+ k
;
1079 /* F suffix is ambiguous in the significand part
1080 so it must appear after the decimal exponent field. */
1081 if (*p
== 'f' || *p
== 'F')
1088 else if (c
== 'l' || c
== 'L')
1097 /* Abort if last character read was not legitimate. */
1099 if ((c
!= '\0' && c
!= ' ' && c
!= '\n' && c
!= '\r') || (decpt
> 1))
1101 /* There must be either one decimal point or one p. */
1102 if (decpt
== 0 && gotp
== 0)
1105 if ((high
== 0) && (low
== 0))
1118 /* Leave a high guard bit for carry-out. */
1119 if ((high
& 0x80000000) != 0)
1122 low
= (low
>> 1) | (high
<< 31);
1126 if ((high
& 0xffff8000) == 0)
1128 high
= (high
<< 16) + ((low
>> 16) & 0xffff);
1132 while ((high
& 0xc0000000) == 0)
1134 high
= (high
<< 1) + ((low
>> 31) & 1);
1138 if (isfloat
|| GET_MODE_SIZE(mode
) == UNITS_PER_WORD
)
1140 /* Keep 24 bits precision, bits 0x7fffff80.
1141 Rounding bit is 0x40. */
1142 lost
= lost
| low
| (high
& 0x3f);
1146 if ((high
& 0x80) || lost
)
1153 /* We need real.c to do long double formats, so here default
1154 to double precision. */
1155 #if HOST_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
1157 Keep 53 bits precision, bits 0x7fffffff fffffc00.
1158 Rounding bit is low word 0x200. */
1159 lost
= lost
| (low
& 0x1ff);
1162 if ((low
& 0x400) || lost
)
1164 low
= (low
+ 0x200) & 0xfffffc00;
1171 /* Assume it's a VAX with 56-bit significand,
1172 bits 0x7fffffff ffffff80. */
1173 lost
= lost
| (low
& 0x7f);
1176 if ((low
& 0x80) || lost
)
1178 low
= (low
+ 0x40) & 0xffffff80;
1187 ip
= REAL_VALUE_LDEXP (ip
, 32) + (double) low
;
1188 /* Apply shifts and exponent value as power of 2. */
1189 ip
= REAL_VALUE_LDEXP (ip
, expon
- (nrmcount
+ frexpon
));
1196 #endif /* no REAL_ARITHMETIC */
1198 /* Split a tree IN into a constant and a variable part
1199 that could be combined with CODE to make IN.
1200 CODE must be a commutative arithmetic operation.
1201 Store the constant part into *CONP and the variable in &VARP.
1202 Return 1 if this was done; zero means the tree IN did not decompose
1205 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
1206 Therefore, we must tell the caller whether the variable part
1207 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
1208 The value stored is the coefficient for the variable term.
1209 The constant term we return should always be added;
1210 we negate it if necessary. */
1213 split_tree (in
, code
, varp
, conp
, varsignp
)
1215 enum tree_code code
;
1219 register tree outtype
= TREE_TYPE (in
);
1223 /* Strip any conversions that don't change the machine mode. */
1224 while ((TREE_CODE (in
) == NOP_EXPR
1225 || TREE_CODE (in
) == CONVERT_EXPR
)
1226 && (TYPE_MODE (TREE_TYPE (in
))
1227 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in
, 0)))))
1228 in
= TREE_OPERAND (in
, 0);
1230 if (TREE_CODE (in
) == code
1231 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1232 /* We can associate addition and subtraction together
1233 (even though the C standard doesn't say so)
1234 for integers because the value is not affected.
1235 For reals, the value might be affected, so we can't. */
1236 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1237 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1239 enum tree_code code
= TREE_CODE (TREE_OPERAND (in
, 0));
1240 if (code
== INTEGER_CST
)
1242 *conp
= TREE_OPERAND (in
, 0);
1243 *varp
= TREE_OPERAND (in
, 1);
1244 if (TYPE_MODE (TREE_TYPE (*varp
)) != TYPE_MODE (outtype
)
1245 && TREE_TYPE (*varp
) != outtype
)
1246 *varp
= convert (outtype
, *varp
);
1247 *varsignp
= (TREE_CODE (in
) == MINUS_EXPR
) ? -1 : 1;
1250 if (TREE_CONSTANT (TREE_OPERAND (in
, 1)))
1252 *conp
= TREE_OPERAND (in
, 1);
1253 *varp
= TREE_OPERAND (in
, 0);
1255 if (TYPE_MODE (TREE_TYPE (*varp
)) != TYPE_MODE (outtype
)
1256 && TREE_TYPE (*varp
) != outtype
)
1257 *varp
= convert (outtype
, *varp
);
1258 if (TREE_CODE (in
) == MINUS_EXPR
)
1260 /* If operation is subtraction and constant is second,
1261 must negate it to get an additive constant.
1262 And this cannot be done unless it is a manifest constant.
1263 It could also be the address of a static variable.
1264 We cannot negate that, so give up. */
1265 if (TREE_CODE (*conp
) == INTEGER_CST
)
1266 /* Subtracting from integer_zero_node loses for long long. */
1267 *conp
= fold (build1 (NEGATE_EXPR
, TREE_TYPE (*conp
), *conp
));
1273 if (TREE_CONSTANT (TREE_OPERAND (in
, 0)))
1275 *conp
= TREE_OPERAND (in
, 0);
1276 *varp
= TREE_OPERAND (in
, 1);
1277 if (TYPE_MODE (TREE_TYPE (*varp
)) != TYPE_MODE (outtype
)
1278 && TREE_TYPE (*varp
) != outtype
)
1279 *varp
= convert (outtype
, *varp
);
1280 *varsignp
= (TREE_CODE (in
) == MINUS_EXPR
) ? -1 : 1;
1287 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1288 to produce a new constant.
1290 If NOTRUNC is nonzero, do not truncate the result to fit the data type.
1291 If FORSIZE is nonzero, compute overflow for unsigned types. */
1294 int_const_binop (code
, arg1
, arg2
, notrunc
, forsize
)
1295 enum tree_code code
;
1296 register tree arg1
, arg2
;
1297 int notrunc
, forsize
;
1299 HOST_WIDE_INT int1l
, int1h
, int2l
, int2h
;
1300 HOST_WIDE_INT low
, hi
;
1301 HOST_WIDE_INT garbagel
, garbageh
;
1303 int uns
= TREE_UNSIGNED (TREE_TYPE (arg1
));
1305 int no_overflow
= 0;
1307 int1l
= TREE_INT_CST_LOW (arg1
);
1308 int1h
= TREE_INT_CST_HIGH (arg1
);
1309 int2l
= TREE_INT_CST_LOW (arg2
);
1310 int2h
= TREE_INT_CST_HIGH (arg2
);
1315 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1319 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1323 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1326 case BIT_ANDTC_EXPR
:
1327 low
= int1l
& ~int2l
, hi
= int1h
& ~int2h
;
1333 /* It's unclear from the C standard whether shifts can overflow.
1334 The following code ignores overflow; perhaps a C standard
1335 interpretation ruling is needed. */
1336 lshift_double (int1l
, int1h
, int2l
,
1337 TYPE_PRECISION (TREE_TYPE (arg1
)),
1346 lrotate_double (int1l
, int1h
, int2l
,
1347 TYPE_PRECISION (TREE_TYPE (arg1
)),
1352 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1356 neg_double (int2l
, int2h
, &low
, &hi
);
1357 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1358 overflow
= overflow_sum_sign (hi
, int2h
, int1h
);
1362 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1365 case TRUNC_DIV_EXPR
:
1366 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1367 case EXACT_DIV_EXPR
:
1368 /* This is a shortcut for a common special case. */
1369 if (int2h
== 0 && int2l
> 0
1370 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1371 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1372 && int1h
== 0 && int1l
>= 0)
1374 if (code
== CEIL_DIV_EXPR
)
1376 low
= int1l
/ int2l
, hi
= 0;
1380 /* ... fall through ... */
1382 case ROUND_DIV_EXPR
:
1383 if (int2h
== 0 && int2l
== 1)
1385 low
= int1l
, hi
= int1h
;
1388 if (int1l
== int2l
&& int1h
== int2h
1389 && ! (int1l
== 0 && int1h
== 0))
1394 overflow
= div_and_round_double (code
, uns
,
1395 int1l
, int1h
, int2l
, int2h
,
1396 &low
, &hi
, &garbagel
, &garbageh
);
1399 case TRUNC_MOD_EXPR
:
1400 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1401 /* This is a shortcut for a common special case. */
1402 if (int2h
== 0 && int2l
> 0
1403 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1404 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1405 && int1h
== 0 && int1l
>= 0)
1407 if (code
== CEIL_MOD_EXPR
)
1409 low
= int1l
% int2l
, hi
= 0;
1413 /* ... fall through ... */
1415 case ROUND_MOD_EXPR
:
1416 overflow
= div_and_round_double (code
, uns
,
1417 int1l
, int1h
, int2l
, int2h
,
1418 &garbagel
, &garbageh
, &low
, &hi
);
1425 low
= (((unsigned HOST_WIDE_INT
) int1h
1426 < (unsigned HOST_WIDE_INT
) int2h
)
1427 || (((unsigned HOST_WIDE_INT
) int1h
1428 == (unsigned HOST_WIDE_INT
) int2h
)
1429 && ((unsigned HOST_WIDE_INT
) int1l
1430 < (unsigned HOST_WIDE_INT
) int2l
)));
1434 low
= ((int1h
< int2h
)
1435 || ((int1h
== int2h
)
1436 && ((unsigned HOST_WIDE_INT
) int1l
1437 < (unsigned HOST_WIDE_INT
) int2l
)));
1439 if (low
== (code
== MIN_EXPR
))
1440 low
= int1l
, hi
= int1h
;
1442 low
= int2l
, hi
= int2h
;
1449 if (TREE_TYPE (arg1
) == sizetype
&& hi
== 0
1451 && (TYPE_MAX_VALUE (sizetype
) == NULL
1452 || low
<= TREE_INT_CST_LOW (TYPE_MAX_VALUE (sizetype
)))
1454 && ! TREE_OVERFLOW (arg1
) && ! TREE_OVERFLOW (arg2
))
1458 t
= build_int_2 (low
, hi
);
1459 TREE_TYPE (t
) = TREE_TYPE (arg1
);
1463 = ((notrunc
? (!uns
|| forsize
) && overflow
1464 : force_fit_type (t
, (!uns
|| forsize
) && overflow
) && ! no_overflow
)
1465 | TREE_OVERFLOW (arg1
)
1466 | TREE_OVERFLOW (arg2
));
1467 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1468 So check if force_fit_type truncated the value. */
1470 && ! TREE_OVERFLOW (t
)
1471 && (TREE_INT_CST_HIGH (t
) != hi
1472 || TREE_INT_CST_LOW (t
) != low
))
1473 TREE_OVERFLOW (t
) = 1;
1474 TREE_CONSTANT_OVERFLOW (t
) = (TREE_OVERFLOW (t
)
1475 | TREE_CONSTANT_OVERFLOW (arg1
)
1476 | TREE_CONSTANT_OVERFLOW (arg2
));
1480 /* Combine two constants ARG1 and ARG2 under operation CODE
1481 to produce a new constant.
1482 We assume ARG1 and ARG2 have the same data type,
1483 or at least are the same kind of constant and the same machine mode.
1485 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1488 const_binop (code
, arg1
, arg2
, notrunc
)
1489 enum tree_code code
;
1490 register tree arg1
, arg2
;
1493 STRIP_NOPS (arg1
); STRIP_NOPS (arg2
);
1495 if (TREE_CODE (arg1
) == INTEGER_CST
)
1496 return int_const_binop (code
, arg1
, arg2
, notrunc
, 0);
1498 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1499 if (TREE_CODE (arg1
) == REAL_CST
)
1504 REAL_VALUE_TYPE value
;
1507 d1
= TREE_REAL_CST (arg1
);
1508 d2
= TREE_REAL_CST (arg2
);
1510 /* If either operand is a NaN, just return it. Otherwise, set up
1511 for floating-point trap; we return an overflow. */
1512 if (REAL_VALUE_ISNAN (d1
))
1514 else if (REAL_VALUE_ISNAN (d2
))
1516 else if (setjmp (float_error
))
1518 t
= copy_node (arg1
);
1523 set_float_handler (float_error
);
1525 #ifdef REAL_ARITHMETIC
1526 REAL_ARITHMETIC (value
, code
, d1
, d2
);
1543 #ifndef REAL_INFINITY
1552 value
= MIN (d1
, d2
);
1556 value
= MAX (d1
, d2
);
1562 #endif /* no REAL_ARITHMETIC */
1563 t
= build_real (TREE_TYPE (arg1
),
1564 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1
)), value
));
1566 set_float_handler (NULL_PTR
);
1569 = (force_fit_type (t
, overflow
)
1570 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1571 TREE_CONSTANT_OVERFLOW (t
)
1573 | TREE_CONSTANT_OVERFLOW (arg1
)
1574 | TREE_CONSTANT_OVERFLOW (arg2
);
1577 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1578 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1580 register tree type
= TREE_TYPE (arg1
);
1581 register tree r1
= TREE_REALPART (arg1
);
1582 register tree i1
= TREE_IMAGPART (arg1
);
1583 register tree r2
= TREE_REALPART (arg2
);
1584 register tree i2
= TREE_IMAGPART (arg2
);
1590 t
= build_complex (type
,
1591 const_binop (PLUS_EXPR
, r1
, r2
, notrunc
),
1592 const_binop (PLUS_EXPR
, i1
, i2
, notrunc
));
1596 t
= build_complex (type
,
1597 const_binop (MINUS_EXPR
, r1
, r2
, notrunc
),
1598 const_binop (MINUS_EXPR
, i1
, i2
, notrunc
));
1602 t
= build_complex (type
,
1603 const_binop (MINUS_EXPR
,
1604 const_binop (MULT_EXPR
,
1606 const_binop (MULT_EXPR
,
1609 const_binop (PLUS_EXPR
,
1610 const_binop (MULT_EXPR
,
1612 const_binop (MULT_EXPR
,
1619 register tree magsquared
1620 = const_binop (PLUS_EXPR
,
1621 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1622 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1625 t
= build_complex (type
,
1627 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1628 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1629 const_binop (PLUS_EXPR
,
1630 const_binop (MULT_EXPR
, r1
, r2
,
1632 const_binop (MULT_EXPR
, i1
, i2
,
1635 magsquared
, notrunc
),
1637 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1638 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1639 const_binop (MINUS_EXPR
,
1640 const_binop (MULT_EXPR
, i1
, r2
,
1642 const_binop (MULT_EXPR
, r1
, i2
,
1645 magsquared
, notrunc
));
1657 /* Return an INTEGER_CST with value V . The type is determined by bit_p:
1658 if it is zero, the type is taken from sizetype; if it is one, the type
1659 is taken from bitsizetype. */
1662 size_int_wide (number
, high
, bit_p
)
1663 unsigned HOST_WIDE_INT number
, high
;
1667 /* Type-size nodes already made for small sizes. */
1668 static tree size_table
[2*HOST_BITS_PER_WIDE_INT
+ 1][2];
1670 if (number
< 2*HOST_BITS_PER_WIDE_INT
+ 1 && ! high
1671 && size_table
[number
][bit_p
] != 0)
1672 return size_table
[number
][bit_p
];
1673 if (number
< 2*HOST_BITS_PER_WIDE_INT
+ 1 && ! high
)
1675 push_obstacks_nochange ();
1676 /* Make this a permanent node. */
1677 end_temporary_allocation ();
1678 t
= build_int_2 (number
, 0);
1679 TREE_TYPE (t
) = bit_p
? bitsizetype
: sizetype
;
1680 size_table
[number
][bit_p
] = t
;
1685 t
= build_int_2 (number
, high
);
1686 TREE_TYPE (t
) = bit_p
? bitsizetype
: sizetype
;
1687 TREE_OVERFLOW (t
) = TREE_CONSTANT_OVERFLOW (t
) = force_fit_type (t
, 0);
1692 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1693 CODE is a tree code. Data type is taken from `sizetype',
1694 If the operands are constant, so is the result. */
1697 size_binop (code
, arg0
, arg1
)
1698 enum tree_code code
;
1701 /* Handle the special case of two integer constants faster. */
1702 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1704 /* And some specific cases even faster than that. */
1705 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1707 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1708 && integer_zerop (arg1
))
1710 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1713 /* Handle general case of two integer constants. */
1714 return int_const_binop (code
, arg0
, arg1
, 0, 1);
1717 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1718 return error_mark_node
;
1720 return fold (build (code
, sizetype
, arg0
, arg1
));
1723 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1724 CODE is a tree code. Data type is taken from `ssizetype',
1725 If the operands are constant, so is the result. */
1728 ssize_binop (code
, arg0
, arg1
)
1729 enum tree_code code
;
1732 /* Handle the special case of two integer constants faster. */
1733 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1735 /* And some specific cases even faster than that. */
1736 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1738 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1739 && integer_zerop (arg1
))
1741 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1744 /* Handle general case of two integer constants. We convert
1745 arg0 to ssizetype because int_const_binop uses its type for the
1747 arg0
= convert (ssizetype
, arg0
);
1748 return int_const_binop (code
, arg0
, arg1
, 0, 0);
1751 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1752 return error_mark_node
;
1754 return fold (build (code
, ssizetype
, arg0
, arg1
));
1757 /* Given T, a tree representing type conversion of ARG1, a constant,
1758 return a constant tree representing the result of conversion. */
1761 fold_convert (t
, arg1
)
1765 register tree type
= TREE_TYPE (t
);
1768 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
1770 if (TREE_CODE (arg1
) == INTEGER_CST
)
1772 /* If we would build a constant wider than GCC supports,
1773 leave the conversion unfolded. */
1774 if (TYPE_PRECISION (type
) > 2 * HOST_BITS_PER_WIDE_INT
)
1777 /* Given an integer constant, make new constant with new type,
1778 appropriately sign-extended or truncated. */
1779 t
= build_int_2 (TREE_INT_CST_LOW (arg1
),
1780 TREE_INT_CST_HIGH (arg1
));
1781 TREE_TYPE (t
) = type
;
1782 /* Indicate an overflow if (1) ARG1 already overflowed,
1783 or (2) force_fit_type indicates an overflow.
1784 Tell force_fit_type that an overflow has already occurred
1785 if ARG1 is a too-large unsigned value and T is signed.
1786 But don't indicate an overflow if converting a pointer. */
1788 = ((force_fit_type (t
,
1789 (TREE_INT_CST_HIGH (arg1
) < 0
1790 && (TREE_UNSIGNED (type
)
1791 < TREE_UNSIGNED (TREE_TYPE (arg1
)))))
1792 && ! POINTER_TYPE_P (TREE_TYPE (arg1
)))
1793 || TREE_OVERFLOW (arg1
));
1794 TREE_CONSTANT_OVERFLOW (t
)
1795 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1797 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1798 else if (TREE_CODE (arg1
) == REAL_CST
)
1800 /* Don't initialize these, use assignments.
1801 Initialized local aggregates don't work on old compilers. */
1805 tree type1
= TREE_TYPE (arg1
);
1808 x
= TREE_REAL_CST (arg1
);
1809 l
= real_value_from_int_cst (type1
, TYPE_MIN_VALUE (type
));
1811 no_upper_bound
= (TYPE_MAX_VALUE (type
) == NULL
);
1812 if (!no_upper_bound
)
1813 u
= real_value_from_int_cst (type1
, TYPE_MAX_VALUE (type
));
1815 /* See if X will be in range after truncation towards 0.
1816 To compensate for truncation, move the bounds away from 0,
1817 but reject if X exactly equals the adjusted bounds. */
1818 #ifdef REAL_ARITHMETIC
1819 REAL_ARITHMETIC (l
, MINUS_EXPR
, l
, dconst1
);
1820 if (!no_upper_bound
)
1821 REAL_ARITHMETIC (u
, PLUS_EXPR
, u
, dconst1
);
1824 if (!no_upper_bound
)
1827 /* If X is a NaN, use zero instead and show we have an overflow.
1828 Otherwise, range check. */
1829 if (REAL_VALUE_ISNAN (x
))
1830 overflow
= 1, x
= dconst0
;
1831 else if (! (REAL_VALUES_LESS (l
, x
)
1833 && REAL_VALUES_LESS (x
, u
)))
1836 #ifndef REAL_ARITHMETIC
1838 HOST_WIDE_INT low
, high
;
1839 HOST_WIDE_INT half_word
1840 = (HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
/ 2);
1845 high
= (HOST_WIDE_INT
) (x
/ half_word
/ half_word
);
1846 x
-= (REAL_VALUE_TYPE
) high
* half_word
* half_word
;
1847 if (x
>= (REAL_VALUE_TYPE
) half_word
* half_word
/ 2)
1849 low
= x
- (REAL_VALUE_TYPE
) half_word
* half_word
/ 2;
1850 low
|= (HOST_WIDE_INT
) -1 << (HOST_BITS_PER_WIDE_INT
- 1);
1853 low
= (HOST_WIDE_INT
) x
;
1854 if (TREE_REAL_CST (arg1
) < 0)
1855 neg_double (low
, high
, &low
, &high
);
1856 t
= build_int_2 (low
, high
);
1860 HOST_WIDE_INT low
, high
;
1861 REAL_VALUE_TO_INT (&low
, &high
, x
);
1862 t
= build_int_2 (low
, high
);
1865 TREE_TYPE (t
) = type
;
1867 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, overflow
);
1868 TREE_CONSTANT_OVERFLOW (t
)
1869 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1871 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1872 TREE_TYPE (t
) = type
;
1874 else if (TREE_CODE (type
) == REAL_TYPE
)
1876 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1877 if (TREE_CODE (arg1
) == INTEGER_CST
)
1878 return build_real_from_int_cst (type
, arg1
);
1879 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1880 if (TREE_CODE (arg1
) == REAL_CST
)
1882 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
1885 TREE_TYPE (arg1
) = type
;
1888 else if (setjmp (float_error
))
1891 t
= copy_node (arg1
);
1894 set_float_handler (float_error
);
1896 t
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
1897 TREE_REAL_CST (arg1
)));
1898 set_float_handler (NULL_PTR
);
1902 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, overflow
);
1903 TREE_CONSTANT_OVERFLOW (t
)
1904 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1908 TREE_CONSTANT (t
) = 1;
1912 /* Return an expr equal to X but certainly not valid as an lvalue. */
1920 /* These things are certainly not lvalues. */
1921 if (TREE_CODE (x
) == NON_LVALUE_EXPR
1922 || TREE_CODE (x
) == INTEGER_CST
1923 || TREE_CODE (x
) == REAL_CST
1924 || TREE_CODE (x
) == STRING_CST
1925 || TREE_CODE (x
) == ADDR_EXPR
)
1928 result
= build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
1929 TREE_CONSTANT (result
) = TREE_CONSTANT (x
);
1933 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1934 Zero means allow extended lvalues. */
1936 int pedantic_lvalues
;
1938 /* When pedantic, return an expr equal to X but certainly not valid as a
1939 pedantic lvalue. Otherwise, return X. */
1942 pedantic_non_lvalue (x
)
1945 if (pedantic_lvalues
)
1946 return non_lvalue (x
);
1951 /* Given a tree comparison code, return the code that is the logical inverse
1952 of the given code. It is not safe to do this for floating-point
1953 comparisons, except for NE_EXPR and EQ_EXPR. */
1955 static enum tree_code
1956 invert_tree_comparison (code
)
1957 enum tree_code code
;
1978 /* Similar, but return the comparison that results if the operands are
1979 swapped. This is safe for floating-point. */
1981 static enum tree_code
1982 swap_tree_comparison (code
)
1983 enum tree_code code
;
2003 /* Return nonzero if CODE is a tree code that represents a truth value. */
2006 truth_value_p (code
)
2007 enum tree_code code
;
2009 return (TREE_CODE_CLASS (code
) == '<'
2010 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2011 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2012 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2015 /* Return nonzero if two operands are necessarily equal.
2016 If ONLY_CONST is non-zero, only return non-zero for constants.
2017 This function tests whether the operands are indistinguishable;
2018 it does not test whether they are equal using C's == operation.
2019 The distinction is important for IEEE floating point, because
2020 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2021 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
2024 operand_equal_p (arg0
, arg1
, only_const
)
2028 /* If both types don't have the same signedness, then we can't consider
2029 them equal. We must check this before the STRIP_NOPS calls
2030 because they may change the signedness of the arguments. */
2031 if (TREE_UNSIGNED (TREE_TYPE (arg0
)) != TREE_UNSIGNED (TREE_TYPE (arg1
)))
2037 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2038 /* This is needed for conversions and for COMPONENT_REF.
2039 Might as well play it safe and always test this. */
2040 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2043 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2044 We don't care about side effects in that case because the SAVE_EXPR
2045 takes care of that for us. In all other cases, two expressions are
2046 equal if they have no side effects. If we have two identical
2047 expressions with side effects that should be treated the same due
2048 to the only side effects being identical SAVE_EXPR's, that will
2049 be detected in the recursive calls below. */
2050 if (arg0
== arg1
&& ! only_const
2051 && (TREE_CODE (arg0
) == SAVE_EXPR
2052 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2055 /* Next handle constant cases, those for which we can return 1 even
2056 if ONLY_CONST is set. */
2057 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2058 switch (TREE_CODE (arg0
))
2061 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2062 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2063 && TREE_INT_CST_LOW (arg0
) == TREE_INT_CST_LOW (arg1
)
2064 && TREE_INT_CST_HIGH (arg0
) == TREE_INT_CST_HIGH (arg1
));
2067 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2068 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2069 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2070 TREE_REAL_CST (arg1
)));
2073 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2075 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2079 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2080 && ! strncmp (TREE_STRING_POINTER (arg0
),
2081 TREE_STRING_POINTER (arg1
),
2082 TREE_STRING_LENGTH (arg0
)));
2085 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2094 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2097 /* Two conversions are equal only if signedness and modes match. */
2098 if ((TREE_CODE (arg0
) == NOP_EXPR
|| TREE_CODE (arg0
) == CONVERT_EXPR
)
2099 && (TREE_UNSIGNED (TREE_TYPE (arg0
))
2100 != TREE_UNSIGNED (TREE_TYPE (arg1
))))
2103 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2104 TREE_OPERAND (arg1
, 0), 0);
2108 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0)
2109 && operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1),
2113 /* For commutative ops, allow the other order. */
2114 return ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
2115 || TREE_CODE (arg0
) == MIN_EXPR
|| TREE_CODE (arg0
) == MAX_EXPR
2116 || TREE_CODE (arg0
) == BIT_IOR_EXPR
2117 || TREE_CODE (arg0
) == BIT_XOR_EXPR
2118 || TREE_CODE (arg0
) == BIT_AND_EXPR
2119 || TREE_CODE (arg0
) == NE_EXPR
|| TREE_CODE (arg0
) == EQ_EXPR
)
2120 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2121 TREE_OPERAND (arg1
, 1), 0)
2122 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2123 TREE_OPERAND (arg1
, 0), 0));
2126 switch (TREE_CODE (arg0
))
2129 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2130 TREE_OPERAND (arg1
, 0), 0);
2134 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2135 TREE_OPERAND (arg1
, 0), 0)
2136 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2137 TREE_OPERAND (arg1
, 1), 0));
2140 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2141 TREE_OPERAND (arg1
, 0), 0)
2142 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2143 TREE_OPERAND (arg1
, 1), 0)
2144 && operand_equal_p (TREE_OPERAND (arg0
, 2),
2145 TREE_OPERAND (arg1
, 2), 0));
2151 if (TREE_CODE (arg0
) == RTL_EXPR
)
2152 return rtx_equal_p (RTL_EXPR_RTL (arg0
), RTL_EXPR_RTL (arg1
));
2160 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2161 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2163 When in doubt, return 0. */
2166 operand_equal_for_comparison_p (arg0
, arg1
, other
)
2170 int unsignedp1
, unsignedpo
;
2171 tree primarg0
, primarg1
, primother
;
2172 unsigned correct_width
;
2174 if (operand_equal_p (arg0
, arg1
, 0))
2177 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2178 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2181 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2182 and see if the inner values are the same. This removes any
2183 signedness comparison, which doesn't matter here. */
2184 primarg0
= arg0
, primarg1
= arg1
;
2185 STRIP_NOPS (primarg0
); STRIP_NOPS (primarg1
);
2186 if (operand_equal_p (primarg0
, primarg1
, 0))
2189 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2190 actual comparison operand, ARG0.
2192 First throw away any conversions to wider types
2193 already present in the operands. */
2195 primarg1
= get_narrower (arg1
, &unsignedp1
);
2196 primother
= get_narrower (other
, &unsignedpo
);
2198 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2199 if (unsignedp1
== unsignedpo
2200 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2201 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2203 tree type
= TREE_TYPE (arg0
);
2205 /* Make sure shorter operand is extended the right way
2206 to match the longer operand. */
2207 primarg1
= convert (signed_or_unsigned_type (unsignedp1
,
2208 TREE_TYPE (primarg1
)),
2211 if (operand_equal_p (arg0
, convert (type
, primarg1
), 0))
2218 /* See if ARG is an expression that is either a comparison or is performing
2219 arithmetic on comparisons. The comparisons must only be comparing
2220 two different values, which will be stored in *CVAL1 and *CVAL2; if
2221 they are non-zero it means that some operands have already been found.
2222 No variables may be used anywhere else in the expression except in the
2223 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2224 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2226 If this is true, return 1. Otherwise, return zero. */
2229 twoval_comparison_p (arg
, cval1
, cval2
, save_p
)
2231 tree
*cval1
, *cval2
;
2234 enum tree_code code
= TREE_CODE (arg
);
2235 char class = TREE_CODE_CLASS (code
);
2237 /* We can handle some of the 'e' cases here. */
2238 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
2240 else if (class == 'e'
2241 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2242 || code
== COMPOUND_EXPR
))
2245 /* ??? Disable this since the SAVE_EXPR might already be in use outside
2246 the expression. There may be no way to make this work, but it needs
2247 to be looked at again for 2.6. */
2249 else if (class == 'e' && code
== SAVE_EXPR
&& SAVE_EXPR_RTL (arg
) == 0)
2251 /* If we've already found a CVAL1 or CVAL2, this expression is
2252 two complex to handle. */
2253 if (*cval1
|| *cval2
)
2264 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2267 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2268 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2269 cval1
, cval2
, save_p
));
2275 if (code
== COND_EXPR
)
2276 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2277 cval1
, cval2
, save_p
)
2278 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2279 cval1
, cval2
, save_p
)
2280 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2281 cval1
, cval2
, save_p
));
2285 /* First see if we can handle the first operand, then the second. For
2286 the second operand, we know *CVAL1 can't be zero. It must be that
2287 one side of the comparison is each of the values; test for the
2288 case where this isn't true by failing if the two operands
2291 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2292 TREE_OPERAND (arg
, 1), 0))
2296 *cval1
= TREE_OPERAND (arg
, 0);
2297 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2299 else if (*cval2
== 0)
2300 *cval2
= TREE_OPERAND (arg
, 0);
2301 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2306 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2308 else if (*cval2
== 0)
2309 *cval2
= TREE_OPERAND (arg
, 1);
2310 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2322 /* ARG is a tree that is known to contain just arithmetic operations and
2323 comparisons. Evaluate the operations in the tree substituting NEW0 for
2324 any occurrence of OLD0 as an operand of a comparison and likewise for
2328 eval_subst (arg
, old0
, new0
, old1
, new1
)
2330 tree old0
, new0
, old1
, new1
;
2332 tree type
= TREE_TYPE (arg
);
2333 enum tree_code code
= TREE_CODE (arg
);
2334 char class = TREE_CODE_CLASS (code
);
2336 /* We can handle some of the 'e' cases here. */
2337 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
2339 else if (class == 'e'
2340 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2346 return fold (build1 (code
, type
,
2347 eval_subst (TREE_OPERAND (arg
, 0),
2348 old0
, new0
, old1
, new1
)));
2351 return fold (build (code
, type
,
2352 eval_subst (TREE_OPERAND (arg
, 0),
2353 old0
, new0
, old1
, new1
),
2354 eval_subst (TREE_OPERAND (arg
, 1),
2355 old0
, new0
, old1
, new1
)));
2361 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
2364 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
2367 return fold (build (code
, type
,
2368 eval_subst (TREE_OPERAND (arg
, 0),
2369 old0
, new0
, old1
, new1
),
2370 eval_subst (TREE_OPERAND (arg
, 1),
2371 old0
, new0
, old1
, new1
),
2372 eval_subst (TREE_OPERAND (arg
, 2),
2373 old0
, new0
, old1
, new1
)));
2377 /* fall through (???) */
2381 tree arg0
= TREE_OPERAND (arg
, 0);
2382 tree arg1
= TREE_OPERAND (arg
, 1);
2384 /* We need to check both for exact equality and tree equality. The
2385 former will be true if the operand has a side-effect. In that
2386 case, we know the operand occurred exactly once. */
2388 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2390 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2393 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2395 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2398 return fold (build (code
, type
, arg0
, arg1
));
2406 /* Return a tree for the case when the result of an expression is RESULT
2407 converted to TYPE and OMITTED was previously an operand of the expression
2408 but is now not needed (e.g., we folded OMITTED * 0).
2410 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2411 the conversion of RESULT to TYPE. */
2414 omit_one_operand (type
, result
, omitted
)
2415 tree type
, result
, omitted
;
2417 tree t
= convert (type
, result
);
2419 if (TREE_SIDE_EFFECTS (omitted
))
2420 return build (COMPOUND_EXPR
, type
, omitted
, t
);
2422 return non_lvalue (t
);
2425 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2428 pedantic_omit_one_operand (type
, result
, omitted
)
2429 tree type
, result
, omitted
;
2431 tree t
= convert (type
, result
);
2433 if (TREE_SIDE_EFFECTS (omitted
))
2434 return build (COMPOUND_EXPR
, type
, omitted
, t
);
2436 return pedantic_non_lvalue (t
);
2441 /* Return a simplified tree node for the truth-negation of ARG. This
2442 never alters ARG itself. We assume that ARG is an operation that
2443 returns a truth value (0 or 1). */
2446 invert_truthvalue (arg
)
2449 tree type
= TREE_TYPE (arg
);
2450 enum tree_code code
= TREE_CODE (arg
);
2452 if (code
== ERROR_MARK
)
2455 /* If this is a comparison, we can simply invert it, except for
2456 floating-point non-equality comparisons, in which case we just
2457 enclose a TRUTH_NOT_EXPR around what we have. */
2459 if (TREE_CODE_CLASS (code
) == '<')
2461 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg
, 0)))
2462 && !flag_fast_math
&& code
!= NE_EXPR
&& code
!= EQ_EXPR
)
2463 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2465 return build (invert_tree_comparison (code
), type
,
2466 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
2472 return convert (type
, build_int_2 (TREE_INT_CST_LOW (arg
) == 0
2473 && TREE_INT_CST_HIGH (arg
) == 0, 0));
2475 case TRUTH_AND_EXPR
:
2476 return build (TRUTH_OR_EXPR
, type
,
2477 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2478 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2481 return build (TRUTH_AND_EXPR
, type
,
2482 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2483 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2485 case TRUTH_XOR_EXPR
:
2486 /* Here we can invert either operand. We invert the first operand
2487 unless the second operand is a TRUTH_NOT_EXPR in which case our
2488 result is the XOR of the first operand with the inside of the
2489 negation of the second operand. */
2491 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
2492 return build (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
2493 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
2495 return build (TRUTH_XOR_EXPR
, type
,
2496 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2497 TREE_OPERAND (arg
, 1));
2499 case TRUTH_ANDIF_EXPR
:
2500 return build (TRUTH_ORIF_EXPR
, type
,
2501 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2502 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2504 case TRUTH_ORIF_EXPR
:
2505 return build (TRUTH_ANDIF_EXPR
, type
,
2506 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2507 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2509 case TRUTH_NOT_EXPR
:
2510 return TREE_OPERAND (arg
, 0);
2513 return build (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2514 invert_truthvalue (TREE_OPERAND (arg
, 1)),
2515 invert_truthvalue (TREE_OPERAND (arg
, 2)));
2518 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2519 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2521 case NON_LVALUE_EXPR
:
2522 return invert_truthvalue (TREE_OPERAND (arg
, 0));
2527 return build1 (TREE_CODE (arg
), type
,
2528 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2531 if (!integer_onep (TREE_OPERAND (arg
, 1)))
2533 return build (EQ_EXPR
, type
, arg
, convert (type
, integer_zero_node
));
2536 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2538 case CLEANUP_POINT_EXPR
:
2539 return build1 (CLEANUP_POINT_EXPR
, type
,
2540 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2545 if (TREE_CODE (TREE_TYPE (arg
)) != BOOLEAN_TYPE
)
2547 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2550 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2551 operands are another bit-wise operation with a common input. If so,
2552 distribute the bit operations to save an operation and possibly two if
2553 constants are involved. For example, convert
2554 (A | B) & (A | C) into A | (B & C)
2555 Further simplification will occur if B and C are constants.
2557 If this optimization cannot be done, 0 will be returned. */
2560 distribute_bit_expr (code
, type
, arg0
, arg1
)
2561 enum tree_code code
;
2568 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2569 || TREE_CODE (arg0
) == code
2570 || (TREE_CODE (arg0
) != BIT_AND_EXPR
2571 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
2574 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
2576 common
= TREE_OPERAND (arg0
, 0);
2577 left
= TREE_OPERAND (arg0
, 1);
2578 right
= TREE_OPERAND (arg1
, 1);
2580 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
2582 common
= TREE_OPERAND (arg0
, 0);
2583 left
= TREE_OPERAND (arg0
, 1);
2584 right
= TREE_OPERAND (arg1
, 0);
2586 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
2588 common
= TREE_OPERAND (arg0
, 1);
2589 left
= TREE_OPERAND (arg0
, 0);
2590 right
= TREE_OPERAND (arg1
, 1);
2592 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
2594 common
= TREE_OPERAND (arg0
, 1);
2595 left
= TREE_OPERAND (arg0
, 0);
2596 right
= TREE_OPERAND (arg1
, 0);
2601 return fold (build (TREE_CODE (arg0
), type
, common
,
2602 fold (build (code
, type
, left
, right
))));
2605 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2606 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2609 make_bit_field_ref (inner
, type
, bitsize
, bitpos
, unsignedp
)
2612 int bitsize
, bitpos
;
2615 tree result
= build (BIT_FIELD_REF
, type
, inner
,
2616 size_int (bitsize
), bitsize_int (bitpos
, 0L));
2618 TREE_UNSIGNED (result
) = unsignedp
;
2623 /* Optimize a bit-field compare.
2625 There are two cases: First is a compare against a constant and the
2626 second is a comparison of two items where the fields are at the same
2627 bit position relative to the start of a chunk (byte, halfword, word)
2628 large enough to contain it. In these cases we can avoid the shift
2629 implicit in bitfield extractions.
2631 For constants, we emit a compare of the shifted constant with the
2632 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2633 compared. For two fields at the same position, we do the ANDs with the
2634 similar mask and compare the result of the ANDs.
2636 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2637 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2638 are the left and right operands of the comparison, respectively.
2640 If the optimization described above can be done, we return the resulting
2641 tree. Otherwise we return zero. */
2644 optimize_bit_field_compare (code
, compare_type
, lhs
, rhs
)
2645 enum tree_code code
;
2649 int lbitpos
, lbitsize
, rbitpos
, rbitsize
;
2650 int lnbitpos
, lnbitsize
, rnbitpos
= 0, rnbitsize
= 0;
2651 tree type
= TREE_TYPE (lhs
);
2652 tree signed_type
, unsigned_type
;
2653 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
2654 enum machine_mode lmode
, rmode
, lnmode
, rnmode
= VOIDmode
;
2655 int lunsignedp
, runsignedp
;
2656 int lvolatilep
= 0, rvolatilep
= 0;
2658 tree linner
, rinner
= NULL_TREE
;
2662 /* Get all the information about the extractions being done. If the bit size
2663 if the same as the size of the underlying object, we aren't doing an
2664 extraction at all and so can do nothing. */
2665 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
2666 &lunsignedp
, &lvolatilep
, &alignment
);
2667 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
2673 /* If this is not a constant, we can only do something if bit positions,
2674 sizes, and signedness are the same. */
2675 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
2676 &runsignedp
, &rvolatilep
, &alignment
);
2678 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
2679 || lunsignedp
!= runsignedp
|| offset
!= 0)
2683 /* See if we can find a mode to refer to this field. We should be able to,
2684 but fail if we can't. */
2685 lnmode
= get_best_mode (lbitsize
, lbitpos
,
2686 TYPE_ALIGN (TREE_TYPE (linner
)), word_mode
,
2688 if (lnmode
== VOIDmode
)
2691 /* Set signed and unsigned types of the precision of this mode for the
2693 signed_type
= type_for_mode (lnmode
, 0);
2694 unsigned_type
= type_for_mode (lnmode
, 1);
2698 rnmode
= get_best_mode (rbitsize
, rbitpos
,
2699 TYPE_ALIGN (TREE_TYPE (rinner
)), word_mode
,
2701 if (rnmode
== VOIDmode
)
2705 /* Compute the bit position and size for the new reference and our offset
2706 within it. If the new reference is the same size as the original, we
2707 won't optimize anything, so return zero. */
2708 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
2709 lnbitpos
= lbitpos
& ~ (lnbitsize
- 1);
2710 lbitpos
-= lnbitpos
;
2711 if (lnbitsize
== lbitsize
)
2716 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
2717 rnbitpos
= rbitpos
& ~ (rnbitsize
- 1);
2718 rbitpos
-= rnbitpos
;
2719 if (rnbitsize
== rbitsize
)
2723 if (BYTES_BIG_ENDIAN
)
2724 lbitpos
= lnbitsize
- lbitsize
- lbitpos
;
2726 /* Make the mask to be used against the extracted field. */
2727 mask
= build_int_2 (~0, ~0);
2728 TREE_TYPE (mask
) = unsigned_type
;
2729 force_fit_type (mask
, 0);
2730 mask
= convert (unsigned_type
, mask
);
2731 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (lnbitsize
- lbitsize
), 0);
2732 mask
= const_binop (RSHIFT_EXPR
, mask
,
2733 size_int (lnbitsize
- lbitsize
- lbitpos
), 0);
2736 /* If not comparing with constant, just rework the comparison
2738 return build (code
, compare_type
,
2739 build (BIT_AND_EXPR
, unsigned_type
,
2740 make_bit_field_ref (linner
, unsigned_type
,
2741 lnbitsize
, lnbitpos
, 1),
2743 build (BIT_AND_EXPR
, unsigned_type
,
2744 make_bit_field_ref (rinner
, unsigned_type
,
2745 rnbitsize
, rnbitpos
, 1),
2748 /* Otherwise, we are handling the constant case. See if the constant is too
2749 big for the field. Warn and return a tree of for 0 (false) if so. We do
2750 this not only for its own sake, but to avoid having to test for this
2751 error case below. If we didn't, we might generate wrong code.
2753 For unsigned fields, the constant shifted right by the field length should
2754 be all zero. For signed fields, the high-order bits should agree with
2759 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
2760 convert (unsigned_type
, rhs
),
2761 size_int (lbitsize
), 0)))
2763 warning ("comparison is always %s due to width of bitfield",
2764 code
== NE_EXPR
? "one" : "zero");
2765 return convert (compare_type
,
2767 ? integer_one_node
: integer_zero_node
));
2772 tree tem
= const_binop (RSHIFT_EXPR
, convert (signed_type
, rhs
),
2773 size_int (lbitsize
- 1), 0);
2774 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
2776 warning ("comparison is always %s due to width of bitfield",
2777 code
== NE_EXPR
? "one" : "zero");
2778 return convert (compare_type
,
2780 ? integer_one_node
: integer_zero_node
));
2784 /* Single-bit compares should always be against zero. */
2785 if (lbitsize
== 1 && ! integer_zerop (rhs
))
2787 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
2788 rhs
= convert (type
, integer_zero_node
);
2791 /* Make a new bitfield reference, shift the constant over the
2792 appropriate number of bits and mask it with the computed mask
2793 (in case this was a signed field). If we changed it, make a new one. */
2794 lhs
= make_bit_field_ref (linner
, unsigned_type
, lnbitsize
, lnbitpos
, 1);
2797 TREE_SIDE_EFFECTS (lhs
) = 1;
2798 TREE_THIS_VOLATILE (lhs
) = 1;
2801 rhs
= fold (const_binop (BIT_AND_EXPR
,
2802 const_binop (LSHIFT_EXPR
,
2803 convert (unsigned_type
, rhs
),
2804 size_int (lbitpos
), 0),
2807 return build (code
, compare_type
,
2808 build (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
2812 /* Subroutine for fold_truthop: decode a field reference.
2814 If EXP is a comparison reference, we return the innermost reference.
2816 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2817 set to the starting bit number.
2819 If the innermost field can be completely contained in a mode-sized
2820 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2822 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2823 otherwise it is not changed.
2825 *PUNSIGNEDP is set to the signedness of the field.
2827 *PMASK is set to the mask used. This is either contained in a
2828 BIT_AND_EXPR or derived from the width of the field.
2830 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2832 Return 0 if this is not a component reference or is one that we can't
2833 do anything with. */
2836 decode_field_reference (exp
, pbitsize
, pbitpos
, pmode
, punsignedp
,
2837 pvolatilep
, pmask
, pand_mask
)
2839 int *pbitsize
, *pbitpos
;
2840 enum machine_mode
*pmode
;
2841 int *punsignedp
, *pvolatilep
;
2846 tree mask
, inner
, offset
;
2851 /* All the optimizations using this function assume integer fields.
2852 There are problems with FP fields since the type_for_size call
2853 below can fail for, e.g., XFmode. */
2854 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
2859 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
2861 and_mask
= TREE_OPERAND (exp
, 1);
2862 exp
= TREE_OPERAND (exp
, 0);
2863 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
2864 if (TREE_CODE (and_mask
) != INTEGER_CST
)
2869 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
2870 punsignedp
, pvolatilep
, &alignment
);
2871 if ((inner
== exp
&& and_mask
== 0)
2872 || *pbitsize
< 0 || offset
!= 0)
2875 /* Compute the mask to access the bitfield. */
2876 unsigned_type
= type_for_size (*pbitsize
, 1);
2877 precision
= TYPE_PRECISION (unsigned_type
);
2879 mask
= build_int_2 (~0, ~0);
2880 TREE_TYPE (mask
) = unsigned_type
;
2881 force_fit_type (mask
, 0);
2882 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
2883 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
2885 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2887 mask
= fold (build (BIT_AND_EXPR
, unsigned_type
,
2888 convert (unsigned_type
, and_mask
), mask
));
2891 *pand_mask
= and_mask
;
2895 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2899 all_ones_mask_p (mask
, size
)
2903 tree type
= TREE_TYPE (mask
);
2904 int precision
= TYPE_PRECISION (type
);
2907 tmask
= build_int_2 (~0, ~0);
2908 TREE_TYPE (tmask
) = signed_type (type
);
2909 force_fit_type (tmask
, 0);
2911 tree_int_cst_equal (mask
,
2912 const_binop (RSHIFT_EXPR
,
2913 const_binop (LSHIFT_EXPR
, tmask
,
2914 size_int (precision
- size
),
2916 size_int (precision
- size
), 0));
2919 /* Subroutine for fold_truthop: determine if an operand is simple enough
2920 to be evaluated unconditionally. */
2923 simple_operand_p (exp
)
2926 /* Strip any conversions that don't change the machine mode. */
2927 while ((TREE_CODE (exp
) == NOP_EXPR
2928 || TREE_CODE (exp
) == CONVERT_EXPR
)
2929 && (TYPE_MODE (TREE_TYPE (exp
))
2930 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
2931 exp
= TREE_OPERAND (exp
, 0);
2933 return (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c'
2934 || (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd'
2935 && ! TREE_ADDRESSABLE (exp
)
2936 && ! TREE_THIS_VOLATILE (exp
)
2937 && ! DECL_NONLOCAL (exp
)
2938 /* Don't regard global variables as simple. They may be
2939 allocated in ways unknown to the compiler (shared memory,
2940 #pragma weak, etc). */
2941 && ! TREE_PUBLIC (exp
)
2942 && ! DECL_EXTERNAL (exp
)
2943 /* Loading a static variable is unduly expensive, but global
2944 registers aren't expensive. */
2945 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
2948 /* The following functions are subroutines to fold_range_test and allow it to
2949 try to change a logical combination of comparisons into a range test.
2952 X == 2 && X == 3 && X == 4 && X == 5
2956 (unsigned) (X - 2) <= 3
2958 We describe each set of comparisons as being either inside or outside
2959 a range, using a variable named like IN_P, and then describe the
2960 range with a lower and upper bound. If one of the bounds is omitted,
2961 it represents either the highest or lowest value of the type.
2963 In the comments below, we represent a range by two numbers in brackets
2964 preceded by a "+" to designate being inside that range, or a "-" to
2965 designate being outside that range, so the condition can be inverted by
2966 flipping the prefix. An omitted bound is represented by a "-". For
2967 example, "- [-, 10]" means being outside the range starting at the lowest
2968 possible value and ending at 10, in other words, being greater than 10.
2969 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2972 We set up things so that the missing bounds are handled in a consistent
2973 manner so neither a missing bound nor "true" and "false" need to be
2974 handled using a special case. */
2976 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2977 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2978 and UPPER1_P are nonzero if the respective argument is an upper bound
2979 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2980 must be specified for a comparison. ARG1 will be converted to ARG0's
2981 type if both are specified. */
2984 range_binop (code
, type
, arg0
, upper0_p
, arg1
, upper1_p
)
2985 enum tree_code code
;
2988 int upper0_p
, upper1_p
;
2994 /* If neither arg represents infinity, do the normal operation.
2995 Else, if not a comparison, return infinity. Else handle the special
2996 comparison rules. Note that most of the cases below won't occur, but
2997 are handled for consistency. */
2999 if (arg0
!= 0 && arg1
!= 0)
3001 tem
= fold (build (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3002 arg0
, convert (TREE_TYPE (arg0
), arg1
)));
3004 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3007 if (TREE_CODE_CLASS (code
) != '<')
3010 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3011 for neither. Then compute our result treating them as never equal
3012 and comparing bounds to non-bounds as above. */
3013 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3014 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3017 case EQ_EXPR
: case NE_EXPR
:
3018 result
= (code
== NE_EXPR
);
3020 case LT_EXPR
: case LE_EXPR
:
3021 result
= sgn0
< sgn1
;
3023 case GT_EXPR
: case GE_EXPR
:
3024 result
= sgn0
> sgn1
;
3030 return convert (type
, result
? integer_one_node
: integer_zero_node
);
3033 /* Given EXP, a logical expression, set the range it is testing into
3034 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3035 actually being tested. *PLOW and *PHIGH will have be made the same type
3036 as the returned expression. If EXP is not a comparison, we will most
3037 likely not be returning a useful value and range. */
3040 make_range (exp
, pin_p
, plow
, phigh
)
3045 enum tree_code code
;
3046 tree arg0
, arg1
, type
= NULL_TREE
;
3047 tree orig_type
= NULL_TREE
;
3049 tree low
, high
, n_low
, n_high
;
3051 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3052 and see if we can refine the range. Some of the cases below may not
3053 happen, but it doesn't seem worth worrying about this. We "continue"
3054 the outer loop when we've changed something; otherwise we "break"
3055 the switch, which will "break" the while. */
3057 in_p
= 0, low
= high
= convert (TREE_TYPE (exp
), integer_zero_node
);
3061 code
= TREE_CODE (exp
);
3063 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3065 arg0
= TREE_OPERAND (exp
, 0);
3066 if (TREE_CODE_CLASS (code
) == '<'
3067 || TREE_CODE_CLASS (code
) == '1'
3068 || TREE_CODE_CLASS (code
) == '2')
3069 type
= TREE_TYPE (arg0
);
3070 if (TREE_CODE_CLASS (code
) == '2'
3071 || TREE_CODE_CLASS (code
) == '<'
3072 || (TREE_CODE_CLASS (code
) == 'e'
3073 && tree_code_length
[(int) code
] > 1))
3074 arg1
= TREE_OPERAND (exp
, 1);
3079 case TRUTH_NOT_EXPR
:
3080 in_p
= ! in_p
, exp
= arg0
;
3083 case EQ_EXPR
: case NE_EXPR
:
3084 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3085 /* We can only do something if the range is testing for zero
3086 and if the second operand is an integer constant. Note that
3087 saying something is "in" the range we make is done by
3088 complementing IN_P since it will set in the initial case of
3089 being not equal to zero; "out" is leaving it alone. */
3090 if (low
== 0 || high
== 0
3091 || ! integer_zerop (low
) || ! integer_zerop (high
)
3092 || TREE_CODE (arg1
) != INTEGER_CST
)
3097 case NE_EXPR
: /* - [c, c] */
3100 case EQ_EXPR
: /* + [c, c] */
3101 in_p
= ! in_p
, low
= high
= arg1
;
3103 case GT_EXPR
: /* - [-, c] */
3104 low
= 0, high
= arg1
;
3106 case GE_EXPR
: /* + [c, -] */
3107 in_p
= ! in_p
, low
= arg1
, high
= 0;
3109 case LT_EXPR
: /* - [c, -] */
3110 low
= arg1
, high
= 0;
3112 case LE_EXPR
: /* + [-, c] */
3113 in_p
= ! in_p
, low
= 0, high
= arg1
;
3121 /* If this is an unsigned comparison, we also know that EXP is
3122 greater than or equal to zero. We base the range tests we make
3123 on that fact, so we record it here so we can parse existing
3125 if (TREE_UNSIGNED (type
) && (low
== 0 || high
== 0))
3127 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, in_p
, low
, high
,
3128 1, convert (type
, integer_zero_node
),
3132 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3134 /* If the high bound is missing, reverse the range so it
3135 goes from zero to the low bound minus 1. */
3139 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3140 integer_one_node
, 0);
3141 low
= convert (type
, integer_zero_node
);
3147 /* (-x) IN [a,b] -> x in [-b, -a] */
3148 n_low
= range_binop (MINUS_EXPR
, type
,
3149 convert (type
, integer_zero_node
), 0, high
, 1);
3150 n_high
= range_binop (MINUS_EXPR
, type
,
3151 convert (type
, integer_zero_node
), 0, low
, 0);
3152 low
= n_low
, high
= n_high
;
3158 exp
= build (MINUS_EXPR
, type
, build1 (NEGATE_EXPR
, type
, arg0
),
3159 convert (type
, integer_one_node
));
3162 case PLUS_EXPR
: case MINUS_EXPR
:
3163 if (TREE_CODE (arg1
) != INTEGER_CST
)
3166 /* If EXP is signed, any overflow in the computation is undefined,
3167 so we don't worry about it so long as our computations on
3168 the bounds don't overflow. For unsigned, overflow is defined
3169 and this is exactly the right thing. */
3170 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3171 type
, low
, 0, arg1
, 0);
3172 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3173 type
, high
, 1, arg1
, 0);
3174 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3175 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3178 /* Check for an unsigned range which has wrapped around the maximum
3179 value thus making n_high < n_low, and normalize it. */
3180 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3182 low
= range_binop (PLUS_EXPR
, type
, n_high
, 0,
3183 integer_one_node
, 0);
3184 high
= range_binop (MINUS_EXPR
, type
, n_low
, 0,
3185 integer_one_node
, 0);
3189 low
= n_low
, high
= n_high
;
3194 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
3195 if (orig_type
== NULL_TREE
)
3197 if (TYPE_PRECISION (type
) > TYPE_PRECISION (orig_type
))
3200 if (! INTEGRAL_TYPE_P (type
)
3201 || (low
!= 0 && ! int_fits_type_p (low
, type
))
3202 || (high
!= 0 && ! int_fits_type_p (high
, type
)))
3205 n_low
= low
, n_high
= high
;
3208 n_low
= convert (type
, n_low
);
3211 n_high
= convert (type
, n_high
);
3213 /* If we're converting from an unsigned to a signed type,
3214 we will be doing the comparison as unsigned. The tests above
3215 have already verified that LOW and HIGH are both positive.
3217 So we have to make sure that the original unsigned value will
3218 be interpreted as positive. */
3219 if (TREE_UNSIGNED (type
) && ! TREE_UNSIGNED (TREE_TYPE (exp
)))
3221 tree equiv_type
= type_for_mode (TYPE_MODE (type
), 1);
3224 /* A range without an upper bound is, naturally, unbounded.
3225 Since convert would have cropped a very large value, use
3226 the max value for the destination type. */
3228 high_positive
= TYPE_MAX_VALUE (equiv_type
);
3231 high_positive
= TYPE_MAX_VALUE (type
);
3235 high_positive
= fold (build (RSHIFT_EXPR
, type
,
3236 convert (type
, high_positive
),
3237 convert (type
, integer_one_node
)));
3239 /* If the low bound is specified, "and" the range with the
3240 range for which the original unsigned value will be
3244 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3246 1, convert (type
, integer_zero_node
),
3250 in_p
= (n_in_p
== in_p
);
3254 /* Otherwise, "or" the range with the range of the input
3255 that will be interpreted as negative. */
3256 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3258 1, convert (type
, integer_zero_node
),
3262 in_p
= (in_p
!= n_in_p
);
3267 low
= n_low
, high
= n_high
;
3277 /* If EXP is a constant, we can evaluate whether this is true or false. */
3278 if (TREE_CODE (exp
) == INTEGER_CST
)
3280 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
3282 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3288 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3292 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3293 type, TYPE, return an expression to test if EXP is in (or out of, depending
3294 on IN_P) the range. */
3297 build_range_check (type
, exp
, in_p
, low
, high
)
3303 tree etype
= TREE_TYPE (exp
);
3307 && (0 != (value
= build_range_check (type
, exp
, 1, low
, high
))))
3308 return invert_truthvalue (value
);
3310 else if (low
== 0 && high
== 0)
3311 return convert (type
, integer_one_node
);
3314 return fold (build (LE_EXPR
, type
, exp
, high
));
3317 return fold (build (GE_EXPR
, type
, exp
, low
));
3319 else if (operand_equal_p (low
, high
, 0))
3320 return fold (build (EQ_EXPR
, type
, exp
, low
));
3322 else if (TREE_UNSIGNED (etype
) && integer_zerop (low
))
3323 return build_range_check (type
, exp
, 1, 0, high
);
3325 else if (integer_zerop (low
))
3327 utype
= unsigned_type (etype
);
3328 return build_range_check (type
, convert (utype
, exp
), 1, 0,
3329 convert (utype
, high
));
3332 else if (0 != (value
= const_binop (MINUS_EXPR
, high
, low
, 0))
3333 && ! TREE_OVERFLOW (value
))
3334 return build_range_check (type
,
3335 fold (build (MINUS_EXPR
, etype
, exp
, low
)),
3336 1, convert (etype
, integer_zero_node
), value
);
3341 /* Given two ranges, see if we can merge them into one. Return 1 if we
3342 can, 0 if we can't. Set the output range into the specified parameters. */
3345 merge_ranges (pin_p
, plow
, phigh
, in0_p
, low0
, high0
, in1_p
, low1
, high1
)
3349 tree low0
, high0
, low1
, high1
;
3357 int lowequal
= ((low0
== 0 && low1
== 0)
3358 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3359 low0
, 0, low1
, 0)));
3360 int highequal
= ((high0
== 0 && high1
== 0)
3361 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3362 high0
, 1, high1
, 1)));
3364 /* Make range 0 be the range that starts first, or ends last if they
3365 start at the same value. Swap them if it isn't. */
3366 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3369 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3370 high1
, 1, high0
, 1))))
3372 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
3373 tem
= low0
, low0
= low1
, low1
= tem
;
3374 tem
= high0
, high0
= high1
, high1
= tem
;
3377 /* Now flag two cases, whether the ranges are disjoint or whether the
3378 second range is totally subsumed in the first. Note that the tests
3379 below are simplified by the ones above. */
3380 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
3381 high0
, 1, low1
, 0));
3382 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3383 high1
, 1, high0
, 1));
3385 /* We now have four cases, depending on whether we are including or
3386 excluding the two ranges. */
3389 /* If they don't overlap, the result is false. If the second range
3390 is a subset it is the result. Otherwise, the range is from the start
3391 of the second to the end of the first. */
3393 in_p
= 0, low
= high
= 0;
3395 in_p
= 1, low
= low1
, high
= high1
;
3397 in_p
= 1, low
= low1
, high
= high0
;
3400 else if (in0_p
&& ! in1_p
)
3402 /* If they don't overlap, the result is the first range. If they are
3403 equal, the result is false. If the second range is a subset of the
3404 first, and the ranges begin at the same place, we go from just after
3405 the end of the first range to the end of the second. If the second
3406 range is not a subset of the first, or if it is a subset and both
3407 ranges end at the same place, the range starts at the start of the
3408 first range and ends just before the second range.
3409 Otherwise, we can't describe this as a single range. */
3411 in_p
= 1, low
= low0
, high
= high0
;
3412 else if (lowequal
&& highequal
)
3413 in_p
= 0, low
= high
= 0;
3414 else if (subset
&& lowequal
)
3416 in_p
= 1, high
= high0
;
3417 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high1
, 0,
3418 integer_one_node
, 0);
3420 else if (! subset
|| highequal
)
3422 in_p
= 1, low
= low0
;
3423 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
3424 integer_one_node
, 0);
3430 else if (! in0_p
&& in1_p
)
3432 /* If they don't overlap, the result is the second range. If the second
3433 is a subset of the first, the result is false. Otherwise,
3434 the range starts just after the first range and ends at the
3435 end of the second. */
3437 in_p
= 1, low
= low1
, high
= high1
;
3439 in_p
= 0, low
= high
= 0;
3442 in_p
= 1, high
= high1
;
3443 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
3444 integer_one_node
, 0);
3450 /* The case where we are excluding both ranges. Here the complex case
3451 is if they don't overlap. In that case, the only time we have a
3452 range is if they are adjacent. If the second is a subset of the
3453 first, the result is the first. Otherwise, the range to exclude
3454 starts at the beginning of the first range and ends at the end of the
3458 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3459 range_binop (PLUS_EXPR
, NULL_TREE
,
3461 integer_one_node
, 1),
3463 in_p
= 0, low
= low0
, high
= high1
;
3468 in_p
= 0, low
= low0
, high
= high0
;
3470 in_p
= 0, low
= low0
, high
= high1
;
3473 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3477 /* EXP is some logical combination of boolean tests. See if we can
3478 merge it into some range test. Return the new tree if so. */
3481 fold_range_test (exp
)
3484 int or_op
= (TREE_CODE (exp
) == TRUTH_ORIF_EXPR
3485 || TREE_CODE (exp
) == TRUTH_OR_EXPR
);
3486 int in0_p
, in1_p
, in_p
;
3487 tree low0
, low1
, low
, high0
, high1
, high
;
3488 tree lhs
= make_range (TREE_OPERAND (exp
, 0), &in0_p
, &low0
, &high0
);
3489 tree rhs
= make_range (TREE_OPERAND (exp
, 1), &in1_p
, &low1
, &high1
);
3492 /* If this is an OR operation, invert both sides; we will invert
3493 again at the end. */
3495 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
3497 /* If both expressions are the same, if we can merge the ranges, and we
3498 can build the range test, return it or it inverted. If one of the
3499 ranges is always true or always false, consider it to be the same
3500 expression as the other. */
3501 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
3502 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
3504 && 0 != (tem
= (build_range_check (TREE_TYPE (exp
),
3506 : rhs
!= 0 ? rhs
: integer_zero_node
,
3508 return or_op
? invert_truthvalue (tem
) : tem
;
3510 /* On machines where the branch cost is expensive, if this is a
3511 short-circuited branch and the underlying object on both sides
3512 is the same, make a non-short-circuit operation. */
3513 else if (BRANCH_COST
>= 2
3514 && (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
3515 || TREE_CODE (exp
) == TRUTH_ORIF_EXPR
)
3516 && operand_equal_p (lhs
, rhs
, 0))
3518 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3519 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3520 which cases we can't do this. */
3521 if (simple_operand_p (lhs
))
3522 return build (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
3523 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
3524 TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
3525 TREE_OPERAND (exp
, 1));
3527 else if (current_function_decl
!= 0
3528 && ! contains_placeholder_p (lhs
))
3530 tree common
= save_expr (lhs
);
3532 if (0 != (lhs
= build_range_check (TREE_TYPE (exp
), common
,
3533 or_op
? ! in0_p
: in0_p
,
3535 && (0 != (rhs
= build_range_check (TREE_TYPE (exp
), common
,
3536 or_op
? ! in1_p
: in1_p
,
3538 return build (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
3539 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
3540 TREE_TYPE (exp
), lhs
, rhs
);
3548 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3549 bit value. Arrange things so the extra bits will be set to zero if and
3550 only if C is signed-extended to its full width. If MASK is nonzero,
3551 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3554 unextend (c
, p
, unsignedp
, mask
)
3560 tree type
= TREE_TYPE (c
);
3561 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
3564 if (p
== modesize
|| unsignedp
)
3567 /* We work by getting just the sign bit into the low-order bit, then
3568 into the high-order bit, then sign-extend. We then XOR that value
3570 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
3571 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
3573 /* We must use a signed type in order to get an arithmetic right shift.
3574 However, we must also avoid introducing accidental overflows, so that
3575 a subsequent call to integer_zerop will work. Hence we must
3576 do the type conversion here. At this point, the constant is either
3577 zero or one, and the conversion to a signed type can never overflow.
3578 We could get an overflow if this conversion is done anywhere else. */
3579 if (TREE_UNSIGNED (type
))
3580 temp
= convert (signed_type (type
), temp
);
3582 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
3583 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
3585 temp
= const_binop (BIT_AND_EXPR
, temp
, convert (TREE_TYPE (c
), mask
), 0);
3586 /* If necessary, convert the type back to match the type of C. */
3587 if (TREE_UNSIGNED (type
))
3588 temp
= convert (type
, temp
);
3590 return convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
3593 /* Find ways of folding logical expressions of LHS and RHS:
3594 Try to merge two comparisons to the same innermost item.
3595 Look for range tests like "ch >= '0' && ch <= '9'".
3596 Look for combinations of simple terms on machines with expensive branches
3597 and evaluate the RHS unconditionally.
3599 For example, if we have p->a == 2 && p->b == 4 and we can make an
3600 object large enough to span both A and B, we can do this with a comparison
3601 against the object ANDed with the a mask.
3603 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3604 operations to do this with one comparison.
3606 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3607 function and the one above.
3609 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3610 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3612 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3615 We return the simplified tree or 0 if no optimization is possible. */
3618 fold_truthop (code
, truth_type
, lhs
, rhs
)
3619 enum tree_code code
;
3620 tree truth_type
, lhs
, rhs
;
3622 /* If this is the "or" of two comparisons, we can do something if we
3623 the comparisons are NE_EXPR. If this is the "and", we can do something
3624 if the comparisons are EQ_EXPR. I.e.,
3625 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3627 WANTED_CODE is this operation code. For single bit fields, we can
3628 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3629 comparison for one-bit fields. */
3631 enum tree_code wanted_code
;
3632 enum tree_code lcode
, rcode
;
3633 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
3634 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
3635 int ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
3636 int rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
3637 int xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
3638 int lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
3639 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
3640 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
3641 enum machine_mode lnmode
, rnmode
;
3642 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
3643 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
3644 tree l_const
, r_const
;
3646 int first_bit
, end_bit
;
3649 /* Start by getting the comparison codes. Fail if anything is volatile.
3650 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3651 it were surrounded with a NE_EXPR. */
3653 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
3656 lcode
= TREE_CODE (lhs
);
3657 rcode
= TREE_CODE (rhs
);
3659 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
3660 lcode
= NE_EXPR
, lhs
= build (NE_EXPR
, truth_type
, lhs
, integer_zero_node
);
3662 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
3663 rcode
= NE_EXPR
, rhs
= build (NE_EXPR
, truth_type
, rhs
, integer_zero_node
);
3665 if (TREE_CODE_CLASS (lcode
) != '<' || TREE_CODE_CLASS (rcode
) != '<')
3668 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
3669 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
3671 ll_arg
= TREE_OPERAND (lhs
, 0);
3672 lr_arg
= TREE_OPERAND (lhs
, 1);
3673 rl_arg
= TREE_OPERAND (rhs
, 0);
3674 rr_arg
= TREE_OPERAND (rhs
, 1);
3676 /* If the RHS can be evaluated unconditionally and its operands are
3677 simple, it wins to evaluate the RHS unconditionally on machines
3678 with expensive branches. In this case, this isn't a comparison
3679 that can be merged. */
3681 /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
3682 are with zero (tmw). */
3684 if (BRANCH_COST
>= 2
3685 && INTEGRAL_TYPE_P (TREE_TYPE (rhs
))
3686 && simple_operand_p (rl_arg
)
3687 && simple_operand_p (rr_arg
))
3688 return build (code
, truth_type
, lhs
, rhs
);
3690 /* See if the comparisons can be merged. Then get all the parameters for
3693 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
3694 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
3698 ll_inner
= decode_field_reference (ll_arg
,
3699 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
3700 &ll_unsignedp
, &volatilep
, &ll_mask
,
3702 lr_inner
= decode_field_reference (lr_arg
,
3703 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
3704 &lr_unsignedp
, &volatilep
, &lr_mask
,
3706 rl_inner
= decode_field_reference (rl_arg
,
3707 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
3708 &rl_unsignedp
, &volatilep
, &rl_mask
,
3710 rr_inner
= decode_field_reference (rr_arg
,
3711 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
3712 &rr_unsignedp
, &volatilep
, &rr_mask
,
3715 /* It must be true that the inner operation on the lhs of each
3716 comparison must be the same if we are to be able to do anything.
3717 Then see if we have constants. If not, the same must be true for
3719 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
3720 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
3723 if (TREE_CODE (lr_arg
) == INTEGER_CST
3724 && TREE_CODE (rr_arg
) == INTEGER_CST
)
3725 l_const
= lr_arg
, r_const
= rr_arg
;
3726 else if (lr_inner
== 0 || rr_inner
== 0
3727 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
3730 l_const
= r_const
= 0;
3732 /* If either comparison code is not correct for our logical operation,
3733 fail. However, we can convert a one-bit comparison against zero into
3734 the opposite comparison against that bit being set in the field. */
3736 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
3737 if (lcode
!= wanted_code
)
3739 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
3741 if (ll_unsignedp
|| tree_log2 (ll_mask
) + 1 < ll_bitsize
)
3744 /* Since ll_arg is a single bit bit mask, we can sign extend
3745 it appropriately with a NEGATE_EXPR.
3746 l_const is made a signed value here, but since for l_const != NULL
3747 lr_unsignedp is not used, we don't need to clear the latter. */
3748 l_const
= fold (build1 (NEGATE_EXPR
, TREE_TYPE (ll_arg
),
3749 convert (TREE_TYPE (ll_arg
), ll_mask
)));
3755 if (rcode
!= wanted_code
)
3757 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
3759 if (rl_unsignedp
|| tree_log2 (rl_mask
) + 1 < rl_bitsize
)
3762 /* This is analogous to the code for l_const above. */
3763 r_const
= fold (build1 (NEGATE_EXPR
, TREE_TYPE (rl_arg
),
3764 convert (TREE_TYPE (rl_arg
), rl_mask
)));
3770 /* See if we can find a mode that contains both fields being compared on
3771 the left. If we can't, fail. Otherwise, update all constants and masks
3772 to be relative to a field of that size. */
3773 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
3774 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
3775 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
3776 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
3778 if (lnmode
== VOIDmode
)
3781 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
3782 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
3783 type
= type_for_size (lnbitsize
, 1);
3784 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
3786 if (BYTES_BIG_ENDIAN
)
3788 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
3789 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
3792 ll_mask
= const_binop (LSHIFT_EXPR
, convert (type
, ll_mask
),
3793 size_int (xll_bitpos
), 0);
3794 rl_mask
= const_binop (LSHIFT_EXPR
, convert (type
, rl_mask
),
3795 size_int (xrl_bitpos
), 0);
3799 l_const
= convert (type
, l_const
);
3800 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
3801 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
3802 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
3803 fold (build1 (BIT_NOT_EXPR
,
3807 warning ("comparison is always %s",
3808 wanted_code
== NE_EXPR
? "one" : "zero");
3810 return convert (truth_type
,
3811 wanted_code
== NE_EXPR
3812 ? integer_one_node
: integer_zero_node
);
3817 r_const
= convert (type
, r_const
);
3818 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
3819 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
3820 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
3821 fold (build1 (BIT_NOT_EXPR
,
3825 warning ("comparison is always %s",
3826 wanted_code
== NE_EXPR
? "one" : "zero");
3828 return convert (truth_type
,
3829 wanted_code
== NE_EXPR
3830 ? integer_one_node
: integer_zero_node
);
3834 /* If the right sides are not constant, do the same for it. Also,
3835 disallow this optimization if a size or signedness mismatch occurs
3836 between the left and right sides. */
3839 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
3840 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
3841 /* Make sure the two fields on the right
3842 correspond to the left without being swapped. */
3843 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
3846 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
3847 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
3848 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
3849 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
3851 if (rnmode
== VOIDmode
)
3854 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
3855 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
3856 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
3858 if (BYTES_BIG_ENDIAN
)
3860 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
3861 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
3864 lr_mask
= const_binop (LSHIFT_EXPR
, convert (type
, lr_mask
),
3865 size_int (xlr_bitpos
), 0);
3866 rr_mask
= const_binop (LSHIFT_EXPR
, convert (type
, rr_mask
),
3867 size_int (xrr_bitpos
), 0);
3869 /* Make a mask that corresponds to both fields being compared.
3870 Do this for both items being compared. If the masks agree,
3871 we can do this by masking both and comparing the masked
3873 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
3874 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
3875 if (operand_equal_p (ll_mask
, lr_mask
, 0) && lnbitsize
== rnbitsize
)
3877 lhs
= make_bit_field_ref (ll_inner
, type
, lnbitsize
, lnbitpos
,
3878 ll_unsignedp
|| rl_unsignedp
);
3879 rhs
= make_bit_field_ref (lr_inner
, type
, rnbitsize
, rnbitpos
,
3880 lr_unsignedp
|| rr_unsignedp
);
3881 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
3883 lhs
= build (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
3884 rhs
= build (BIT_AND_EXPR
, type
, rhs
, ll_mask
);
3886 return build (wanted_code
, truth_type
, lhs
, rhs
);
3889 /* There is still another way we can do something: If both pairs of
3890 fields being compared are adjacent, we may be able to make a wider
3891 field containing them both. */
3892 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
3893 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
3894 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
3895 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
3896 return build (wanted_code
, truth_type
,
3897 make_bit_field_ref (ll_inner
, type
,
3898 ll_bitsize
+ rl_bitsize
,
3899 MIN (ll_bitpos
, rl_bitpos
),
3901 make_bit_field_ref (lr_inner
, type
,
3902 lr_bitsize
+ rr_bitsize
,
3903 MIN (lr_bitpos
, rr_bitpos
),
3909 /* Handle the case of comparisons with constants. If there is something in
3910 common between the masks, those bits of the constants must be the same.
3911 If not, the condition is always false. Test for this to avoid generating
3912 incorrect code below. */
3913 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
3914 if (! integer_zerop (result
)
3915 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
3916 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
3918 if (wanted_code
== NE_EXPR
)
3920 warning ("`or' of unmatched not-equal tests is always 1");
3921 return convert (truth_type
, integer_one_node
);
3925 warning ("`and' of mutually exclusive equal-tests is always zero");
3926 return convert (truth_type
, integer_zero_node
);
3930 /* Construct the expression we will return. First get the component
3931 reference we will make. Unless the mask is all ones the width of
3932 that field, perform the mask operation. Then compare with the
3934 result
= make_bit_field_ref (ll_inner
, type
, lnbitsize
, lnbitpos
,
3935 ll_unsignedp
|| rl_unsignedp
);
3937 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
3938 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
3939 result
= build (BIT_AND_EXPR
, type
, result
, ll_mask
);
3941 return build (wanted_code
, truth_type
, result
,
3942 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
3945 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
3946 S, a SAVE_EXPR, return the expression actually being evaluated. Note
3947 that we may sometimes modify the tree. */
3950 strip_compound_expr (t
, s
)
3954 enum tree_code code
= TREE_CODE (t
);
3956 /* See if this is the COMPOUND_EXPR we want to eliminate. */
3957 if (code
== COMPOUND_EXPR
&& TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
3958 && TREE_OPERAND (TREE_OPERAND (t
, 0), 0) == s
)
3959 return TREE_OPERAND (t
, 1);
3961 /* See if this is a COND_EXPR or a simple arithmetic operator. We
3962 don't bother handling any other types. */
3963 else if (code
== COND_EXPR
)
3965 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
3966 TREE_OPERAND (t
, 1) = strip_compound_expr (TREE_OPERAND (t
, 1), s
);
3967 TREE_OPERAND (t
, 2) = strip_compound_expr (TREE_OPERAND (t
, 2), s
);
3969 else if (TREE_CODE_CLASS (code
) == '1')
3970 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
3971 else if (TREE_CODE_CLASS (code
) == '<'
3972 || TREE_CODE_CLASS (code
) == '2')
3974 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
3975 TREE_OPERAND (t
, 1) = strip_compound_expr (TREE_OPERAND (t
, 1), s
);
3981 /* Return a node which has the indicated constant VALUE (either 0 or
3982 1), and is of the indicated TYPE. */
3985 constant_boolean_node (value
, type
)
3989 if (type
== integer_type_node
)
3990 return value
? integer_one_node
: integer_zero_node
;
3991 else if (TREE_CODE (type
) == BOOLEAN_TYPE
)
3992 return truthvalue_conversion (value
? integer_one_node
:
3996 tree t
= build_int_2 (value
, 0);
3997 TREE_TYPE (t
) = type
;
4002 /* Perform constant folding and related simplification of EXPR.
4003 The related simplifications include x*1 => x, x*0 => 0, etc.,
4004 and application of the associative law.
4005 NOP_EXPR conversions may be removed freely (as long as we
4006 are careful not to change the C type of the overall expression)
4007 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4008 but we can constant-fold them if they have constant operands. */
4014 register tree t
= expr
;
4015 tree t1
= NULL_TREE
;
4017 tree type
= TREE_TYPE (expr
);
4018 register tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
4019 register enum tree_code code
= TREE_CODE (t
);
4023 /* WINS will be nonzero when the switch is done
4024 if all operands are constant. */
4028 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4029 Likewise for a SAVE_EXPR that's already been evaluated. */
4030 if (code
== RTL_EXPR
|| (code
== SAVE_EXPR
&& SAVE_EXPR_RTL (t
)) != 0)
4033 /* Return right away if already constant. */
4034 if (TREE_CONSTANT (t
))
4036 if (code
== CONST_DECL
)
4037 return DECL_INITIAL (t
);
4041 #ifdef MAX_INTEGER_COMPUTATION_MODE
4042 check_max_integer_computation_mode (expr
);
4045 kind
= TREE_CODE_CLASS (code
);
4046 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
4050 /* Special case for conversion ops that can have fixed point args. */
4051 arg0
= TREE_OPERAND (t
, 0);
4053 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4055 STRIP_TYPE_NOPS (arg0
);
4057 if (arg0
!= 0 && TREE_CODE (arg0
) == COMPLEX_CST
)
4058 subop
= TREE_REALPART (arg0
);
4062 if (subop
!= 0 && TREE_CODE (subop
) != INTEGER_CST
4063 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4064 && TREE_CODE (subop
) != REAL_CST
4065 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4067 /* Note that TREE_CONSTANT isn't enough:
4068 static var addresses are constant but we can't
4069 do arithmetic on them. */
4072 else if (kind
== 'e' || kind
== '<'
4073 || kind
== '1' || kind
== '2' || kind
== 'r')
4075 register int len
= tree_code_length
[(int) code
];
4077 for (i
= 0; i
< len
; i
++)
4079 tree op
= TREE_OPERAND (t
, i
);
4083 continue; /* Valid for CALL_EXPR, at least. */
4085 if (kind
== '<' || code
== RSHIFT_EXPR
)
4087 /* Signedness matters here. Perhaps we can refine this
4089 STRIP_TYPE_NOPS (op
);
4093 /* Strip any conversions that don't change the mode. */
4097 if (TREE_CODE (op
) == COMPLEX_CST
)
4098 subop
= TREE_REALPART (op
);
4102 if (TREE_CODE (subop
) != INTEGER_CST
4103 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4104 && TREE_CODE (subop
) != REAL_CST
4105 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4107 /* Note that TREE_CONSTANT isn't enough:
4108 static var addresses are constant but we can't
4109 do arithmetic on them. */
4119 /* If this is a commutative operation, and ARG0 is a constant, move it
4120 to ARG1 to reduce the number of tests below. */
4121 if ((code
== PLUS_EXPR
|| code
== MULT_EXPR
|| code
== MIN_EXPR
4122 || code
== MAX_EXPR
|| code
== BIT_IOR_EXPR
|| code
== BIT_XOR_EXPR
4123 || code
== BIT_AND_EXPR
)
4124 && (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
))
4126 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
4128 tem
= TREE_OPERAND (t
, 0); TREE_OPERAND (t
, 0) = TREE_OPERAND (t
, 1);
4129 TREE_OPERAND (t
, 1) = tem
;
4132 /* Now WINS is set as described above,
4133 ARG0 is the first operand of EXPR,
4134 and ARG1 is the second operand (if it has more than one operand).
4136 First check for cases where an arithmetic operation is applied to a
4137 compound, conditional, or comparison operation. Push the arithmetic
4138 operation inside the compound or conditional to see if any folding
4139 can then be done. Convert comparison to conditional for this purpose.
4140 The also optimizes non-constant cases that used to be done in
4143 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
4144 one of the operands is a comparison and the other is a comparison, a
4145 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4146 code below would make the expression more complex. Change it to a
4147 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4148 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4150 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
4151 || code
== EQ_EXPR
|| code
== NE_EXPR
)
4152 && ((truth_value_p (TREE_CODE (arg0
))
4153 && (truth_value_p (TREE_CODE (arg1
))
4154 || (TREE_CODE (arg1
) == BIT_AND_EXPR
4155 && integer_onep (TREE_OPERAND (arg1
, 1)))))
4156 || (truth_value_p (TREE_CODE (arg1
))
4157 && (truth_value_p (TREE_CODE (arg0
))
4158 || (TREE_CODE (arg0
) == BIT_AND_EXPR
4159 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
4161 t
= fold (build (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
4162 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
4166 if (code
== EQ_EXPR
)
4167 t
= invert_truthvalue (t
);
4172 if (TREE_CODE_CLASS (code
) == '1')
4174 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
4175 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
4176 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))));
4177 else if (TREE_CODE (arg0
) == COND_EXPR
)
4179 t
= fold (build (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
4180 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))),
4181 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 2)))));
4183 /* If this was a conversion, and all we did was to move into
4184 inside the COND_EXPR, bring it back out. But leave it if
4185 it is a conversion from integer to integer and the
4186 result precision is no wider than a word since such a
4187 conversion is cheap and may be optimized away by combine,
4188 while it couldn't if it were outside the COND_EXPR. Then return
4189 so we don't get into an infinite recursion loop taking the
4190 conversion out and then back in. */
4192 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
4193 || code
== NON_LVALUE_EXPR
)
4194 && TREE_CODE (t
) == COND_EXPR
4195 && TREE_CODE (TREE_OPERAND (t
, 1)) == code
4196 && TREE_CODE (TREE_OPERAND (t
, 2)) == code
4197 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0))
4198 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 2), 0)))
4199 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t
))
4200 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0)))
4201 && TYPE_PRECISION (TREE_TYPE (t
)) <= BITS_PER_WORD
))
4202 t
= build1 (code
, type
,
4204 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0)),
4205 TREE_OPERAND (t
, 0),
4206 TREE_OPERAND (TREE_OPERAND (t
, 1), 0),
4207 TREE_OPERAND (TREE_OPERAND (t
, 2), 0)));
4210 else if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<')
4211 return fold (build (COND_EXPR
, type
, arg0
,
4212 fold (build1 (code
, type
, integer_one_node
)),
4213 fold (build1 (code
, type
, integer_zero_node
))));
4215 else if (TREE_CODE_CLASS (code
) == '2'
4216 || TREE_CODE_CLASS (code
) == '<')
4218 if (TREE_CODE (arg1
) == COMPOUND_EXPR
)
4219 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
4220 fold (build (code
, type
,
4221 arg0
, TREE_OPERAND (arg1
, 1))));
4222 else if ((TREE_CODE (arg1
) == COND_EXPR
4223 || (TREE_CODE_CLASS (TREE_CODE (arg1
)) == '<'
4224 && TREE_CODE_CLASS (code
) != '<'))
4225 && (! TREE_SIDE_EFFECTS (arg0
)
4226 || (current_function_decl
!= 0
4227 && ! contains_placeholder_p (arg0
))))
4229 tree test
, true_value
, false_value
;
4230 tree lhs
= 0, rhs
= 0;
4232 if (TREE_CODE (arg1
) == COND_EXPR
)
4234 test
= TREE_OPERAND (arg1
, 0);
4235 true_value
= TREE_OPERAND (arg1
, 1);
4236 false_value
= TREE_OPERAND (arg1
, 2);
4240 tree testtype
= TREE_TYPE (arg1
);
4242 true_value
= convert (testtype
, integer_one_node
);
4243 false_value
= convert (testtype
, integer_zero_node
);
4246 /* If ARG0 is complex we want to make sure we only evaluate
4247 it once. Though this is only required if it is volatile, it
4248 might be more efficient even if it is not. However, if we
4249 succeed in folding one part to a constant, we do not need
4250 to make this SAVE_EXPR. Since we do this optimization
4251 primarily to see if we do end up with constant and this
4252 SAVE_EXPR interferes with later optimizations, suppressing
4253 it when we can is important.
4255 If we are not in a function, we can't make a SAVE_EXPR, so don't
4256 try to do so. Don't try to see if the result is a constant
4257 if an arm is a COND_EXPR since we get exponential behavior
4260 if (TREE_CODE (arg0
) != SAVE_EXPR
&& ! TREE_CONSTANT (arg0
)
4261 && current_function_decl
!= 0
4262 && ((TREE_CODE (arg0
) != VAR_DECL
4263 && TREE_CODE (arg0
) != PARM_DECL
)
4264 || TREE_SIDE_EFFECTS (arg0
)))
4266 if (TREE_CODE (true_value
) != COND_EXPR
)
4267 lhs
= fold (build (code
, type
, arg0
, true_value
));
4269 if (TREE_CODE (false_value
) != COND_EXPR
)
4270 rhs
= fold (build (code
, type
, arg0
, false_value
));
4272 if ((lhs
== 0 || ! TREE_CONSTANT (lhs
))
4273 && (rhs
== 0 || !TREE_CONSTANT (rhs
)))
4274 arg0
= save_expr (arg0
), lhs
= rhs
= 0;
4278 lhs
= fold (build (code
, type
, arg0
, true_value
));
4280 rhs
= fold (build (code
, type
, arg0
, false_value
));
4282 test
= fold (build (COND_EXPR
, type
, test
, lhs
, rhs
));
4284 if (TREE_CODE (arg0
) == SAVE_EXPR
)
4285 return build (COMPOUND_EXPR
, type
,
4286 convert (void_type_node
, arg0
),
4287 strip_compound_expr (test
, arg0
));
4289 return convert (type
, test
);
4292 else if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
4293 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
4294 fold (build (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
4295 else if ((TREE_CODE (arg0
) == COND_EXPR
4296 || (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
4297 && TREE_CODE_CLASS (code
) != '<'))
4298 && (! TREE_SIDE_EFFECTS (arg1
)
4299 || (current_function_decl
!= 0
4300 && ! contains_placeholder_p (arg1
))))
4302 tree test
, true_value
, false_value
;
4303 tree lhs
= 0, rhs
= 0;
4305 if (TREE_CODE (arg0
) == COND_EXPR
)
4307 test
= TREE_OPERAND (arg0
, 0);
4308 true_value
= TREE_OPERAND (arg0
, 1);
4309 false_value
= TREE_OPERAND (arg0
, 2);
4313 tree testtype
= TREE_TYPE (arg0
);
4315 true_value
= convert (testtype
, integer_one_node
);
4316 false_value
= convert (testtype
, integer_zero_node
);
4319 if (TREE_CODE (arg1
) != SAVE_EXPR
&& ! TREE_CONSTANT (arg0
)
4320 && current_function_decl
!= 0
4321 && ((TREE_CODE (arg1
) != VAR_DECL
4322 && TREE_CODE (arg1
) != PARM_DECL
)
4323 || TREE_SIDE_EFFECTS (arg1
)))
4325 if (TREE_CODE (true_value
) != COND_EXPR
)
4326 lhs
= fold (build (code
, type
, true_value
, arg1
));
4328 if (TREE_CODE (false_value
) != COND_EXPR
)
4329 rhs
= fold (build (code
, type
, false_value
, arg1
));
4331 if ((lhs
== 0 || ! TREE_CONSTANT (lhs
))
4332 && (rhs
== 0 || !TREE_CONSTANT (rhs
)))
4333 arg1
= save_expr (arg1
), lhs
= rhs
= 0;
4337 lhs
= fold (build (code
, type
, true_value
, arg1
));
4340 rhs
= fold (build (code
, type
, false_value
, arg1
));
4342 test
= fold (build (COND_EXPR
, type
, test
, lhs
, rhs
));
4343 if (TREE_CODE (arg1
) == SAVE_EXPR
)
4344 return build (COMPOUND_EXPR
, type
,
4345 convert (void_type_node
, arg1
),
4346 strip_compound_expr (test
, arg1
));
4348 return convert (type
, test
);
4351 else if (TREE_CODE_CLASS (code
) == '<'
4352 && TREE_CODE (arg0
) == COMPOUND_EXPR
)
4353 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
4354 fold (build (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
4355 else if (TREE_CODE_CLASS (code
) == '<'
4356 && TREE_CODE (arg1
) == COMPOUND_EXPR
)
4357 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
4358 fold (build (code
, type
, arg0
, TREE_OPERAND (arg1
, 1))));
4370 return fold (DECL_INITIAL (t
));
4375 case FIX_TRUNC_EXPR
:
4376 /* Other kinds of FIX are not handled properly by fold_convert. */
4378 if (TREE_TYPE (TREE_OPERAND (t
, 0)) == TREE_TYPE (t
))
4379 return TREE_OPERAND (t
, 0);
4381 /* Handle cases of two conversions in a row. */
4382 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
4383 || TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
)
4385 tree inside_type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
4386 tree inter_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
4387 tree final_type
= TREE_TYPE (t
);
4388 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
4389 int inside_ptr
= POINTER_TYPE_P (inside_type
);
4390 int inside_float
= FLOAT_TYPE_P (inside_type
);
4391 int inside_prec
= TYPE_PRECISION (inside_type
);
4392 int inside_unsignedp
= TREE_UNSIGNED (inside_type
);
4393 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
4394 int inter_ptr
= POINTER_TYPE_P (inter_type
);
4395 int inter_float
= FLOAT_TYPE_P (inter_type
);
4396 int inter_prec
= TYPE_PRECISION (inter_type
);
4397 int inter_unsignedp
= TREE_UNSIGNED (inter_type
);
4398 int final_int
= INTEGRAL_TYPE_P (final_type
);
4399 int final_ptr
= POINTER_TYPE_P (final_type
);
4400 int final_float
= FLOAT_TYPE_P (final_type
);
4401 int final_prec
= TYPE_PRECISION (final_type
);
4402 int final_unsignedp
= TREE_UNSIGNED (final_type
);
4404 /* In addition to the cases of two conversions in a row
4405 handled below, if we are converting something to its own
4406 type via an object of identical or wider precision, neither
4407 conversion is needed. */
4408 if (inside_type
== final_type
4409 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
4410 && inter_prec
>= final_prec
)
4411 return TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
4413 /* Likewise, if the intermediate and final types are either both
4414 float or both integer, we don't need the middle conversion if
4415 it is wider than the final type and doesn't change the signedness
4416 (for integers). Avoid this if the final type is a pointer
4417 since then we sometimes need the inner conversion. Likewise if
4418 the outer has a precision not equal to the size of its mode. */
4419 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
4420 || (inter_float
&& inside_float
))
4421 && inter_prec
>= inside_prec
4422 && (inter_float
|| inter_unsignedp
== inside_unsignedp
)
4423 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (final_type
))
4424 && TYPE_MODE (final_type
) == TYPE_MODE (inter_type
))
4426 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
4428 /* If we have a sign-extension of a zero-extended value, we can
4429 replace that by a single zero-extension. */
4430 if (inside_int
&& inter_int
&& final_int
4431 && inside_prec
< inter_prec
&& inter_prec
< final_prec
4432 && inside_unsignedp
&& !inter_unsignedp
)
4433 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
4435 /* Two conversions in a row are not needed unless:
4436 - some conversion is floating-point (overstrict for now), or
4437 - the intermediate type is narrower than both initial and
4439 - the intermediate type and innermost type differ in signedness,
4440 and the outermost type is wider than the intermediate, or
4441 - the initial type is a pointer type and the precisions of the
4442 intermediate and final types differ, or
4443 - the final type is a pointer type and the precisions of the
4444 initial and intermediate types differ. */
4445 if (! inside_float
&& ! inter_float
&& ! final_float
4446 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
4447 && ! (inside_int
&& inter_int
4448 && inter_unsignedp
!= inside_unsignedp
4449 && inter_prec
< final_prec
)
4450 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
4451 == (final_unsignedp
&& final_prec
> inter_prec
))
4452 && ! (inside_ptr
&& inter_prec
!= final_prec
)
4453 && ! (final_ptr
&& inside_prec
!= inter_prec
)
4454 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (final_type
))
4455 && TYPE_MODE (final_type
) == TYPE_MODE (inter_type
))
4457 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
4460 if (TREE_CODE (TREE_OPERAND (t
, 0)) == MODIFY_EXPR
4461 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t
, 0), 1))
4462 /* Detect assigning a bitfield. */
4463 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)) == COMPONENT_REF
4464 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 1))))
4466 /* Don't leave an assignment inside a conversion
4467 unless assigning a bitfield. */
4468 tree prev
= TREE_OPERAND (t
, 0);
4469 TREE_OPERAND (t
, 0) = TREE_OPERAND (prev
, 1);
4470 /* First do the assignment, then return converted constant. */
4471 t
= build (COMPOUND_EXPR
, TREE_TYPE (t
), prev
, fold (t
));
4477 TREE_CONSTANT (t
) = TREE_CONSTANT (arg0
);
4480 return fold_convert (t
, arg0
);
4482 #if 0 /* This loses on &"foo"[0]. */
4487 /* Fold an expression like: "foo"[2] */
4488 if (TREE_CODE (arg0
) == STRING_CST
4489 && TREE_CODE (arg1
) == INTEGER_CST
4490 && !TREE_INT_CST_HIGH (arg1
)
4491 && (i
= TREE_INT_CST_LOW (arg1
)) < TREE_STRING_LENGTH (arg0
))
4493 t
= build_int_2 (TREE_STRING_POINTER (arg0
)[i
], 0);
4494 TREE_TYPE (t
) = TREE_TYPE (TREE_TYPE (arg0
));
4495 force_fit_type (t
, 0);
4502 if (TREE_CODE (arg0
) == CONSTRUCTOR
)
4504 tree m
= purpose_member (arg1
, CONSTRUCTOR_ELTS (arg0
));
4511 TREE_CONSTANT (t
) = wins
;
4517 if (TREE_CODE (arg0
) == INTEGER_CST
)
4519 HOST_WIDE_INT low
, high
;
4520 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
4521 TREE_INT_CST_HIGH (arg0
),
4523 t
= build_int_2 (low
, high
);
4524 TREE_TYPE (t
) = type
;
4526 = (TREE_OVERFLOW (arg0
)
4527 | force_fit_type (t
, overflow
&& !TREE_UNSIGNED (type
)));
4528 TREE_CONSTANT_OVERFLOW (t
)
4529 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
4531 else if (TREE_CODE (arg0
) == REAL_CST
)
4532 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
4534 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
4535 return TREE_OPERAND (arg0
, 0);
4537 /* Convert - (a - b) to (b - a) for non-floating-point. */
4538 else if (TREE_CODE (arg0
) == MINUS_EXPR
&& ! FLOAT_TYPE_P (type
))
4539 return build (MINUS_EXPR
, type
, TREE_OPERAND (arg0
, 1),
4540 TREE_OPERAND (arg0
, 0));
4547 if (TREE_CODE (arg0
) == INTEGER_CST
)
4549 if (! TREE_UNSIGNED (type
)
4550 && TREE_INT_CST_HIGH (arg0
) < 0)
4552 HOST_WIDE_INT low
, high
;
4553 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
4554 TREE_INT_CST_HIGH (arg0
),
4556 t
= build_int_2 (low
, high
);
4557 TREE_TYPE (t
) = type
;
4559 = (TREE_OVERFLOW (arg0
)
4560 | force_fit_type (t
, overflow
));
4561 TREE_CONSTANT_OVERFLOW (t
)
4562 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
4565 else if (TREE_CODE (arg0
) == REAL_CST
)
4567 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
4568 t
= build_real (type
,
4569 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
4572 else if (TREE_CODE (arg0
) == ABS_EXPR
|| TREE_CODE (arg0
) == NEGATE_EXPR
)
4573 return build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
4577 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
4579 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
4580 return build (COMPLEX_EXPR
, TREE_TYPE (arg0
),
4581 TREE_OPERAND (arg0
, 0),
4582 fold (build1 (NEGATE_EXPR
,
4583 TREE_TYPE (TREE_TYPE (arg0
)),
4584 TREE_OPERAND (arg0
, 1))));
4585 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
4586 return build_complex (type
, TREE_OPERAND (arg0
, 0),
4587 fold (build1 (NEGATE_EXPR
,
4588 TREE_TYPE (TREE_TYPE (arg0
)),
4589 TREE_OPERAND (arg0
, 1))));
4590 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
4591 return fold (build (TREE_CODE (arg0
), type
,
4592 fold (build1 (CONJ_EXPR
, type
,
4593 TREE_OPERAND (arg0
, 0))),
4594 fold (build1 (CONJ_EXPR
,
4595 type
, TREE_OPERAND (arg0
, 1)))));
4596 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
4597 return TREE_OPERAND (arg0
, 0);
4603 t
= build_int_2 (~ TREE_INT_CST_LOW (arg0
),
4604 ~ TREE_INT_CST_HIGH (arg0
));
4605 TREE_TYPE (t
) = type
;
4606 force_fit_type (t
, 0);
4607 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg0
);
4608 TREE_CONSTANT_OVERFLOW (t
) = TREE_CONSTANT_OVERFLOW (arg0
);
4610 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
4611 return TREE_OPERAND (arg0
, 0);
4615 /* A + (-B) -> A - B */
4616 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
4617 return fold (build (MINUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
4618 else if (! FLOAT_TYPE_P (type
))
4620 if (integer_zerop (arg1
))
4621 return non_lvalue (convert (type
, arg0
));
4623 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
4624 with a constant, and the two constants have no bits in common,
4625 we should treat this as a BIT_IOR_EXPR since this may produce more
4627 if (TREE_CODE (arg0
) == BIT_AND_EXPR
4628 && TREE_CODE (arg1
) == BIT_AND_EXPR
4629 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
4630 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
4631 && integer_zerop (const_binop (BIT_AND_EXPR
,
4632 TREE_OPERAND (arg0
, 1),
4633 TREE_OPERAND (arg1
, 1), 0)))
4635 code
= BIT_IOR_EXPR
;
4639 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
)
4641 tree arg00
, arg01
, arg10
, arg11
;
4642 tree alt0
, alt1
, same
;
4644 /* (A * C) + (B * C) -> (A+B) * C.
4645 We are most concerned about the case where C is a constant,
4646 but other combinations show up during loop reduction. Since
4647 it is not difficult, try all four possibilities. */
4649 arg00
= TREE_OPERAND (arg0
, 0);
4650 arg01
= TREE_OPERAND (arg0
, 1);
4651 arg10
= TREE_OPERAND (arg1
, 0);
4652 arg11
= TREE_OPERAND (arg1
, 1);
4655 if (operand_equal_p (arg01
, arg11
, 0))
4656 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
4657 else if (operand_equal_p (arg00
, arg10
, 0))
4658 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
4659 else if (operand_equal_p (arg00
, arg11
, 0))
4660 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
4661 else if (operand_equal_p (arg01
, arg10
, 0))
4662 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
4665 return fold (build (MULT_EXPR
, type
,
4666 fold (build (PLUS_EXPR
, type
, alt0
, alt1
)),
4670 /* In IEEE floating point, x+0 may not equal x. */
4671 else if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
4673 && real_zerop (arg1
))
4674 return non_lvalue (convert (type
, arg0
));
4676 /* In most languages, can't associate operations on floats
4677 through parentheses. Rather than remember where the parentheses
4678 were, we don't associate floats at all. It shouldn't matter much.
4679 However, associating multiplications is only very slightly
4680 inaccurate, so do that if -ffast-math is specified. */
4681 if (FLOAT_TYPE_P (type
)
4682 && ! (flag_fast_math
&& code
== MULT_EXPR
))
4685 /* The varsign == -1 cases happen only for addition and subtraction.
4686 It says that the arg that was split was really CON minus VAR.
4687 The rest of the code applies to all associative operations. */
4693 if (split_tree (arg0
, code
, &var
, &con
, &varsign
))
4697 /* EXPR is (CON-VAR) +- ARG1. */
4698 /* If it is + and VAR==ARG1, return just CONST. */
4699 if (code
== PLUS_EXPR
&& operand_equal_p (var
, arg1
, 0))
4700 return convert (TREE_TYPE (t
), con
);
4702 /* If ARG0 is a constant, don't change things around;
4703 instead keep all the constant computations together. */
4705 if (TREE_CONSTANT (arg0
))
4708 /* Otherwise return (CON +- ARG1) - VAR. */
4709 t
= build (MINUS_EXPR
, type
,
4710 fold (build (code
, type
, con
, arg1
)), var
);
4714 /* EXPR is (VAR+CON) +- ARG1. */
4715 /* If it is - and VAR==ARG1, return just CONST. */
4716 if (code
== MINUS_EXPR
&& operand_equal_p (var
, arg1
, 0))
4717 return convert (TREE_TYPE (t
), con
);
4719 /* If ARG0 is a constant, don't change things around;
4720 instead keep all the constant computations together. */
4722 if (TREE_CONSTANT (arg0
))
4725 /* Otherwise return VAR +- (ARG1 +- CON). */
4726 tem
= fold (build (code
, type
, arg1
, con
));
4727 t
= build (code
, type
, var
, tem
);
4729 if (integer_zerop (tem
)
4730 && (code
== PLUS_EXPR
|| code
== MINUS_EXPR
))
4731 return convert (type
, var
);
4732 /* If we have x +/- (c - d) [c an explicit integer]
4733 change it to x -/+ (d - c) since if d is relocatable
4734 then the latter can be a single immediate insn
4735 and the former cannot. */
4736 if (TREE_CODE (tem
) == MINUS_EXPR
4737 && TREE_CODE (TREE_OPERAND (tem
, 0)) == INTEGER_CST
)
4739 tree tem1
= TREE_OPERAND (tem
, 1);
4740 TREE_OPERAND (tem
, 1) = TREE_OPERAND (tem
, 0);
4741 TREE_OPERAND (tem
, 0) = tem1
;
4743 (code
== PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
));
4749 if (split_tree (arg1
, code
, &var
, &con
, &varsign
))
4751 if (TREE_CONSTANT (arg1
))
4756 (code
== PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
));
4758 /* EXPR is ARG0 +- (CON +- VAR). */
4759 if (TREE_CODE (t
) == MINUS_EXPR
4760 && operand_equal_p (var
, arg0
, 0))
4762 /* If VAR and ARG0 cancel, return just CON or -CON. */
4763 if (code
== PLUS_EXPR
)
4764 return convert (TREE_TYPE (t
), con
);
4765 return fold (build1 (NEGATE_EXPR
, TREE_TYPE (t
),
4766 convert (TREE_TYPE (t
), con
)));
4769 t
= build (TREE_CODE (t
), type
,
4770 fold (build (code
, TREE_TYPE (t
), arg0
, con
)), var
);
4772 if (integer_zerop (TREE_OPERAND (t
, 0))
4773 && TREE_CODE (t
) == PLUS_EXPR
)
4774 return convert (TREE_TYPE (t
), var
);
4779 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
4780 if (TREE_CODE (arg1
) == REAL_CST
)
4782 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
4784 t1
= const_binop (code
, arg0
, arg1
, 0);
4785 if (t1
!= NULL_TREE
)
4787 /* The return value should always have
4788 the same type as the original expression. */
4789 if (TREE_TYPE (t1
) != TREE_TYPE (t
))
4790 t1
= convert (TREE_TYPE (t
), t1
);
4797 if (! FLOAT_TYPE_P (type
))
4799 if (! wins
&& integer_zerop (arg0
))
4800 return build1 (NEGATE_EXPR
, type
, arg1
);
4801 if (integer_zerop (arg1
))
4802 return non_lvalue (convert (type
, arg0
));
4804 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
4805 about the case where C is a constant, just try one of the
4806 four possibilities. */
4808 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
4809 && operand_equal_p (TREE_OPERAND (arg0
, 1),
4810 TREE_OPERAND (arg1
, 1), 0))
4811 return fold (build (MULT_EXPR
, type
,
4812 fold (build (MINUS_EXPR
, type
,
4813 TREE_OPERAND (arg0
, 0),
4814 TREE_OPERAND (arg1
, 0))),
4815 TREE_OPERAND (arg0
, 1)));
4817 /* Convert A - (-B) to A + B. */
4818 else if (TREE_CODE (arg1
) == NEGATE_EXPR
)
4819 return fold (build (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
4821 else if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
4824 /* Except with IEEE floating point, 0-x equals -x. */
4825 if (! wins
&& real_zerop (arg0
))
4826 return build1 (NEGATE_EXPR
, type
, arg1
);
4827 /* Except with IEEE floating point, x-0 equals x. */
4828 if (real_zerop (arg1
))
4829 return non_lvalue (convert (type
, arg0
));
4832 /* Fold &x - &x. This can happen from &x.foo - &x.
4833 This is unsafe for certain floats even in non-IEEE formats.
4834 In IEEE, it is unsafe because it does wrong for NaNs.
4835 Also note that operand_equal_p is always false if an operand
4838 if ((! FLOAT_TYPE_P (type
) || flag_fast_math
)
4839 && operand_equal_p (arg0
, arg1
, 0))
4840 return convert (type
, integer_zero_node
);
4845 if (! FLOAT_TYPE_P (type
))
4847 if (integer_zerop (arg1
))
4848 return omit_one_operand (type
, arg1
, arg0
);
4849 if (integer_onep (arg1
))
4850 return non_lvalue (convert (type
, arg0
));
4852 /* ((A / C) * C) is A if the division is an
4853 EXACT_DIV_EXPR. Since C is normally a constant,
4854 just check for one of the four possibilities. */
4856 if (TREE_CODE (arg0
) == EXACT_DIV_EXPR
4857 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
4858 return TREE_OPERAND (arg0
, 0);
4860 /* (a * (1 << b)) is (a << b) */
4861 if (TREE_CODE (arg1
) == LSHIFT_EXPR
4862 && integer_onep (TREE_OPERAND (arg1
, 0)))
4863 return fold (build (LSHIFT_EXPR
, type
, arg0
,
4864 TREE_OPERAND (arg1
, 1)));
4865 if (TREE_CODE (arg0
) == LSHIFT_EXPR
4866 && integer_onep (TREE_OPERAND (arg0
, 0)))
4867 return fold (build (LSHIFT_EXPR
, type
, arg1
,
4868 TREE_OPERAND (arg0
, 1)));
4872 /* x*0 is 0, except for IEEE floating point. */
4873 if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
4875 && real_zerop (arg1
))
4876 return omit_one_operand (type
, arg1
, arg0
);
4877 /* In IEEE floating point, x*1 is not equivalent to x for snans.
4878 However, ANSI says we can drop signals,
4879 so we can do this anyway. */
4880 if (real_onep (arg1
))
4881 return non_lvalue (convert (type
, arg0
));
4883 if (! wins
&& real_twop (arg1
) && current_function_decl
!= 0
4884 && ! contains_placeholder_p (arg0
))
4886 tree arg
= save_expr (arg0
);
4887 return build (PLUS_EXPR
, type
, arg
, arg
);
4895 register enum tree_code code0
, code1
;
4897 if (integer_all_onesp (arg1
))
4898 return omit_one_operand (type
, arg1
, arg0
);
4899 if (integer_zerop (arg1
))
4900 return non_lvalue (convert (type
, arg0
));
4901 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
4902 if (t1
!= NULL_TREE
)
4905 /* (A << C1) | (A >> C2) if A is unsigned and C1+C2 is the size of A
4906 is a rotate of A by C1 bits. */
4907 /* (A << B) | (A >> (Z - B)) if A is unsigned and Z is the size of A
4908 is a rotate of A by B bits. */
4910 code0
= TREE_CODE (arg0
);
4911 code1
= TREE_CODE (arg1
);
4912 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
4913 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
4914 && operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
,0), 0)
4915 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
4917 register tree tree01
, tree11
;
4918 register enum tree_code code01
, code11
;
4920 tree01
= TREE_OPERAND (arg0
, 1);
4921 tree11
= TREE_OPERAND (arg1
, 1);
4922 code01
= TREE_CODE (tree01
);
4923 code11
= TREE_CODE (tree11
);
4924 if (code01
== INTEGER_CST
4925 && code11
== INTEGER_CST
4926 && TREE_INT_CST_HIGH (tree01
) == 0
4927 && TREE_INT_CST_HIGH (tree11
) == 0
4928 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
4929 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
4930 return build (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
4931 code0
== LSHIFT_EXPR
? tree01
: tree11
);
4932 else if (code11
== MINUS_EXPR
4933 && TREE_CODE (TREE_OPERAND (tree11
, 0)) == INTEGER_CST
4934 && TREE_INT_CST_HIGH (TREE_OPERAND (tree11
, 0)) == 0
4935 && TREE_INT_CST_LOW (TREE_OPERAND (tree11
, 0))
4936 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))
4937 && operand_equal_p (tree01
, TREE_OPERAND (tree11
, 1), 0))
4938 return build (code0
== LSHIFT_EXPR
? LROTATE_EXPR
: RROTATE_EXPR
,
4939 type
, TREE_OPERAND (arg0
, 0), tree01
);
4940 else if (code01
== MINUS_EXPR
4941 && TREE_CODE (TREE_OPERAND (tree01
, 0)) == INTEGER_CST
4942 && TREE_INT_CST_HIGH (TREE_OPERAND (tree01
, 0)) == 0
4943 && TREE_INT_CST_LOW (TREE_OPERAND (tree01
, 0))
4944 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))
4945 && operand_equal_p (tree11
, TREE_OPERAND (tree01
, 1), 0))
4946 return build (code0
!= LSHIFT_EXPR
? LROTATE_EXPR
: RROTATE_EXPR
,
4947 type
, TREE_OPERAND (arg0
, 0), tree11
);
4954 if (integer_zerop (arg1
))
4955 return non_lvalue (convert (type
, arg0
));
4956 if (integer_all_onesp (arg1
))
4957 return fold (build1 (BIT_NOT_EXPR
, type
, arg0
));
4962 if (integer_all_onesp (arg1
))
4963 return non_lvalue (convert (type
, arg0
));
4964 if (integer_zerop (arg1
))
4965 return omit_one_operand (type
, arg1
, arg0
);
4966 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
4967 if (t1
!= NULL_TREE
)
4969 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
4970 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == NOP_EXPR
4971 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
4973 int prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)));
4974 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
4975 && (~TREE_INT_CST_LOW (arg0
)
4976 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
4977 return build1 (NOP_EXPR
, type
, TREE_OPERAND (arg1
, 0));
4979 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
4980 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
4982 int prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
4983 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
4984 && (~TREE_INT_CST_LOW (arg1
)
4985 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
4986 return build1 (NOP_EXPR
, type
, TREE_OPERAND (arg0
, 0));
4990 case BIT_ANDTC_EXPR
:
4991 if (integer_all_onesp (arg0
))
4992 return non_lvalue (convert (type
, arg1
));
4993 if (integer_zerop (arg0
))
4994 return omit_one_operand (type
, arg0
, arg1
);
4995 if (TREE_CODE (arg1
) == INTEGER_CST
)
4997 arg1
= fold (build1 (BIT_NOT_EXPR
, type
, arg1
));
4998 code
= BIT_AND_EXPR
;
5004 /* In most cases, do nothing with a divide by zero. */
5005 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
5006 #ifndef REAL_INFINITY
5007 if (TREE_CODE (arg1
) == REAL_CST
&& real_zerop (arg1
))
5010 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
5012 /* In IEEE floating point, x/1 is not equivalent to x for snans.
5013 However, ANSI says we can drop signals, so we can do this anyway. */
5014 if (real_onep (arg1
))
5015 return non_lvalue (convert (type
, arg0
));
5017 /* If ARG1 is a constant, we can convert this to a multiply by the
5018 reciprocal. This does not have the same rounding properties,
5019 so only do this if -ffast-math. We can actually always safely
5020 do it if ARG1 is a power of two, but it's hard to tell if it is
5021 or not in a portable manner. */
5022 if (TREE_CODE (arg1
) == REAL_CST
)
5025 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
5027 return fold (build (MULT_EXPR
, type
, arg0
, tem
));
5028 /* Find the reciprocal if optimizing and the result is exact. */
5032 r
= TREE_REAL_CST (arg1
);
5033 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
5035 tem
= build_real (type
, r
);
5036 return fold (build (MULT_EXPR
, type
, arg0
, tem
));
5042 case TRUNC_DIV_EXPR
:
5043 case ROUND_DIV_EXPR
:
5044 case FLOOR_DIV_EXPR
:
5046 case EXACT_DIV_EXPR
:
5047 if (integer_onep (arg1
))
5048 return non_lvalue (convert (type
, arg0
));
5049 if (integer_zerop (arg1
))
5052 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5053 operation, EXACT_DIV_EXPR.
5055 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5056 At one time others generated faster code, it's not clear if they do
5057 after the last round to changes to the DIV code in expmed.c. */
5058 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
5059 && multiple_of_p (type
, arg0
, arg1
))
5060 return fold (build (EXACT_DIV_EXPR
, type
, arg0
, arg1
));
5062 /* If we have ((a / C1) / C2) where both division are the same type, try
5063 to simplify. First see if C1 * C2 overflows or not. */
5064 if (TREE_CODE (arg0
) == code
&& TREE_CODE (arg1
) == INTEGER_CST
5065 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
5069 new_divisor
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 1), arg1
, 0);
5070 tem
= const_binop (FLOOR_DIV_EXPR
, new_divisor
, arg1
, 0);
5072 if (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) == TREE_INT_CST_LOW (tem
)
5073 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == TREE_INT_CST_HIGH (tem
))
5075 /* If no overflow, divide by C1*C2. */
5076 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), new_divisor
));
5080 /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
5081 where C1 % C3 == 0 or C3 % C1 == 0. We can simplify these
5082 expressions, which often appear in the offsets or sizes of
5083 objects with a varying size. Only deal with positive divisors
5084 and multiplicands. If C2 is negative, we must have C2 % C3 == 0.
5086 Look for NOPs and SAVE_EXPRs inside. */
5088 if (TREE_CODE (arg1
) == INTEGER_CST
5089 && tree_int_cst_sgn (arg1
) >= 0)
5091 int have_save_expr
= 0;
5092 tree c2
= integer_zero_node
;
5095 if (TREE_CODE (xarg0
) == SAVE_EXPR
&& SAVE_EXPR_RTL (xarg0
) == 0)
5096 have_save_expr
= 1, xarg0
= TREE_OPERAND (xarg0
, 0);
5100 /* Look inside the dividend and simplify using EXACT_DIV_EXPR
5102 if (TREE_CODE (xarg0
) == MULT_EXPR
5103 && multiple_of_p (type
, TREE_OPERAND (xarg0
, 0), arg1
))
5107 t
= fold (build (MULT_EXPR
, type
,
5108 fold (build (EXACT_DIV_EXPR
, type
,
5109 TREE_OPERAND (xarg0
, 0), arg1
)),
5110 TREE_OPERAND (xarg0
, 1)));
5117 if (TREE_CODE (xarg0
) == MULT_EXPR
5118 && multiple_of_p (type
, TREE_OPERAND (xarg0
, 1), arg1
))
5122 t
= fold (build (MULT_EXPR
, type
,
5123 fold (build (EXACT_DIV_EXPR
, type
,
5124 TREE_OPERAND (xarg0
, 1), arg1
)),
5125 TREE_OPERAND (xarg0
, 0)));
5131 if (TREE_CODE (xarg0
) == PLUS_EXPR
5132 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
)
5133 c2
= TREE_OPERAND (xarg0
, 1), xarg0
= TREE_OPERAND (xarg0
, 0);
5134 else if (TREE_CODE (xarg0
) == MINUS_EXPR
5135 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
5136 /* If we are doing this computation unsigned, the negate
5138 && ! TREE_UNSIGNED (type
))
5140 c2
= fold (build1 (NEGATE_EXPR
, type
, TREE_OPERAND (xarg0
, 1)));
5141 xarg0
= TREE_OPERAND (xarg0
, 0);
5144 if (TREE_CODE (xarg0
) == SAVE_EXPR
&& SAVE_EXPR_RTL (xarg0
) == 0)
5145 have_save_expr
= 1, xarg0
= TREE_OPERAND (xarg0
, 0);
5149 if (TREE_CODE (xarg0
) == MULT_EXPR
5150 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
5151 && tree_int_cst_sgn (TREE_OPERAND (xarg0
, 1)) >= 0
5152 && (integer_zerop (const_binop (TRUNC_MOD_EXPR
,
5153 TREE_OPERAND (xarg0
, 1), arg1
, 1))
5154 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, arg1
,
5155 TREE_OPERAND (xarg0
, 1), 1)))
5156 && (tree_int_cst_sgn (c2
) >= 0
5157 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, c2
,
5160 tree outer_div
= integer_one_node
;
5161 tree c1
= TREE_OPERAND (xarg0
, 1);
5164 /* If C3 > C1, set them equal and do a divide by
5165 C3/C1 at the end of the operation. */
5166 if (tree_int_cst_lt (c1
, c3
))
5167 outer_div
= const_binop (code
, c3
, c1
, 0), c3
= c1
;
5169 /* The result is A * (C1/C3) + (C2/C3). */
5170 t
= fold (build (PLUS_EXPR
, type
,
5171 fold (build (MULT_EXPR
, type
,
5172 TREE_OPERAND (xarg0
, 0),
5173 const_binop (code
, c1
, c3
, 1))),
5174 const_binop (code
, c2
, c3
, 1)));
5176 if (! integer_onep (outer_div
))
5177 t
= fold (build (code
, type
, t
, convert (type
, outer_div
)));
5189 case FLOOR_MOD_EXPR
:
5190 case ROUND_MOD_EXPR
:
5191 case TRUNC_MOD_EXPR
:
5192 if (integer_onep (arg1
))
5193 return omit_one_operand (type
, integer_zero_node
, arg0
);
5194 if (integer_zerop (arg1
))
5197 /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
5198 where C1 % C3 == 0. Handle similarly to the division case,
5199 but don't bother with SAVE_EXPRs. */
5201 if (TREE_CODE (arg1
) == INTEGER_CST
5202 && ! integer_zerop (arg1
))
5204 tree c2
= integer_zero_node
;
5207 if (TREE_CODE (xarg0
) == PLUS_EXPR
5208 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
)
5209 c2
= TREE_OPERAND (xarg0
, 1), xarg0
= TREE_OPERAND (xarg0
, 0);
5210 else if (TREE_CODE (xarg0
) == MINUS_EXPR
5211 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
5212 && ! TREE_UNSIGNED (type
))
5214 c2
= fold (build1 (NEGATE_EXPR
, type
, TREE_OPERAND (xarg0
, 1)));
5215 xarg0
= TREE_OPERAND (xarg0
, 0);
5220 if (TREE_CODE (xarg0
) == MULT_EXPR
5221 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
5222 && integer_zerop (const_binop (TRUNC_MOD_EXPR
,
5223 TREE_OPERAND (xarg0
, 1),
5225 && tree_int_cst_sgn (c2
) >= 0)
5226 /* The result is (C2%C3). */
5227 return omit_one_operand (type
, const_binop (code
, c2
, arg1
, 1),
5228 TREE_OPERAND (xarg0
, 0));
5237 if (integer_zerop (arg1
))
5238 return non_lvalue (convert (type
, arg0
));
5239 /* Since negative shift count is not well-defined,
5240 don't try to compute it in the compiler. */
5241 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
5243 /* Rewrite an LROTATE_EXPR by a constant into an
5244 RROTATE_EXPR by a new constant. */
5245 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
5247 TREE_SET_CODE (t
, RROTATE_EXPR
);
5248 code
= RROTATE_EXPR
;
5249 TREE_OPERAND (t
, 1) = arg1
5252 convert (TREE_TYPE (arg1
),
5253 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type
)), 0)),
5255 if (tree_int_cst_sgn (arg1
) < 0)
5259 /* If we have a rotate of a bit operation with the rotate count and
5260 the second operand of the bit operation both constant,
5261 permute the two operations. */
5262 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
5263 && (TREE_CODE (arg0
) == BIT_AND_EXPR
5264 || TREE_CODE (arg0
) == BIT_ANDTC_EXPR
5265 || TREE_CODE (arg0
) == BIT_IOR_EXPR
5266 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
5267 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
5268 return fold (build (TREE_CODE (arg0
), type
,
5269 fold (build (code
, type
,
5270 TREE_OPERAND (arg0
, 0), arg1
)),
5271 fold (build (code
, type
,
5272 TREE_OPERAND (arg0
, 1), arg1
))));
5274 /* Two consecutive rotates adding up to the width of the mode can
5276 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
5277 && TREE_CODE (arg0
) == RROTATE_EXPR
5278 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
5279 && TREE_INT_CST_HIGH (arg1
) == 0
5280 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
5281 && ((TREE_INT_CST_LOW (arg1
)
5282 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
5283 == GET_MODE_BITSIZE (TYPE_MODE (type
))))
5284 return TREE_OPERAND (arg0
, 0);
5289 if (operand_equal_p (arg0
, arg1
, 0))
5291 if (INTEGRAL_TYPE_P (type
)
5292 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), 1))
5293 return omit_one_operand (type
, arg1
, arg0
);
5297 if (operand_equal_p (arg0
, arg1
, 0))
5299 if (INTEGRAL_TYPE_P (type
)
5300 && TYPE_MAX_VALUE (type
)
5301 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), 1))
5302 return omit_one_operand (type
, arg1
, arg0
);
5305 case TRUTH_NOT_EXPR
:
5306 /* Note that the operand of this must be an int
5307 and its values must be 0 or 1.
5308 ("true" is a fixed value perhaps depending on the language,
5309 but we don't handle values other than 1 correctly yet.) */
5310 tem
= invert_truthvalue (arg0
);
5311 /* Avoid infinite recursion. */
5312 if (TREE_CODE (tem
) == TRUTH_NOT_EXPR
)
5314 return convert (type
, tem
);
5316 case TRUTH_ANDIF_EXPR
:
5317 /* Note that the operands of this must be ints
5318 and their values must be 0 or 1.
5319 ("true" is a fixed value perhaps depending on the language.) */
5320 /* If first arg is constant zero, return it. */
5321 if (integer_zerop (arg0
))
5323 case TRUTH_AND_EXPR
:
5324 /* If either arg is constant true, drop it. */
5325 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
5326 return non_lvalue (arg1
);
5327 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
5328 return non_lvalue (arg0
);
5329 /* If second arg is constant zero, result is zero, but first arg
5330 must be evaluated. */
5331 if (integer_zerop (arg1
))
5332 return omit_one_operand (type
, arg1
, arg0
);
5333 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5334 case will be handled here. */
5335 if (integer_zerop (arg0
))
5336 return omit_one_operand (type
, arg0
, arg1
);
5339 /* We only do these simplifications if we are optimizing. */
5343 /* Check for things like (A || B) && (A || C). We can convert this
5344 to A || (B && C). Note that either operator can be any of the four
5345 truth and/or operations and the transformation will still be
5346 valid. Also note that we only care about order for the
5347 ANDIF and ORIF operators. If B contains side effects, this
5348 might change the truth-value of A. */
5349 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
5350 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
5351 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
5352 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
5353 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
5354 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
5356 tree a00
= TREE_OPERAND (arg0
, 0);
5357 tree a01
= TREE_OPERAND (arg0
, 1);
5358 tree a10
= TREE_OPERAND (arg1
, 0);
5359 tree a11
= TREE_OPERAND (arg1
, 1);
5360 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
5361 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
5362 && (code
== TRUTH_AND_EXPR
5363 || code
== TRUTH_OR_EXPR
));
5365 if (operand_equal_p (a00
, a10
, 0))
5366 return fold (build (TREE_CODE (arg0
), type
, a00
,
5367 fold (build (code
, type
, a01
, a11
))));
5368 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
5369 return fold (build (TREE_CODE (arg0
), type
, a00
,
5370 fold (build (code
, type
, a01
, a10
))));
5371 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
5372 return fold (build (TREE_CODE (arg0
), type
, a01
,
5373 fold (build (code
, type
, a00
, a11
))));
5375 /* This case if tricky because we must either have commutative
5376 operators or else A10 must not have side-effects. */
5378 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
5379 && operand_equal_p (a01
, a11
, 0))
5380 return fold (build (TREE_CODE (arg0
), type
,
5381 fold (build (code
, type
, a00
, a10
)),
5385 /* See if we can build a range comparison. */
5386 if (0 != (tem
= fold_range_test (t
)))
5389 /* Check for the possibility of merging component references. If our
5390 lhs is another similar operation, try to merge its rhs with our
5391 rhs. Then try to merge our lhs and rhs. */
5392 if (TREE_CODE (arg0
) == code
5393 && 0 != (tem
= fold_truthop (code
, type
,
5394 TREE_OPERAND (arg0
, 1), arg1
)))
5395 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
5397 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
5402 case TRUTH_ORIF_EXPR
:
5403 /* Note that the operands of this must be ints
5404 and their values must be 0 or true.
5405 ("true" is a fixed value perhaps depending on the language.) */
5406 /* If first arg is constant true, return it. */
5407 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
5410 /* If either arg is constant zero, drop it. */
5411 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
5412 return non_lvalue (arg1
);
5413 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
))
5414 return non_lvalue (arg0
);
5415 /* If second arg is constant true, result is true, but we must
5416 evaluate first arg. */
5417 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
5418 return omit_one_operand (type
, arg1
, arg0
);
5419 /* Likewise for first arg, but note this only occurs here for
5421 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
5422 return omit_one_operand (type
, arg0
, arg1
);
5425 case TRUTH_XOR_EXPR
:
5426 /* If either arg is constant zero, drop it. */
5427 if (integer_zerop (arg0
))
5428 return non_lvalue (arg1
);
5429 if (integer_zerop (arg1
))
5430 return non_lvalue (arg0
);
5431 /* If either arg is constant true, this is a logical inversion. */
5432 if (integer_onep (arg0
))
5433 return non_lvalue (invert_truthvalue (arg1
));
5434 if (integer_onep (arg1
))
5435 return non_lvalue (invert_truthvalue (arg0
));
5444 /* If one arg is a constant integer, put it last. */
5445 if (TREE_CODE (arg0
) == INTEGER_CST
5446 && TREE_CODE (arg1
) != INTEGER_CST
)
5448 TREE_OPERAND (t
, 0) = arg1
;
5449 TREE_OPERAND (t
, 1) = arg0
;
5450 arg0
= TREE_OPERAND (t
, 0);
5451 arg1
= TREE_OPERAND (t
, 1);
5452 code
= swap_tree_comparison (code
);
5453 TREE_SET_CODE (t
, code
);
5456 /* Convert foo++ == CONST into ++foo == CONST + INCR.
5457 First, see if one arg is constant; find the constant arg
5458 and the other one. */
5460 tree constop
= 0, varop
= NULL_TREE
;
5461 int constopnum
= -1;
5463 if (TREE_CONSTANT (arg1
))
5464 constopnum
= 1, constop
= arg1
, varop
= arg0
;
5465 if (TREE_CONSTANT (arg0
))
5466 constopnum
= 0, constop
= arg0
, varop
= arg1
;
5468 if (constop
&& TREE_CODE (varop
) == POSTINCREMENT_EXPR
)
5470 /* This optimization is invalid for ordered comparisons
5471 if CONST+INCR overflows or if foo+incr might overflow.
5472 This optimization is invalid for floating point due to rounding.
5473 For pointer types we assume overflow doesn't happen. */
5474 if (POINTER_TYPE_P (TREE_TYPE (varop
))
5475 || (! FLOAT_TYPE_P (TREE_TYPE (varop
))
5476 && (code
== EQ_EXPR
|| code
== NE_EXPR
)))
5479 = fold (build (PLUS_EXPR
, TREE_TYPE (varop
),
5480 constop
, TREE_OPERAND (varop
, 1)));
5481 TREE_SET_CODE (varop
, PREINCREMENT_EXPR
);
5483 /* If VAROP is a reference to a bitfield, we must mask
5484 the constant by the width of the field. */
5485 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
5486 && DECL_BIT_FIELD(TREE_OPERAND
5487 (TREE_OPERAND (varop
, 0), 1)))
5490 = TREE_INT_CST_LOW (DECL_SIZE
5492 (TREE_OPERAND (varop
, 0), 1)));
5493 tree mask
, unsigned_type
;
5495 tree folded_compare
;
5497 /* First check whether the comparison would come out
5498 always the same. If we don't do that we would
5499 change the meaning with the masking. */
5500 if (constopnum
== 0)
5501 folded_compare
= fold (build (code
, type
, constop
,
5502 TREE_OPERAND (varop
, 0)));
5504 folded_compare
= fold (build (code
, type
,
5505 TREE_OPERAND (varop
, 0),
5507 if (integer_zerop (folded_compare
)
5508 || integer_onep (folded_compare
))
5509 return omit_one_operand (type
, folded_compare
, varop
);
5511 unsigned_type
= type_for_size (size
, 1);
5512 precision
= TYPE_PRECISION (unsigned_type
);
5513 mask
= build_int_2 (~0, ~0);
5514 TREE_TYPE (mask
) = unsigned_type
;
5515 force_fit_type (mask
, 0);
5516 mask
= const_binop (RSHIFT_EXPR
, mask
,
5517 size_int (precision
- size
), 0);
5518 newconst
= fold (build (BIT_AND_EXPR
,
5519 TREE_TYPE (varop
), newconst
,
5520 convert (TREE_TYPE (varop
),
5525 t
= build (code
, type
, TREE_OPERAND (t
, 0),
5526 TREE_OPERAND (t
, 1));
5527 TREE_OPERAND (t
, constopnum
) = newconst
;
5531 else if (constop
&& TREE_CODE (varop
) == POSTDECREMENT_EXPR
)
5533 if (POINTER_TYPE_P (TREE_TYPE (varop
))
5534 || (! FLOAT_TYPE_P (TREE_TYPE (varop
))
5535 && (code
== EQ_EXPR
|| code
== NE_EXPR
)))
5538 = fold (build (MINUS_EXPR
, TREE_TYPE (varop
),
5539 constop
, TREE_OPERAND (varop
, 1)));
5540 TREE_SET_CODE (varop
, PREDECREMENT_EXPR
);
5542 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
5543 && DECL_BIT_FIELD(TREE_OPERAND
5544 (TREE_OPERAND (varop
, 0), 1)))
5547 = TREE_INT_CST_LOW (DECL_SIZE
5549 (TREE_OPERAND (varop
, 0), 1)));
5550 tree mask
, unsigned_type
;
5552 tree folded_compare
;
5554 if (constopnum
== 0)
5555 folded_compare
= fold (build (code
, type
, constop
,
5556 TREE_OPERAND (varop
, 0)));
5558 folded_compare
= fold (build (code
, type
,
5559 TREE_OPERAND (varop
, 0),
5561 if (integer_zerop (folded_compare
)
5562 || integer_onep (folded_compare
))
5563 return omit_one_operand (type
, folded_compare
, varop
);
5565 unsigned_type
= type_for_size (size
, 1);
5566 precision
= TYPE_PRECISION (unsigned_type
);
5567 mask
= build_int_2 (~0, ~0);
5568 TREE_TYPE (mask
) = TREE_TYPE (varop
);
5569 force_fit_type (mask
, 0);
5570 mask
= const_binop (RSHIFT_EXPR
, mask
,
5571 size_int (precision
- size
), 0);
5572 newconst
= fold (build (BIT_AND_EXPR
,
5573 TREE_TYPE (varop
), newconst
,
5574 convert (TREE_TYPE (varop
),
5579 t
= build (code
, type
, TREE_OPERAND (t
, 0),
5580 TREE_OPERAND (t
, 1));
5581 TREE_OPERAND (t
, constopnum
) = newconst
;
5587 /* Change X >= CST to X > (CST - 1) if CST is positive. */
5588 if (TREE_CODE (arg1
) == INTEGER_CST
5589 && TREE_CODE (arg0
) != INTEGER_CST
5590 && tree_int_cst_sgn (arg1
) > 0)
5592 switch (TREE_CODE (t
))
5596 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
5597 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
);
5602 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
5603 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
);
5611 /* If this is an EQ or NE comparison with zero and ARG0 is
5612 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
5613 two operations, but the latter can be done in one less insn
5614 on machines that have only two-operand insns or on which a
5615 constant cannot be the first operand. */
5616 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
5617 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
5619 if (TREE_CODE (TREE_OPERAND (arg0
, 0)) == LSHIFT_EXPR
5620 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0)))
5622 fold (build (code
, type
,
5623 build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
5625 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
5626 TREE_OPERAND (arg0
, 1),
5627 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1)),
5628 convert (TREE_TYPE (arg0
),
5631 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
5632 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
5634 fold (build (code
, type
,
5635 build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
5637 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
5638 TREE_OPERAND (arg0
, 0),
5639 TREE_OPERAND (TREE_OPERAND (arg0
, 1), 1)),
5640 convert (TREE_TYPE (arg0
),
5645 /* If this is an NE or EQ comparison of zero against the result of a
5646 signed MOD operation whose second operand is a power of 2, make
5647 the MOD operation unsigned since it is simpler and equivalent. */
5648 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
5649 && integer_zerop (arg1
)
5650 && ! TREE_UNSIGNED (TREE_TYPE (arg0
))
5651 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
5652 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
5653 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
5654 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
5655 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
5657 tree newtype
= unsigned_type (TREE_TYPE (arg0
));
5658 tree newmod
= build (TREE_CODE (arg0
), newtype
,
5659 convert (newtype
, TREE_OPERAND (arg0
, 0)),
5660 convert (newtype
, TREE_OPERAND (arg0
, 1)));
5662 return build (code
, type
, newmod
, convert (newtype
, arg1
));
5665 /* If this is an NE comparison of zero with an AND of one, remove the
5666 comparison since the AND will give the correct value. */
5667 if (code
== NE_EXPR
&& integer_zerop (arg1
)
5668 && TREE_CODE (arg0
) == BIT_AND_EXPR
5669 && integer_onep (TREE_OPERAND (arg0
, 1)))
5670 return convert (type
, arg0
);
5672 /* If we have (A & C) == C where C is a power of 2, convert this into
5673 (A & C) != 0. Similarly for NE_EXPR. */
5674 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
5675 && TREE_CODE (arg0
) == BIT_AND_EXPR
5676 && integer_pow2p (TREE_OPERAND (arg0
, 1))
5677 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
5678 return build (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
5679 arg0
, integer_zero_node
);
5681 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
5682 and similarly for >= into !=. */
5683 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
5684 && TREE_UNSIGNED (TREE_TYPE (arg0
))
5685 && TREE_CODE (arg1
) == LSHIFT_EXPR
5686 && integer_onep (TREE_OPERAND (arg1
, 0)))
5687 return build (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
5688 build (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
5689 TREE_OPERAND (arg1
, 1)),
5690 convert (TREE_TYPE (arg0
), integer_zero_node
));
5692 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
5693 && TREE_UNSIGNED (TREE_TYPE (arg0
))
5694 && (TREE_CODE (arg1
) == NOP_EXPR
5695 || TREE_CODE (arg1
) == CONVERT_EXPR
)
5696 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
5697 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
5699 build (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
5700 convert (TREE_TYPE (arg0
),
5701 build (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
5702 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1))),
5703 convert (TREE_TYPE (arg0
), integer_zero_node
));
5705 /* Simplify comparison of something with itself. (For IEEE
5706 floating-point, we can only do some of these simplifications.) */
5707 if (operand_equal_p (arg0
, arg1
, 0))
5714 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5715 return constant_boolean_node (1, type
);
5717 TREE_SET_CODE (t
, code
);
5721 /* For NE, we can only do this simplification if integer. */
5722 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5724 /* ... fall through ... */
5727 return constant_boolean_node (0, type
);
5733 /* An unsigned comparison against 0 can be simplified. */
5734 if (integer_zerop (arg1
)
5735 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
5736 || POINTER_TYPE_P (TREE_TYPE (arg1
)))
5737 && TREE_UNSIGNED (TREE_TYPE (arg1
)))
5739 switch (TREE_CODE (t
))
5743 TREE_SET_CODE (t
, NE_EXPR
);
5747 TREE_SET_CODE (t
, EQ_EXPR
);
5750 return omit_one_operand (type
,
5751 convert (type
, integer_one_node
),
5754 return omit_one_operand (type
,
5755 convert (type
, integer_zero_node
),
5762 /* An unsigned <= 0x7fffffff can be simplified. */
5764 int width
= TYPE_PRECISION (TREE_TYPE (arg1
));
5765 if (TREE_CODE (arg1
) == INTEGER_CST
5766 && ! TREE_CONSTANT_OVERFLOW (arg1
)
5767 && width
<= HOST_BITS_PER_WIDE_INT
5768 && TREE_INT_CST_LOW (arg1
) == ((HOST_WIDE_INT
) 1 << (width
- 1)) - 1
5769 && TREE_INT_CST_HIGH (arg1
) == 0
5770 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
5771 || POINTER_TYPE_P (TREE_TYPE (arg1
)))
5772 && TREE_UNSIGNED (TREE_TYPE (arg1
)))
5774 switch (TREE_CODE (t
))
5777 return fold (build (GE_EXPR
, type
,
5778 convert (signed_type (TREE_TYPE (arg0
)),
5780 convert (signed_type (TREE_TYPE (arg1
)),
5781 integer_zero_node
)));
5783 return fold (build (LT_EXPR
, type
,
5784 convert (signed_type (TREE_TYPE (arg0
)),
5786 convert (signed_type (TREE_TYPE (arg1
)),
5787 integer_zero_node
)));
5794 /* If we are comparing an expression that just has comparisons
5795 of two integer values, arithmetic expressions of those comparisons,
5796 and constants, we can simplify it. There are only three cases
5797 to check: the two values can either be equal, the first can be
5798 greater, or the second can be greater. Fold the expression for
5799 those three values. Since each value must be 0 or 1, we have
5800 eight possibilities, each of which corresponds to the constant 0
5801 or 1 or one of the six possible comparisons.
5803 This handles common cases like (a > b) == 0 but also handles
5804 expressions like ((x > y) - (y > x)) > 0, which supposedly
5805 occur in macroized code. */
5807 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
5809 tree cval1
= 0, cval2
= 0;
5812 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
5813 /* Don't handle degenerate cases here; they should already
5814 have been handled anyway. */
5815 && cval1
!= 0 && cval2
!= 0
5816 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
5817 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
5818 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
5819 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
5820 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
5821 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
5822 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
5824 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
5825 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
5827 /* We can't just pass T to eval_subst in case cval1 or cval2
5828 was the same as ARG1. */
5831 = fold (build (code
, type
,
5832 eval_subst (arg0
, cval1
, maxval
, cval2
, minval
),
5835 = fold (build (code
, type
,
5836 eval_subst (arg0
, cval1
, maxval
, cval2
, maxval
),
5839 = fold (build (code
, type
,
5840 eval_subst (arg0
, cval1
, minval
, cval2
, maxval
),
5843 /* All three of these results should be 0 or 1. Confirm they
5844 are. Then use those values to select the proper code
5847 if ((integer_zerop (high_result
)
5848 || integer_onep (high_result
))
5849 && (integer_zerop (equal_result
)
5850 || integer_onep (equal_result
))
5851 && (integer_zerop (low_result
)
5852 || integer_onep (low_result
)))
5854 /* Make a 3-bit mask with the high-order bit being the
5855 value for `>', the next for '=', and the low for '<'. */
5856 switch ((integer_onep (high_result
) * 4)
5857 + (integer_onep (equal_result
) * 2)
5858 + integer_onep (low_result
))
5862 return omit_one_operand (type
, integer_zero_node
, arg0
);
5883 return omit_one_operand (type
, integer_one_node
, arg0
);
5886 t
= build (code
, type
, cval1
, cval2
);
5888 return save_expr (t
);
5895 /* If this is a comparison of a field, we may be able to simplify it. */
5896 if ((TREE_CODE (arg0
) == COMPONENT_REF
5897 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
5898 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
5899 /* Handle the constant case even without -O
5900 to make sure the warnings are given. */
5901 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
5903 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
5907 /* If this is a comparison of complex values and either or both sides
5908 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
5909 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
5910 This may prevent needless evaluations. */
5911 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
5912 && TREE_CODE (TREE_TYPE (arg0
)) == COMPLEX_TYPE
5913 && (TREE_CODE (arg0
) == COMPLEX_EXPR
5914 || TREE_CODE (arg1
) == COMPLEX_EXPR
5915 || TREE_CODE (arg0
) == COMPLEX_CST
5916 || TREE_CODE (arg1
) == COMPLEX_CST
))
5918 tree subtype
= TREE_TYPE (TREE_TYPE (arg0
));
5919 tree real0
, imag0
, real1
, imag1
;
5921 arg0
= save_expr (arg0
);
5922 arg1
= save_expr (arg1
);
5923 real0
= fold (build1 (REALPART_EXPR
, subtype
, arg0
));
5924 imag0
= fold (build1 (IMAGPART_EXPR
, subtype
, arg0
));
5925 real1
= fold (build1 (REALPART_EXPR
, subtype
, arg1
));
5926 imag1
= fold (build1 (IMAGPART_EXPR
, subtype
, arg1
));
5928 return fold (build ((code
== EQ_EXPR
? TRUTH_ANDIF_EXPR
5931 fold (build (code
, type
, real0
, real1
)),
5932 fold (build (code
, type
, imag0
, imag1
))));
5935 /* From here on, the only cases we handle are when the result is
5936 known to be a constant.
5938 To compute GT, swap the arguments and do LT.
5939 To compute GE, do LT and invert the result.
5940 To compute LE, swap the arguments, do LT and invert the result.
5941 To compute NE, do EQ and invert the result.
5943 Therefore, the code below must handle only EQ and LT. */
5945 if (code
== LE_EXPR
|| code
== GT_EXPR
)
5947 tem
= arg0
, arg0
= arg1
, arg1
= tem
;
5948 code
= swap_tree_comparison (code
);
5951 /* Note that it is safe to invert for real values here because we
5952 will check below in the one case that it matters. */
5955 if (code
== NE_EXPR
|| code
== GE_EXPR
)
5958 code
= invert_tree_comparison (code
);
5961 /* Compute a result for LT or EQ if args permit;
5962 otherwise return T. */
5963 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
5965 if (code
== EQ_EXPR
)
5966 t1
= build_int_2 ((TREE_INT_CST_LOW (arg0
)
5967 == TREE_INT_CST_LOW (arg1
))
5968 && (TREE_INT_CST_HIGH (arg0
)
5969 == TREE_INT_CST_HIGH (arg1
)),
5972 t1
= build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0
))
5973 ? INT_CST_LT_UNSIGNED (arg0
, arg1
)
5974 : INT_CST_LT (arg0
, arg1
)),
5978 #if 0 /* This is no longer useful, but breaks some real code. */
5979 /* Assume a nonexplicit constant cannot equal an explicit one,
5980 since such code would be undefined anyway.
5981 Exception: on sysvr4, using #pragma weak,
5982 a label can come out as 0. */
5983 else if (TREE_CODE (arg1
) == INTEGER_CST
5984 && !integer_zerop (arg1
)
5985 && TREE_CONSTANT (arg0
)
5986 && TREE_CODE (arg0
) == ADDR_EXPR
5988 t1
= build_int_2 (0, 0);
5990 /* Two real constants can be compared explicitly. */
5991 else if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
5993 /* If either operand is a NaN, the result is false with two
5994 exceptions: First, an NE_EXPR is true on NaNs, but that case
5995 is already handled correctly since we will be inverting the
5996 result for NE_EXPR. Second, if we had inverted a LE_EXPR
5997 or a GE_EXPR into a LT_EXPR, we must return true so that it
5998 will be inverted into false. */
6000 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
6001 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
6002 t1
= build_int_2 (invert
&& code
== LT_EXPR
, 0);
6004 else if (code
== EQ_EXPR
)
6005 t1
= build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
),
6006 TREE_REAL_CST (arg1
)),
6009 t1
= build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0
),
6010 TREE_REAL_CST (arg1
)),
6014 if (t1
== NULL_TREE
)
6018 TREE_INT_CST_LOW (t1
) ^= 1;
6020 TREE_TYPE (t1
) = type
;
6021 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
6022 return truthvalue_conversion (t1
);
6026 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6027 so all simple results must be passed through pedantic_non_lvalue. */
6028 if (TREE_CODE (arg0
) == INTEGER_CST
)
6029 return pedantic_non_lvalue
6030 (TREE_OPERAND (t
, (integer_zerop (arg0
) ? 2 : 1)));
6031 else if (operand_equal_p (arg1
, TREE_OPERAND (expr
, 2), 0))
6032 return pedantic_omit_one_operand (type
, arg1
, arg0
);
6034 /* If the second operand is zero, invert the comparison and swap
6035 the second and third operands. Likewise if the second operand
6036 is constant and the third is not or if the third operand is
6037 equivalent to the first operand of the comparison. */
6039 if (integer_zerop (arg1
)
6040 || (TREE_CONSTANT (arg1
) && ! TREE_CONSTANT (TREE_OPERAND (t
, 2)))
6041 || (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
6042 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
6043 TREE_OPERAND (t
, 2),
6044 TREE_OPERAND (arg0
, 1))))
6046 /* See if this can be inverted. If it can't, possibly because
6047 it was a floating-point inequality comparison, don't do
6049 tem
= invert_truthvalue (arg0
);
6051 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
6053 t
= build (code
, type
, tem
,
6054 TREE_OPERAND (t
, 2), TREE_OPERAND (t
, 1));
6056 /* arg1 should be the first argument of the new T. */
6057 arg1
= TREE_OPERAND (t
, 1);
6062 /* If we have A op B ? A : C, we may be able to convert this to a
6063 simpler expression, depending on the operation and the values
6064 of B and C. IEEE floating point prevents this though,
6065 because A or B might be -0.0 or a NaN. */
6067 if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
6068 && (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
6069 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0)))
6071 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
6072 arg1
, TREE_OPERAND (arg0
, 1)))
6074 tree arg2
= TREE_OPERAND (t
, 2);
6075 enum tree_code comp_code
= TREE_CODE (arg0
);
6079 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
6080 depending on the comparison operation. */
6081 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
6082 ? real_zerop (TREE_OPERAND (arg0
, 1))
6083 : integer_zerop (TREE_OPERAND (arg0
, 1)))
6084 && TREE_CODE (arg2
) == NEGATE_EXPR
6085 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
6089 return pedantic_non_lvalue
6090 (fold (build1 (NEGATE_EXPR
, type
, arg1
)));
6092 return pedantic_non_lvalue (convert (type
, arg1
));
6095 if (TREE_UNSIGNED (TREE_TYPE (arg1
)))
6096 arg1
= convert (signed_type (TREE_TYPE (arg1
)), arg1
);
6097 return pedantic_non_lvalue
6098 (convert (type
, fold (build1 (ABS_EXPR
,
6099 TREE_TYPE (arg1
), arg1
))));
6102 if (TREE_UNSIGNED (TREE_TYPE (arg1
)))
6103 arg1
= convert (signed_type (TREE_TYPE (arg1
)), arg1
);
6104 return pedantic_non_lvalue
6105 (fold (build1 (NEGATE_EXPR
, type
,
6107 fold (build1 (ABS_EXPR
,
6114 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
6117 if (integer_zerop (TREE_OPERAND (arg0
, 1)) && integer_zerop (arg2
))
6119 if (comp_code
== NE_EXPR
)
6120 return pedantic_non_lvalue (convert (type
, arg1
));
6121 else if (comp_code
== EQ_EXPR
)
6122 return pedantic_non_lvalue (convert (type
, integer_zero_node
));
6125 /* If this is A op B ? A : B, this is either A, B, min (A, B),
6126 or max (A, B), depending on the operation. */
6128 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 1),
6129 arg2
, TREE_OPERAND (arg0
, 0)))
6131 tree comp_op0
= TREE_OPERAND (arg0
, 0);
6132 tree comp_op1
= TREE_OPERAND (arg0
, 1);
6133 tree comp_type
= TREE_TYPE (comp_op0
);
6138 return pedantic_non_lvalue (convert (type
, arg2
));
6140 return pedantic_non_lvalue (convert (type
, arg1
));
6143 /* In C++ a ?: expression can be an lvalue, so put the
6144 operand which will be used if they are equal first
6145 so that we can convert this back to the
6146 corresponding COND_EXPR. */
6147 return pedantic_non_lvalue
6148 (convert (type
, (fold (build (MIN_EXPR
, comp_type
,
6149 (comp_code
== LE_EXPR
6150 ? comp_op0
: comp_op1
),
6151 (comp_code
== LE_EXPR
6152 ? comp_op1
: comp_op0
))))));
6156 return pedantic_non_lvalue
6157 (convert (type
, fold (build (MAX_EXPR
, comp_type
,
6158 (comp_code
== GE_EXPR
6159 ? comp_op0
: comp_op1
),
6160 (comp_code
== GE_EXPR
6161 ? comp_op1
: comp_op0
)))));
6168 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6169 we might still be able to simplify this. For example,
6170 if C1 is one less or one more than C2, this might have started
6171 out as a MIN or MAX and been transformed by this function.
6172 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
6174 if (INTEGRAL_TYPE_P (type
)
6175 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
6176 && TREE_CODE (arg2
) == INTEGER_CST
)
6180 /* We can replace A with C1 in this case. */
6181 arg1
= convert (type
, TREE_OPERAND (arg0
, 1));
6182 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
,
6183 TREE_OPERAND (t
, 2));
6187 /* If C1 is C2 + 1, this is min(A, C2). */
6188 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
), 1)
6189 && operand_equal_p (TREE_OPERAND (arg0
, 1),
6190 const_binop (PLUS_EXPR
, arg2
,
6191 integer_one_node
, 0), 1))
6192 return pedantic_non_lvalue
6193 (fold (build (MIN_EXPR
, type
, arg1
, arg2
)));
6197 /* If C1 is C2 - 1, this is min(A, C2). */
6198 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
), 1)
6199 && operand_equal_p (TREE_OPERAND (arg0
, 1),
6200 const_binop (MINUS_EXPR
, arg2
,
6201 integer_one_node
, 0), 1))
6202 return pedantic_non_lvalue
6203 (fold (build (MIN_EXPR
, type
, arg1
, arg2
)));
6207 /* If C1 is C2 - 1, this is max(A, C2). */
6208 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
), 1)
6209 && operand_equal_p (TREE_OPERAND (arg0
, 1),
6210 const_binop (MINUS_EXPR
, arg2
,
6211 integer_one_node
, 0), 1))
6212 return pedantic_non_lvalue
6213 (fold (build (MAX_EXPR
, type
, arg1
, arg2
)));
6217 /* If C1 is C2 + 1, this is max(A, C2). */
6218 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
), 1)
6219 && operand_equal_p (TREE_OPERAND (arg0
, 1),
6220 const_binop (PLUS_EXPR
, arg2
,
6221 integer_one_node
, 0), 1))
6222 return pedantic_non_lvalue
6223 (fold (build (MAX_EXPR
, type
, arg1
, arg2
)));
6232 /* If the second operand is simpler than the third, swap them
6233 since that produces better jump optimization results. */
6234 if ((TREE_CONSTANT (arg1
) || TREE_CODE_CLASS (TREE_CODE (arg1
)) == 'd'
6235 || TREE_CODE (arg1
) == SAVE_EXPR
)
6236 && ! (TREE_CONSTANT (TREE_OPERAND (t
, 2))
6237 || TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t
, 2))) == 'd'
6238 || TREE_CODE (TREE_OPERAND (t
, 2)) == SAVE_EXPR
))
6240 /* See if this can be inverted. If it can't, possibly because
6241 it was a floating-point inequality comparison, don't do
6243 tem
= invert_truthvalue (arg0
);
6245 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
6247 t
= build (code
, type
, tem
,
6248 TREE_OPERAND (t
, 2), TREE_OPERAND (t
, 1));
6250 /* arg1 should be the first argument of the new T. */
6251 arg1
= TREE_OPERAND (t
, 1);
6256 /* Convert A ? 1 : 0 to simply A. */
6257 if (integer_onep (TREE_OPERAND (t
, 1))
6258 && integer_zerop (TREE_OPERAND (t
, 2))
6259 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
6260 call to fold will try to move the conversion inside
6261 a COND, which will recurse. In that case, the COND_EXPR
6262 is probably the best choice, so leave it alone. */
6263 && type
== TREE_TYPE (arg0
))
6264 return pedantic_non_lvalue (arg0
);
6266 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
6267 operation is simply A & 2. */
6269 if (integer_zerop (TREE_OPERAND (t
, 2))
6270 && TREE_CODE (arg0
) == NE_EXPR
6271 && integer_zerop (TREE_OPERAND (arg0
, 1))
6272 && integer_pow2p (arg1
)
6273 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
6274 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
6276 return pedantic_non_lvalue (convert (type
, TREE_OPERAND (arg0
, 0)));
6281 /* When pedantic, a compound expression can be neither an lvalue
6282 nor an integer constant expression. */
6283 if (TREE_SIDE_EFFECTS (arg0
) || pedantic
)
6285 /* Don't let (0, 0) be null pointer constant. */
6286 if (integer_zerop (arg1
))
6287 return build1 (NOP_EXPR
, TREE_TYPE (arg1
), arg1
);
6292 return build_complex (type
, arg0
, arg1
);
6296 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
6298 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
6299 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
6300 TREE_OPERAND (arg0
, 1));
6301 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
6302 return TREE_REALPART (arg0
);
6303 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
6304 return fold (build (TREE_CODE (arg0
), type
,
6305 fold (build1 (REALPART_EXPR
, type
,
6306 TREE_OPERAND (arg0
, 0))),
6307 fold (build1 (REALPART_EXPR
,
6308 type
, TREE_OPERAND (arg0
, 1)))));
6312 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
6313 return convert (type
, integer_zero_node
);
6314 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
6315 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
6316 TREE_OPERAND (arg0
, 0));
6317 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
6318 return TREE_IMAGPART (arg0
);
6319 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
6320 return fold (build (TREE_CODE (arg0
), type
,
6321 fold (build1 (IMAGPART_EXPR
, type
,
6322 TREE_OPERAND (arg0
, 0))),
6323 fold (build1 (IMAGPART_EXPR
, type
,
6324 TREE_OPERAND (arg0
, 1)))));
6327 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
6329 case CLEANUP_POINT_EXPR
:
6330 if (! has_cleanups (arg0
))
6331 return TREE_OPERAND (t
, 0);
6334 enum tree_code code0
= TREE_CODE (arg0
);
6335 int kind0
= TREE_CODE_CLASS (code0
);
6336 tree arg00
= TREE_OPERAND (arg0
, 0);
6339 if (kind0
== '1' || code0
== TRUTH_NOT_EXPR
)
6340 return fold (build1 (code0
, type
,
6341 fold (build1 (CLEANUP_POINT_EXPR
,
6342 TREE_TYPE (arg00
), arg00
))));
6344 if (kind0
== '<' || kind0
== '2'
6345 || code0
== TRUTH_ANDIF_EXPR
|| code0
== TRUTH_ORIF_EXPR
6346 || code0
== TRUTH_AND_EXPR
|| code0
== TRUTH_OR_EXPR
6347 || code0
== TRUTH_XOR_EXPR
)
6349 arg01
= TREE_OPERAND (arg0
, 1);
6351 if (TREE_CONSTANT (arg00
)
6352 || ((code0
== TRUTH_ANDIF_EXPR
|| code0
== TRUTH_ORIF_EXPR
)
6353 && ! has_cleanups (arg00
)))
6354 return fold (build (code0
, type
, arg00
,
6355 fold (build1 (CLEANUP_POINT_EXPR
,
6356 TREE_TYPE (arg01
), arg01
))));
6358 if (TREE_CONSTANT (arg01
))
6359 return fold (build (code0
, type
,
6360 fold (build1 (CLEANUP_POINT_EXPR
,
6361 TREE_TYPE (arg00
), arg00
)),
6370 } /* switch (code) */
6373 /* Determine if first argument is a multiple of second argument.
6374 Return 0 if it is not, or is not easily determined to so be.
6376 An example of the sort of thing we care about (at this point --
6377 this routine could surely be made more general, and expanded
6378 to do what the *_DIV_EXPR's fold() cases do now) is discovering
6381 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
6387 when we know that the two `SAVE_EXPR (J * 8)' nodes are the
6388 same node (which means they will have the same value at run
6389 time, even though we don't know when they'll be assigned).
6391 This code also handles discovering that
6393 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
6399 (of course) so we don't have to worry about dealing with a
6402 Note that we _look_ inside a SAVE_EXPR only to determine
6403 how it was calculated; it is not safe for fold() to do much
6404 of anything else with the internals of a SAVE_EXPR, since
6405 fold() cannot know when it will be evaluated at run time.
6406 For example, the latter example above _cannot_ be implemented
6411 or any variant thereof, since the value of J at evaluation time
6412 of the original SAVE_EXPR is not necessarily the same at the time
6413 the new expression is evaluated. The only optimization of this
6414 sort that would be valid is changing
6416 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
6422 SAVE_EXPR (I) * SAVE_EXPR (J)
6424 (where the same SAVE_EXPR (J) is used in the original and the
6425 transformed version). */
6428 multiple_of_p (type
, top
, bottom
)
6433 if (operand_equal_p (top
, bottom
, 0))
6436 if (TREE_CODE (type
) != INTEGER_TYPE
)
6439 switch (TREE_CODE (top
))
6442 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
6443 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
6447 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
6448 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
6451 /* Punt if conversion from non-integral or wider integral type. */
6452 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
6453 || (TYPE_PRECISION (type
)
6454 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
6458 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
6461 if ((TREE_CODE (bottom
) != INTEGER_CST
)
6462 || (tree_int_cst_sgn (top
) < 0)
6463 || (tree_int_cst_sgn (bottom
) < 0))
6465 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,