1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 88, 92, 93, 94, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
29 /* The entry points in this file are fold, size_int and size_binop.
31 fold takes a tree as argument and returns a simplified tree.
33 size_binop takes a tree code for an arithmetic operation
34 and two operands that are trees, and produces a tree for the
35 result, assuming the type comes from `sizetype'.
37 size_int takes an integer value, and creates a tree constant
38 with type from `sizetype'. */
46 /* Handle floating overflow for `const_binop'. */
47 static jmp_buf float_error
;
49 static void encode
PROTO((HOST_WIDE_INT
*, HOST_WIDE_INT
, HOST_WIDE_INT
));
50 static void decode
PROTO((HOST_WIDE_INT
*, HOST_WIDE_INT
*, HOST_WIDE_INT
*));
51 int div_and_round_double
PROTO((enum tree_code
, int, HOST_WIDE_INT
,
52 HOST_WIDE_INT
, HOST_WIDE_INT
,
53 HOST_WIDE_INT
, HOST_WIDE_INT
*,
54 HOST_WIDE_INT
*, HOST_WIDE_INT
*,
56 static int split_tree
PROTO((tree
, enum tree_code
, tree
*, tree
*, int *));
57 static tree const_binop
PROTO((enum tree_code
, tree
, tree
, int));
58 static tree fold_convert
PROTO((tree
, tree
));
59 static enum tree_code invert_tree_comparison
PROTO((enum tree_code
));
60 static enum tree_code swap_tree_comparison
PROTO((enum tree_code
));
61 static int truth_value_p
PROTO((enum tree_code
));
62 static int operand_equal_for_comparison_p
PROTO((tree
, tree
, tree
));
63 static int twoval_comparison_p
PROTO((tree
, tree
*, tree
*, int *));
64 static tree eval_subst
PROTO((tree
, tree
, tree
, tree
, tree
));
65 static tree omit_one_operand
PROTO((tree
, tree
, tree
));
66 static tree pedantic_omit_one_operand
PROTO((tree
, tree
, tree
));
67 static tree distribute_bit_expr
PROTO((enum tree_code
, tree
, tree
, tree
));
68 static tree make_bit_field_ref
PROTO((tree
, tree
, int, int, int));
69 static tree optimize_bit_field_compare
PROTO((enum tree_code
, tree
,
71 static tree decode_field_reference
PROTO((tree
, int *, int *,
72 enum machine_mode
*, int *,
74 static int all_ones_mask_p
PROTO((tree
, int));
75 static int simple_operand_p
PROTO((tree
));
76 static tree range_test
PROTO((enum tree_code
, tree
, enum tree_code
,
77 enum tree_code
, tree
, tree
, tree
));
78 static tree fold_truthop
PROTO((enum tree_code
, tree
, tree
, tree
));
79 static tree strip_compound_expr
PROTO((tree
, tree
));
85 /* Yield nonzero if a signed left shift of A by B bits overflows. */
86 #define left_shift_overflows(a, b) ((a) != ((a) << (b)) >> (b))
88 /* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
89 Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
90 Then this yields nonzero if overflow occurred during the addition.
91 Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
92 Use `^' to test whether signs differ, and `< 0' to isolate the sign. */
93 #define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
95 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
96 We do that by representing the two-word integer in 4 words, with only
97 HOST_BITS_PER_WIDE_INT/2 bits stored in each word, as a positive number. */
100 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT/2)) - 1))
101 #define HIGHPART(x) \
102 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT/2)
103 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT/2)
105 /* Unpack a two-word integer into 4 words.
106 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
107 WORDS points to the array of HOST_WIDE_INTs. */
110 encode (words
, low
, hi
)
111 HOST_WIDE_INT
*words
;
112 HOST_WIDE_INT low
, hi
;
114 words
[0] = LOWPART (low
);
115 words
[1] = HIGHPART (low
);
116 words
[2] = LOWPART (hi
);
117 words
[3] = HIGHPART (hi
);
120 /* Pack an array of 4 words into a two-word integer.
121 WORDS points to the array of words.
122 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
125 decode (words
, low
, hi
)
126 HOST_WIDE_INT
*words
;
127 HOST_WIDE_INT
*low
, *hi
;
129 *low
= words
[0] | words
[1] * BASE
;
130 *hi
= words
[2] | words
[3] * BASE
;
133 /* Make the integer constant T valid for its type
134 by setting to 0 or 1 all the bits in the constant
135 that don't belong in the type.
136 Yield 1 if a signed overflow occurs, 0 otherwise.
137 If OVERFLOW is nonzero, a signed overflow has already occurred
138 in calculating T, so propagate it.
140 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
144 force_fit_type (t
, overflow
)
148 HOST_WIDE_INT low
, high
;
151 if (TREE_CODE (t
) == REAL_CST
)
153 #ifdef CHECK_FLOAT_VALUE
154 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t
)), TREE_REAL_CST (t
),
160 else if (TREE_CODE (t
) != INTEGER_CST
)
163 low
= TREE_INT_CST_LOW (t
);
164 high
= TREE_INT_CST_HIGH (t
);
166 if (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
)
169 prec
= TYPE_PRECISION (TREE_TYPE (t
));
171 /* First clear all bits that are beyond the type's precision. */
173 if (prec
== 2 * HOST_BITS_PER_WIDE_INT
)
175 else if (prec
> HOST_BITS_PER_WIDE_INT
)
177 TREE_INT_CST_HIGH (t
)
178 &= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
182 TREE_INT_CST_HIGH (t
) = 0;
183 if (prec
< HOST_BITS_PER_WIDE_INT
)
184 TREE_INT_CST_LOW (t
) &= ~((HOST_WIDE_INT
) (-1) << prec
);
187 /* Unsigned types do not suffer sign extension or overflow. */
188 if (TREE_UNSIGNED (TREE_TYPE (t
)))
191 /* If the value's sign bit is set, extend the sign. */
192 if (prec
!= 2 * HOST_BITS_PER_WIDE_INT
193 && (prec
> HOST_BITS_PER_WIDE_INT
194 ? (TREE_INT_CST_HIGH (t
)
195 & ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
196 : TREE_INT_CST_LOW (t
) & ((HOST_WIDE_INT
) 1 << (prec
- 1))))
198 /* Value is negative:
199 set to 1 all the bits that are outside this type's precision. */
200 if (prec
> HOST_BITS_PER_WIDE_INT
)
202 TREE_INT_CST_HIGH (t
)
203 |= ((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
207 TREE_INT_CST_HIGH (t
) = -1;
208 if (prec
< HOST_BITS_PER_WIDE_INT
)
209 TREE_INT_CST_LOW (t
) |= ((HOST_WIDE_INT
) (-1) << prec
);
213 /* Yield nonzero if signed overflow occurred. */
215 ((overflow
| (low
^ TREE_INT_CST_LOW (t
)) | (high
^ TREE_INT_CST_HIGH (t
)))
219 /* Add two doubleword integers with doubleword result.
220 Each argument is given as two `HOST_WIDE_INT' pieces.
221 One argument is L1 and H1; the other, L2 and H2.
222 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
225 add_double (l1
, h1
, l2
, h2
, lv
, hv
)
226 HOST_WIDE_INT l1
, h1
, l2
, h2
;
227 HOST_WIDE_INT
*lv
, *hv
;
232 h
= h1
+ h2
+ ((unsigned HOST_WIDE_INT
) l
< l1
);
236 return overflow_sum_sign (h1
, h2
, h
);
239 /* Negate a doubleword integer with doubleword result.
240 Return nonzero if the operation overflows, assuming it's signed.
241 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
242 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
245 neg_double (l1
, h1
, lv
, hv
)
246 HOST_WIDE_INT l1
, h1
;
247 HOST_WIDE_INT
*lv
, *hv
;
253 return (*hv
& h1
) < 0;
263 /* Multiply two doubleword integers with doubleword result.
264 Return nonzero if the operation overflows, assuming it's signed.
265 Each argument is given as two `HOST_WIDE_INT' pieces.
266 One argument is L1 and H1; the other, L2 and H2.
267 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
270 mul_double (l1
, h1
, l2
, h2
, lv
, hv
)
271 HOST_WIDE_INT l1
, h1
, l2
, h2
;
272 HOST_WIDE_INT
*lv
, *hv
;
274 HOST_WIDE_INT arg1
[4];
275 HOST_WIDE_INT arg2
[4];
276 HOST_WIDE_INT prod
[4 * 2];
277 register unsigned HOST_WIDE_INT carry
;
278 register int i
, j
, k
;
279 HOST_WIDE_INT toplow
, tophigh
, neglow
, neghigh
;
281 encode (arg1
, l1
, h1
);
282 encode (arg2
, l2
, h2
);
284 bzero ((char *) prod
, sizeof prod
);
286 for (i
= 0; i
< 4; i
++)
289 for (j
= 0; j
< 4; j
++)
292 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
293 carry
+= arg1
[i
] * arg2
[j
];
294 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
296 prod
[k
] = LOWPART (carry
);
297 carry
= HIGHPART (carry
);
302 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
304 /* Check for overflow by calculating the top half of the answer in full;
305 it should agree with the low half's sign bit. */
306 decode (prod
+4, &toplow
, &tophigh
);
309 neg_double (l2
, h2
, &neglow
, &neghigh
);
310 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
314 neg_double (l1
, h1
, &neglow
, &neghigh
);
315 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
317 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
320 /* Shift the doubleword integer in L1, H1 left by COUNT places
321 keeping only PREC bits of result.
322 Shift right if COUNT is negative.
323 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
324 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
327 lshift_double (l1
, h1
, count
, prec
, lv
, hv
, arith
)
328 HOST_WIDE_INT l1
, h1
, count
;
330 HOST_WIDE_INT
*lv
, *hv
;
335 rshift_double (l1
, h1
, - count
, prec
, lv
, hv
, arith
);
340 count
= (unsigned HOST_WIDE_INT
) count
& prec
;
342 if (count
>= HOST_BITS_PER_WIDE_INT
)
344 *hv
= (unsigned HOST_WIDE_INT
) l1
<< count
- HOST_BITS_PER_WIDE_INT
;
349 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
350 | ((unsigned HOST_WIDE_INT
) l1
>> HOST_BITS_PER_WIDE_INT
- count
- 1 >> 1));
351 *lv
= (unsigned HOST_WIDE_INT
) l1
<< count
;
355 /* Shift the doubleword integer in L1, H1 right by COUNT places
356 keeping only PREC bits of result. COUNT must be positive.
357 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
358 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
361 rshift_double (l1
, h1
, count
, prec
, lv
, hv
, arith
)
362 HOST_WIDE_INT l1
, h1
, count
;
364 HOST_WIDE_INT
*lv
, *hv
;
367 unsigned HOST_WIDE_INT signmask
;
369 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
373 count
= (unsigned HOST_WIDE_INT
) count
% prec
;
375 if (count
>= HOST_BITS_PER_WIDE_INT
)
378 *lv
= ((signmask
<< 2 * HOST_BITS_PER_WIDE_INT
- count
- 1 << 1)
379 | ((unsigned HOST_WIDE_INT
) h1
>> count
- HOST_BITS_PER_WIDE_INT
));
383 *lv
= (((unsigned HOST_WIDE_INT
) l1
>> count
)
384 | ((unsigned HOST_WIDE_INT
) h1
<< HOST_BITS_PER_WIDE_INT
- count
- 1 << 1));
385 *hv
= ((signmask
<< HOST_BITS_PER_WIDE_INT
- count
)
386 | ((unsigned HOST_WIDE_INT
) h1
>> count
));
390 /* Rotate the doubleword integer in L1, H1 left by COUNT places
391 keeping only PREC bits of result.
392 Rotate right if COUNT is negative.
393 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
396 lrotate_double (l1
, h1
, count
, prec
, lv
, hv
)
397 HOST_WIDE_INT l1
, h1
, count
;
399 HOST_WIDE_INT
*lv
, *hv
;
401 HOST_WIDE_INT arg1
[4];
407 rrotate_double (l1
, h1
, - count
, prec
, lv
, hv
);
411 encode (arg1
, l1
, h1
);
416 carry
= arg1
[4 - 1] >> 16 - 1;
419 for (i
= 0; i
< 4; i
++)
421 carry
+= arg1
[i
] << 1;
422 arg1
[i
] = LOWPART (carry
);
423 carry
= HIGHPART (carry
);
428 decode (arg1
, lv
, hv
);
431 /* Rotate the doubleword integer in L1, H1 left by COUNT places
432 keeping only PREC bits of result. COUNT must be positive.
433 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
436 rrotate_double (l1
, h1
, count
, prec
, lv
, hv
)
437 HOST_WIDE_INT l1
, h1
, count
;
439 HOST_WIDE_INT
*lv
, *hv
;
441 HOST_WIDE_INT arg1
[4];
445 encode (arg1
, l1
, h1
);
453 for (i
= 4 - 1; i
>= 0; i
--)
457 arg1
[i
] = LOWPART (carry
>> 1);
462 decode (arg1
, lv
, hv
);
465 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
466 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
467 CODE is a tree code for a kind of division, one of
468 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
470 It controls how the quotient is rounded to a integer.
471 Return nonzero if the operation overflows.
472 UNS nonzero says do unsigned division. */
475 div_and_round_double (code
, uns
,
476 lnum_orig
, hnum_orig
, lden_orig
, hden_orig
,
477 lquo
, hquo
, lrem
, hrem
)
480 HOST_WIDE_INT lnum_orig
, hnum_orig
; /* num == numerator == dividend */
481 HOST_WIDE_INT lden_orig
, hden_orig
; /* den == denominator == divisor */
482 HOST_WIDE_INT
*lquo
, *hquo
, *lrem
, *hrem
;
485 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
486 HOST_WIDE_INT den
[4], quo
[4];
488 unsigned HOST_WIDE_INT work
;
489 register int carry
= 0;
490 HOST_WIDE_INT lnum
= lnum_orig
;
491 HOST_WIDE_INT hnum
= hnum_orig
;
492 HOST_WIDE_INT lden
= lden_orig
;
493 HOST_WIDE_INT hden
= hden_orig
;
496 if ((hden
== 0) && (lden
== 0))
499 /* calculate quotient sign and convert operands to unsigned. */
505 /* (minimum integer) / (-1) is the only overflow case. */
506 if (neg_double (lnum
, hnum
, &lnum
, &hnum
) && (lden
& hden
) == -1)
512 neg_double (lden
, hden
, &lden
, &hden
);
516 if (hnum
== 0 && hden
== 0)
517 { /* single precision */
519 /* This unsigned division rounds toward zero. */
520 *lquo
= lnum
/ (unsigned HOST_WIDE_INT
) lden
;
525 { /* trivial case: dividend < divisor */
526 /* hden != 0 already checked. */
533 bzero ((char *) quo
, sizeof quo
);
535 bzero ((char *) num
, sizeof num
); /* to zero 9th element */
536 bzero ((char *) den
, sizeof den
);
538 encode (num
, lnum
, hnum
);
539 encode (den
, lden
, hden
);
541 /* Special code for when the divisor < BASE. */
542 if (hden
== 0 && lden
< BASE
)
544 /* hnum != 0 already checked. */
545 for (i
= 4 - 1; i
>= 0; i
--)
547 work
= num
[i
] + carry
* BASE
;
548 quo
[i
] = work
/ (unsigned HOST_WIDE_INT
) lden
;
549 carry
= work
% (unsigned HOST_WIDE_INT
) lden
;
554 /* Full double precision division,
555 with thanks to Don Knuth's "Seminumerical Algorithms". */
556 int quo_est
, scale
, num_hi_sig
, den_hi_sig
;
558 /* Find the highest non-zero divisor digit. */
559 for (i
= 4 - 1; ; i
--)
565 /* Insure that the first digit of the divisor is at least BASE/2.
566 This is required by the quotient digit estimation algorithm. */
568 scale
= BASE
/ (den
[den_hi_sig
] + 1);
569 if (scale
> 1) { /* scale divisor and dividend */
571 for (i
= 0; i
<= 4 - 1; i
++) {
572 work
= (num
[i
] * scale
) + carry
;
573 num
[i
] = LOWPART (work
);
574 carry
= HIGHPART (work
);
577 for (i
= 0; i
<= 4 - 1; i
++) {
578 work
= (den
[i
] * scale
) + carry
;
579 den
[i
] = LOWPART (work
);
580 carry
= HIGHPART (work
);
581 if (den
[i
] != 0) den_hi_sig
= i
;
588 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--) {
589 /* guess the next quotient digit, quo_est, by dividing the first
590 two remaining dividend digits by the high order quotient digit.
591 quo_est is never low and is at most 2 high. */
592 unsigned HOST_WIDE_INT tmp
;
594 num_hi_sig
= i
+ den_hi_sig
+ 1;
595 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
596 if (num
[num_hi_sig
] != den
[den_hi_sig
])
597 quo_est
= work
/ den
[den_hi_sig
];
601 /* refine quo_est so it's usually correct, and at most one high. */
602 tmp
= work
- quo_est
* den
[den_hi_sig
];
604 && den
[den_hi_sig
- 1] * quo_est
> (tmp
* BASE
+ num
[num_hi_sig
- 2]))
607 /* Try QUO_EST as the quotient digit, by multiplying the
608 divisor by QUO_EST and subtracting from the remaining dividend.
609 Keep in mind that QUO_EST is the I - 1st digit. */
612 for (j
= 0; j
<= den_hi_sig
; j
++)
614 work
= quo_est
* den
[j
] + carry
;
615 carry
= HIGHPART (work
);
616 work
= num
[i
+ j
] - LOWPART (work
);
617 num
[i
+ j
] = LOWPART (work
);
618 carry
+= HIGHPART (work
) != 0;
621 /* if quo_est was high by one, then num[i] went negative and
622 we need to correct things. */
624 if (num
[num_hi_sig
] < carry
)
627 carry
= 0; /* add divisor back in */
628 for (j
= 0; j
<= den_hi_sig
; j
++)
630 work
= num
[i
+ j
] + den
[j
] + carry
;
631 carry
= HIGHPART (work
);
632 num
[i
+ j
] = LOWPART (work
);
634 num
[num_hi_sig
] += carry
;
637 /* store the quotient digit. */
642 decode (quo
, lquo
, hquo
);
645 /* if result is negative, make it so. */
647 neg_double (*lquo
, *hquo
, lquo
, hquo
);
649 /* compute trial remainder: rem = num - (quo * den) */
650 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
651 neg_double (*lrem
, *hrem
, lrem
, hrem
);
652 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
657 case TRUNC_MOD_EXPR
: /* round toward zero */
658 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
662 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
663 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
666 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
669 else return overflow
;
673 case CEIL_MOD_EXPR
: /* round toward positive infinity */
674 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
676 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
679 else return overflow
;
683 case ROUND_MOD_EXPR
: /* round to closest integer */
685 HOST_WIDE_INT labs_rem
= *lrem
, habs_rem
= *hrem
;
686 HOST_WIDE_INT labs_den
= lden
, habs_den
= hden
, ltwice
, htwice
;
688 /* get absolute values */
689 if (*hrem
< 0) neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
690 if (hden
< 0) neg_double (lden
, hden
, &labs_den
, &habs_den
);
692 /* if (2 * abs (lrem) >= abs (lden)) */
693 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
694 labs_rem
, habs_rem
, <wice
, &htwice
);
695 if (((unsigned HOST_WIDE_INT
) habs_den
696 < (unsigned HOST_WIDE_INT
) htwice
)
697 || (((unsigned HOST_WIDE_INT
) habs_den
698 == (unsigned HOST_WIDE_INT
) htwice
)
699 && ((HOST_WIDE_INT
unsigned) labs_den
700 < (unsigned HOST_WIDE_INT
) ltwice
)))
704 add_double (*lquo
, *hquo
,
705 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
708 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
711 else return overflow
;
719 /* compute true remainder: rem = num - (quo * den) */
720 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
721 neg_double (*lrem
, *hrem
, lrem
, hrem
);
722 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
726 #ifndef REAL_ARITHMETIC
727 /* Effectively truncate a real value to represent the nearest possible value
728 in a narrower mode. The result is actually represented in the same data
729 type as the argument, but its value is usually different.
731 A trap may occur during the FP operations and it is the responsibility
732 of the calling function to have a handler established. */
735 real_value_truncate (mode
, arg
)
736 enum machine_mode mode
;
739 return REAL_VALUE_TRUNCATE (mode
, arg
);
742 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
744 /* Check for infinity in an IEEE double precision number. */
750 /* The IEEE 64-bit double format. */
755 unsigned exponent
: 11;
756 unsigned mantissa1
: 20;
761 unsigned mantissa1
: 20;
762 unsigned exponent
: 11;
768 if (u
.big_endian
.sign
== 1)
771 return (u
.big_endian
.exponent
== 2047
772 && u
.big_endian
.mantissa1
== 0
773 && u
.big_endian
.mantissa2
== 0);
778 return (u
.little_endian
.exponent
== 2047
779 && u
.little_endian
.mantissa1
== 0
780 && u
.little_endian
.mantissa2
== 0);
784 /* Check whether an IEEE double precision number is a NaN. */
790 /* The IEEE 64-bit double format. */
795 unsigned exponent
: 11;
796 unsigned mantissa1
: 20;
801 unsigned mantissa1
: 20;
802 unsigned exponent
: 11;
808 if (u
.big_endian
.sign
== 1)
811 return (u
.big_endian
.exponent
== 2047
812 && (u
.big_endian
.mantissa1
!= 0
813 || u
.big_endian
.mantissa2
!= 0));
818 return (u
.little_endian
.exponent
== 2047
819 && (u
.little_endian
.mantissa1
!= 0
820 || u
.little_endian
.mantissa2
!= 0));
824 /* Check for a negative IEEE double precision number. */
830 /* The IEEE 64-bit double format. */
835 unsigned exponent
: 11;
836 unsigned mantissa1
: 20;
841 unsigned mantissa1
: 20;
842 unsigned exponent
: 11;
848 if (u
.big_endian
.sign
== 1)
851 return u
.big_endian
.sign
;
856 return u
.little_endian
.sign
;
859 #else /* Target not IEEE */
861 /* Let's assume other float formats don't have infinity.
862 (This can be overridden by redefining REAL_VALUE_ISINF.) */
870 /* Let's assume other float formats don't have NaNs.
871 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
879 /* Let's assume other float formats don't have minus zero.
880 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
887 #endif /* Target not IEEE */
888 #endif /* no REAL_ARITHMETIC */
890 /* Split a tree IN into a constant and a variable part
891 that could be combined with CODE to make IN.
892 CODE must be a commutative arithmetic operation.
893 Store the constant part into *CONP and the variable in &VARP.
894 Return 1 if this was done; zero means the tree IN did not decompose
897 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
898 Therefore, we must tell the caller whether the variable part
899 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
900 The value stored is the coefficient for the variable term.
901 The constant term we return should always be added;
902 we negate it if necessary. */
905 split_tree (in
, code
, varp
, conp
, varsignp
)
911 register tree outtype
= TREE_TYPE (in
);
915 /* Strip any conversions that don't change the machine mode. */
916 while ((TREE_CODE (in
) == NOP_EXPR
917 || TREE_CODE (in
) == CONVERT_EXPR
)
918 && (TYPE_MODE (TREE_TYPE (in
))
919 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in
, 0)))))
920 in
= TREE_OPERAND (in
, 0);
922 if (TREE_CODE (in
) == code
923 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
924 /* We can associate addition and subtraction together
925 (even though the C standard doesn't say so)
926 for integers because the value is not affected.
927 For reals, the value might be affected, so we can't. */
928 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
929 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
931 enum tree_code code
= TREE_CODE (TREE_OPERAND (in
, 0));
932 if (code
== INTEGER_CST
)
934 *conp
= TREE_OPERAND (in
, 0);
935 *varp
= TREE_OPERAND (in
, 1);
936 if (TYPE_MODE (TREE_TYPE (*varp
)) != TYPE_MODE (outtype
)
937 && TREE_TYPE (*varp
) != outtype
)
938 *varp
= convert (outtype
, *varp
);
939 *varsignp
= (TREE_CODE (in
) == MINUS_EXPR
) ? -1 : 1;
942 if (TREE_CONSTANT (TREE_OPERAND (in
, 1)))
944 *conp
= TREE_OPERAND (in
, 1);
945 *varp
= TREE_OPERAND (in
, 0);
947 if (TYPE_MODE (TREE_TYPE (*varp
)) != TYPE_MODE (outtype
)
948 && TREE_TYPE (*varp
) != outtype
)
949 *varp
= convert (outtype
, *varp
);
950 if (TREE_CODE (in
) == MINUS_EXPR
)
952 /* If operation is subtraction and constant is second,
953 must negate it to get an additive constant.
954 And this cannot be done unless it is a manifest constant.
955 It could also be the address of a static variable.
956 We cannot negate that, so give up. */
957 if (TREE_CODE (*conp
) == INTEGER_CST
)
958 /* Subtracting from integer_zero_node loses for long long. */
959 *conp
= fold (build1 (NEGATE_EXPR
, TREE_TYPE (*conp
), *conp
));
965 if (TREE_CONSTANT (TREE_OPERAND (in
, 0)))
967 *conp
= TREE_OPERAND (in
, 0);
968 *varp
= TREE_OPERAND (in
, 1);
969 if (TYPE_MODE (TREE_TYPE (*varp
)) != TYPE_MODE (outtype
)
970 && TREE_TYPE (*varp
) != outtype
)
971 *varp
= convert (outtype
, *varp
);
972 *varsignp
= (TREE_CODE (in
) == MINUS_EXPR
) ? -1 : 1;
979 /* Combine two constants NUM and ARG2 under operation CODE
980 to produce a new constant.
981 We assume ARG1 and ARG2 have the same data type,
982 or at least are the same kind of constant and the same machine mode.
984 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
987 const_binop (code
, arg1
, arg2
, notrunc
)
989 register tree arg1
, arg2
;
992 if (TREE_CODE (arg1
) == INTEGER_CST
)
994 register HOST_WIDE_INT int1l
= TREE_INT_CST_LOW (arg1
);
995 register HOST_WIDE_INT int1h
= TREE_INT_CST_HIGH (arg1
);
996 HOST_WIDE_INT int2l
= TREE_INT_CST_LOW (arg2
);
997 HOST_WIDE_INT int2h
= TREE_INT_CST_HIGH (arg2
);
998 HOST_WIDE_INT low
, hi
;
999 HOST_WIDE_INT garbagel
, garbageh
;
1001 int uns
= TREE_UNSIGNED (TREE_TYPE (arg1
));
1007 t
= build_int_2 (int1l
| int2l
, int1h
| int2h
);
1011 t
= build_int_2 (int1l
^ int2l
, int1h
^ int2h
);
1015 t
= build_int_2 (int1l
& int2l
, int1h
& int2h
);
1018 case BIT_ANDTC_EXPR
:
1019 t
= build_int_2 (int1l
& ~int2l
, int1h
& ~int2h
);
1025 /* It's unclear from the C standard whether shifts can overflow.
1026 The following code ignores overflow; perhaps a C standard
1027 interpretation ruling is needed. */
1028 lshift_double (int1l
, int1h
, int2l
,
1029 TYPE_PRECISION (TREE_TYPE (arg1
)),
1032 t
= build_int_2 (low
, hi
);
1033 TREE_TYPE (t
) = TREE_TYPE (arg1
);
1035 force_fit_type (t
, 0);
1036 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1037 TREE_CONSTANT_OVERFLOW (t
)
1038 = TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
);
1044 lrotate_double (int1l
, int1h
, int2l
,
1045 TYPE_PRECISION (TREE_TYPE (arg1
)),
1047 t
= build_int_2 (low
, hi
);
1054 if ((unsigned HOST_WIDE_INT
) int2l
< int1l
)
1057 overflow
= int2h
< hi
;
1059 t
= build_int_2 (int2l
, int2h
);
1065 if ((unsigned HOST_WIDE_INT
) int1l
< int2l
)
1068 overflow
= int1h
< hi
;
1070 t
= build_int_2 (int1l
, int1h
);
1073 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1074 t
= build_int_2 (low
, hi
);
1078 if (int2h
== 0 && int2l
== 0)
1080 t
= build_int_2 (int1l
, int1h
);
1083 neg_double (int2l
, int2h
, &low
, &hi
);
1084 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1085 overflow
= overflow_sum_sign (hi
, int2h
, int1h
);
1086 t
= build_int_2 (low
, hi
);
1090 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1091 t
= build_int_2 (low
, hi
);
1094 case TRUNC_DIV_EXPR
:
1095 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1096 case EXACT_DIV_EXPR
:
1097 /* This is a shortcut for a common special case.
1098 It reduces the number of tree nodes generated
1100 if (int2h
== 0 && int2l
> 0
1101 && TREE_TYPE (arg1
) == sizetype
1102 && int1h
== 0 && int1l
>= 0)
1104 if (code
== CEIL_DIV_EXPR
)
1106 return size_int (int1l
/ int2l
);
1108 case ROUND_DIV_EXPR
:
1109 if (int2h
== 0 && int2l
== 1)
1111 t
= build_int_2 (int1l
, int1h
);
1114 if (int1l
== int2l
&& int1h
== int2h
)
1116 if ((int1l
| int1h
) == 0)
1118 t
= build_int_2 (1, 0);
1121 overflow
= div_and_round_double (code
, uns
,
1122 int1l
, int1h
, int2l
, int2h
,
1123 &low
, &hi
, &garbagel
, &garbageh
);
1124 t
= build_int_2 (low
, hi
);
1127 case TRUNC_MOD_EXPR
: case ROUND_MOD_EXPR
:
1128 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1129 overflow
= div_and_round_double (code
, uns
,
1130 int1l
, int1h
, int2l
, int2h
,
1131 &garbagel
, &garbageh
, &low
, &hi
);
1132 t
= build_int_2 (low
, hi
);
1139 low
= (((unsigned HOST_WIDE_INT
) int1h
1140 < (unsigned HOST_WIDE_INT
) int2h
)
1141 || (((unsigned HOST_WIDE_INT
) int1h
1142 == (unsigned HOST_WIDE_INT
) int2h
)
1143 && ((unsigned HOST_WIDE_INT
) int1l
1144 < (unsigned HOST_WIDE_INT
) int2l
)));
1148 low
= ((int1h
< int2h
)
1149 || ((int1h
== int2h
)
1150 && ((unsigned HOST_WIDE_INT
) int1l
1151 < (unsigned HOST_WIDE_INT
) int2l
)));
1153 if (low
== (code
== MIN_EXPR
))
1154 t
= build_int_2 (int1l
, int1h
);
1156 t
= build_int_2 (int2l
, int2h
);
1163 TREE_TYPE (t
) = TREE_TYPE (arg1
);
1165 = ((notrunc
? !uns
&& overflow
: force_fit_type (t
, overflow
))
1166 | TREE_OVERFLOW (arg1
)
1167 | TREE_OVERFLOW (arg2
));
1168 TREE_CONSTANT_OVERFLOW (t
) = (TREE_OVERFLOW (t
)
1169 | TREE_CONSTANT_OVERFLOW (arg1
)
1170 | TREE_CONSTANT_OVERFLOW (arg2
));
1173 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1174 if (TREE_CODE (arg1
) == REAL_CST
)
1179 REAL_VALUE_TYPE value
;
1182 d1
= TREE_REAL_CST (arg1
);
1183 d2
= TREE_REAL_CST (arg2
);
1185 /* If either operand is a NaN, just return it. Otherwise, set up
1186 for floating-point trap; we return an overflow. */
1187 if (REAL_VALUE_ISNAN (d1
))
1189 else if (REAL_VALUE_ISNAN (d2
))
1191 else if (setjmp (float_error
))
1193 t
= copy_node (arg1
);
1198 set_float_handler (float_error
);
1200 #ifdef REAL_ARITHMETIC
1201 REAL_ARITHMETIC (value
, code
, d1
, d2
);
1218 #ifndef REAL_INFINITY
1227 value
= MIN (d1
, d2
);
1231 value
= MAX (d1
, d2
);
1237 #endif /* no REAL_ARITHMETIC */
1238 t
= build_real (TREE_TYPE (arg1
),
1239 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1
)), value
));
1241 set_float_handler (NULL_PTR
);
1244 = (force_fit_type (t
, overflow
)
1245 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1246 TREE_CONSTANT_OVERFLOW (t
)
1248 | TREE_CONSTANT_OVERFLOW (arg1
)
1249 | TREE_CONSTANT_OVERFLOW (arg2
);
1252 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1253 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1255 register tree r1
= TREE_REALPART (arg1
);
1256 register tree i1
= TREE_IMAGPART (arg1
);
1257 register tree r2
= TREE_REALPART (arg2
);
1258 register tree i2
= TREE_IMAGPART (arg2
);
1264 t
= build_complex (const_binop (PLUS_EXPR
, r1
, r2
, notrunc
),
1265 const_binop (PLUS_EXPR
, i1
, i2
, notrunc
));
1269 t
= build_complex (const_binop (MINUS_EXPR
, r1
, r2
, notrunc
),
1270 const_binop (MINUS_EXPR
, i1
, i2
, notrunc
));
1274 t
= build_complex (const_binop (MINUS_EXPR
,
1275 const_binop (MULT_EXPR
,
1277 const_binop (MULT_EXPR
,
1280 const_binop (PLUS_EXPR
,
1281 const_binop (MULT_EXPR
,
1283 const_binop (MULT_EXPR
,
1290 register tree magsquared
1291 = const_binop (PLUS_EXPR
,
1292 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1293 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1297 (const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1298 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1299 const_binop (PLUS_EXPR
,
1300 const_binop (MULT_EXPR
, r1
, r2
,
1302 const_binop (MULT_EXPR
, i1
, i2
,
1305 magsquared
, notrunc
),
1306 const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1307 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1308 const_binop (MINUS_EXPR
,
1309 const_binop (MULT_EXPR
, i1
, r2
,
1311 const_binop (MULT_EXPR
, r1
, i2
,
1314 magsquared
, notrunc
));
1321 TREE_TYPE (t
) = TREE_TYPE (arg1
);
1327 /* Return an INTEGER_CST with value V and type from `sizetype'. */
1331 unsigned int number
;
1334 /* Type-size nodes already made for small sizes. */
1335 static tree size_table
[2*HOST_BITS_PER_WIDE_INT
+ 1];
1337 if (number
< 2*HOST_BITS_PER_WIDE_INT
+ 1
1338 && size_table
[number
] != 0)
1339 return size_table
[number
];
1340 if (number
< 2*HOST_BITS_PER_WIDE_INT
+ 1)
1342 push_obstacks_nochange ();
1343 /* Make this a permanent node. */
1344 end_temporary_allocation ();
1345 t
= build_int_2 (number
, 0);
1346 TREE_TYPE (t
) = sizetype
;
1347 size_table
[number
] = t
;
1352 t
= build_int_2 (number
, 0);
1353 TREE_TYPE (t
) = sizetype
;
1358 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1359 CODE is a tree code. Data type is taken from `sizetype',
1360 If the operands are constant, so is the result. */
1363 size_binop (code
, arg0
, arg1
)
1364 enum tree_code code
;
1367 /* Handle the special case of two integer constants faster. */
1368 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1370 /* And some specific cases even faster than that. */
1371 if (code
== PLUS_EXPR
1372 && TREE_INT_CST_LOW (arg0
) == 0
1373 && TREE_INT_CST_HIGH (arg0
) == 0)
1375 if (code
== MINUS_EXPR
1376 && TREE_INT_CST_LOW (arg1
) == 0
1377 && TREE_INT_CST_HIGH (arg1
) == 0)
1379 if (code
== MULT_EXPR
1380 && TREE_INT_CST_LOW (arg0
) == 1
1381 && TREE_INT_CST_HIGH (arg0
) == 0)
1383 /* Handle general case of two integer constants. */
1384 return const_binop (code
, arg0
, arg1
, 1);
1387 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1388 return error_mark_node
;
1390 return fold (build (code
, sizetype
, arg0
, arg1
));
1393 /* Given T, a tree representing type conversion of ARG1, a constant,
1394 return a constant tree representing the result of conversion. */
1397 fold_convert (t
, arg1
)
1401 register tree type
= TREE_TYPE (t
);
1404 if (TREE_CODE (type
) == POINTER_TYPE
|| INTEGRAL_TYPE_P (type
))
1406 if (TREE_CODE (arg1
) == INTEGER_CST
)
1408 /* If we would build a constant wider than GCC supports,
1409 leave the conversion unfolded. */
1410 if (TYPE_PRECISION (type
) > 2 * HOST_BITS_PER_WIDE_INT
)
1413 /* Given an integer constant, make new constant with new type,
1414 appropriately sign-extended or truncated. */
1415 t
= build_int_2 (TREE_INT_CST_LOW (arg1
),
1416 TREE_INT_CST_HIGH (arg1
));
1417 TREE_TYPE (t
) = type
;
1418 /* Indicate an overflow if (1) ARG1 already overflowed,
1419 or (2) force_fit_type indicates an overflow.
1420 Tell force_fit_type that an overflow has already occurred
1421 if ARG1 is a too-large unsigned value and T is signed. */
1423 = (TREE_OVERFLOW (arg1
)
1424 | force_fit_type (t
,
1425 (TREE_INT_CST_HIGH (arg1
) < 0
1426 & (TREE_UNSIGNED (type
)
1427 < TREE_UNSIGNED (TREE_TYPE (arg1
))))));
1428 TREE_CONSTANT_OVERFLOW (t
)
1429 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1431 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1432 else if (TREE_CODE (arg1
) == REAL_CST
)
1434 /* Don't initialize these, use assignments.
1435 Initialized local aggregates don't work on old compilers. */
1440 x
= TREE_REAL_CST (arg1
);
1441 l
= real_value_from_int_cst (TYPE_MIN_VALUE (type
));
1442 u
= real_value_from_int_cst (TYPE_MAX_VALUE (type
));
1443 /* See if X will be in range after truncation towards 0.
1444 To compensate for truncation, move the bounds away from 0,
1445 but reject if X exactly equals the adjusted bounds. */
1446 #ifdef REAL_ARITHMETIC
1447 REAL_ARITHMETIC (l
, MINUS_EXPR
, l
, dconst1
);
1448 REAL_ARITHMETIC (u
, PLUS_EXPR
, u
, dconst1
);
1453 /* If X is a NaN, use zero instead and show we have an overflow.
1454 Otherwise, range check. */
1455 if (REAL_VALUE_ISNAN (x
))
1456 overflow
= 1, x
= dconst0
;
1457 else if (! (REAL_VALUES_LESS (l
, x
) && REAL_VALUES_LESS (x
, u
)))
1460 #ifndef REAL_ARITHMETIC
1462 HOST_WIDE_INT low
, high
;
1463 HOST_WIDE_INT half_word
1464 = (HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
/ 2);
1469 high
= (HOST_WIDE_INT
) (x
/ half_word
/ half_word
);
1470 x
-= (REAL_VALUE_TYPE
) high
* half_word
* half_word
;
1471 if (x
>= (REAL_VALUE_TYPE
) half_word
* half_word
/ 2)
1473 low
= x
- (REAL_VALUE_TYPE
) half_word
* half_word
/ 2;
1474 low
|= (HOST_WIDE_INT
) -1 << (HOST_BITS_PER_WIDE_INT
- 1);
1477 low
= (HOST_WIDE_INT
) x
;
1478 if (TREE_REAL_CST (arg1
) < 0)
1479 neg_double (low
, high
, &low
, &high
);
1480 t
= build_int_2 (low
, high
);
1484 HOST_WIDE_INT low
, high
;
1485 REAL_VALUE_TO_INT (&low
, &high
, x
);
1486 t
= build_int_2 (low
, high
);
1489 TREE_TYPE (t
) = type
;
1491 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, overflow
);
1492 TREE_CONSTANT_OVERFLOW (t
)
1493 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1495 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1496 TREE_TYPE (t
) = type
;
1498 else if (TREE_CODE (type
) == REAL_TYPE
)
1500 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1501 if (TREE_CODE (arg1
) == INTEGER_CST
)
1502 return build_real_from_int_cst (type
, arg1
);
1503 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1504 if (TREE_CODE (arg1
) == REAL_CST
)
1506 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
1508 else if (setjmp (float_error
))
1511 t
= copy_node (arg1
);
1514 set_float_handler (float_error
);
1516 t
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
1517 TREE_REAL_CST (arg1
)));
1518 set_float_handler (NULL_PTR
);
1522 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, overflow
);
1523 TREE_CONSTANT_OVERFLOW (t
)
1524 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1528 TREE_CONSTANT (t
) = 1;
1532 /* Return an expr equal to X but certainly not valid as an lvalue.
1533 Also make sure it is not valid as an null pointer constant. */
1541 /* These things are certainly not lvalues. */
1542 if (TREE_CODE (x
) == NON_LVALUE_EXPR
1543 || TREE_CODE (x
) == INTEGER_CST
1544 || TREE_CODE (x
) == REAL_CST
1545 || TREE_CODE (x
) == STRING_CST
1546 || TREE_CODE (x
) == ADDR_EXPR
)
1548 if (TREE_CODE (x
) == INTEGER_CST
&& integer_zerop (x
))
1550 /* Use NOP_EXPR instead of NON_LVALUE_EXPR
1551 so convert_for_assignment won't strip it.
1552 This is so this 0 won't be treated as a null pointer constant. */
1553 result
= build1 (NOP_EXPR
, TREE_TYPE (x
), x
);
1554 TREE_CONSTANT (result
) = TREE_CONSTANT (x
);
1560 result
= build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
1561 TREE_CONSTANT (result
) = TREE_CONSTANT (x
);
1565 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1566 Zero means allow extended lvalues. */
1568 int pedantic_lvalues
;
1570 /* When pedantic, return an expr equal to X but certainly not valid as a
1571 pedantic lvalue. Otherwise, return X. */
1574 pedantic_non_lvalue (x
)
1577 if (pedantic_lvalues
)
1578 return non_lvalue (x
);
1583 /* Given a tree comparison code, return the code that is the logical inverse
1584 of the given code. It is not safe to do this for floating-point
1585 comparisons, except for NE_EXPR and EQ_EXPR. */
1587 static enum tree_code
1588 invert_tree_comparison (code
)
1589 enum tree_code code
;
1610 /* Similar, but return the comparison that results if the operands are
1611 swapped. This is safe for floating-point. */
1613 static enum tree_code
1614 swap_tree_comparison (code
)
1615 enum tree_code code
;
1635 /* Return nonzero if CODE is a tree code that represents a truth value. */
1638 truth_value_p (code
)
1639 enum tree_code code
;
1641 return (TREE_CODE_CLASS (code
) == '<'
1642 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
1643 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
1644 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
1647 /* Return nonzero if two operands are necessarily equal.
1648 If ONLY_CONST is non-zero, only return non-zero for constants.
1649 This function tests whether the operands are indistinguishable;
1650 it does not test whether they are equal using C's == operation.
1651 The distinction is important for IEEE floating point, because
1652 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1653 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1656 operand_equal_p (arg0
, arg1
, only_const
)
1660 /* If both types don't have the same signedness, then we can't consider
1661 them equal. We must check this before the STRIP_NOPS calls
1662 because they may change the signedness of the arguments. */
1663 if (TREE_UNSIGNED (TREE_TYPE (arg0
)) != TREE_UNSIGNED (TREE_TYPE (arg1
)))
1669 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1670 We don't care about side effects in that case because the SAVE_EXPR
1671 takes care of that for us. */
1672 if (TREE_CODE (arg0
) == SAVE_EXPR
&& arg0
== arg1
)
1673 return ! only_const
;
1675 if (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
))
1678 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
1679 && TREE_CODE (arg0
) == ADDR_EXPR
1680 && TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0))
1683 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
1684 && TREE_CODE (arg0
) == INTEGER_CST
1685 && TREE_INT_CST_LOW (arg0
) == TREE_INT_CST_LOW (arg1
)
1686 && TREE_INT_CST_HIGH (arg0
) == TREE_INT_CST_HIGH (arg1
))
1689 /* Detect when real constants are equal. */
1690 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
1691 && TREE_CODE (arg0
) == REAL_CST
)
1692 return !bcmp ((char *) &TREE_REAL_CST (arg0
),
1693 (char *) &TREE_REAL_CST (arg1
),
1694 sizeof (REAL_VALUE_TYPE
));
1702 if (TREE_CODE (arg0
) != TREE_CODE (arg1
))
1704 /* This is needed for conversions and for COMPONENT_REF.
1705 Might as well play it safe and always test this. */
1706 if (TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
1709 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
1712 /* Two conversions are equal only if signedness and modes match. */
1713 if ((TREE_CODE (arg0
) == NOP_EXPR
|| TREE_CODE (arg0
) == CONVERT_EXPR
)
1714 && (TREE_UNSIGNED (TREE_TYPE (arg0
))
1715 != TREE_UNSIGNED (TREE_TYPE (arg1
))))
1718 return operand_equal_p (TREE_OPERAND (arg0
, 0),
1719 TREE_OPERAND (arg1
, 0), 0);
1723 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
1724 TREE_OPERAND (arg1
, 0), 0)
1725 && operand_equal_p (TREE_OPERAND (arg0
, 1),
1726 TREE_OPERAND (arg1
, 1), 0));
1729 switch (TREE_CODE (arg0
))
1732 return operand_equal_p (TREE_OPERAND (arg0
, 0),
1733 TREE_OPERAND (arg1
, 0), 0);
1737 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
1738 TREE_OPERAND (arg1
, 0), 0)
1739 && operand_equal_p (TREE_OPERAND (arg0
, 1),
1740 TREE_OPERAND (arg1
, 1), 0));
1743 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
1744 TREE_OPERAND (arg1
, 0), 0)
1745 && operand_equal_p (TREE_OPERAND (arg0
, 1),
1746 TREE_OPERAND (arg1
, 1), 0)
1747 && operand_equal_p (TREE_OPERAND (arg0
, 2),
1748 TREE_OPERAND (arg1
, 2), 0));
1756 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1757 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1759 When in doubt, return 0. */
1762 operand_equal_for_comparison_p (arg0
, arg1
, other
)
1766 int unsignedp1
, unsignedpo
;
1767 tree primarg1
, primother
;
1768 unsigned correct_width
;
1770 if (operand_equal_p (arg0
, arg1
, 0))
1773 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
1776 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1777 actual comparison operand, ARG0.
1779 First throw away any conversions to wider types
1780 already present in the operands. */
1782 primarg1
= get_narrower (arg1
, &unsignedp1
);
1783 primother
= get_narrower (other
, &unsignedpo
);
1785 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
1786 if (unsignedp1
== unsignedpo
1787 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
1788 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
1790 tree type
= TREE_TYPE (arg0
);
1792 /* Make sure shorter operand is extended the right way
1793 to match the longer operand. */
1794 primarg1
= convert (signed_or_unsigned_type (unsignedp1
,
1795 TREE_TYPE (primarg1
)),
1798 if (operand_equal_p (arg0
, convert (type
, primarg1
), 0))
1805 /* See if ARG is an expression that is either a comparison or is performing
1806 arithmetic on comparisons. The comparisons must only be comparing
1807 two different values, which will be stored in *CVAL1 and *CVAL2; if
1808 they are non-zero it means that some operands have already been found.
1809 No variables may be used anywhere else in the expression except in the
1810 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
1811 the expression and save_expr needs to be called with CVAL1 and CVAL2.
1813 If this is true, return 1. Otherwise, return zero. */
1816 twoval_comparison_p (arg
, cval1
, cval2
, save_p
)
1818 tree
*cval1
, *cval2
;
1821 enum tree_code code
= TREE_CODE (arg
);
1822 char class = TREE_CODE_CLASS (code
);
1824 /* We can handle some of the 'e' cases here. */
1825 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
1827 else if (class == 'e'
1828 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
1829 || code
== COMPOUND_EXPR
))
1832 /* ??? Disable this since the SAVE_EXPR might already be in use outside
1833 the expression. There may be no way to make this work, but it needs
1834 to be looked at again for 2.6. */
1836 else if (class == 'e' && code
== SAVE_EXPR
&& SAVE_EXPR_RTL (arg
) == 0)
1838 /* If we've already found a CVAL1 or CVAL2, this expression is
1839 two complex to handle. */
1840 if (*cval1
|| *cval2
)
1851 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
1854 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
1855 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
1856 cval1
, cval2
, save_p
));
1862 if (code
== COND_EXPR
)
1863 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
1864 cval1
, cval2
, save_p
)
1865 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
1866 cval1
, cval2
, save_p
)
1867 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
1868 cval1
, cval2
, save_p
));
1872 /* First see if we can handle the first operand, then the second. For
1873 the second operand, we know *CVAL1 can't be zero. It must be that
1874 one side of the comparison is each of the values; test for the
1875 case where this isn't true by failing if the two operands
1878 if (operand_equal_p (TREE_OPERAND (arg
, 0),
1879 TREE_OPERAND (arg
, 1), 0))
1883 *cval1
= TREE_OPERAND (arg
, 0);
1884 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
1886 else if (*cval2
== 0)
1887 *cval2
= TREE_OPERAND (arg
, 0);
1888 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
1893 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
1895 else if (*cval2
== 0)
1896 *cval2
= TREE_OPERAND (arg
, 1);
1897 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
1908 /* ARG is a tree that is known to contain just arithmetic operations and
1909 comparisons. Evaluate the operations in the tree substituting NEW0 for
1910 any occurrence of OLD0 as an operand of a comparison and likewise for
1914 eval_subst (arg
, old0
, new0
, old1
, new1
)
1916 tree old0
, new0
, old1
, new1
;
1918 tree type
= TREE_TYPE (arg
);
1919 enum tree_code code
= TREE_CODE (arg
);
1920 char class = TREE_CODE_CLASS (code
);
1922 /* We can handle some of the 'e' cases here. */
1923 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
1925 else if (class == 'e'
1926 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
1932 return fold (build1 (code
, type
,
1933 eval_subst (TREE_OPERAND (arg
, 0),
1934 old0
, new0
, old1
, new1
)));
1937 return fold (build (code
, type
,
1938 eval_subst (TREE_OPERAND (arg
, 0),
1939 old0
, new0
, old1
, new1
),
1940 eval_subst (TREE_OPERAND (arg
, 1),
1941 old0
, new0
, old1
, new1
)));
1947 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
1950 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
1953 return fold (build (code
, type
,
1954 eval_subst (TREE_OPERAND (arg
, 0),
1955 old0
, new0
, old1
, new1
),
1956 eval_subst (TREE_OPERAND (arg
, 1),
1957 old0
, new0
, old1
, new1
),
1958 eval_subst (TREE_OPERAND (arg
, 2),
1959 old0
, new0
, old1
, new1
)));
1964 tree arg0
= TREE_OPERAND (arg
, 0);
1965 tree arg1
= TREE_OPERAND (arg
, 1);
1967 /* We need to check both for exact equality and tree equality. The
1968 former will be true if the operand has a side-effect. In that
1969 case, we know the operand occurred exactly once. */
1971 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
1973 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
1976 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
1978 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
1981 return fold (build (code
, type
, arg0
, arg1
));
1988 /* Return a tree for the case when the result of an expression is RESULT
1989 converted to TYPE and OMITTED was previously an operand of the expression
1990 but is now not needed (e.g., we folded OMITTED * 0).
1992 If OMITTED has side effects, we must evaluate it. Otherwise, just do
1993 the conversion of RESULT to TYPE. */
1996 omit_one_operand (type
, result
, omitted
)
1997 tree type
, result
, omitted
;
1999 tree t
= convert (type
, result
);
2001 if (TREE_SIDE_EFFECTS (omitted
))
2002 return build (COMPOUND_EXPR
, type
, omitted
, t
);
2004 return non_lvalue (t
);
2007 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2010 pedantic_omit_one_operand (type
, result
, omitted
)
2011 tree type
, result
, omitted
;
2013 tree t
= convert (type
, result
);
2015 if (TREE_SIDE_EFFECTS (omitted
))
2016 return build (COMPOUND_EXPR
, type
, omitted
, t
);
2018 return pedantic_non_lvalue (t
);
2023 /* Return a simplified tree node for the truth-negation of ARG. This
2024 never alters ARG itself. We assume that ARG is an operation that
2025 returns a truth value (0 or 1). */
2028 invert_truthvalue (arg
)
2031 tree type
= TREE_TYPE (arg
);
2032 enum tree_code code
= TREE_CODE (arg
);
2034 if (code
== ERROR_MARK
)
2037 /* If this is a comparison, we can simply invert it, except for
2038 floating-point non-equality comparisons, in which case we just
2039 enclose a TRUTH_NOT_EXPR around what we have. */
2041 if (TREE_CODE_CLASS (code
) == '<')
2043 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg
, 0)))
2044 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
2045 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2047 return build (invert_tree_comparison (code
), type
,
2048 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
2054 return convert (type
, build_int_2 (TREE_INT_CST_LOW (arg
) == 0
2055 && TREE_INT_CST_HIGH (arg
) == 0, 0));
2057 case TRUTH_AND_EXPR
:
2058 return build (TRUTH_OR_EXPR
, type
,
2059 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2060 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2063 return build (TRUTH_AND_EXPR
, type
,
2064 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2065 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2067 case TRUTH_XOR_EXPR
:
2068 /* Here we can invert either operand. We invert the first operand
2069 unless the second operand is a TRUTH_NOT_EXPR in which case our
2070 result is the XOR of the first operand with the inside of the
2071 negation of the second operand. */
2073 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
2074 return build (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
2075 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
2077 return build (TRUTH_XOR_EXPR
, type
,
2078 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2079 TREE_OPERAND (arg
, 1));
2081 case TRUTH_ANDIF_EXPR
:
2082 return build (TRUTH_ORIF_EXPR
, type
,
2083 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2084 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2086 case TRUTH_ORIF_EXPR
:
2087 return build (TRUTH_ANDIF_EXPR
, type
,
2088 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2089 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2091 case TRUTH_NOT_EXPR
:
2092 return TREE_OPERAND (arg
, 0);
2095 return build (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2096 invert_truthvalue (TREE_OPERAND (arg
, 1)),
2097 invert_truthvalue (TREE_OPERAND (arg
, 2)));
2100 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2101 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2103 case NON_LVALUE_EXPR
:
2104 return invert_truthvalue (TREE_OPERAND (arg
, 0));
2109 return build1 (TREE_CODE (arg
), type
,
2110 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2113 if (!integer_onep (TREE_OPERAND (arg
, 1)))
2115 return build (EQ_EXPR
, type
, arg
, convert (type
, integer_zero_node
));
2118 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2120 if (TREE_CODE (TREE_TYPE (arg
)) != BOOLEAN_TYPE
)
2122 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2125 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2126 operands are another bit-wise operation with a common input. If so,
2127 distribute the bit operations to save an operation and possibly two if
2128 constants are involved. For example, convert
2129 (A | B) & (A | C) into A | (B & C)
2130 Further simplification will occur if B and C are constants.
2132 If this optimization cannot be done, 0 will be returned. */
2135 distribute_bit_expr (code
, type
, arg0
, arg1
)
2136 enum tree_code code
;
2143 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2144 || TREE_CODE (arg0
) == code
2145 || (TREE_CODE (arg0
) != BIT_AND_EXPR
2146 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
2149 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
2151 common
= TREE_OPERAND (arg0
, 0);
2152 left
= TREE_OPERAND (arg0
, 1);
2153 right
= TREE_OPERAND (arg1
, 1);
2155 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
2157 common
= TREE_OPERAND (arg0
, 0);
2158 left
= TREE_OPERAND (arg0
, 1);
2159 right
= TREE_OPERAND (arg1
, 0);
2161 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
2163 common
= TREE_OPERAND (arg0
, 1);
2164 left
= TREE_OPERAND (arg0
, 0);
2165 right
= TREE_OPERAND (arg1
, 1);
2167 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
2169 common
= TREE_OPERAND (arg0
, 1);
2170 left
= TREE_OPERAND (arg0
, 0);
2171 right
= TREE_OPERAND (arg1
, 0);
2176 return fold (build (TREE_CODE (arg0
), type
, common
,
2177 fold (build (code
, type
, left
, right
))));
2180 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2181 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2184 make_bit_field_ref (inner
, type
, bitsize
, bitpos
, unsignedp
)
2187 int bitsize
, bitpos
;
2190 tree result
= build (BIT_FIELD_REF
, type
, inner
,
2191 size_int (bitsize
), size_int (bitpos
));
2193 TREE_UNSIGNED (result
) = unsignedp
;
2198 /* Optimize a bit-field compare.
2200 There are two cases: First is a compare against a constant and the
2201 second is a comparison of two items where the fields are at the same
2202 bit position relative to the start of a chunk (byte, halfword, word)
2203 large enough to contain it. In these cases we can avoid the shift
2204 implicit in bitfield extractions.
2206 For constants, we emit a compare of the shifted constant with the
2207 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2208 compared. For two fields at the same position, we do the ANDs with the
2209 similar mask and compare the result of the ANDs.
2211 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2212 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2213 are the left and right operands of the comparison, respectively.
2215 If the optimization described above can be done, we return the resulting
2216 tree. Otherwise we return zero. */
2219 optimize_bit_field_compare (code
, compare_type
, lhs
, rhs
)
2220 enum tree_code code
;
2224 int lbitpos
, lbitsize
, rbitpos
, rbitsize
;
2225 int lnbitpos
, lnbitsize
, rnbitpos
, rnbitsize
;
2226 tree type
= TREE_TYPE (lhs
);
2227 tree signed_type
, unsigned_type
;
2228 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
2229 enum machine_mode lmode
, rmode
, lnmode
, rnmode
;
2230 int lunsignedp
, runsignedp
;
2231 int lvolatilep
= 0, rvolatilep
= 0;
2232 tree linner
, rinner
;
2236 /* Get all the information about the extractions being done. If the bit size
2237 if the same as the size of the underlying object, we aren't doing an
2238 extraction at all and so can do nothing. */
2239 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
2240 &lunsignedp
, &lvolatilep
);
2241 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
2247 /* If this is not a constant, we can only do something if bit positions,
2248 sizes, and signedness are the same. */
2249 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
,
2250 &rmode
, &runsignedp
, &rvolatilep
);
2252 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
2253 || lunsignedp
!= runsignedp
|| offset
!= 0)
2257 /* See if we can find a mode to refer to this field. We should be able to,
2258 but fail if we can't. */
2259 lnmode
= get_best_mode (lbitsize
, lbitpos
,
2260 TYPE_ALIGN (TREE_TYPE (linner
)), word_mode
,
2262 if (lnmode
== VOIDmode
)
2265 /* Set signed and unsigned types of the precision of this mode for the
2267 signed_type
= type_for_mode (lnmode
, 0);
2268 unsigned_type
= type_for_mode (lnmode
, 1);
2272 rnmode
= get_best_mode (rbitsize
, rbitpos
,
2273 TYPE_ALIGN (TREE_TYPE (rinner
)), word_mode
,
2275 if (rnmode
== VOIDmode
)
2279 /* Compute the bit position and size for the new reference and our offset
2280 within it. If the new reference is the same size as the original, we
2281 won't optimize anything, so return zero. */
2282 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
2283 lnbitpos
= lbitpos
& ~ (lnbitsize
- 1);
2284 lbitpos
-= lnbitpos
;
2285 if (lnbitsize
== lbitsize
)
2290 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
2291 rnbitpos
= rbitpos
& ~ (rnbitsize
- 1);
2292 rbitpos
-= rnbitpos
;
2293 if (rnbitsize
== rbitsize
)
2297 if (BYTES_BIG_ENDIAN
)
2298 lbitpos
= lnbitsize
- lbitsize
- lbitpos
;
2300 /* Make the mask to be used against the extracted field. */
2301 mask
= build_int_2 (~0, ~0);
2302 TREE_TYPE (mask
) = unsigned_type
;
2303 force_fit_type (mask
, 0);
2304 mask
= convert (unsigned_type
, mask
);
2305 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (lnbitsize
- lbitsize
), 0);
2306 mask
= const_binop (RSHIFT_EXPR
, mask
,
2307 size_int (lnbitsize
- lbitsize
- lbitpos
), 0);
2310 /* If not comparing with constant, just rework the comparison
2312 return build (code
, compare_type
,
2313 build (BIT_AND_EXPR
, unsigned_type
,
2314 make_bit_field_ref (linner
, unsigned_type
,
2315 lnbitsize
, lnbitpos
, 1),
2317 build (BIT_AND_EXPR
, unsigned_type
,
2318 make_bit_field_ref (rinner
, unsigned_type
,
2319 rnbitsize
, rnbitpos
, 1),
2322 /* Otherwise, we are handling the constant case. See if the constant is too
2323 big for the field. Warn and return a tree of for 0 (false) if so. We do
2324 this not only for its own sake, but to avoid having to test for this
2325 error case below. If we didn't, we might generate wrong code.
2327 For unsigned fields, the constant shifted right by the field length should
2328 be all zero. For signed fields, the high-order bits should agree with
2333 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
2334 convert (unsigned_type
, rhs
),
2335 size_int (lbitsize
), 0)))
2337 warning ("comparison is always %s due to width of bitfield",
2338 code
== NE_EXPR
? "one" : "zero");
2339 return convert (compare_type
,
2341 ? integer_one_node
: integer_zero_node
));
2346 tree tem
= const_binop (RSHIFT_EXPR
, convert (signed_type
, rhs
),
2347 size_int (lbitsize
- 1), 0);
2348 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
2350 warning ("comparison is always %s due to width of bitfield",
2351 code
== NE_EXPR
? "one" : "zero");
2352 return convert (compare_type
,
2354 ? integer_one_node
: integer_zero_node
));
2358 /* Single-bit compares should always be against zero. */
2359 if (lbitsize
== 1 && ! integer_zerop (rhs
))
2361 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
2362 rhs
= convert (type
, integer_zero_node
);
2365 /* Make a new bitfield reference, shift the constant over the
2366 appropriate number of bits and mask it with the computed mask
2367 (in case this was a signed field). If we changed it, make a new one. */
2368 lhs
= make_bit_field_ref (linner
, unsigned_type
, lnbitsize
, lnbitpos
, 1);
2371 TREE_SIDE_EFFECTS (lhs
) = 1;
2372 TREE_THIS_VOLATILE (lhs
) = 1;
2375 rhs
= fold (const_binop (BIT_AND_EXPR
,
2376 const_binop (LSHIFT_EXPR
,
2377 convert (unsigned_type
, rhs
),
2378 size_int (lbitpos
), 0),
2381 return build (code
, compare_type
,
2382 build (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
2386 /* Subroutine for fold_truthop: decode a field reference.
2388 If EXP is a comparison reference, we return the innermost reference.
2390 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2391 set to the starting bit number.
2393 If the innermost field can be completely contained in a mode-sized
2394 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2396 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2397 otherwise it is not changed.
2399 *PUNSIGNEDP is set to the signedness of the field.
2401 *PMASK is set to the mask used. This is either contained in a
2402 BIT_AND_EXPR or derived from the width of the field.
2404 Return 0 if this is not a component reference or is one that we can't
2405 do anything with. */
2408 decode_field_reference (exp
, pbitsize
, pbitpos
, pmode
, punsignedp
,
2411 int *pbitsize
, *pbitpos
;
2412 enum machine_mode
*pmode
;
2413 int *punsignedp
, *pvolatilep
;
2417 tree mask
, inner
, offset
;
2421 /* All the optimizations using this function assume integer fields.
2422 There are problems with FP fields since the type_for_size call
2423 below can fail for, e.g., XFmode. */
2424 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
2429 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
2431 and_mask
= TREE_OPERAND (exp
, 1);
2432 exp
= TREE_OPERAND (exp
, 0);
2433 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
2434 if (TREE_CODE (and_mask
) != INTEGER_CST
)
2438 if (TREE_CODE (exp
) != COMPONENT_REF
&& TREE_CODE (exp
) != ARRAY_REF
2439 && TREE_CODE (exp
) != BIT_FIELD_REF
)
2442 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
2443 punsignedp
, pvolatilep
);
2444 if (inner
== exp
|| *pbitsize
< 0 || offset
!= 0)
2447 /* Compute the mask to access the bitfield. */
2448 unsigned_type
= type_for_size (*pbitsize
, 1);
2449 precision
= TYPE_PRECISION (unsigned_type
);
2451 mask
= build_int_2 (~0, ~0);
2452 TREE_TYPE (mask
) = unsigned_type
;
2453 force_fit_type (mask
, 0);
2454 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
2455 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
2457 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2459 mask
= fold (build (BIT_AND_EXPR
, unsigned_type
,
2460 convert (unsigned_type
, and_mask
), mask
));
2466 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2470 all_ones_mask_p (mask
, size
)
2474 tree type
= TREE_TYPE (mask
);
2475 int precision
= TYPE_PRECISION (type
);
2478 tmask
= build_int_2 (~0, ~0);
2479 TREE_TYPE (tmask
) = signed_type (type
);
2480 force_fit_type (tmask
, 0);
2482 operand_equal_p (mask
,
2483 const_binop (RSHIFT_EXPR
,
2484 const_binop (LSHIFT_EXPR
, tmask
,
2485 size_int (precision
- size
), 0),
2486 size_int (precision
- size
), 0),
2490 /* Subroutine for fold_truthop: determine if an operand is simple enough
2491 to be evaluated unconditionally. */
2494 simple_operand_p (exp
)
2497 /* Strip any conversions that don't change the machine mode. */
2498 while ((TREE_CODE (exp
) == NOP_EXPR
2499 || TREE_CODE (exp
) == CONVERT_EXPR
)
2500 && (TYPE_MODE (TREE_TYPE (exp
))
2501 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
2502 exp
= TREE_OPERAND (exp
, 0);
2504 return (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c'
2505 || (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd'
2506 && ! TREE_ADDRESSABLE (exp
)
2507 && ! TREE_THIS_VOLATILE (exp
)
2508 && ! DECL_NONLOCAL (exp
)
2509 /* Don't regard global variables as simple. They may be
2510 allocated in ways unknown to the compiler (shared memory,
2511 #pragma weak, etc). */
2512 && ! TREE_PUBLIC (exp
)
2513 && ! DECL_EXTERNAL (exp
)
2514 /* Loading a static variable is unduly expensive, but global
2515 registers aren't expensive. */
2516 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
2519 /* Subroutine for fold_truthop: try to optimize a range test.
2521 For example, "i >= 2 && i =< 9" can be done as "(unsigned) (i - 2) <= 7".
2523 JCODE is the logical combination of the two terms. It is TRUTH_AND_EXPR
2524 (representing TRUTH_ANDIF_EXPR and TRUTH_AND_EXPR) or TRUTH_OR_EXPR
2525 (representing TRUTH_ORIF_EXPR and TRUTH_OR_EXPR). TYPE is the type of
2528 VAR is the value being tested. LO_CODE and HI_CODE are the comparison
2529 operators comparing VAR to LO_CST and HI_CST. LO_CST is known to be no
2530 larger than HI_CST (they may be equal).
2532 We return the simplified tree or 0 if no optimization is possible. */
2535 range_test (jcode
, type
, lo_code
, hi_code
, var
, lo_cst
, hi_cst
)
2536 enum tree_code jcode
, lo_code
, hi_code
;
2537 tree type
, var
, lo_cst
, hi_cst
;
2540 enum tree_code rcode
;
2542 /* See if this is a range test and normalize the constant terms. */
2544 if (jcode
== TRUTH_AND_EXPR
)
2549 /* See if we have VAR != CST && VAR != CST+1. */
2550 if (! (hi_code
== NE_EXPR
2551 && TREE_INT_CST_LOW (hi_cst
) - TREE_INT_CST_LOW (lo_cst
) == 1
2552 && tree_int_cst_equal (integer_one_node
,
2553 const_binop (MINUS_EXPR
,
2554 hi_cst
, lo_cst
, 0))))
2562 if (hi_code
== LT_EXPR
)
2563 hi_cst
= const_binop (MINUS_EXPR
, hi_cst
, integer_one_node
, 0);
2564 else if (hi_code
!= LE_EXPR
)
2567 if (lo_code
== GT_EXPR
)
2568 lo_cst
= const_binop (PLUS_EXPR
, lo_cst
, integer_one_node
, 0);
2570 /* We now have VAR >= LO_CST && VAR <= HI_CST. */
2583 /* See if we have VAR == CST || VAR == CST+1. */
2584 if (! (hi_code
== EQ_EXPR
2585 && TREE_INT_CST_LOW (hi_cst
) - TREE_INT_CST_LOW (lo_cst
) == 1
2586 && tree_int_cst_equal (integer_one_node
,
2587 const_binop (MINUS_EXPR
,
2588 hi_cst
, lo_cst
, 0))))
2596 if (hi_code
== GE_EXPR
)
2597 hi_cst
= const_binop (MINUS_EXPR
, hi_cst
, integer_one_node
, 0);
2598 else if (hi_code
!= GT_EXPR
)
2601 if (lo_code
== LE_EXPR
)
2602 lo_cst
= const_binop (PLUS_EXPR
, lo_cst
, integer_one_node
, 0);
2604 /* We now have VAR < LO_CST || VAR > HI_CST. */
2613 /* When normalizing, it is possible to both increment the smaller constant
2614 and decrement the larger constant. See if they are still ordered. */
2615 if (tree_int_cst_lt (hi_cst
, lo_cst
))
2618 /* Fail if VAR isn't an integer. */
2619 utype
= TREE_TYPE (var
);
2620 if (! INTEGRAL_TYPE_P (utype
))
2623 /* The range test is invalid if subtracting the two constants results
2624 in overflow. This can happen in traditional mode. */
2625 if (! int_fits_type_p (hi_cst
, TREE_TYPE (var
))
2626 || ! int_fits_type_p (lo_cst
, TREE_TYPE (var
)))
2629 if (! TREE_UNSIGNED (utype
))
2631 utype
= unsigned_type (utype
);
2632 var
= convert (utype
, var
);
2633 lo_cst
= convert (utype
, lo_cst
);
2634 hi_cst
= convert (utype
, hi_cst
);
2637 return fold (convert (type
,
2638 build (rcode
, utype
,
2639 build (MINUS_EXPR
, utype
, var
, lo_cst
),
2640 const_binop (MINUS_EXPR
, hi_cst
, lo_cst
, 0))));
2643 /* Find ways of folding logical expressions of LHS and RHS:
2644 Try to merge two comparisons to the same innermost item.
2645 Look for range tests like "ch >= '0' && ch <= '9'".
2646 Look for combinations of simple terms on machines with expensive branches
2647 and evaluate the RHS unconditionally.
2649 For example, if we have p->a == 2 && p->b == 4 and we can make an
2650 object large enough to span both A and B, we can do this with a comparison
2651 against the object ANDed with the a mask.
2653 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
2654 operations to do this with one comparison.
2656 We check for both normal comparisons and the BIT_AND_EXPRs made this by
2657 function and the one above.
2659 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
2660 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
2662 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
2665 We return the simplified tree or 0 if no optimization is possible. */
2668 fold_truthop (code
, truth_type
, lhs
, rhs
)
2669 enum tree_code code
;
2670 tree truth_type
, lhs
, rhs
;
2672 /* If this is the "or" of two comparisons, we can do something if we
2673 the comparisons are NE_EXPR. If this is the "and", we can do something
2674 if the comparisons are EQ_EXPR. I.e.,
2675 (a->b == 2 && a->c == 4) can become (a->new == NEW).
2677 WANTED_CODE is this operation code. For single bit fields, we can
2678 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
2679 comparison for one-bit fields. */
2681 enum tree_code wanted_code
;
2682 enum tree_code lcode
, rcode
;
2683 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
2684 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
2685 int ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
2686 int rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
2687 int xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
2688 int lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
2689 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
2690 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
2691 enum machine_mode lnmode
, rnmode
;
2692 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
2693 tree l_const
, r_const
;
2695 int first_bit
, end_bit
;
2698 /* Start by getting the comparison codes and seeing if this looks like
2699 a range test. Fail if anything is volatile. If one operand is a
2700 BIT_AND_EXPR with the constant one, treat it as if it were surrounded
2703 if (TREE_SIDE_EFFECTS (lhs
)
2704 || TREE_SIDE_EFFECTS (rhs
))
2707 lcode
= TREE_CODE (lhs
);
2708 rcode
= TREE_CODE (rhs
);
2710 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
2711 lcode
= NE_EXPR
, lhs
= build (NE_EXPR
, truth_type
, lhs
, integer_zero_node
);
2713 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
2714 rcode
= NE_EXPR
, rhs
= build (NE_EXPR
, truth_type
, rhs
, integer_zero_node
);
2716 if (TREE_CODE_CLASS (lcode
) != '<'
2717 || TREE_CODE_CLASS (rcode
) != '<')
2720 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
2721 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
2723 ll_arg
= TREE_OPERAND (lhs
, 0);
2724 lr_arg
= TREE_OPERAND (lhs
, 1);
2725 rl_arg
= TREE_OPERAND (rhs
, 0);
2726 rr_arg
= TREE_OPERAND (rhs
, 1);
2728 if (TREE_CODE (lr_arg
) == INTEGER_CST
2729 && TREE_CODE (rr_arg
) == INTEGER_CST
2730 && operand_equal_p (ll_arg
, rl_arg
, 0))
2732 if (tree_int_cst_lt (lr_arg
, rr_arg
))
2733 result
= range_test (code
, truth_type
, lcode
, rcode
,
2734 ll_arg
, lr_arg
, rr_arg
);
2736 result
= range_test (code
, truth_type
, rcode
, lcode
,
2737 ll_arg
, rr_arg
, lr_arg
);
2739 /* If this isn't a range test, it also isn't a comparison that
2740 can be merged. However, it wins to evaluate the RHS unconditionally
2741 on machines with expensive branches. */
2743 if (result
== 0 && BRANCH_COST
>= 2)
2745 if (TREE_CODE (ll_arg
) != VAR_DECL
2746 && TREE_CODE (ll_arg
) != PARM_DECL
)
2748 /* Avoid evaluating the variable part twice. */
2749 ll_arg
= save_expr (ll_arg
);
2750 lhs
= build (lcode
, TREE_TYPE (lhs
), ll_arg
, lr_arg
);
2751 rhs
= build (rcode
, TREE_TYPE (rhs
), ll_arg
, rr_arg
);
2753 return build (code
, truth_type
, lhs
, rhs
);
2758 /* If the RHS can be evaluated unconditionally and its operands are
2759 simple, it wins to evaluate the RHS unconditionally on machines
2760 with expensive branches. In this case, this isn't a comparison
2761 that can be merged. */
2763 /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
2764 are with zero (tmw). */
2766 if (BRANCH_COST
>= 2
2767 && INTEGRAL_TYPE_P (TREE_TYPE (rhs
))
2768 && simple_operand_p (rl_arg
)
2769 && simple_operand_p (rr_arg
))
2770 return build (code
, truth_type
, lhs
, rhs
);
2772 /* See if the comparisons can be merged. Then get all the parameters for
2775 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
2776 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
2780 ll_inner
= decode_field_reference (ll_arg
,
2781 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
2782 &ll_unsignedp
, &volatilep
, &ll_mask
);
2783 lr_inner
= decode_field_reference (lr_arg
,
2784 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
2785 &lr_unsignedp
, &volatilep
, &lr_mask
);
2786 rl_inner
= decode_field_reference (rl_arg
,
2787 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
2788 &rl_unsignedp
, &volatilep
, &rl_mask
);
2789 rr_inner
= decode_field_reference (rr_arg
,
2790 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
2791 &rr_unsignedp
, &volatilep
, &rr_mask
);
2793 /* It must be true that the inner operation on the lhs of each
2794 comparison must be the same if we are to be able to do anything.
2795 Then see if we have constants. If not, the same must be true for
2797 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
2798 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
2801 if (TREE_CODE (lr_arg
) == INTEGER_CST
2802 && TREE_CODE (rr_arg
) == INTEGER_CST
)
2803 l_const
= lr_arg
, r_const
= rr_arg
;
2804 else if (lr_inner
== 0 || rr_inner
== 0
2805 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
2808 l_const
= r_const
= 0;
2810 /* If either comparison code is not correct for our logical operation,
2811 fail. However, we can convert a one-bit comparison against zero into
2812 the opposite comparison against that bit being set in the field. */
2814 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
2815 if (lcode
!= wanted_code
)
2817 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
2823 if (rcode
!= wanted_code
)
2825 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
2831 /* See if we can find a mode that contains both fields being compared on
2832 the left. If we can't, fail. Otherwise, update all constants and masks
2833 to be relative to a field of that size. */
2834 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
2835 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
2836 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
2837 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
2839 if (lnmode
== VOIDmode
)
2842 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
2843 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
2844 type
= type_for_size (lnbitsize
, 1);
2845 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
2847 if (BYTES_BIG_ENDIAN
)
2849 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
2850 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
2853 ll_mask
= const_binop (LSHIFT_EXPR
, convert (type
, ll_mask
),
2854 size_int (xll_bitpos
), 0);
2855 rl_mask
= const_binop (LSHIFT_EXPR
, convert (type
, rl_mask
),
2856 size_int (xrl_bitpos
), 0);
2858 /* Make sure the constants are interpreted as unsigned, so we
2859 don't have sign bits outside the range of their type. */
2863 l_const
= convert (unsigned_type (TREE_TYPE (l_const
)), l_const
);
2864 l_const
= const_binop (LSHIFT_EXPR
, convert (type
, l_const
),
2865 size_int (xll_bitpos
), 0);
2869 r_const
= convert (unsigned_type (TREE_TYPE (r_const
)), r_const
);
2870 r_const
= const_binop (LSHIFT_EXPR
, convert (type
, r_const
),
2871 size_int (xrl_bitpos
), 0);
2874 /* If the right sides are not constant, do the same for it. Also,
2875 disallow this optimization if a size or signedness mismatch occurs
2876 between the left and right sides. */
2879 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
2880 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
2881 /* Make sure the two fields on the right
2882 correspond to the left without being swapped. */
2883 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
2886 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
2887 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
2888 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
2889 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
2891 if (rnmode
== VOIDmode
)
2894 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
2895 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
2896 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
2898 if (BYTES_BIG_ENDIAN
)
2900 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
2901 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
2904 lr_mask
= const_binop (LSHIFT_EXPR
, convert (type
, lr_mask
),
2905 size_int (xlr_bitpos
), 0);
2906 rr_mask
= const_binop (LSHIFT_EXPR
, convert (type
, rr_mask
),
2907 size_int (xrr_bitpos
), 0);
2909 /* Make a mask that corresponds to both fields being compared.
2910 Do this for both items being compared. If the masks agree,
2911 we can do this by masking both and comparing the masked
2913 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
2914 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
2915 if (operand_equal_p (ll_mask
, lr_mask
, 0) && lnbitsize
== rnbitsize
)
2917 lhs
= make_bit_field_ref (ll_inner
, type
, lnbitsize
, lnbitpos
,
2918 ll_unsignedp
|| rl_unsignedp
);
2919 rhs
= make_bit_field_ref (lr_inner
, type
, rnbitsize
, rnbitpos
,
2920 lr_unsignedp
|| rr_unsignedp
);
2921 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
2923 lhs
= build (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
2924 rhs
= build (BIT_AND_EXPR
, type
, rhs
, ll_mask
);
2926 return build (wanted_code
, truth_type
, lhs
, rhs
);
2929 /* There is still another way we can do something: If both pairs of
2930 fields being compared are adjacent, we may be able to make a wider
2931 field containing them both. */
2932 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
2933 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
2934 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
2935 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
2936 return build (wanted_code
, truth_type
,
2937 make_bit_field_ref (ll_inner
, type
,
2938 ll_bitsize
+ rl_bitsize
,
2939 MIN (ll_bitpos
, rl_bitpos
),
2941 make_bit_field_ref (lr_inner
, type
,
2942 lr_bitsize
+ rr_bitsize
,
2943 MIN (lr_bitpos
, rr_bitpos
),
2949 /* Handle the case of comparisons with constants. If there is something in
2950 common between the masks, those bits of the constants must be the same.
2951 If not, the condition is always false. Test for this to avoid generating
2952 incorrect code below. */
2953 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
2954 if (! integer_zerop (result
)
2955 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
2956 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
2958 if (wanted_code
== NE_EXPR
)
2960 warning ("`or' of unmatched not-equal tests is always 1");
2961 return convert (truth_type
, integer_one_node
);
2965 warning ("`and' of mutually exclusive equal-tests is always zero");
2966 return convert (truth_type
, integer_zero_node
);
2970 /* Construct the expression we will return. First get the component
2971 reference we will make. Unless the mask is all ones the width of
2972 that field, perform the mask operation. Then compare with the
2974 result
= make_bit_field_ref (ll_inner
, type
, lnbitsize
, lnbitpos
,
2975 ll_unsignedp
|| rl_unsignedp
);
2977 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
2978 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
2979 result
= build (BIT_AND_EXPR
, type
, result
, ll_mask
);
2981 return build (wanted_code
, truth_type
, result
,
2982 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
2985 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
2986 S, a SAVE_EXPR, return the expression actually being evaluated. Note
2987 that we may sometimes modify the tree. */
2990 strip_compound_expr (t
, s
)
2994 tree type
= TREE_TYPE (t
);
2995 enum tree_code code
= TREE_CODE (t
);
2997 /* See if this is the COMPOUND_EXPR we want to eliminate. */
2998 if (code
== COMPOUND_EXPR
&& TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
2999 && TREE_OPERAND (TREE_OPERAND (t
, 0), 0) == s
)
3000 return TREE_OPERAND (t
, 1);
3002 /* See if this is a COND_EXPR or a simple arithmetic operator. We
3003 don't bother handling any other types. */
3004 else if (code
== COND_EXPR
)
3006 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
3007 TREE_OPERAND (t
, 1) = strip_compound_expr (TREE_OPERAND (t
, 1), s
);
3008 TREE_OPERAND (t
, 2) = strip_compound_expr (TREE_OPERAND (t
, 2), s
);
3010 else if (TREE_CODE_CLASS (code
) == '1')
3011 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
3012 else if (TREE_CODE_CLASS (code
) == '<'
3013 || TREE_CODE_CLASS (code
) == '2')
3015 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
3016 TREE_OPERAND (t
, 1) = strip_compound_expr (TREE_OPERAND (t
, 1), s
);
3022 /* Perform constant folding and related simplification of EXPR.
3023 The related simplifications include x*1 => x, x*0 => 0, etc.,
3024 and application of the associative law.
3025 NOP_EXPR conversions may be removed freely (as long as we
3026 are careful not to change the C type of the overall expression)
3027 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
3028 but we can constant-fold them if they have constant operands. */
3034 register tree t
= expr
;
3035 tree t1
= NULL_TREE
;
3037 tree type
= TREE_TYPE (expr
);
3038 register tree arg0
, arg1
;
3039 register enum tree_code code
= TREE_CODE (t
);
3043 /* WINS will be nonzero when the switch is done
3044 if all operands are constant. */
3048 /* Don't try to process an RTL_EXPR since its operands aren't trees. */
3049 if (code
== RTL_EXPR
)
3052 /* Return right away if already constant. */
3053 if (TREE_CONSTANT (t
))
3055 if (code
== CONST_DECL
)
3056 return DECL_INITIAL (t
);
3060 kind
= TREE_CODE_CLASS (code
);
3061 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
3065 /* Special case for conversion ops that can have fixed point args. */
3066 arg0
= TREE_OPERAND (t
, 0);
3068 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
3070 STRIP_TYPE_NOPS (arg0
);
3072 if (arg0
!= 0 && TREE_CODE (arg0
) == COMPLEX_CST
)
3073 subop
= TREE_REALPART (arg0
);
3077 if (subop
!= 0 && TREE_CODE (subop
) != INTEGER_CST
3078 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3079 && TREE_CODE (subop
) != REAL_CST
3080 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3082 /* Note that TREE_CONSTANT isn't enough:
3083 static var addresses are constant but we can't
3084 do arithmetic on them. */
3087 else if (kind
== 'e' || kind
== '<'
3088 || kind
== '1' || kind
== '2' || kind
== 'r')
3090 register int len
= tree_code_length
[(int) code
];
3092 for (i
= 0; i
< len
; i
++)
3094 tree op
= TREE_OPERAND (t
, i
);
3098 continue; /* Valid for CALL_EXPR, at least. */
3100 if (kind
== '<' || code
== RSHIFT_EXPR
)
3102 /* Signedness matters here. Perhaps we can refine this
3104 STRIP_TYPE_NOPS (op
);
3108 /* Strip any conversions that don't change the mode. */
3112 if (TREE_CODE (op
) == COMPLEX_CST
)
3113 subop
= TREE_REALPART (op
);
3117 if (TREE_CODE (subop
) != INTEGER_CST
3118 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3119 && TREE_CODE (subop
) != REAL_CST
3120 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3122 /* Note that TREE_CONSTANT isn't enough:
3123 static var addresses are constant but we can't
3124 do arithmetic on them. */
3134 /* If this is a commutative operation, and ARG0 is a constant, move it
3135 to ARG1 to reduce the number of tests below. */
3136 if ((code
== PLUS_EXPR
|| code
== MULT_EXPR
|| code
== MIN_EXPR
3137 || code
== MAX_EXPR
|| code
== BIT_IOR_EXPR
|| code
== BIT_XOR_EXPR
3138 || code
== BIT_AND_EXPR
)
3139 && (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
))
3141 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
3143 tem
= TREE_OPERAND (t
, 0); TREE_OPERAND (t
, 0) = TREE_OPERAND (t
, 1);
3144 TREE_OPERAND (t
, 1) = tem
;
3147 /* Now WINS is set as described above,
3148 ARG0 is the first operand of EXPR,
3149 and ARG1 is the second operand (if it has more than one operand).
3151 First check for cases where an arithmetic operation is applied to a
3152 compound, conditional, or comparison operation. Push the arithmetic
3153 operation inside the compound or conditional to see if any folding
3154 can then be done. Convert comparison to conditional for this purpose.
3155 The also optimizes non-constant cases that used to be done in
3158 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
3159 one of the operands is a comparison and the other is a comparison, a
3160 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
3161 code below would make the expression more complex. Change it to a
3162 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
3163 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
3165 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
3166 || code
== EQ_EXPR
|| code
== NE_EXPR
)
3167 && ((truth_value_p (TREE_CODE (arg0
))
3168 && (truth_value_p (TREE_CODE (arg1
))
3169 || (TREE_CODE (arg1
) == BIT_AND_EXPR
3170 && integer_onep (TREE_OPERAND (arg1
, 1)))))
3171 || (truth_value_p (TREE_CODE (arg1
))
3172 && (truth_value_p (TREE_CODE (arg0
))
3173 || (TREE_CODE (arg0
) == BIT_AND_EXPR
3174 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
3176 t
= fold (build (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
3177 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
3181 if (code
== EQ_EXPR
)
3182 t
= invert_truthvalue (t
);
3187 if (TREE_CODE_CLASS (code
) == '1')
3189 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
3190 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
3191 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))));
3192 else if (TREE_CODE (arg0
) == COND_EXPR
)
3194 t
= fold (build (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
3195 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))),
3196 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 2)))));
3198 /* If this was a conversion, and all we did was to move into
3199 inside the COND_EXPR, bring it back out. But leave it if
3200 it is a conversion from integer to integer and the
3201 result precision is no wider than a word since such a
3202 conversion is cheap and may be optimized away by combine,
3203 while it couldn't if it were outside the COND_EXPR. Then return
3204 so we don't get into an infinite recursion loop taking the
3205 conversion out and then back in. */
3207 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
3208 || code
== NON_LVALUE_EXPR
)
3209 && TREE_CODE (t
) == COND_EXPR
3210 && TREE_CODE (TREE_OPERAND (t
, 1)) == code
3211 && TREE_CODE (TREE_OPERAND (t
, 2)) == code
3212 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0))
3213 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 2), 0)))
3214 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t
))
3215 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0)))
3216 && TYPE_PRECISION (TREE_TYPE (t
)) <= BITS_PER_WORD
))
3217 t
= build1 (code
, type
,
3219 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0)),
3220 TREE_OPERAND (t
, 0),
3221 TREE_OPERAND (TREE_OPERAND (t
, 1), 0),
3222 TREE_OPERAND (TREE_OPERAND (t
, 2), 0)));
3225 else if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<')
3226 return fold (build (COND_EXPR
, type
, arg0
,
3227 fold (build1 (code
, type
, integer_one_node
)),
3228 fold (build1 (code
, type
, integer_zero_node
))));
3230 else if (TREE_CODE_CLASS (code
) == '2'
3231 || TREE_CODE_CLASS (code
) == '<')
3233 if (TREE_CODE (arg1
) == COMPOUND_EXPR
)
3234 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
3235 fold (build (code
, type
,
3236 arg0
, TREE_OPERAND (arg1
, 1))));
3237 else if (TREE_CODE (arg1
) == COND_EXPR
3238 || TREE_CODE_CLASS (TREE_CODE (arg1
)) == '<')
3240 tree test
, true_value
, false_value
;
3242 if (TREE_CODE (arg1
) == COND_EXPR
)
3244 test
= TREE_OPERAND (arg1
, 0);
3245 true_value
= TREE_OPERAND (arg1
, 1);
3246 false_value
= TREE_OPERAND (arg1
, 2);
3251 true_value
= integer_one_node
;
3252 false_value
= integer_zero_node
;
3255 /* If ARG0 is complex we want to make sure we only evaluate
3256 it once. Though this is only required if it is volatile, it
3257 might be more efficient even if it is not. However, if we
3258 succeed in folding one part to a constant, we do not need
3259 to make this SAVE_EXPR. Since we do this optimization
3260 primarily to see if we do end up with constant and this
3261 SAVE_EXPR interfers with later optimizations, suppressing
3262 it when we can is important. */
3264 if (TREE_CODE (arg0
) != SAVE_EXPR
3265 && ((TREE_CODE (arg0
) != VAR_DECL
3266 && TREE_CODE (arg0
) != PARM_DECL
)
3267 || TREE_SIDE_EFFECTS (arg0
)))
3269 tree lhs
= fold (build (code
, type
, arg0
, true_value
));
3270 tree rhs
= fold (build (code
, type
, arg0
, false_value
));
3272 if (TREE_CONSTANT (lhs
) || TREE_CONSTANT (rhs
))
3273 return fold (build (COND_EXPR
, type
, test
, lhs
, rhs
));
3275 arg0
= save_expr (arg0
);
3278 test
= fold (build (COND_EXPR
, type
, test
,
3279 fold (build (code
, type
, arg0
, true_value
)),
3280 fold (build (code
, type
, arg0
, false_value
))));
3281 if (TREE_CODE (arg0
) == SAVE_EXPR
)
3282 return build (COMPOUND_EXPR
, type
,
3283 convert (void_type_node
, arg0
),
3284 strip_compound_expr (test
, arg0
));
3286 return convert (type
, test
);
3289 else if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
3290 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
3291 fold (build (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
3292 else if (TREE_CODE (arg0
) == COND_EXPR
3293 || TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<')
3295 tree test
, true_value
, false_value
;
3297 if (TREE_CODE (arg0
) == COND_EXPR
)
3299 test
= TREE_OPERAND (arg0
, 0);
3300 true_value
= TREE_OPERAND (arg0
, 1);
3301 false_value
= TREE_OPERAND (arg0
, 2);
3306 true_value
= integer_one_node
;
3307 false_value
= integer_zero_node
;
3310 if (TREE_CODE (arg1
) != SAVE_EXPR
3311 && ((TREE_CODE (arg1
) != VAR_DECL
3312 && TREE_CODE (arg1
) != PARM_DECL
)
3313 || TREE_SIDE_EFFECTS (arg1
)))
3315 tree lhs
= fold (build (code
, type
, true_value
, arg1
));
3316 tree rhs
= fold (build (code
, type
, false_value
, arg1
));
3318 if (TREE_CONSTANT (lhs
) || TREE_CONSTANT (rhs
)
3319 || TREE_CONSTANT (arg1
))
3320 return fold (build (COND_EXPR
, type
, test
, lhs
, rhs
));
3322 arg1
= save_expr (arg1
);
3325 test
= fold (build (COND_EXPR
, type
, test
,
3326 fold (build (code
, type
, true_value
, arg1
)),
3327 fold (build (code
, type
, false_value
, arg1
))));
3328 if (TREE_CODE (arg1
) == SAVE_EXPR
)
3329 return build (COMPOUND_EXPR
, type
,
3330 convert (void_type_node
, arg1
),
3331 strip_compound_expr (test
, arg1
));
3333 return convert (type
, test
);
3336 else if (TREE_CODE_CLASS (code
) == '<'
3337 && TREE_CODE (arg0
) == COMPOUND_EXPR
)
3338 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
3339 fold (build (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
3340 else if (TREE_CODE_CLASS (code
) == '<'
3341 && TREE_CODE (arg1
) == COMPOUND_EXPR
)
3342 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
3343 fold (build (code
, type
, arg0
, TREE_OPERAND (arg1
, 1))));
3355 return fold (DECL_INITIAL (t
));
3360 case FIX_TRUNC_EXPR
:
3361 /* Other kinds of FIX are not handled properly by fold_convert. */
3363 if (TREE_TYPE (TREE_OPERAND (t
, 0)) == TREE_TYPE (t
))
3364 return TREE_OPERAND (t
, 0);
3366 /* Handle cases of two conversions in a row. */
3367 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
3368 || TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
)
3370 tree inside_type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
3371 tree inter_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
3372 tree final_type
= TREE_TYPE (t
);
3373 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
3374 int inside_ptr
= POINTER_TYPE_P (inside_type
);
3375 int inside_float
= FLOAT_TYPE_P (inside_type
);
3376 int inside_prec
= TYPE_PRECISION (inside_type
);
3377 int inside_unsignedp
= TREE_UNSIGNED (inside_type
);
3378 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
3379 int inter_ptr
= POINTER_TYPE_P (inter_type
);
3380 int inter_float
= FLOAT_TYPE_P (inter_type
);
3381 int inter_prec
= TYPE_PRECISION (inter_type
);
3382 int inter_unsignedp
= TREE_UNSIGNED (inter_type
);
3383 int final_int
= INTEGRAL_TYPE_P (final_type
);
3384 int final_ptr
= POINTER_TYPE_P (final_type
);
3385 int final_float
= FLOAT_TYPE_P (final_type
);
3386 int final_prec
= TYPE_PRECISION (final_type
);
3387 int final_unsignedp
= TREE_UNSIGNED (final_type
);
3389 /* In addition to the cases of two conversions in a row
3390 handled below, if we are converting something to its own
3391 type via an object of identical or wider precision, neither
3392 conversion is needed. */
3393 if (inside_type
== final_type
3394 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
3395 && inter_prec
>= final_prec
)
3396 return TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
3398 /* Likewise, if the intermediate and final types are either both
3399 float or both integer, we don't need the middle conversion if
3400 it is wider than the final type and doesn't change the signedness
3401 (for integers). Avoid this if the final type is a pointer
3402 since then we sometimes need the inner conversion. */
3403 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
3404 || (inter_float
&& inside_float
))
3405 && inter_prec
>= inside_prec
3406 && (inter_float
|| inter_unsignedp
== inside_unsignedp
)
3408 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
3410 /* Two conversions in a row are not needed unless:
3411 - some conversion is floating-point (overstrict for now), or
3412 - the intermediate type is narrower than both initial and
3414 - the intermediate type and innermost type differ in signedness,
3415 and the outermost type is wider than the intermediate, or
3416 - the initial type is a pointer type and the precisions of the
3417 intermediate and final types differ, or
3418 - the final type is a pointer type and the precisions of the
3419 initial and intermediate types differ. */
3420 if (! inside_float
&& ! inter_float
&& ! final_float
3421 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
3422 && ! (inside_int
&& inter_int
3423 && inter_unsignedp
!= inside_unsignedp
3424 && inter_prec
< final_prec
)
3425 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
3426 == (final_unsignedp
&& final_prec
> inter_prec
))
3427 && ! (inside_ptr
&& inter_prec
!= final_prec
)
3428 && ! (final_ptr
&& inside_prec
!= inter_prec
))
3429 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
3432 if (TREE_CODE (TREE_OPERAND (t
, 0)) == MODIFY_EXPR
3433 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t
, 0), 1))
3434 /* Detect assigning a bitfield. */
3435 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)) == COMPONENT_REF
3436 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 1))))
3438 /* Don't leave an assignment inside a conversion
3439 unless assigning a bitfield. */
3440 tree prev
= TREE_OPERAND (t
, 0);
3441 TREE_OPERAND (t
, 0) = TREE_OPERAND (prev
, 1);
3442 /* First do the assignment, then return converted constant. */
3443 t
= build (COMPOUND_EXPR
, TREE_TYPE (t
), prev
, fold (t
));
3449 TREE_CONSTANT (t
) = TREE_CONSTANT (arg0
);
3452 return fold_convert (t
, arg0
);
3454 #if 0 /* This loses on &"foo"[0]. */
3459 /* Fold an expression like: "foo"[2] */
3460 if (TREE_CODE (arg0
) == STRING_CST
3461 && TREE_CODE (arg1
) == INTEGER_CST
3462 && !TREE_INT_CST_HIGH (arg1
)
3463 && (i
= TREE_INT_CST_LOW (arg1
)) < TREE_STRING_LENGTH (arg0
))
3465 t
= build_int_2 (TREE_STRING_POINTER (arg0
)[i
], 0);
3466 TREE_TYPE (t
) = TREE_TYPE (TREE_TYPE (arg0
));
3467 force_fit_type (t
, 0);
3474 if (TREE_CODE (arg0
) == CONSTRUCTOR
)
3476 tree m
= purpose_member (arg1
, CONSTRUCTOR_ELTS (arg0
));
3483 TREE_CONSTANT (t
) = wins
;
3489 if (TREE_CODE (arg0
) == INTEGER_CST
)
3491 HOST_WIDE_INT low
, high
;
3492 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
3493 TREE_INT_CST_HIGH (arg0
),
3495 t
= build_int_2 (low
, high
);
3496 TREE_TYPE (t
) = type
;
3498 = (TREE_OVERFLOW (arg0
)
3499 | force_fit_type (t
, overflow
));
3500 TREE_CONSTANT_OVERFLOW (t
)
3501 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
3503 else if (TREE_CODE (arg0
) == REAL_CST
)
3504 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
3505 TREE_TYPE (t
) = type
;
3507 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
3508 return TREE_OPERAND (arg0
, 0);
3510 /* Convert - (a - b) to (b - a) for non-floating-point. */
3511 else if (TREE_CODE (arg0
) == MINUS_EXPR
&& ! FLOAT_TYPE_P (type
))
3512 return build (MINUS_EXPR
, type
, TREE_OPERAND (arg0
, 1),
3513 TREE_OPERAND (arg0
, 0));
3520 if (TREE_CODE (arg0
) == INTEGER_CST
)
3522 if (! TREE_UNSIGNED (type
)
3523 && TREE_INT_CST_HIGH (arg0
) < 0)
3525 HOST_WIDE_INT low
, high
;
3526 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
3527 TREE_INT_CST_HIGH (arg0
),
3529 t
= build_int_2 (low
, high
);
3530 TREE_TYPE (t
) = type
;
3532 = (TREE_OVERFLOW (arg0
)
3533 | force_fit_type (t
, overflow
));
3534 TREE_CONSTANT_OVERFLOW (t
)
3535 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
3538 else if (TREE_CODE (arg0
) == REAL_CST
)
3540 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
3541 t
= build_real (type
,
3542 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
3544 TREE_TYPE (t
) = type
;
3546 else if (TREE_CODE (arg0
) == ABS_EXPR
|| TREE_CODE (arg0
) == NEGATE_EXPR
)
3547 return build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
3551 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
3553 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
3554 return build (COMPLEX_EXPR
, TREE_TYPE (arg0
),
3555 TREE_OPERAND (arg0
, 0),
3556 fold (build1 (NEGATE_EXPR
,
3557 TREE_TYPE (TREE_TYPE (arg0
)),
3558 TREE_OPERAND (arg0
, 1))));
3559 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
3560 return build_complex (TREE_OPERAND (arg0
, 0),
3561 fold (build1 (NEGATE_EXPR
,
3562 TREE_TYPE (TREE_TYPE (arg0
)),
3563 TREE_OPERAND (arg0
, 1))));
3564 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
3565 return fold (build (TREE_CODE (arg0
), type
,
3566 fold (build1 (CONJ_EXPR
, type
,
3567 TREE_OPERAND (arg0
, 0))),
3568 fold (build1 (CONJ_EXPR
,
3569 type
, TREE_OPERAND (arg0
, 1)))));
3570 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
3571 return TREE_OPERAND (arg0
, 0);
3577 if (TREE_CODE (arg0
) == INTEGER_CST
)
3578 t
= build_int_2 (~ TREE_INT_CST_LOW (arg0
),
3579 ~ TREE_INT_CST_HIGH (arg0
));
3580 TREE_TYPE (t
) = type
;
3581 force_fit_type (t
, 0);
3582 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg0
);
3583 TREE_CONSTANT_OVERFLOW (t
) = TREE_CONSTANT_OVERFLOW (arg0
);
3585 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
3586 return TREE_OPERAND (arg0
, 0);
3590 /* A + (-B) -> A - B */
3591 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
3592 return fold (build (MINUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
3593 else if (! FLOAT_TYPE_P (type
))
3595 if (integer_zerop (arg1
))
3596 return non_lvalue (convert (type
, arg0
));
3598 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
3599 with a constant, and the two constants have no bits in common,
3600 we should treat this as a BIT_IOR_EXPR since this may produce more
3602 if (TREE_CODE (arg0
) == BIT_AND_EXPR
3603 && TREE_CODE (arg1
) == BIT_AND_EXPR
3604 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
3605 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
3606 && integer_zerop (const_binop (BIT_AND_EXPR
,
3607 TREE_OPERAND (arg0
, 1),
3608 TREE_OPERAND (arg1
, 1), 0)))
3610 code
= BIT_IOR_EXPR
;
3614 /* (A * C) + (B * C) -> (A+B) * C. Since we are most concerned
3615 about the case where C is a constant, just try one of the
3616 four possibilities. */
3618 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
3619 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3620 TREE_OPERAND (arg1
, 1), 0))
3621 return fold (build (MULT_EXPR
, type
,
3622 fold (build (PLUS_EXPR
, type
,
3623 TREE_OPERAND (arg0
, 0),
3624 TREE_OPERAND (arg1
, 0))),
3625 TREE_OPERAND (arg0
, 1)));
3627 /* In IEEE floating point, x+0 may not equal x. */
3628 else if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
3630 && real_zerop (arg1
))
3631 return non_lvalue (convert (type
, arg0
));
3633 /* In most languages, can't associate operations on floats
3634 through parentheses. Rather than remember where the parentheses
3635 were, we don't associate floats at all. It shouldn't matter much.
3636 However, associating multiplications is only very slightly
3637 inaccurate, so do that if -ffast-math is specified. */
3638 if (FLOAT_TYPE_P (type
)
3639 && ! (flag_fast_math
&& code
== MULT_EXPR
))
3642 /* The varsign == -1 cases happen only for addition and subtraction.
3643 It says that the arg that was split was really CON minus VAR.
3644 The rest of the code applies to all associative operations. */
3650 if (split_tree (arg0
, code
, &var
, &con
, &varsign
))
3654 /* EXPR is (CON-VAR) +- ARG1. */
3655 /* If it is + and VAR==ARG1, return just CONST. */
3656 if (code
== PLUS_EXPR
&& operand_equal_p (var
, arg1
, 0))
3657 return convert (TREE_TYPE (t
), con
);
3659 /* If ARG0 is a constant, don't change things around;
3660 instead keep all the constant computations together. */
3662 if (TREE_CONSTANT (arg0
))
3665 /* Otherwise return (CON +- ARG1) - VAR. */
3666 t
= build (MINUS_EXPR
, type
,
3667 fold (build (code
, type
, con
, arg1
)), var
);
3671 /* EXPR is (VAR+CON) +- ARG1. */
3672 /* If it is - and VAR==ARG1, return just CONST. */
3673 if (code
== MINUS_EXPR
&& operand_equal_p (var
, arg1
, 0))
3674 return convert (TREE_TYPE (t
), con
);
3676 /* If ARG0 is a constant, don't change things around;
3677 instead keep all the constant computations together. */
3679 if (TREE_CONSTANT (arg0
))
3682 /* Otherwise return VAR +- (ARG1 +- CON). */
3683 tem
= fold (build (code
, type
, arg1
, con
));
3684 t
= build (code
, type
, var
, tem
);
3686 if (integer_zerop (tem
)
3687 && (code
== PLUS_EXPR
|| code
== MINUS_EXPR
))
3688 return convert (type
, var
);
3689 /* If we have x +/- (c - d) [c an explicit integer]
3690 change it to x -/+ (d - c) since if d is relocatable
3691 then the latter can be a single immediate insn
3692 and the former cannot. */
3693 if (TREE_CODE (tem
) == MINUS_EXPR
3694 && TREE_CODE (TREE_OPERAND (tem
, 0)) == INTEGER_CST
)
3696 tree tem1
= TREE_OPERAND (tem
, 1);
3697 TREE_OPERAND (tem
, 1) = TREE_OPERAND (tem
, 0);
3698 TREE_OPERAND (tem
, 0) = tem1
;
3700 (code
== PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
));
3706 if (split_tree (arg1
, code
, &var
, &con
, &varsign
))
3708 if (TREE_CONSTANT (arg1
))
3713 (code
== PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
));
3715 /* EXPR is ARG0 +- (CON +- VAR). */
3716 if (TREE_CODE (t
) == MINUS_EXPR
3717 && operand_equal_p (var
, arg0
, 0))
3719 /* If VAR and ARG0 cancel, return just CON or -CON. */
3720 if (code
== PLUS_EXPR
)
3721 return convert (TREE_TYPE (t
), con
);
3722 return fold (build1 (NEGATE_EXPR
, TREE_TYPE (t
),
3723 convert (TREE_TYPE (t
), con
)));
3726 t
= build (TREE_CODE (t
), type
,
3727 fold (build (code
, TREE_TYPE (t
), arg0
, con
)), var
);
3729 if (integer_zerop (TREE_OPERAND (t
, 0))
3730 && TREE_CODE (t
) == PLUS_EXPR
)
3731 return convert (TREE_TYPE (t
), var
);
3736 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
3737 if (TREE_CODE (arg1
) == REAL_CST
)
3739 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
3741 t1
= const_binop (code
, arg0
, arg1
, 0);
3742 if (t1
!= NULL_TREE
)
3744 /* The return value should always have
3745 the same type as the original expression. */
3746 TREE_TYPE (t1
) = TREE_TYPE (t
);
3752 if (! FLOAT_TYPE_P (type
))
3754 if (! wins
&& integer_zerop (arg0
))
3755 return build1 (NEGATE_EXPR
, type
, arg1
);
3756 if (integer_zerop (arg1
))
3757 return non_lvalue (convert (type
, arg0
));
3759 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
3760 about the case where C is a constant, just try one of the
3761 four possibilities. */
3763 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
3764 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3765 TREE_OPERAND (arg1
, 1), 0))
3766 return fold (build (MULT_EXPR
, type
,
3767 fold (build (MINUS_EXPR
, type
,
3768 TREE_OPERAND (arg0
, 0),
3769 TREE_OPERAND (arg1
, 0))),
3770 TREE_OPERAND (arg0
, 1)));
3772 /* Convert A - (-B) to A + B. */
3773 else if (TREE_CODE (arg1
) == NEGATE_EXPR
)
3774 return fold (build (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
3776 else if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
3779 /* Except with IEEE floating point, 0-x equals -x. */
3780 if (! wins
&& real_zerop (arg0
))
3781 return build1 (NEGATE_EXPR
, type
, arg1
);
3782 /* Except with IEEE floating point, x-0 equals x. */
3783 if (real_zerop (arg1
))
3784 return non_lvalue (convert (type
, arg0
));
3787 /* Fold &x - &x. This can happen from &x.foo - &x.
3788 This is unsafe for certain floats even in non-IEEE formats.
3789 In IEEE, it is unsafe because it does wrong for NaNs.
3790 Also note that operand_equal_p is always false if an operand
3793 if ((! FLOAT_TYPE_P (type
) || flag_fast_math
)
3794 && operand_equal_p (arg0
, arg1
, 0))
3795 return convert (type
, integer_zero_node
);
3800 if (! FLOAT_TYPE_P (type
))
3802 if (integer_zerop (arg1
))
3803 return omit_one_operand (type
, arg1
, arg0
);
3804 if (integer_onep (arg1
))
3805 return non_lvalue (convert (type
, arg0
));
3807 /* ((A / C) * C) is A if the division is an
3808 EXACT_DIV_EXPR. Since C is normally a constant,
3809 just check for one of the four possibilities. */
3811 if (TREE_CODE (arg0
) == EXACT_DIV_EXPR
3812 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
3813 return TREE_OPERAND (arg0
, 0);
3815 /* (a * (1 << b)) is (a << b) */
3816 if (TREE_CODE (arg1
) == LSHIFT_EXPR
3817 && integer_onep (TREE_OPERAND (arg1
, 0)))
3818 return fold (build (LSHIFT_EXPR
, type
, arg0
,
3819 TREE_OPERAND (arg1
, 1)));
3820 if (TREE_CODE (arg0
) == LSHIFT_EXPR
3821 && integer_onep (TREE_OPERAND (arg0
, 0)))
3822 return fold (build (LSHIFT_EXPR
, type
, arg1
,
3823 TREE_OPERAND (arg0
, 1)));
3827 /* x*0 is 0, except for IEEE floating point. */
3828 if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
3830 && real_zerop (arg1
))
3831 return omit_one_operand (type
, arg1
, arg0
);
3832 /* In IEEE floating point, x*1 is not equivalent to x for snans.
3833 However, ANSI says we can drop signals,
3834 so we can do this anyway. */
3835 if (real_onep (arg1
))
3836 return non_lvalue (convert (type
, arg0
));
3838 if (! wins
&& real_twop (arg1
))
3840 tree arg
= save_expr (arg0
);
3841 return build (PLUS_EXPR
, type
, arg
, arg
);
3848 if (integer_all_onesp (arg1
))
3849 return omit_one_operand (type
, arg1
, arg0
);
3850 if (integer_zerop (arg1
))
3851 return non_lvalue (convert (type
, arg0
));
3852 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
3853 if (t1
!= NULL_TREE
)
3856 /* (a << C1) | (a >> C2) if A is unsigned and C1+C2 is the size of A
3857 is a rotate of A by C1 bits. */
3859 if ((TREE_CODE (arg0
) == RSHIFT_EXPR
3860 || TREE_CODE (arg0
) == LSHIFT_EXPR
)
3861 && (TREE_CODE (arg1
) == RSHIFT_EXPR
3862 || TREE_CODE (arg1
) == LSHIFT_EXPR
)
3863 && TREE_CODE (arg0
) != TREE_CODE (arg1
)
3864 && operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
,0), 0)
3865 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0)))
3866 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
3867 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
3868 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
3869 && TREE_INT_CST_HIGH (TREE_OPERAND (arg1
, 1)) == 0
3870 && ((TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
3871 + TREE_INT_CST_LOW (TREE_OPERAND (arg1
, 1)))
3872 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
3873 return build (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
3874 TREE_CODE (arg0
) == LSHIFT_EXPR
3875 ? TREE_OPERAND (arg0
, 1) : TREE_OPERAND (arg1
, 1));
3880 if (integer_zerop (arg1
))
3881 return non_lvalue (convert (type
, arg0
));
3882 if (integer_all_onesp (arg1
))
3883 return fold (build1 (BIT_NOT_EXPR
, type
, arg0
));
3888 if (integer_all_onesp (arg1
))
3889 return non_lvalue (convert (type
, arg0
));
3890 if (integer_zerop (arg1
))
3891 return omit_one_operand (type
, arg1
, arg0
);
3892 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
3893 if (t1
!= NULL_TREE
)
3895 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
3896 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == NOP_EXPR
3897 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
3899 int prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)));
3900 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
3901 && (~TREE_INT_CST_LOW (arg0
)
3902 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
3903 return build1 (NOP_EXPR
, type
, TREE_OPERAND (arg1
, 0));
3905 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
3906 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
3908 int prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
3909 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
3910 && (~TREE_INT_CST_LOW (arg1
)
3911 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
3912 return build1 (NOP_EXPR
, type
, TREE_OPERAND (arg0
, 0));
3916 case BIT_ANDTC_EXPR
:
3917 if (integer_all_onesp (arg0
))
3918 return non_lvalue (convert (type
, arg1
));
3919 if (integer_zerop (arg0
))
3920 return omit_one_operand (type
, arg0
, arg1
);
3921 if (TREE_CODE (arg1
) == INTEGER_CST
)
3923 arg1
= fold (build1 (BIT_NOT_EXPR
, type
, arg1
));
3924 code
= BIT_AND_EXPR
;
3930 /* In most cases, do nothing with a divide by zero. */
3931 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3932 #ifndef REAL_INFINITY
3933 if (TREE_CODE (arg1
) == REAL_CST
&& real_zerop (arg1
))
3936 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3938 /* In IEEE floating point, x/1 is not equivalent to x for snans.
3939 However, ANSI says we can drop signals, so we can do this anyway. */
3940 if (real_onep (arg1
))
3941 return non_lvalue (convert (type
, arg0
));
3943 /* If ARG1 is a constant, we can convert this to a multiply by the
3944 reciprocal. This does not have the same rounding properties,
3945 so only do this if -ffast-math. We can actually always safely
3946 do it if ARG1 is a power of two, but it's hard to tell if it is
3947 or not in a portable manner. */
3948 if (TREE_CODE (arg1
) == REAL_CST
&& flag_fast_math
3949 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
3951 return fold (build (MULT_EXPR
, type
, arg0
, tem
));
3955 case TRUNC_DIV_EXPR
:
3956 case ROUND_DIV_EXPR
:
3957 case FLOOR_DIV_EXPR
:
3959 case EXACT_DIV_EXPR
:
3960 if (integer_onep (arg1
))
3961 return non_lvalue (convert (type
, arg0
));
3962 if (integer_zerop (arg1
))
3965 /* If we have ((a / C1) / C2) where both division are the same type, try
3966 to simplify. First see if C1 * C2 overflows or not. */
3967 if (TREE_CODE (arg0
) == code
&& TREE_CODE (arg1
) == INTEGER_CST
3968 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
3972 new_divisor
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 1), arg1
, 0);
3973 tem
= const_binop (FLOOR_DIV_EXPR
, new_divisor
, arg1
, 0);
3975 if (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) == TREE_INT_CST_LOW (tem
)
3976 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == TREE_INT_CST_HIGH (tem
))
3978 /* If no overflow, divide by C1*C2. */
3979 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), new_divisor
));
3983 /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
3984 where C1 % C3 == 0 or C3 % C1 == 0. We can simplify these
3985 expressions, which often appear in the offsets or sizes of
3986 objects with a varying size. Only deal with positive divisors
3987 and multiplicands. If C2 is negative, we must have C2 % C3 == 0.
3989 Look for NOPs and SAVE_EXPRs inside. */
3991 if (TREE_CODE (arg1
) == INTEGER_CST
3992 && tree_int_cst_sgn (arg1
) >= 0)
3994 int have_save_expr
= 0;
3995 tree c2
= integer_zero_node
;
3998 if (TREE_CODE (xarg0
) == SAVE_EXPR
)
3999 have_save_expr
= 1, xarg0
= TREE_OPERAND (xarg0
, 0);
4003 if (TREE_CODE (xarg0
) == PLUS_EXPR
4004 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
)
4005 c2
= TREE_OPERAND (xarg0
, 1), xarg0
= TREE_OPERAND (xarg0
, 0);
4006 else if (TREE_CODE (xarg0
) == MINUS_EXPR
4007 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
4008 /* If we are doing this computation unsigned, the negate
4010 && ! TREE_UNSIGNED (type
))
4012 c2
= fold (build1 (NEGATE_EXPR
, type
, TREE_OPERAND (xarg0
, 1)));
4013 xarg0
= TREE_OPERAND (xarg0
, 0);
4016 if (TREE_CODE (xarg0
) == SAVE_EXPR
)
4017 have_save_expr
= 1, xarg0
= TREE_OPERAND (xarg0
, 0);
4021 if (TREE_CODE (xarg0
) == MULT_EXPR
4022 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
4023 && tree_int_cst_sgn (TREE_OPERAND (xarg0
, 1)) >= 0
4024 && (integer_zerop (const_binop (TRUNC_MOD_EXPR
,
4025 TREE_OPERAND (xarg0
, 1), arg1
, 1))
4026 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, arg1
,
4027 TREE_OPERAND (xarg0
, 1), 1)))
4028 && (tree_int_cst_sgn (c2
) >= 0
4029 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, c2
,
4032 tree outer_div
= integer_one_node
;
4033 tree c1
= TREE_OPERAND (xarg0
, 1);
4036 /* If C3 > C1, set them equal and do a divide by
4037 C3/C1 at the end of the operation. */
4038 if (tree_int_cst_lt (c1
, c3
))
4039 outer_div
= const_binop (code
, c3
, c1
, 0), c3
= c1
;
4041 /* The result is A * (C1/C3) + (C2/C3). */
4042 t
= fold (build (PLUS_EXPR
, type
,
4043 fold (build (MULT_EXPR
, type
,
4044 TREE_OPERAND (xarg0
, 0),
4045 const_binop (code
, c1
, c3
, 1))),
4046 const_binop (code
, c2
, c3
, 1)));
4048 if (! integer_onep (outer_div
))
4049 t
= fold (build (code
, type
, t
, convert (type
, outer_div
)));
4061 case FLOOR_MOD_EXPR
:
4062 case ROUND_MOD_EXPR
:
4063 case TRUNC_MOD_EXPR
:
4064 if (integer_onep (arg1
))
4065 return omit_one_operand (type
, integer_zero_node
, arg0
);
4066 if (integer_zerop (arg1
))
4069 /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
4070 where C1 % C3 == 0. Handle similarly to the division case,
4071 but don't bother with SAVE_EXPRs. */
4073 if (TREE_CODE (arg1
) == INTEGER_CST
4074 && ! integer_zerop (arg1
))
4076 tree c2
= integer_zero_node
;
4079 if (TREE_CODE (xarg0
) == PLUS_EXPR
4080 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
)
4081 c2
= TREE_OPERAND (xarg0
, 1), xarg0
= TREE_OPERAND (xarg0
, 0);
4082 else if (TREE_CODE (xarg0
) == MINUS_EXPR
4083 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
4084 && ! TREE_UNSIGNED (type
))
4086 c2
= fold (build1 (NEGATE_EXPR
, type
, TREE_OPERAND (xarg0
, 1)));
4087 xarg0
= TREE_OPERAND (xarg0
, 0);
4092 if (TREE_CODE (xarg0
) == MULT_EXPR
4093 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
4094 && integer_zerop (const_binop (TRUNC_MOD_EXPR
,
4095 TREE_OPERAND (xarg0
, 1),
4097 && tree_int_cst_sgn (c2
) >= 0)
4098 /* The result is (C2%C3). */
4099 return omit_one_operand (type
, const_binop (code
, c2
, arg1
, 1),
4100 TREE_OPERAND (xarg0
, 0));
4109 if (integer_zerop (arg1
))
4110 return non_lvalue (convert (type
, arg0
));
4111 /* Since negative shift count is not well-defined,
4112 don't try to compute it in the compiler. */
4113 if (tree_int_cst_sgn (arg1
) < 0)
4118 if (operand_equal_p (arg0
, arg1
, 0))
4120 if (INTEGRAL_TYPE_P (type
)
4121 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), 1))
4122 return omit_one_operand (type
, arg1
, arg0
);
4126 if (operand_equal_p (arg0
, arg1
, 0))
4128 if (INTEGRAL_TYPE_P (type
)
4129 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), 1))
4130 return omit_one_operand (type
, arg1
, arg0
);
4133 case TRUTH_NOT_EXPR
:
4134 /* Note that the operand of this must be an int
4135 and its values must be 0 or 1.
4136 ("true" is a fixed value perhaps depending on the language,
4137 but we don't handle values other than 1 correctly yet.) */
4138 return invert_truthvalue (arg0
);
4140 case TRUTH_ANDIF_EXPR
:
4141 /* Note that the operands of this must be ints
4142 and their values must be 0 or 1.
4143 ("true" is a fixed value perhaps depending on the language.) */
4144 /* If first arg is constant zero, return it. */
4145 if (integer_zerop (arg0
))
4147 case TRUTH_AND_EXPR
:
4148 /* If either arg is constant true, drop it. */
4149 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
4150 return non_lvalue (arg1
);
4151 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
4152 return non_lvalue (arg0
);
4153 /* If second arg is constant zero, result is zero, but first arg
4154 must be evaluated. */
4155 if (integer_zerop (arg1
))
4156 return omit_one_operand (type
, arg1
, arg0
);
4159 /* We only do these simplifications if we are optimizing. */
4163 /* Check for things like (A || B) && (A || C). We can convert this
4164 to A || (B && C). Note that either operator can be any of the four
4165 truth and/or operations and the transformation will still be
4166 valid. Also note that we only care about order for the
4167 ANDIF and ORIF operators. */
4168 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
4169 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
4170 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
4171 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
4172 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
))
4174 tree a00
= TREE_OPERAND (arg0
, 0);
4175 tree a01
= TREE_OPERAND (arg0
, 1);
4176 tree a10
= TREE_OPERAND (arg1
, 0);
4177 tree a11
= TREE_OPERAND (arg1
, 1);
4178 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
4179 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
4180 && (code
== TRUTH_AND_EXPR
4181 || code
== TRUTH_OR_EXPR
));
4183 if (operand_equal_p (a00
, a10
, 0))
4184 return fold (build (TREE_CODE (arg0
), type
, a00
,
4185 fold (build (code
, type
, a01
, a11
))));
4186 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
4187 return fold (build (TREE_CODE (arg0
), type
, a00
,
4188 fold (build (code
, type
, a01
, a10
))));
4189 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
4190 return fold (build (TREE_CODE (arg0
), type
, a01
,
4191 fold (build (code
, type
, a00
, a11
))));
4193 /* This case if tricky because we must either have commutative
4194 operators or else A10 must not have side-effects. */
4196 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
4197 && operand_equal_p (a01
, a11
, 0))
4198 return fold (build (TREE_CODE (arg0
), type
,
4199 fold (build (code
, type
, a00
, a10
)),
4203 /* Check for the possibility of merging component references. If our
4204 lhs is another similar operation, try to merge its rhs with our
4205 rhs. Then try to merge our lhs and rhs. */
4206 if (TREE_CODE (arg0
) == code
4207 && 0 != (tem
= fold_truthop (code
, type
,
4208 TREE_OPERAND (arg0
, 1), arg1
)))
4209 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
4211 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
4216 case TRUTH_ORIF_EXPR
:
4217 /* Note that the operands of this must be ints
4218 and their values must be 0 or true.
4219 ("true" is a fixed value perhaps depending on the language.) */
4220 /* If first arg is constant true, return it. */
4221 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
4224 /* If either arg is constant zero, drop it. */
4225 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
4226 return non_lvalue (arg1
);
4227 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
))
4228 return non_lvalue (arg0
);
4229 /* If second arg is constant true, result is true, but we must
4230 evaluate first arg. */
4231 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
4232 return omit_one_operand (type
, arg1
, arg0
);
4235 case TRUTH_XOR_EXPR
:
4236 /* If either arg is constant zero, drop it. */
4237 if (integer_zerop (arg0
))
4238 return non_lvalue (arg1
);
4239 if (integer_zerop (arg1
))
4240 return non_lvalue (arg0
);
4241 /* If either arg is constant true, this is a logical inversion. */
4242 if (integer_onep (arg0
))
4243 return non_lvalue (invert_truthvalue (arg1
));
4244 if (integer_onep (arg1
))
4245 return non_lvalue (invert_truthvalue (arg0
));
4254 /* If one arg is a constant integer, put it last. */
4255 if (TREE_CODE (arg0
) == INTEGER_CST
4256 && TREE_CODE (arg1
) != INTEGER_CST
)
4258 TREE_OPERAND (t
, 0) = arg1
;
4259 TREE_OPERAND (t
, 1) = arg0
;
4260 arg0
= TREE_OPERAND (t
, 0);
4261 arg1
= TREE_OPERAND (t
, 1);
4262 code
= swap_tree_comparison (code
);
4263 TREE_SET_CODE (t
, code
);
4266 /* Convert foo++ == CONST into ++foo == CONST + INCR.
4267 First, see if one arg is constant; find the constant arg
4268 and the other one. */
4270 tree constop
= 0, varop
;
4271 int constopnum
= -1;
4273 if (TREE_CONSTANT (arg1
))
4274 constopnum
= 1, constop
= arg1
, varop
= arg0
;
4275 if (TREE_CONSTANT (arg0
))
4276 constopnum
= 0, constop
= arg0
, varop
= arg1
;
4278 if (constop
&& TREE_CODE (varop
) == POSTINCREMENT_EXPR
)
4280 /* This optimization is invalid for ordered comparisons
4281 if CONST+INCR overflows or if foo+incr might overflow.
4282 This optimization is invalid for floating point due to rounding.
4283 For pointer types we assume overflow doesn't happen. */
4284 if (TREE_CODE (TREE_TYPE (varop
)) == POINTER_TYPE
4285 || (! FLOAT_TYPE_P (TREE_TYPE (varop
))
4286 && (code
== EQ_EXPR
|| code
== NE_EXPR
)))
4289 = fold (build (PLUS_EXPR
, TREE_TYPE (varop
),
4290 constop
, TREE_OPERAND (varop
, 1)));
4291 TREE_SET_CODE (varop
, PREINCREMENT_EXPR
);
4293 t
= build (code
, type
, TREE_OPERAND (t
, 0),
4294 TREE_OPERAND (t
, 1));
4295 TREE_OPERAND (t
, constopnum
) = newconst
;
4299 else if (constop
&& TREE_CODE (varop
) == POSTDECREMENT_EXPR
)
4301 if (TREE_CODE (TREE_TYPE (varop
)) == POINTER_TYPE
4302 || (! FLOAT_TYPE_P (TREE_TYPE (varop
))
4303 && (code
== EQ_EXPR
|| code
== NE_EXPR
)))
4306 = fold (build (MINUS_EXPR
, TREE_TYPE (varop
),
4307 constop
, TREE_OPERAND (varop
, 1)));
4308 TREE_SET_CODE (varop
, PREDECREMENT_EXPR
);
4309 t
= build (code
, type
, TREE_OPERAND (t
, 0),
4310 TREE_OPERAND (t
, 1));
4311 TREE_OPERAND (t
, constopnum
) = newconst
;
4317 /* Change X >= CST to X > (CST - 1) if CST is positive. */
4318 if (TREE_CODE (arg1
) == INTEGER_CST
4319 && TREE_CODE (arg0
) != INTEGER_CST
4320 && tree_int_cst_sgn (arg1
) > 0)
4322 switch (TREE_CODE (t
))
4326 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
4327 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
);
4332 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
4333 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
);
4338 /* If this is an EQ or NE comparison with zero and ARG0 is
4339 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
4340 two operations, but the latter can be done in one less insn
4341 one machine that have only two-operand insns or on which a
4342 constant cannot be the first operand. */
4343 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
4344 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
4346 if (TREE_CODE (TREE_OPERAND (arg0
, 0)) == LSHIFT_EXPR
4347 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0)))
4349 fold (build (code
, type
,
4350 build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
4352 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
4353 TREE_OPERAND (arg0
, 1),
4354 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1)),
4355 convert (TREE_TYPE (arg0
),
4358 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
4359 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
4361 fold (build (code
, type
,
4362 build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
4364 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
4365 TREE_OPERAND (arg0
, 0),
4366 TREE_OPERAND (TREE_OPERAND (arg0
, 1), 1)),
4367 convert (TREE_TYPE (arg0
),
4372 /* If this is an NE or EQ comparison of zero against the result of a
4373 signed MOD operation whose second operand is a power of 2, make
4374 the MOD operation unsigned since it is simpler and equivalent. */
4375 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
4376 && integer_zerop (arg1
)
4377 && ! TREE_UNSIGNED (TREE_TYPE (arg0
))
4378 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
4379 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
4380 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
4381 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
4382 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
4384 tree newtype
= unsigned_type (TREE_TYPE (arg0
));
4385 tree newmod
= build (TREE_CODE (arg0
), newtype
,
4386 convert (newtype
, TREE_OPERAND (arg0
, 0)),
4387 convert (newtype
, TREE_OPERAND (arg0
, 1)));
4389 return build (code
, type
, newmod
, convert (newtype
, arg1
));
4392 /* If this is an NE comparison of zero with an AND of one, remove the
4393 comparison since the AND will give the correct value. */
4394 if (code
== NE_EXPR
&& integer_zerop (arg1
)
4395 && TREE_CODE (arg0
) == BIT_AND_EXPR
4396 && integer_onep (TREE_OPERAND (arg0
, 1)))
4397 return convert (type
, arg0
);
4399 /* If we have (A & C) == C where C is a power of 2, convert this into
4400 (A & C) != 0. Similarly for NE_EXPR. */
4401 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
4402 && TREE_CODE (arg0
) == BIT_AND_EXPR
4403 && integer_pow2p (TREE_OPERAND (arg0
, 1))
4404 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
4405 return build (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
4406 arg0
, integer_zero_node
);
4408 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
4409 and similarly for >= into !=. */
4410 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
4411 && TREE_UNSIGNED (TREE_TYPE (arg0
))
4412 && TREE_CODE (arg1
) == LSHIFT_EXPR
4413 && integer_onep (TREE_OPERAND (arg1
, 0)))
4414 return build (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
4415 build (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
4416 TREE_OPERAND (arg1
, 1)),
4417 convert (TREE_TYPE (arg0
), integer_zero_node
));
4419 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
4420 && TREE_UNSIGNED (TREE_TYPE (arg0
))
4421 && (TREE_CODE (arg1
) == NOP_EXPR
4422 || TREE_CODE (arg1
) == CONVERT_EXPR
)
4423 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
4424 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
4426 build (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
4427 convert (TREE_TYPE (arg0
),
4428 build (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
4429 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1))),
4430 convert (TREE_TYPE (arg0
), integer_zero_node
));
4432 /* Simplify comparison of something with itself. (For IEEE
4433 floating-point, we can only do some of these simplifications.) */
4434 if (operand_equal_p (arg0
, arg1
, 0))
4441 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
4443 t
= build_int_2 (1, 0);
4444 TREE_TYPE (t
) = type
;
4448 TREE_SET_CODE (t
, code
);
4452 /* For NE, we can only do this simplification if integer. */
4453 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
4455 /* ... fall through ... */
4458 t
= build_int_2 (0, 0);
4459 TREE_TYPE (t
) = type
;
4464 /* An unsigned comparison against 0 can be simplified. */
4465 if (integer_zerop (arg1
)
4466 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
4467 || TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
)
4468 && TREE_UNSIGNED (TREE_TYPE (arg1
)))
4470 switch (TREE_CODE (t
))
4474 TREE_SET_CODE (t
, NE_EXPR
);
4478 TREE_SET_CODE (t
, EQ_EXPR
);
4481 return omit_one_operand (type
,
4482 convert (type
, integer_one_node
),
4485 return omit_one_operand (type
,
4486 convert (type
, integer_zero_node
),
4491 /* If we are comparing an expression that just has comparisons
4492 of two integer values, arithmetic expressions of those comparisons,
4493 and constants, we can simplify it. There are only three cases
4494 to check: the two values can either be equal, the first can be
4495 greater, or the second can be greater. Fold the expression for
4496 those three values. Since each value must be 0 or 1, we have
4497 eight possibilities, each of which corresponds to the constant 0
4498 or 1 or one of the six possible comparisons.
4500 This handles common cases like (a > b) == 0 but also handles
4501 expressions like ((x > y) - (y > x)) > 0, which supposedly
4502 occur in macroized code. */
4504 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
4506 tree cval1
= 0, cval2
= 0;
4509 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
4510 /* Don't handle degenerate cases here; they should already
4511 have been handled anyway. */
4512 && cval1
!= 0 && cval2
!= 0
4513 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
4514 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
4515 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
4516 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
4517 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
4519 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
4520 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
4522 /* We can't just pass T to eval_subst in case cval1 or cval2
4523 was the same as ARG1. */
4526 = fold (build (code
, type
,
4527 eval_subst (arg0
, cval1
, maxval
, cval2
, minval
),
4530 = fold (build (code
, type
,
4531 eval_subst (arg0
, cval1
, maxval
, cval2
, maxval
),
4534 = fold (build (code
, type
,
4535 eval_subst (arg0
, cval1
, minval
, cval2
, maxval
),
4538 /* All three of these results should be 0 or 1. Confirm they
4539 are. Then use those values to select the proper code
4542 if ((integer_zerop (high_result
)
4543 || integer_onep (high_result
))
4544 && (integer_zerop (equal_result
)
4545 || integer_onep (equal_result
))
4546 && (integer_zerop (low_result
)
4547 || integer_onep (low_result
)))
4549 /* Make a 3-bit mask with the high-order bit being the
4550 value for `>', the next for '=', and the low for '<'. */
4551 switch ((integer_onep (high_result
) * 4)
4552 + (integer_onep (equal_result
) * 2)
4553 + integer_onep (low_result
))
4557 return omit_one_operand (type
, integer_zero_node
, arg0
);
4578 return omit_one_operand (type
, integer_one_node
, arg0
);
4581 t
= build (code
, type
, cval1
, cval2
);
4583 return save_expr (t
);
4590 /* If this is a comparison of a field, we may be able to simplify it. */
4591 if ((TREE_CODE (arg0
) == COMPONENT_REF
4592 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
4593 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
4594 /* Handle the constant case even without -O
4595 to make sure the warnings are given. */
4596 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
4598 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
4602 /* If this is a comparison of complex values and either or both
4603 sizes are a COMPLEX_EXPR, it is best to split up the comparisons
4604 and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR. This
4605 may prevent needless evaluations. */
4606 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
4607 && TREE_CODE (TREE_TYPE (arg0
)) == COMPLEX_TYPE
4608 && (TREE_CODE (arg0
) == COMPLEX_EXPR
4609 || TREE_CODE (arg1
) == COMPLEX_EXPR
))
4611 tree subtype
= TREE_TYPE (TREE_TYPE (arg0
));
4612 tree real0
= fold (build1 (REALPART_EXPR
, subtype
, arg0
));
4613 tree imag0
= fold (build1 (IMAGPART_EXPR
, subtype
, arg0
));
4614 tree real1
= fold (build1 (REALPART_EXPR
, subtype
, arg1
));
4615 tree imag1
= fold (build1 (IMAGPART_EXPR
, subtype
, arg1
));
4617 return fold (build ((code
== EQ_EXPR
? TRUTH_ANDIF_EXPR
4620 fold (build (code
, type
, real0
, real1
)),
4621 fold (build (code
, type
, imag0
, imag1
))));
4624 /* From here on, the only cases we handle are when the result is
4625 known to be a constant.
4627 To compute GT, swap the arguments and do LT.
4628 To compute GE, do LT and invert the result.
4629 To compute LE, swap the arguments, do LT and invert the result.
4630 To compute NE, do EQ and invert the result.
4632 Therefore, the code below must handle only EQ and LT. */
4634 if (code
== LE_EXPR
|| code
== GT_EXPR
)
4636 tem
= arg0
, arg0
= arg1
, arg1
= tem
;
4637 code
= swap_tree_comparison (code
);
4640 /* Note that it is safe to invert for real values here because we
4641 will check below in the one case that it matters. */
4644 if (code
== NE_EXPR
|| code
== GE_EXPR
)
4647 code
= invert_tree_comparison (code
);
4650 /* Compute a result for LT or EQ if args permit;
4651 otherwise return T. */
4652 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
4654 if (code
== EQ_EXPR
)
4655 t1
= build_int_2 ((TREE_INT_CST_LOW (arg0
)
4656 == TREE_INT_CST_LOW (arg1
))
4657 && (TREE_INT_CST_HIGH (arg0
)
4658 == TREE_INT_CST_HIGH (arg1
)),
4661 t1
= build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0
))
4662 ? INT_CST_LT_UNSIGNED (arg0
, arg1
)
4663 : INT_CST_LT (arg0
, arg1
)),
4667 /* Assume a nonexplicit constant cannot equal an explicit one,
4668 since such code would be undefined anyway.
4669 Exception: on sysvr4, using #pragma weak,
4670 a label can come out as 0. */
4671 else if (TREE_CODE (arg1
) == INTEGER_CST
4672 && !integer_zerop (arg1
)
4673 && TREE_CONSTANT (arg0
)
4674 && TREE_CODE (arg0
) == ADDR_EXPR
4676 t1
= build_int_2 (0, 0);
4678 /* Two real constants can be compared explicitly. */
4679 else if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
4681 /* If either operand is a NaN, the result is false with two
4682 exceptions: First, an NE_EXPR is true on NaNs, but that case
4683 is already handled correctly since we will be inverting the
4684 result for NE_EXPR. Second, if we had inverted a LE_EXPR
4685 or a GE_EXPR into a LT_EXPR, we must return true so that it
4686 will be inverted into false. */
4688 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
4689 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
4690 t1
= build_int_2 (invert
&& code
== LT_EXPR
, 0);
4692 else if (code
== EQ_EXPR
)
4693 t1
= build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
),
4694 TREE_REAL_CST (arg1
)),
4697 t1
= build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0
),
4698 TREE_REAL_CST (arg1
)),
4702 if (t1
== NULL_TREE
)
4706 TREE_INT_CST_LOW (t1
) ^= 1;
4708 TREE_TYPE (t1
) = type
;
4712 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
4713 so all simple results must be passed through pedantic_non_lvalue. */
4714 if (TREE_CODE (arg0
) == INTEGER_CST
)
4715 return pedantic_non_lvalue
4716 (TREE_OPERAND (t
, (integer_zerop (arg0
) ? 2 : 1)));
4717 else if (operand_equal_p (arg1
, TREE_OPERAND (expr
, 2), 0))
4718 return pedantic_omit_one_operand (type
, arg1
, arg0
);
4720 /* If the second operand is zero, invert the comparison and swap
4721 the second and third operands. Likewise if the second operand
4722 is constant and the third is not or if the third operand is
4723 equivalent to the first operand of the comparison. */
4725 if (integer_zerop (arg1
)
4726 || (TREE_CONSTANT (arg1
) && ! TREE_CONSTANT (TREE_OPERAND (t
, 2)))
4727 || (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
4728 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
4729 TREE_OPERAND (t
, 2),
4730 TREE_OPERAND (arg0
, 1))))
4732 /* See if this can be inverted. If it can't, possibly because
4733 it was a floating-point inequality comparison, don't do
4735 tem
= invert_truthvalue (arg0
);
4737 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
4739 t
= build (code
, type
, tem
,
4740 TREE_OPERAND (t
, 2), TREE_OPERAND (t
, 1));
4742 arg1
= TREE_OPERAND (t
, 2);
4747 /* If we have A op B ? A : C, we may be able to convert this to a
4748 simpler expression, depending on the operation and the values
4749 of B and C. IEEE floating point prevents this though,
4750 because A or B might be -0.0 or a NaN. */
4752 if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
4753 && (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
4754 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0)))
4756 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
4757 arg1
, TREE_OPERAND (arg0
, 1)))
4759 tree arg2
= TREE_OPERAND (t
, 2);
4760 enum tree_code comp_code
= TREE_CODE (arg0
);
4764 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
4765 depending on the comparison operation. */
4766 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
4767 ? real_zerop (TREE_OPERAND (arg0
, 1))
4768 : integer_zerop (TREE_OPERAND (arg0
, 1)))
4769 && TREE_CODE (arg2
) == NEGATE_EXPR
4770 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4774 return pedantic_non_lvalue
4775 (fold (build1 (NEGATE_EXPR
, type
, arg1
)));
4777 return pedantic_non_lvalue (convert (type
, arg1
));
4780 return pedantic_non_lvalue
4781 (fold (build1 (ABS_EXPR
, type
, arg1
)));
4784 return pedantic_non_lvalue
4785 (fold (build1 (NEGATE_EXPR
, type
,
4786 fold (build1 (ABS_EXPR
, type
, arg1
)))));
4789 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
4792 if (integer_zerop (TREE_OPERAND (arg0
, 1)) && integer_zerop (arg2
))
4794 if (comp_code
== NE_EXPR
)
4795 return pedantic_non_lvalue (convert (type
, arg1
));
4796 else if (comp_code
== EQ_EXPR
)
4797 return pedantic_non_lvalue (convert (type
, integer_zero_node
));
4800 /* If this is A op B ? A : B, this is either A, B, min (A, B),
4801 or max (A, B), depending on the operation. */
4803 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 1),
4804 arg2
, TREE_OPERAND (arg0
, 0)))
4806 tree comp_op0
= TREE_OPERAND (arg0
, 0);
4807 tree comp_op1
= TREE_OPERAND (arg0
, 1);
4808 tree comp_type
= TREE_TYPE (comp_op0
);
4813 return pedantic_non_lvalue (convert (type
, arg2
));
4815 return pedantic_non_lvalue (convert (type
, arg1
));
4818 return pedantic_non_lvalue
4819 (convert (type
, (fold (build (MIN_EXPR
, comp_type
,
4820 comp_op0
, comp_op1
)))));
4823 return pedantic_non_lvalue
4824 (convert (type
, fold (build (MAX_EXPR
, comp_type
,
4825 comp_op0
, comp_op1
))));
4829 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4830 we might still be able to simplify this. For example,
4831 if C1 is one less or one more than C2, this might have started
4832 out as a MIN or MAX and been transformed by this function.
4833 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4835 if (INTEGRAL_TYPE_P (type
)
4836 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
4837 && TREE_CODE (arg2
) == INTEGER_CST
)
4841 /* We can replace A with C1 in this case. */
4842 arg1
= convert (type
, TREE_OPERAND (arg0
, 1));
4843 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
,
4844 TREE_OPERAND (t
, 2));
4848 /* If C1 is C2 + 1, this is min(A, C2). */
4849 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
), 1)
4850 && operand_equal_p (TREE_OPERAND (arg0
, 1),
4851 const_binop (PLUS_EXPR
, arg2
,
4852 integer_one_node
, 0), 1))
4853 return pedantic_non_lvalue
4854 (fold (build (MIN_EXPR
, type
, arg1
, arg2
)));
4858 /* If C1 is C2 - 1, this is min(A, C2). */
4859 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
), 1)
4860 && operand_equal_p (TREE_OPERAND (arg0
, 1),
4861 const_binop (MINUS_EXPR
, arg2
,
4862 integer_one_node
, 0), 1))
4863 return pedantic_non_lvalue
4864 (fold (build (MIN_EXPR
, type
, arg1
, arg2
)));
4868 /* If C1 is C2 - 1, this is max(A, C2). */
4869 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
), 1)
4870 && operand_equal_p (TREE_OPERAND (arg0
, 1),
4871 const_binop (MINUS_EXPR
, arg2
,
4872 integer_one_node
, 0), 1))
4873 return pedantic_non_lvalue
4874 (fold (build (MAX_EXPR
, type
, arg1
, arg2
)));
4878 /* If C1 is C2 + 1, this is max(A, C2). */
4879 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
), 1)
4880 && operand_equal_p (TREE_OPERAND (arg0
, 1),
4881 const_binop (PLUS_EXPR
, arg2
,
4882 integer_one_node
, 0), 1))
4883 return pedantic_non_lvalue
4884 (fold (build (MAX_EXPR
, type
, arg1
, arg2
)));
4889 /* If the second operand is simpler than the third, swap them
4890 since that produces better jump optimization results. */
4891 if ((TREE_CONSTANT (arg1
) || TREE_CODE_CLASS (TREE_CODE (arg1
)) == 'd'
4892 || TREE_CODE (arg1
) == SAVE_EXPR
)
4893 && ! (TREE_CONSTANT (TREE_OPERAND (t
, 2))
4894 || TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t
, 2))) == 'd'
4895 || TREE_CODE (TREE_OPERAND (t
, 2)) == SAVE_EXPR
))
4897 /* See if this can be inverted. If it can't, possibly because
4898 it was a floating-point inequality comparison, don't do
4900 tem
= invert_truthvalue (arg0
);
4902 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
4904 t
= build (code
, type
, tem
,
4905 TREE_OPERAND (t
, 2), TREE_OPERAND (t
, 1));
4907 arg1
= TREE_OPERAND (t
, 2);
4912 /* Convert A ? 1 : 0 to simply A. */
4913 if (integer_onep (TREE_OPERAND (t
, 1))
4914 && integer_zerop (TREE_OPERAND (t
, 2))
4915 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
4916 call to fold will try to move the conversion inside
4917 a COND, which will recurse. In that case, the COND_EXPR
4918 is probably the best choice, so leave it alone. */
4919 && type
== TREE_TYPE (arg0
))
4920 return pedantic_non_lvalue (arg0
);
4922 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
4923 operation is simply A & 2. */
4925 if (integer_zerop (TREE_OPERAND (t
, 2))
4926 && TREE_CODE (arg0
) == NE_EXPR
4927 && integer_zerop (TREE_OPERAND (arg0
, 1))
4928 && integer_pow2p (arg1
)
4929 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
4930 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
4932 return pedantic_non_lvalue (convert (type
, TREE_OPERAND (arg0
, 0)));
4937 /* When pedantic, a compound expression can be neither an lvalue
4938 nor an integer constant expression. */
4939 if (TREE_SIDE_EFFECTS (arg0
) || pedantic
)
4941 /* Don't let (0, 0) be null pointer constant. */
4942 if (integer_zerop (arg1
))
4943 return non_lvalue (arg1
);
4948 return build_complex (arg0
, arg1
);
4952 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
4954 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
4955 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
4956 TREE_OPERAND (arg0
, 1));
4957 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
4958 return TREE_REALPART (arg0
);
4959 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
4960 return fold (build (TREE_CODE (arg0
), type
,
4961 fold (build1 (REALPART_EXPR
, type
,
4962 TREE_OPERAND (arg0
, 0))),
4963 fold (build1 (REALPART_EXPR
,
4964 type
, TREE_OPERAND (arg0
, 1)))));
4968 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
4969 return convert (type
, integer_zero_node
);
4970 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
4971 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
4972 TREE_OPERAND (arg0
, 0));
4973 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
4974 return TREE_IMAGPART (arg0
);
4975 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
4976 return fold (build (TREE_CODE (arg0
), type
,
4977 fold (build1 (IMAGPART_EXPR
, type
,
4978 TREE_OPERAND (arg0
, 0))),
4979 fold (build1 (IMAGPART_EXPR
, type
,
4980 TREE_OPERAND (arg0
, 1)))));
4985 } /* switch (code) */