1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
56 static void encode
PARAMS ((HOST_WIDE_INT
*,
57 unsigned HOST_WIDE_INT
,
59 static void decode
PARAMS ((HOST_WIDE_INT
*,
60 unsigned HOST_WIDE_INT
*,
62 static tree negate_expr
PARAMS ((tree
));
63 static tree split_tree
PARAMS ((tree
, enum tree_code
, tree
*, tree
*,
65 static tree associate_trees
PARAMS ((tree
, tree
, enum tree_code
, tree
));
66 static tree int_const_binop
PARAMS ((enum tree_code
, tree
, tree
, int, int));
67 static void const_binop_1
PARAMS ((PTR
));
68 static tree const_binop
PARAMS ((enum tree_code
, tree
, tree
, int));
69 static void fold_convert_1
PARAMS ((PTR
));
70 static tree fold_convert
PARAMS ((tree
, tree
));
71 static enum tree_code invert_tree_comparison
PARAMS ((enum tree_code
));
72 static enum tree_code swap_tree_comparison
PARAMS ((enum tree_code
));
73 static int truth_value_p
PARAMS ((enum tree_code
));
74 static int operand_equal_for_comparison_p
PARAMS ((tree
, tree
, tree
));
75 static int twoval_comparison_p
PARAMS ((tree
, tree
*, tree
*, int *));
76 static tree eval_subst
PARAMS ((tree
, tree
, tree
, tree
, tree
));
77 static tree omit_one_operand
PARAMS ((tree
, tree
, tree
));
78 static tree pedantic_omit_one_operand
PARAMS ((tree
, tree
, tree
));
79 static tree distribute_bit_expr
PARAMS ((enum tree_code
, tree
, tree
, tree
));
80 static tree make_bit_field_ref
PARAMS ((tree
, tree
, int, int, int));
81 static tree optimize_bit_field_compare
PARAMS ((enum tree_code
, tree
,
83 static tree decode_field_reference
PARAMS ((tree
, HOST_WIDE_INT
*,
85 enum machine_mode
*, int *,
86 int *, tree
*, tree
*));
87 static int all_ones_mask_p
PARAMS ((tree
, int));
88 static int simple_operand_p
PARAMS ((tree
));
89 static tree range_binop
PARAMS ((enum tree_code
, tree
, tree
, int,
91 static tree make_range
PARAMS ((tree
, int *, tree
*, tree
*));
92 static tree build_range_check
PARAMS ((tree
, tree
, int, tree
, tree
));
93 static int merge_ranges
PARAMS ((int *, tree
*, tree
*, int, tree
, tree
,
95 static tree fold_range_test
PARAMS ((tree
));
96 static tree unextend
PARAMS ((tree
, int, int, tree
));
97 static tree fold_truthop
PARAMS ((enum tree_code
, tree
, tree
, tree
));
98 static tree optimize_minmax_comparison
PARAMS ((tree
));
99 static tree extract_muldiv
PARAMS ((tree
, tree
, enum tree_code
, tree
));
100 static tree strip_compound_expr
PARAMS ((tree
, tree
));
101 static int multiple_of_p
PARAMS ((tree
, tree
, tree
));
102 static tree constant_boolean_node
PARAMS ((int, tree
));
103 static int count_cond
PARAMS ((tree
, int));
106 #define BRANCH_COST 1
109 #if defined(HOST_EBCDIC)
110 /* bit 8 is significant in EBCDIC */
111 #define CHARMASK 0xff
113 #define CHARMASK 0x7f
116 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
117 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
118 and SUM1. Then this yields nonzero if overflow occurred during the
121 Overflow occurs if A and B have the same sign, but A and SUM differ in
122 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
124 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
126 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
127 We do that by representing the two-word integer in 4 words, with only
128 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
129 number. The value of the word is LOWPART + HIGHPART * BASE. */
132 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
133 #define HIGHPART(x) \
134 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
135 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
137 /* Unpack a two-word integer into 4 words.
138 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
139 WORDS points to the array of HOST_WIDE_INTs. */
142 encode (words
, low
, hi
)
143 HOST_WIDE_INT
*words
;
144 unsigned HOST_WIDE_INT low
;
147 words
[0] = LOWPART (low
);
148 words
[1] = HIGHPART (low
);
149 words
[2] = LOWPART (hi
);
150 words
[3] = HIGHPART (hi
);
153 /* Pack an array of 4 words into a two-word integer.
154 WORDS points to the array of words.
155 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
158 decode (words
, low
, hi
)
159 HOST_WIDE_INT
*words
;
160 unsigned HOST_WIDE_INT
*low
;
163 *low
= words
[0] + words
[1] * BASE
;
164 *hi
= words
[2] + words
[3] * BASE
;
167 /* Make the integer constant T valid for its type by setting to 0 or 1 all
168 the bits in the constant that don't belong in the type.
170 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
171 nonzero, a signed overflow has already occurred in calculating T, so
174 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
178 force_fit_type (t
, overflow
)
182 unsigned HOST_WIDE_INT low
;
186 if (TREE_CODE (t
) == REAL_CST
)
188 #ifdef CHECK_FLOAT_VALUE
189 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t
)), TREE_REAL_CST (t
),
195 else if (TREE_CODE (t
) != INTEGER_CST
)
198 low
= TREE_INT_CST_LOW (t
);
199 high
= TREE_INT_CST_HIGH (t
);
201 if (POINTER_TYPE_P (TREE_TYPE (t
)))
204 prec
= TYPE_PRECISION (TREE_TYPE (t
));
206 /* First clear all bits that are beyond the type's precision. */
208 if (prec
== 2 * HOST_BITS_PER_WIDE_INT
)
210 else if (prec
> HOST_BITS_PER_WIDE_INT
)
211 TREE_INT_CST_HIGH (t
)
212 &= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
215 TREE_INT_CST_HIGH (t
) = 0;
216 if (prec
< HOST_BITS_PER_WIDE_INT
)
217 TREE_INT_CST_LOW (t
) &= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
220 /* Unsigned types do not suffer sign extension or overflow unless they
222 if (TREE_UNSIGNED (TREE_TYPE (t
))
223 && ! (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
224 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))))
227 /* If the value's sign bit is set, extend the sign. */
228 if (prec
!= 2 * HOST_BITS_PER_WIDE_INT
229 && (prec
> HOST_BITS_PER_WIDE_INT
230 ? 0 != (TREE_INT_CST_HIGH (t
)
232 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
233 : 0 != (TREE_INT_CST_LOW (t
)
234 & ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)))))
236 /* Value is negative:
237 set to 1 all the bits that are outside this type's precision. */
238 if (prec
> HOST_BITS_PER_WIDE_INT
)
239 TREE_INT_CST_HIGH (t
)
240 |= ((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
243 TREE_INT_CST_HIGH (t
) = -1;
244 if (prec
< HOST_BITS_PER_WIDE_INT
)
245 TREE_INT_CST_LOW (t
) |= ((unsigned HOST_WIDE_INT
) (-1) << prec
);
249 /* Return nonzero if signed overflow occurred. */
251 ((overflow
| (low
^ TREE_INT_CST_LOW (t
)) | (high
^ TREE_INT_CST_HIGH (t
)))
255 /* Add two doubleword integers with doubleword result.
256 Each argument is given as two `HOST_WIDE_INT' pieces.
257 One argument is L1 and H1; the other, L2 and H2.
258 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
261 add_double (l1
, h1
, l2
, h2
, lv
, hv
)
262 unsigned HOST_WIDE_INT l1
, l2
;
263 HOST_WIDE_INT h1
, h2
;
264 unsigned HOST_WIDE_INT
*lv
;
267 unsigned HOST_WIDE_INT l
;
271 h
= h1
+ h2
+ (l
< l1
);
275 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
278 /* Negate a doubleword integer with doubleword result.
279 Return nonzero if the operation overflows, assuming it's signed.
280 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
281 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
284 neg_double (l1
, h1
, lv
, hv
)
285 unsigned HOST_WIDE_INT l1
;
287 unsigned HOST_WIDE_INT
*lv
;
294 return (*hv
& h1
) < 0;
304 /* Multiply two doubleword integers with doubleword result.
305 Return nonzero if the operation overflows, assuming it's signed.
306 Each argument is given as two `HOST_WIDE_INT' pieces.
307 One argument is L1 and H1; the other, L2 and H2.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 mul_double (l1
, h1
, l2
, h2
, lv
, hv
)
312 unsigned HOST_WIDE_INT l1
, l2
;
313 HOST_WIDE_INT h1
, h2
;
314 unsigned HOST_WIDE_INT
*lv
;
317 HOST_WIDE_INT arg1
[4];
318 HOST_WIDE_INT arg2
[4];
319 HOST_WIDE_INT prod
[4 * 2];
320 register unsigned HOST_WIDE_INT carry
;
321 register int i
, j
, k
;
322 unsigned HOST_WIDE_INT toplow
, neglow
;
323 HOST_WIDE_INT tophigh
, neghigh
;
325 encode (arg1
, l1
, h1
);
326 encode (arg2
, l2
, h2
);
328 memset ((char *) prod
, 0, sizeof prod
);
330 for (i
= 0; i
< 4; i
++)
333 for (j
= 0; j
< 4; j
++)
336 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
337 carry
+= arg1
[i
] * arg2
[j
];
338 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
340 prod
[k
] = LOWPART (carry
);
341 carry
= HIGHPART (carry
);
346 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
348 /* Check for overflow by calculating the top half of the answer in full;
349 it should agree with the low half's sign bit. */
350 decode (prod
+ 4, &toplow
, &tophigh
);
353 neg_double (l2
, h2
, &neglow
, &neghigh
);
354 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
358 neg_double (l1
, h1
, &neglow
, &neghigh
);
359 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
361 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
364 /* Shift the doubleword integer in L1, H1 left by COUNT places
365 keeping only PREC bits of result.
366 Shift right if COUNT is negative.
367 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
368 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
371 lshift_double (l1
, h1
, count
, prec
, lv
, hv
, arith
)
372 unsigned HOST_WIDE_INT l1
;
373 HOST_WIDE_INT h1
, count
;
375 unsigned HOST_WIDE_INT
*lv
;
381 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
385 #ifdef SHIFT_COUNT_TRUNCATED
386 if (SHIFT_COUNT_TRUNCATED
)
390 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
392 /* Shifting by the host word size is undefined according to the
393 ANSI standard, so we must handle this as a special case. */
397 else if (count
>= HOST_BITS_PER_WIDE_INT
)
399 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
404 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
405 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
410 /* Shift the doubleword integer in L1, H1 right by COUNT places
411 keeping only PREC bits of result. COUNT must be positive.
412 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
413 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
416 rshift_double (l1
, h1
, count
, prec
, lv
, hv
, arith
)
417 unsigned HOST_WIDE_INT l1
;
418 HOST_WIDE_INT h1
, count
;
419 unsigned int prec ATTRIBUTE_UNUSED
;
420 unsigned HOST_WIDE_INT
*lv
;
424 unsigned HOST_WIDE_INT signmask
;
427 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
430 #ifdef SHIFT_COUNT_TRUNCATED
431 if (SHIFT_COUNT_TRUNCATED
)
435 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
437 /* Shifting by the host word size is undefined according to the
438 ANSI standard, so we must handle this as a special case. */
442 else if (count
>= HOST_BITS_PER_WIDE_INT
)
445 *lv
= ((signmask
<< (2 * HOST_BITS_PER_WIDE_INT
- count
- 1) << 1)
446 | ((unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
)));
451 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
452 *hv
= ((signmask
<< (HOST_BITS_PER_WIDE_INT
- count
))
453 | ((unsigned HOST_WIDE_INT
) h1
>> count
));
457 /* Rotate the doubleword integer in L1, H1 left by COUNT places
458 keeping only PREC bits of result.
459 Rotate right if COUNT is negative.
460 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
463 lrotate_double (l1
, h1
, count
, prec
, lv
, hv
)
464 unsigned HOST_WIDE_INT l1
;
465 HOST_WIDE_INT h1
, count
;
467 unsigned HOST_WIDE_INT
*lv
;
470 unsigned HOST_WIDE_INT s1l
, s2l
;
471 HOST_WIDE_INT s1h
, s2h
;
477 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
478 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
483 /* Rotate the doubleword integer in L1, H1 left by COUNT places
484 keeping only PREC bits of result. COUNT must be positive.
485 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
488 rrotate_double (l1
, h1
, count
, prec
, lv
, hv
)
489 unsigned HOST_WIDE_INT l1
;
490 HOST_WIDE_INT h1
, count
;
492 unsigned HOST_WIDE_INT
*lv
;
495 unsigned HOST_WIDE_INT s1l
, s2l
;
496 HOST_WIDE_INT s1h
, s2h
;
502 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
503 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
508 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
509 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
510 CODE is a tree code for a kind of division, one of
511 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
513 It controls how the quotient is rounded to a integer.
514 Return nonzero if the operation overflows.
515 UNS nonzero says do unsigned division. */
518 div_and_round_double (code
, uns
,
519 lnum_orig
, hnum_orig
, lden_orig
, hden_orig
,
520 lquo
, hquo
, lrem
, hrem
)
523 unsigned HOST_WIDE_INT lnum_orig
; /* num == numerator == dividend */
524 HOST_WIDE_INT hnum_orig
;
525 unsigned HOST_WIDE_INT lden_orig
; /* den == denominator == divisor */
526 HOST_WIDE_INT hden_orig
;
527 unsigned HOST_WIDE_INT
*lquo
, *lrem
;
528 HOST_WIDE_INT
*hquo
, *hrem
;
531 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
532 HOST_WIDE_INT den
[4], quo
[4];
534 unsigned HOST_WIDE_INT work
;
535 unsigned HOST_WIDE_INT carry
= 0;
536 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
537 HOST_WIDE_INT hnum
= hnum_orig
;
538 unsigned HOST_WIDE_INT lden
= lden_orig
;
539 HOST_WIDE_INT hden
= hden_orig
;
542 if (hden
== 0 && lden
== 0)
543 overflow
= 1, lden
= 1;
545 /* calculate quotient sign and convert operands to unsigned. */
551 /* (minimum integer) / (-1) is the only overflow case. */
552 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
553 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
559 neg_double (lden
, hden
, &lden
, &hden
);
563 if (hnum
== 0 && hden
== 0)
564 { /* single precision */
566 /* This unsigned division rounds toward zero. */
572 { /* trivial case: dividend < divisor */
573 /* hden != 0 already checked. */
580 memset ((char *) quo
, 0, sizeof quo
);
582 memset ((char *) num
, 0, sizeof num
); /* to zero 9th element */
583 memset ((char *) den
, 0, sizeof den
);
585 encode (num
, lnum
, hnum
);
586 encode (den
, lden
, hden
);
588 /* Special code for when the divisor < BASE. */
589 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
591 /* hnum != 0 already checked. */
592 for (i
= 4 - 1; i
>= 0; i
--)
594 work
= num
[i
] + carry
* BASE
;
595 quo
[i
] = work
/ lden
;
601 /* Full double precision division,
602 with thanks to Don Knuth's "Seminumerical Algorithms". */
603 int num_hi_sig
, den_hi_sig
;
604 unsigned HOST_WIDE_INT quo_est
, scale
;
606 /* Find the highest non-zero divisor digit. */
607 for (i
= 4 - 1;; i
--)
614 /* Insure that the first digit of the divisor is at least BASE/2.
615 This is required by the quotient digit estimation algorithm. */
617 scale
= BASE
/ (den
[den_hi_sig
] + 1);
619 { /* scale divisor and dividend */
621 for (i
= 0; i
<= 4 - 1; i
++)
623 work
= (num
[i
] * scale
) + carry
;
624 num
[i
] = LOWPART (work
);
625 carry
= HIGHPART (work
);
630 for (i
= 0; i
<= 4 - 1; i
++)
632 work
= (den
[i
] * scale
) + carry
;
633 den
[i
] = LOWPART (work
);
634 carry
= HIGHPART (work
);
635 if (den
[i
] != 0) den_hi_sig
= i
;
642 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
644 /* Guess the next quotient digit, quo_est, by dividing the first
645 two remaining dividend digits by the high order quotient digit.
646 quo_est is never low and is at most 2 high. */
647 unsigned HOST_WIDE_INT tmp
;
649 num_hi_sig
= i
+ den_hi_sig
+ 1;
650 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
651 if (num
[num_hi_sig
] != den
[den_hi_sig
])
652 quo_est
= work
/ den
[den_hi_sig
];
656 /* Refine quo_est so it's usually correct, and at most one high. */
657 tmp
= work
- quo_est
* den
[den_hi_sig
];
659 && (den
[den_hi_sig
- 1] * quo_est
660 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
663 /* Try QUO_EST as the quotient digit, by multiplying the
664 divisor by QUO_EST and subtracting from the remaining dividend.
665 Keep in mind that QUO_EST is the I - 1st digit. */
668 for (j
= 0; j
<= den_hi_sig
; j
++)
670 work
= quo_est
* den
[j
] + carry
;
671 carry
= HIGHPART (work
);
672 work
= num
[i
+ j
] - LOWPART (work
);
673 num
[i
+ j
] = LOWPART (work
);
674 carry
+= HIGHPART (work
) != 0;
677 /* If quo_est was high by one, then num[i] went negative and
678 we need to correct things. */
679 if (num
[num_hi_sig
] < carry
)
682 carry
= 0; /* add divisor back in */
683 for (j
= 0; j
<= den_hi_sig
; j
++)
685 work
= num
[i
+ j
] + den
[j
] + carry
;
686 carry
= HIGHPART (work
);
687 num
[i
+ j
] = LOWPART (work
);
690 num
[num_hi_sig
] += carry
;
693 /* Store the quotient digit. */
698 decode (quo
, lquo
, hquo
);
701 /* if result is negative, make it so. */
703 neg_double (*lquo
, *hquo
, lquo
, hquo
);
705 /* compute trial remainder: rem = num - (quo * den) */
706 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
707 neg_double (*lrem
, *hrem
, lrem
, hrem
);
708 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
713 case TRUNC_MOD_EXPR
: /* round toward zero */
714 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
718 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
719 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
722 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
730 case CEIL_MOD_EXPR
: /* round toward positive infinity */
731 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
733 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
741 case ROUND_MOD_EXPR
: /* round to closest integer */
743 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
744 HOST_WIDE_INT habs_rem
= *hrem
;
745 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
746 HOST_WIDE_INT habs_den
= hden
, htwice
;
748 /* Get absolute values */
750 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
752 neg_double (lden
, hden
, &labs_den
, &habs_den
);
754 /* If (2 * abs (lrem) >= abs (lden)) */
755 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
756 labs_rem
, habs_rem
, <wice
, &htwice
);
758 if (((unsigned HOST_WIDE_INT
) habs_den
759 < (unsigned HOST_WIDE_INT
) htwice
)
760 || (((unsigned HOST_WIDE_INT
) habs_den
761 == (unsigned HOST_WIDE_INT
) htwice
)
762 && (labs_den
< ltwice
)))
766 add_double (*lquo
, *hquo
,
767 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
770 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
782 /* compute true remainder: rem = num - (quo * den) */
783 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
784 neg_double (*lrem
, *hrem
, lrem
, hrem
);
785 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
789 #ifndef REAL_ARITHMETIC
790 /* Effectively truncate a real value to represent the nearest possible value
791 in a narrower mode. The result is actually represented in the same data
792 type as the argument, but its value is usually different.
794 A trap may occur during the FP operations and it is the responsibility
795 of the calling function to have a handler established. */
798 real_value_truncate (mode
, arg
)
799 enum machine_mode mode
;
802 return REAL_VALUE_TRUNCATE (mode
, arg
);
805 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
807 /* Check for infinity in an IEEE double precision number. */
813 /* The IEEE 64-bit double format. */
818 unsigned exponent
: 11;
819 unsigned mantissa1
: 20;
824 unsigned mantissa1
: 20;
825 unsigned exponent
: 11;
831 if (u
.big_endian
.sign
== 1)
834 return (u
.big_endian
.exponent
== 2047
835 && u
.big_endian
.mantissa1
== 0
836 && u
.big_endian
.mantissa2
== 0);
841 return (u
.little_endian
.exponent
== 2047
842 && u
.little_endian
.mantissa1
== 0
843 && u
.little_endian
.mantissa2
== 0);
847 /* Check whether an IEEE double precision number is a NaN. */
853 /* The IEEE 64-bit double format. */
858 unsigned exponent
: 11;
859 unsigned mantissa1
: 20;
864 unsigned mantissa1
: 20;
865 unsigned exponent
: 11;
871 if (u
.big_endian
.sign
== 1)
874 return (u
.big_endian
.exponent
== 2047
875 && (u
.big_endian
.mantissa1
!= 0
876 || u
.big_endian
.mantissa2
!= 0));
881 return (u
.little_endian
.exponent
== 2047
882 && (u
.little_endian
.mantissa1
!= 0
883 || u
.little_endian
.mantissa2
!= 0));
887 /* Check for a negative IEEE double precision number. */
893 /* The IEEE 64-bit double format. */
898 unsigned exponent
: 11;
899 unsigned mantissa1
: 20;
904 unsigned mantissa1
: 20;
905 unsigned exponent
: 11;
911 if (u
.big_endian
.sign
== 1)
914 return u
.big_endian
.sign
;
919 return u
.little_endian
.sign
;
922 #else /* Target not IEEE */
924 /* Let's assume other float formats don't have infinity.
925 (This can be overridden by redefining REAL_VALUE_ISINF.) */
929 REAL_VALUE_TYPE x ATTRIBUTE_UNUSED
;
934 /* Let's assume other float formats don't have NaNs.
935 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
939 REAL_VALUE_TYPE x ATTRIBUTE_UNUSED
;
944 /* Let's assume other float formats don't have minus zero.
945 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
953 #endif /* Target not IEEE */
955 /* Try to change R into its exact multiplicative inverse in machine mode
956 MODE. Return nonzero function value if successful. */
959 exact_real_inverse (mode
, r
)
960 enum machine_mode mode
;
969 #ifdef CHECK_FLOAT_VALUE
973 /* Usually disable if bounds checks are not reliable. */
974 if ((HOST_FLOAT_FORMAT
!= TARGET_FLOAT_FORMAT
) && !flag_pretend_float
)
977 /* Set array index to the less significant bits in the unions, depending
978 on the endian-ness of the host doubles.
979 Disable if insufficient information on the data structure. */
980 #if HOST_FLOAT_FORMAT == UNKNOWN_FLOAT_FORMAT
983 #if HOST_FLOAT_FORMAT == VAX_FLOAT_FORMAT
986 #if HOST_FLOAT_FORMAT == IBM_FLOAT_FORMAT
989 #define K (2 * HOST_FLOAT_WORDS_BIG_ENDIAN)
994 if (setjmp (float_error
))
996 /* Don't do the optimization if there was an arithmetic error. */
998 set_float_handler (NULL_PTR
);
1001 set_float_handler (float_error
);
1003 /* Domain check the argument. */
1008 #ifdef REAL_INFINITY
1009 if (REAL_VALUE_ISINF (x
.d
) || REAL_VALUE_ISNAN (x
.d
))
1013 /* Compute the reciprocal and check for numerical exactness.
1014 It is unnecessary to check all the significand bits to determine
1015 whether X is a power of 2. If X is not, then it is impossible for
1016 the bottom half significand of both X and 1/X to be all zero bits.
1017 Hence we ignore the data structure of the top half and examine only
1018 the low order bits of the two significands. */
1020 if (x
.i
[K
] != 0 || x
.i
[K
+ 1] != 0 || t
.i
[K
] != 0 || t
.i
[K
+ 1] != 0)
1023 /* Truncate to the required mode and range-check the result. */
1024 y
.d
= REAL_VALUE_TRUNCATE (mode
, t
.d
);
1025 #ifdef CHECK_FLOAT_VALUE
1027 if (CHECK_FLOAT_VALUE (mode
, y
.d
, i
))
1031 /* Fail if truncation changed the value. */
1032 if (y
.d
!= t
.d
|| y
.d
== 0.0)
1035 #ifdef REAL_INFINITY
1036 if (REAL_VALUE_ISINF (y
.d
) || REAL_VALUE_ISNAN (y
.d
))
1040 /* Output the reciprocal and return success flag. */
1041 set_float_handler (NULL_PTR
);
1046 /* Convert C99 hexadecimal floating point string constant S. Return
1047 real value type in mode MODE. This function uses the host computer's
1048 floating point arithmetic when there is no REAL_ARITHMETIC. */
1051 real_hex_to_f (s
, mode
)
1053 enum machine_mode mode
;
1057 unsigned HOST_WIDE_INT low
, high
;
1058 int shcount
, nrmcount
, k
;
1059 int sign
, expsign
, isfloat
;
1060 int lost
= 0;/* Nonzero low order bits shifted out and discarded. */
1061 int frexpon
= 0; /* Bits after the decimal point. */
1062 int expon
= 0; /* Value of exponent. */
1063 int decpt
= 0; /* How many decimal points. */
1064 int gotp
= 0; /* How many P's. */
1071 while (*p
== ' ' || *p
== '\t')
1074 /* Sign, if any, comes first. */
1082 /* The string is supposed to start with 0x or 0X . */
1086 if (*p
== 'x' || *p
== 'X')
1100 while ((c
= *p
) != '\0')
1102 if ((c
>= '0' && c
<= '9') || (c
>= 'A' && c
<= 'F')
1103 || (c
>= 'a' && c
<= 'f'))
1106 if (k
>= 'a' && k
<= 'f')
1113 if ((high
& 0xf0000000) == 0)
1115 high
= (high
<< 4) + ((low
>> 28) & 15);
1116 low
= (low
<< 4) + k
;
1123 /* Record nonzero lost bits. */
1136 else if (c
== 'p' || c
== 'P')
1140 /* Sign of exponent. */
1147 /* Value of exponent.
1148 The exponent field is a decimal integer. */
1149 while (ISDIGIT (*p
))
1151 k
= (*p
++ & CHARMASK
) - '0';
1152 expon
= 10 * expon
+ k
;
1156 /* F suffix is ambiguous in the significand part
1157 so it must appear after the decimal exponent field. */
1158 if (*p
== 'f' || *p
== 'F')
1166 else if (c
== 'l' || c
== 'L')
1175 /* Abort if last character read was not legitimate. */
1177 if ((c
!= '\0' && c
!= ' ' && c
!= '\n' && c
!= '\r') || (decpt
> 1))
1180 /* There must be either one decimal point or one p. */
1181 if (decpt
== 0 && gotp
== 0)
1185 if (high
== 0 && low
== 0)
1197 /* Leave a high guard bit for carry-out. */
1198 if ((high
& 0x80000000) != 0)
1201 low
= (low
>> 1) | (high
<< 31);
1206 if ((high
& 0xffff8000) == 0)
1208 high
= (high
<< 16) + ((low
>> 16) & 0xffff);
1213 while ((high
& 0xc0000000) == 0)
1215 high
= (high
<< 1) + ((low
>> 31) & 1);
1220 if (isfloat
|| GET_MODE_SIZE (mode
) == UNITS_PER_WORD
)
1222 /* Keep 24 bits precision, bits 0x7fffff80.
1223 Rounding bit is 0x40. */
1224 lost
= lost
| low
| (high
& 0x3f);
1228 if ((high
& 0x80) || lost
)
1235 /* We need real.c to do long double formats, so here default
1236 to double precision. */
1237 #if HOST_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
1239 Keep 53 bits precision, bits 0x7fffffff fffffc00.
1240 Rounding bit is low word 0x200. */
1241 lost
= lost
| (low
& 0x1ff);
1244 if ((low
& 0x400) || lost
)
1246 low
= (low
+ 0x200) & 0xfffffc00;
1253 /* Assume it's a VAX with 56-bit significand,
1254 bits 0x7fffffff ffffff80. */
1255 lost
= lost
| (low
& 0x7f);
1258 if ((low
& 0x80) || lost
)
1260 low
= (low
+ 0x40) & 0xffffff80;
1270 ip
= REAL_VALUE_LDEXP (ip
, 32) + (double) low
;
1271 /* Apply shifts and exponent value as power of 2. */
1272 ip
= REAL_VALUE_LDEXP (ip
, expon
- (nrmcount
+ frexpon
));
1279 #endif /* no REAL_ARITHMETIC */
1281 /* Given T, an expression, return the negation of T. Allow for T to be
1282 null, in which case return null. */
1294 type
= TREE_TYPE (t
);
1295 STRIP_SIGN_NOPS (t
);
1297 switch (TREE_CODE (t
))
1301 if (! TREE_UNSIGNED (type
)
1302 && 0 != (tem
= fold (build1 (NEGATE_EXPR
, type
, t
)))
1303 && ! TREE_OVERFLOW (tem
))
1308 return convert (type
, TREE_OPERAND (t
, 0));
1311 /* - (A - B) -> B - A */
1312 if (! FLOAT_TYPE_P (type
) || flag_fast_math
)
1313 return convert (type
,
1314 fold (build (MINUS_EXPR
, TREE_TYPE (t
),
1315 TREE_OPERAND (t
, 1),
1316 TREE_OPERAND (t
, 0))));
1323 return convert (type
, build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
));
1326 /* Split a tree IN into a constant, literal and variable parts that could be
1327 combined with CODE to make IN. "constant" means an expression with
1328 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1329 commutative arithmetic operation. Store the constant part into *CONP,
1330 the literal in &LITP and return the variable part. If a part isn't
1331 present, set it to null. If the tree does not decompose in this way,
1332 return the entire tree as the variable part and the other parts as null.
1334 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1335 case, we negate an operand that was subtracted. If NEGATE_P is true, we
1336 are negating all of IN.
1338 If IN is itself a literal or constant, return it as appropriate.
1340 Note that we do not guarantee that any of the three values will be the
1341 same type as IN, but they will have the same signedness and mode. */
1344 split_tree (in
, code
, conp
, litp
, negate_p
)
1346 enum tree_code code
;
1355 /* Strip any conversions that don't change the machine mode or signedness. */
1356 STRIP_SIGN_NOPS (in
);
1358 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1360 else if (TREE_CONSTANT (in
))
1363 else if (TREE_CODE (in
) == code
1364 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1365 /* We can associate addition and subtraction together (even
1366 though the C standard doesn't say so) for integers because
1367 the value is not affected. For reals, the value might be
1368 affected, so we can't. */
1369 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1370 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1372 tree op0
= TREE_OPERAND (in
, 0);
1373 tree op1
= TREE_OPERAND (in
, 1);
1374 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1375 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1377 /* First see if either of the operands is a literal, then a constant. */
1378 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1379 *litp
= op0
, op0
= 0;
1380 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1381 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1383 if (op0
!= 0 && TREE_CONSTANT (op0
))
1384 *conp
= op0
, op0
= 0;
1385 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1386 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1388 /* If we haven't dealt with either operand, this is not a case we can
1389 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1390 if (op0
!= 0 && op1
!= 0)
1395 var
= op1
, neg_var_p
= neg1_p
;
1397 /* Now do any needed negations. */
1398 if (neg_litp_p
) *litp
= negate_expr (*litp
);
1399 if (neg_conp_p
) *conp
= negate_expr (*conp
);
1400 if (neg_var_p
) var
= negate_expr (var
);
1407 var
= negate_expr (var
);
1408 *conp
= negate_expr (*conp
);
1409 *litp
= negate_expr (*litp
);
1415 /* Re-associate trees split by the above function. T1 and T2 are either
1416 expressions to associate or null. Return the new expression, if any. If
1417 we build an operation, do it in TYPE and with CODE, except if CODE is a
1418 MINUS_EXPR, in which case we use PLUS_EXPR since split_tree will already
1419 have taken care of the negations. */
1422 associate_trees (t1
, t2
, code
, type
)
1424 enum tree_code code
;
1432 if (code
== MINUS_EXPR
)
1435 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1436 try to fold this since we will have infinite recursion. But do
1437 deal with any NEGATE_EXPRs. */
1438 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1439 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1441 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1442 return build (MINUS_EXPR
, type
, convert (type
, t2
),
1443 convert (type
, TREE_OPERAND (t1
, 0)));
1444 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1445 return build (MINUS_EXPR
, type
, convert (type
, t1
),
1446 convert (type
, TREE_OPERAND (t2
, 0)));
1448 return build (code
, type
, convert (type
, t1
), convert (type
, t2
));
1451 return fold (build (code
, type
, convert (type
, t1
), convert (type
, t2
)));
1454 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1455 to produce a new constant.
1457 If NOTRUNC is nonzero, do not truncate the result to fit the data type.
1458 If FORSIZE is nonzero, compute overflow for unsigned types. */
1461 int_const_binop (code
, arg1
, arg2
, notrunc
, forsize
)
1462 enum tree_code code
;
1463 register tree arg1
, arg2
;
1464 int notrunc
, forsize
;
1466 unsigned HOST_WIDE_INT int1l
, int2l
;
1467 HOST_WIDE_INT int1h
, int2h
;
1468 unsigned HOST_WIDE_INT low
;
1470 unsigned HOST_WIDE_INT garbagel
;
1471 HOST_WIDE_INT garbageh
;
1473 int uns
= TREE_UNSIGNED (TREE_TYPE (arg1
));
1475 int no_overflow
= 0;
1477 int1l
= TREE_INT_CST_LOW (arg1
);
1478 int1h
= TREE_INT_CST_HIGH (arg1
);
1479 int2l
= TREE_INT_CST_LOW (arg2
);
1480 int2h
= TREE_INT_CST_HIGH (arg2
);
1485 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1489 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1493 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1496 case BIT_ANDTC_EXPR
:
1497 low
= int1l
& ~int2l
, hi
= int1h
& ~int2h
;
1503 /* It's unclear from the C standard whether shifts can overflow.
1504 The following code ignores overflow; perhaps a C standard
1505 interpretation ruling is needed. */
1506 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (TREE_TYPE (arg1
)),
1514 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (TREE_TYPE (arg1
)),
1519 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1523 neg_double (int2l
, int2h
, &low
, &hi
);
1524 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1525 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1529 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1532 case TRUNC_DIV_EXPR
:
1533 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1534 case EXACT_DIV_EXPR
:
1535 /* This is a shortcut for a common special case. */
1536 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1537 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1538 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1539 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1541 if (code
== CEIL_DIV_EXPR
)
1544 low
= int1l
/ int2l
, hi
= 0;
1548 /* ... fall through ... */
1550 case ROUND_DIV_EXPR
:
1551 if (int2h
== 0 && int2l
== 1)
1553 low
= int1l
, hi
= int1h
;
1556 if (int1l
== int2l
&& int1h
== int2h
1557 && ! (int1l
== 0 && int1h
== 0))
1562 overflow
= div_and_round_double (code
, uns
,
1563 int1l
, int1h
, int2l
, int2h
,
1564 &low
, &hi
, &garbagel
, &garbageh
);
1567 case TRUNC_MOD_EXPR
:
1568 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1569 /* This is a shortcut for a common special case. */
1570 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1571 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1572 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1573 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1575 if (code
== CEIL_MOD_EXPR
)
1577 low
= int1l
% int2l
, hi
= 0;
1581 /* ... fall through ... */
1583 case ROUND_MOD_EXPR
:
1584 overflow
= div_and_round_double (code
, uns
,
1585 int1l
, int1h
, int2l
, int2h
,
1586 &garbagel
, &garbageh
, &low
, &hi
);
1592 low
= (((unsigned HOST_WIDE_INT
) int1h
1593 < (unsigned HOST_WIDE_INT
) int2h
)
1594 || (((unsigned HOST_WIDE_INT
) int1h
1595 == (unsigned HOST_WIDE_INT
) int2h
)
1598 low
= (int1h
< int2h
1599 || (int1h
== int2h
&& int1l
< int2l
));
1601 if (low
== (code
== MIN_EXPR
))
1602 low
= int1l
, hi
= int1h
;
1604 low
= int2l
, hi
= int2h
;
1611 if (forsize
&& hi
== 0 && low
< 10000
1612 && overflow
== 0 && ! TREE_OVERFLOW (arg1
) && ! TREE_OVERFLOW (arg2
))
1613 return size_int_type_wide (low
, TREE_TYPE (arg1
));
1616 t
= build_int_2 (low
, hi
);
1617 TREE_TYPE (t
) = TREE_TYPE (arg1
);
1621 = ((notrunc
? (!uns
|| forsize
) && overflow
1622 : force_fit_type (t
, (!uns
|| forsize
) && overflow
) && ! no_overflow
)
1623 | TREE_OVERFLOW (arg1
)
1624 | TREE_OVERFLOW (arg2
));
1626 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1627 So check if force_fit_type truncated the value. */
1629 && ! TREE_OVERFLOW (t
)
1630 && (TREE_INT_CST_HIGH (t
) != hi
1631 || TREE_INT_CST_LOW (t
) != low
))
1632 TREE_OVERFLOW (t
) = 1;
1634 TREE_CONSTANT_OVERFLOW (t
) = (TREE_OVERFLOW (t
)
1635 | TREE_CONSTANT_OVERFLOW (arg1
)
1636 | TREE_CONSTANT_OVERFLOW (arg2
));
1640 /* Define input and output argument for const_binop_1. */
1643 enum tree_code code
; /* Input: tree code for operation. */
1644 tree type
; /* Input: tree type for operation. */
1645 REAL_VALUE_TYPE d1
, d2
; /* Input: floating point operands. */
1646 tree t
; /* Output: constant for result. */
1649 /* Do the real arithmetic for const_binop while protected by a
1650 float overflow handler. */
1653 const_binop_1 (data
)
1656 struct cb_args
*args
= (struct cb_args
*) data
;
1657 REAL_VALUE_TYPE value
;
1659 #ifdef REAL_ARITHMETIC
1660 REAL_ARITHMETIC (value
, args
->code
, args
->d1
, args
->d2
);
1665 value
= args
->d1
+ args
->d2
;
1669 value
= args
->d1
- args
->d2
;
1673 value
= args
->d1
* args
->d2
;
1677 #ifndef REAL_INFINITY
1682 value
= args
->d1
/ args
->d2
;
1686 value
= MIN (args
->d1
, args
->d2
);
1690 value
= MAX (args
->d1
, args
->d2
);
1696 #endif /* no REAL_ARITHMETIC */
1699 = build_real (args
->type
,
1700 real_value_truncate (TYPE_MODE (args
->type
), value
));
1703 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1704 constant. We assume ARG1 and ARG2 have the same data type, or at least
1705 are the same kind of constant and the same machine mode.
1707 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1710 const_binop (code
, arg1
, arg2
, notrunc
)
1711 enum tree_code code
;
1712 register tree arg1
, arg2
;
1718 if (TREE_CODE (arg1
) == INTEGER_CST
)
1719 return int_const_binop (code
, arg1
, arg2
, notrunc
, 0);
1721 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1722 if (TREE_CODE (arg1
) == REAL_CST
)
1728 struct cb_args args
;
1730 d1
= TREE_REAL_CST (arg1
);
1731 d2
= TREE_REAL_CST (arg2
);
1733 /* If either operand is a NaN, just return it. Otherwise, set up
1734 for floating-point trap; we return an overflow. */
1735 if (REAL_VALUE_ISNAN (d1
))
1737 else if (REAL_VALUE_ISNAN (d2
))
1740 /* Setup input for const_binop_1() */
1741 args
.type
= TREE_TYPE (arg1
);
1746 if (do_float_handler (const_binop_1
, (PTR
) &args
))
1747 /* Receive output from const_binop_1. */
1751 /* We got an exception from const_binop_1. */
1752 t
= copy_node (arg1
);
1757 = (force_fit_type (t
, overflow
)
1758 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1759 TREE_CONSTANT_OVERFLOW (t
)
1761 | TREE_CONSTANT_OVERFLOW (arg1
)
1762 | TREE_CONSTANT_OVERFLOW (arg2
);
1765 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1766 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1768 register tree type
= TREE_TYPE (arg1
);
1769 register tree r1
= TREE_REALPART (arg1
);
1770 register tree i1
= TREE_IMAGPART (arg1
);
1771 register tree r2
= TREE_REALPART (arg2
);
1772 register tree i2
= TREE_IMAGPART (arg2
);
1778 t
= build_complex (type
,
1779 const_binop (PLUS_EXPR
, r1
, r2
, notrunc
),
1780 const_binop (PLUS_EXPR
, i1
, i2
, notrunc
));
1784 t
= build_complex (type
,
1785 const_binop (MINUS_EXPR
, r1
, r2
, notrunc
),
1786 const_binop (MINUS_EXPR
, i1
, i2
, notrunc
));
1790 t
= build_complex (type
,
1791 const_binop (MINUS_EXPR
,
1792 const_binop (MULT_EXPR
,
1794 const_binop (MULT_EXPR
,
1797 const_binop (PLUS_EXPR
,
1798 const_binop (MULT_EXPR
,
1800 const_binop (MULT_EXPR
,
1807 register tree magsquared
1808 = const_binop (PLUS_EXPR
,
1809 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1810 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1813 t
= build_complex (type
,
1815 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1816 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1817 const_binop (PLUS_EXPR
,
1818 const_binop (MULT_EXPR
, r1
, r2
,
1820 const_binop (MULT_EXPR
, i1
, i2
,
1823 magsquared
, notrunc
),
1825 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1826 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1827 const_binop (MINUS_EXPR
,
1828 const_binop (MULT_EXPR
, i1
, r2
,
1830 const_binop (MULT_EXPR
, r1
, i2
,
1833 magsquared
, notrunc
));
1845 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1846 bits are given by NUMBER and of the sizetype represented by KIND. */
1849 size_int_wide (number
, kind
)
1850 HOST_WIDE_INT number
;
1851 enum size_type_kind kind
;
1853 return size_int_type_wide (number
, sizetype_tab
[(int) kind
]);
1856 /* Likewise, but the desired type is specified explicitly. */
1859 size_int_type_wide (number
, type
)
1860 HOST_WIDE_INT number
;
1863 /* Type-size nodes already made for small sizes. */
1864 static tree size_table
[2048 + 1];
1865 static int init_p
= 0;
1870 ggc_add_tree_root ((tree
*) size_table
,
1871 sizeof size_table
/ sizeof (tree
));
1875 /* If this is a positive number that fits in the table we use to hold
1876 cached entries, see if it is already in the table and put it there
1878 if (number
>= 0 && number
< (int) ARRAY_SIZE (size_table
))
1880 if (size_table
[number
] != 0)
1881 for (t
= size_table
[number
]; t
!= 0; t
= TREE_CHAIN (t
))
1882 if (TREE_TYPE (t
) == type
)
1885 t
= build_int_2 (number
, 0);
1886 TREE_TYPE (t
) = type
;
1887 TREE_CHAIN (t
) = size_table
[number
];
1888 size_table
[number
] = t
;
1893 t
= build_int_2 (number
, number
< 0 ? -1 : 0);
1894 TREE_TYPE (t
) = type
;
1895 TREE_OVERFLOW (t
) = TREE_CONSTANT_OVERFLOW (t
) = force_fit_type (t
, 0);
1899 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1900 is a tree code. The type of the result is taken from the operands.
1901 Both must be the same type integer type and it must be a size type.
1902 If the operands are constant, so is the result. */
1905 size_binop (code
, arg0
, arg1
)
1906 enum tree_code code
;
1909 tree type
= TREE_TYPE (arg0
);
1911 if (TREE_CODE (type
) != INTEGER_TYPE
|| ! TYPE_IS_SIZETYPE (type
)
1912 || type
!= TREE_TYPE (arg1
))
1915 /* Handle the special case of two integer constants faster. */
1916 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1918 /* And some specific cases even faster than that. */
1919 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1921 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1922 && integer_zerop (arg1
))
1924 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1927 /* Handle general case of two integer constants. */
1928 return int_const_binop (code
, arg0
, arg1
, 0, 1);
1931 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1932 return error_mark_node
;
1934 return fold (build (code
, type
, arg0
, arg1
));
1937 /* Given two values, either both of sizetype or both of bitsizetype,
1938 compute the difference between the two values. Return the value
1939 in signed type corresponding to the type of the operands. */
1942 size_diffop (arg0
, arg1
)
1945 tree type
= TREE_TYPE (arg0
);
1948 if (TREE_CODE (type
) != INTEGER_TYPE
|| ! TYPE_IS_SIZETYPE (type
)
1949 || type
!= TREE_TYPE (arg1
))
1952 /* If the type is already signed, just do the simple thing. */
1953 if (! TREE_UNSIGNED (type
))
1954 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1956 ctype
= (type
== bitsizetype
|| type
== ubitsizetype
1957 ? sbitsizetype
: ssizetype
);
1959 /* If either operand is not a constant, do the conversions to the signed
1960 type and subtract. The hardware will do the right thing with any
1961 overflow in the subtraction. */
1962 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1963 return size_binop (MINUS_EXPR
, convert (ctype
, arg0
),
1964 convert (ctype
, arg1
));
1966 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1967 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1968 overflow) and negate (which can't either). Special-case a result
1969 of zero while we're here. */
1970 if (tree_int_cst_equal (arg0
, arg1
))
1971 return convert (ctype
, integer_zero_node
);
1972 else if (tree_int_cst_lt (arg1
, arg0
))
1973 return convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1975 return size_binop (MINUS_EXPR
, convert (ctype
, integer_zero_node
),
1976 convert (ctype
, size_binop (MINUS_EXPR
, arg1
, arg0
)));
1979 /* This structure is used to communicate arguments to fold_convert_1. */
1982 tree arg1
; /* Input: value to convert. */
1983 tree type
; /* Input: type to convert value to. */
1984 tree t
; /* Ouput: result of conversion. */
1987 /* Function to convert floating-point constants, protected by floating
1988 point exception handler. */
1991 fold_convert_1 (data
)
1994 struct fc_args
*args
= (struct fc_args
*) data
;
1996 args
->t
= build_real (args
->type
,
1997 real_value_truncate (TYPE_MODE (args
->type
),
1998 TREE_REAL_CST (args
->arg1
)));
2001 /* Given T, a tree representing type conversion of ARG1, a constant,
2002 return a constant tree representing the result of conversion. */
2005 fold_convert (t
, arg1
)
2009 register tree type
= TREE_TYPE (t
);
2012 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
2014 if (TREE_CODE (arg1
) == INTEGER_CST
)
2016 /* If we would build a constant wider than GCC supports,
2017 leave the conversion unfolded. */
2018 if (TYPE_PRECISION (type
) > 2 * HOST_BITS_PER_WIDE_INT
)
2021 /* If we are trying to make a sizetype for a small integer, use
2022 size_int to pick up cached types to reduce duplicate nodes. */
2023 if (TREE_CODE (type
) == INTEGER_CST
&& TYPE_IS_SIZETYPE (type
)
2024 && compare_tree_int (arg1
, 10000) < 0)
2025 return size_int_type_wide (TREE_INT_CST_LOW (arg1
), type
);
2027 /* Given an integer constant, make new constant with new type,
2028 appropriately sign-extended or truncated. */
2029 t
= build_int_2 (TREE_INT_CST_LOW (arg1
),
2030 TREE_INT_CST_HIGH (arg1
));
2031 TREE_TYPE (t
) = type
;
2032 /* Indicate an overflow if (1) ARG1 already overflowed,
2033 or (2) force_fit_type indicates an overflow.
2034 Tell force_fit_type that an overflow has already occurred
2035 if ARG1 is a too-large unsigned value and T is signed.
2036 But don't indicate an overflow if converting a pointer. */
2038 = ((force_fit_type (t
,
2039 (TREE_INT_CST_HIGH (arg1
) < 0
2040 && (TREE_UNSIGNED (type
)
2041 < TREE_UNSIGNED (TREE_TYPE (arg1
)))))
2042 && ! POINTER_TYPE_P (TREE_TYPE (arg1
)))
2043 || TREE_OVERFLOW (arg1
));
2044 TREE_CONSTANT_OVERFLOW (t
)
2045 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
2047 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2048 else if (TREE_CODE (arg1
) == REAL_CST
)
2050 /* Don't initialize these, use assignments.
2051 Initialized local aggregates don't work on old compilers. */
2055 tree type1
= TREE_TYPE (arg1
);
2058 x
= TREE_REAL_CST (arg1
);
2059 l
= real_value_from_int_cst (type1
, TYPE_MIN_VALUE (type
));
2061 no_upper_bound
= (TYPE_MAX_VALUE (type
) == NULL
);
2062 if (!no_upper_bound
)
2063 u
= real_value_from_int_cst (type1
, TYPE_MAX_VALUE (type
));
2065 /* See if X will be in range after truncation towards 0.
2066 To compensate for truncation, move the bounds away from 0,
2067 but reject if X exactly equals the adjusted bounds. */
2068 #ifdef REAL_ARITHMETIC
2069 REAL_ARITHMETIC (l
, MINUS_EXPR
, l
, dconst1
);
2070 if (!no_upper_bound
)
2071 REAL_ARITHMETIC (u
, PLUS_EXPR
, u
, dconst1
);
2074 if (!no_upper_bound
)
2077 /* If X is a NaN, use zero instead and show we have an overflow.
2078 Otherwise, range check. */
2079 if (REAL_VALUE_ISNAN (x
))
2080 overflow
= 1, x
= dconst0
;
2081 else if (! (REAL_VALUES_LESS (l
, x
)
2083 && REAL_VALUES_LESS (x
, u
)))
2086 #ifndef REAL_ARITHMETIC
2088 HOST_WIDE_INT low
, high
;
2089 HOST_WIDE_INT half_word
2090 = (HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
/ 2);
2095 high
= (HOST_WIDE_INT
) (x
/ half_word
/ half_word
);
2096 x
-= (REAL_VALUE_TYPE
) high
* half_word
* half_word
;
2097 if (x
>= (REAL_VALUE_TYPE
) half_word
* half_word
/ 2)
2099 low
= x
- (REAL_VALUE_TYPE
) half_word
* half_word
/ 2;
2100 low
|= (HOST_WIDE_INT
) -1 << (HOST_BITS_PER_WIDE_INT
- 1);
2103 low
= (HOST_WIDE_INT
) x
;
2104 if (TREE_REAL_CST (arg1
) < 0)
2105 neg_double (low
, high
, &low
, &high
);
2106 t
= build_int_2 (low
, high
);
2110 HOST_WIDE_INT low
, high
;
2111 REAL_VALUE_TO_INT (&low
, &high
, x
);
2112 t
= build_int_2 (low
, high
);
2115 TREE_TYPE (t
) = type
;
2117 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, overflow
);
2118 TREE_CONSTANT_OVERFLOW (t
)
2119 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
2121 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
2122 TREE_TYPE (t
) = type
;
2124 else if (TREE_CODE (type
) == REAL_TYPE
)
2126 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2127 if (TREE_CODE (arg1
) == INTEGER_CST
)
2128 return build_real_from_int_cst (type
, arg1
);
2129 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
2130 if (TREE_CODE (arg1
) == REAL_CST
)
2132 struct fc_args args
;
2134 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
2137 TREE_TYPE (arg1
) = type
;
2141 /* Setup input for fold_convert_1() */
2145 if (do_float_handler (fold_convert_1
, (PTR
) &args
))
2147 /* Receive output from fold_convert_1() */
2152 /* We got an exception from fold_convert_1() */
2154 t
= copy_node (arg1
);
2158 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, overflow
);
2159 TREE_CONSTANT_OVERFLOW (t
)
2160 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
2164 TREE_CONSTANT (t
) = 1;
2168 /* Return an expr equal to X but certainly not valid as an lvalue. */
2176 /* These things are certainly not lvalues. */
2177 if (TREE_CODE (x
) == NON_LVALUE_EXPR
2178 || TREE_CODE (x
) == INTEGER_CST
2179 || TREE_CODE (x
) == REAL_CST
2180 || TREE_CODE (x
) == STRING_CST
2181 || TREE_CODE (x
) == ADDR_EXPR
)
2184 result
= build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2185 TREE_CONSTANT (result
) = TREE_CONSTANT (x
);
2189 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2190 Zero means allow extended lvalues. */
2192 int pedantic_lvalues
;
2194 /* When pedantic, return an expr equal to X but certainly not valid as a
2195 pedantic lvalue. Otherwise, return X. */
2198 pedantic_non_lvalue (x
)
2201 if (pedantic_lvalues
)
2202 return non_lvalue (x
);
2207 /* Given a tree comparison code, return the code that is the logical inverse
2208 of the given code. It is not safe to do this for floating-point
2209 comparisons, except for NE_EXPR and EQ_EXPR. */
2211 static enum tree_code
2212 invert_tree_comparison (code
)
2213 enum tree_code code
;
2234 /* Similar, but return the comparison that results if the operands are
2235 swapped. This is safe for floating-point. */
2237 static enum tree_code
2238 swap_tree_comparison (code
)
2239 enum tree_code code
;
2259 /* Return nonzero if CODE is a tree code that represents a truth value. */
2262 truth_value_p (code
)
2263 enum tree_code code
;
2265 return (TREE_CODE_CLASS (code
) == '<'
2266 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2267 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2268 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2271 /* Return nonzero if two operands are necessarily equal.
2272 If ONLY_CONST is non-zero, only return non-zero for constants.
2273 This function tests whether the operands are indistinguishable;
2274 it does not test whether they are equal using C's == operation.
2275 The distinction is important for IEEE floating point, because
2276 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2277 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
2280 operand_equal_p (arg0
, arg1
, only_const
)
2284 /* If both types don't have the same signedness, then we can't consider
2285 them equal. We must check this before the STRIP_NOPS calls
2286 because they may change the signedness of the arguments. */
2287 if (TREE_UNSIGNED (TREE_TYPE (arg0
)) != TREE_UNSIGNED (TREE_TYPE (arg1
)))
2293 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2294 /* This is needed for conversions and for COMPONENT_REF.
2295 Might as well play it safe and always test this. */
2296 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2297 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2298 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2301 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2302 We don't care about side effects in that case because the SAVE_EXPR
2303 takes care of that for us. In all other cases, two expressions are
2304 equal if they have no side effects. If we have two identical
2305 expressions with side effects that should be treated the same due
2306 to the only side effects being identical SAVE_EXPR's, that will
2307 be detected in the recursive calls below. */
2308 if (arg0
== arg1
&& ! only_const
2309 && (TREE_CODE (arg0
) == SAVE_EXPR
2310 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2313 /* Next handle constant cases, those for which we can return 1 even
2314 if ONLY_CONST is set. */
2315 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2316 switch (TREE_CODE (arg0
))
2319 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2320 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2321 && tree_int_cst_equal (arg0
, arg1
));
2324 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2325 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2326 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2327 TREE_REAL_CST (arg1
)));
2330 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2332 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2336 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2337 && ! memcmp (TREE_STRING_POINTER (arg0
),
2338 TREE_STRING_POINTER (arg1
),
2339 TREE_STRING_LENGTH (arg0
)));
2342 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2351 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2354 /* Two conversions are equal only if signedness and modes match. */
2355 if ((TREE_CODE (arg0
) == NOP_EXPR
|| TREE_CODE (arg0
) == CONVERT_EXPR
)
2356 && (TREE_UNSIGNED (TREE_TYPE (arg0
))
2357 != TREE_UNSIGNED (TREE_TYPE (arg1
))))
2360 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2361 TREE_OPERAND (arg1
, 0), 0);
2365 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0)
2366 && operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1),
2370 /* For commutative ops, allow the other order. */
2371 return ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
2372 || TREE_CODE (arg0
) == MIN_EXPR
|| TREE_CODE (arg0
) == MAX_EXPR
2373 || TREE_CODE (arg0
) == BIT_IOR_EXPR
2374 || TREE_CODE (arg0
) == BIT_XOR_EXPR
2375 || TREE_CODE (arg0
) == BIT_AND_EXPR
2376 || TREE_CODE (arg0
) == NE_EXPR
|| TREE_CODE (arg0
) == EQ_EXPR
)
2377 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2378 TREE_OPERAND (arg1
, 1), 0)
2379 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2380 TREE_OPERAND (arg1
, 0), 0));
2383 /* If either of the pointer (or reference) expressions we are dereferencing
2384 contain a side effect, these cannot be equal. */
2385 if (TREE_SIDE_EFFECTS (arg0
)
2386 || TREE_SIDE_EFFECTS (arg1
))
2389 switch (TREE_CODE (arg0
))
2392 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2393 TREE_OPERAND (arg1
, 0), 0);
2397 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2398 TREE_OPERAND (arg1
, 0), 0)
2399 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2400 TREE_OPERAND (arg1
, 1), 0));
2403 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2404 TREE_OPERAND (arg1
, 0), 0)
2405 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2406 TREE_OPERAND (arg1
, 1), 0)
2407 && operand_equal_p (TREE_OPERAND (arg0
, 2),
2408 TREE_OPERAND (arg1
, 2), 0));
2414 if (TREE_CODE (arg0
) == RTL_EXPR
)
2415 return rtx_equal_p (RTL_EXPR_RTL (arg0
), RTL_EXPR_RTL (arg1
));
2423 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2424 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2426 When in doubt, return 0. */
2429 operand_equal_for_comparison_p (arg0
, arg1
, other
)
2433 int unsignedp1
, unsignedpo
;
2434 tree primarg0
, primarg1
, primother
;
2435 unsigned int correct_width
;
2437 if (operand_equal_p (arg0
, arg1
, 0))
2440 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2441 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2444 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2445 and see if the inner values are the same. This removes any
2446 signedness comparison, which doesn't matter here. */
2447 primarg0
= arg0
, primarg1
= arg1
;
2448 STRIP_NOPS (primarg0
);
2449 STRIP_NOPS (primarg1
);
2450 if (operand_equal_p (primarg0
, primarg1
, 0))
2453 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2454 actual comparison operand, ARG0.
2456 First throw away any conversions to wider types
2457 already present in the operands. */
2459 primarg1
= get_narrower (arg1
, &unsignedp1
);
2460 primother
= get_narrower (other
, &unsignedpo
);
2462 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2463 if (unsignedp1
== unsignedpo
2464 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2465 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2467 tree type
= TREE_TYPE (arg0
);
2469 /* Make sure shorter operand is extended the right way
2470 to match the longer operand. */
2471 primarg1
= convert (signed_or_unsigned_type (unsignedp1
,
2472 TREE_TYPE (primarg1
)),
2475 if (operand_equal_p (arg0
, convert (type
, primarg1
), 0))
2482 /* See if ARG is an expression that is either a comparison or is performing
2483 arithmetic on comparisons. The comparisons must only be comparing
2484 two different values, which will be stored in *CVAL1 and *CVAL2; if
2485 they are non-zero it means that some operands have already been found.
2486 No variables may be used anywhere else in the expression except in the
2487 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2488 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2490 If this is true, return 1. Otherwise, return zero. */
2493 twoval_comparison_p (arg
, cval1
, cval2
, save_p
)
2495 tree
*cval1
, *cval2
;
2498 enum tree_code code
= TREE_CODE (arg
);
2499 char class = TREE_CODE_CLASS (code
);
2501 /* We can handle some of the 'e' cases here. */
2502 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
2504 else if (class == 'e'
2505 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2506 || code
== COMPOUND_EXPR
))
2509 else if (class == 'e' && code
== SAVE_EXPR
&& SAVE_EXPR_RTL (arg
) == 0
2510 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2512 /* If we've already found a CVAL1 or CVAL2, this expression is
2513 two complex to handle. */
2514 if (*cval1
|| *cval2
)
2524 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2527 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2528 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2529 cval1
, cval2
, save_p
));
2535 if (code
== COND_EXPR
)
2536 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2537 cval1
, cval2
, save_p
)
2538 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2539 cval1
, cval2
, save_p
)
2540 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2541 cval1
, cval2
, save_p
));
2545 /* First see if we can handle the first operand, then the second. For
2546 the second operand, we know *CVAL1 can't be zero. It must be that
2547 one side of the comparison is each of the values; test for the
2548 case where this isn't true by failing if the two operands
2551 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2552 TREE_OPERAND (arg
, 1), 0))
2556 *cval1
= TREE_OPERAND (arg
, 0);
2557 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2559 else if (*cval2
== 0)
2560 *cval2
= TREE_OPERAND (arg
, 0);
2561 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2566 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2568 else if (*cval2
== 0)
2569 *cval2
= TREE_OPERAND (arg
, 1);
2570 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2582 /* ARG is a tree that is known to contain just arithmetic operations and
2583 comparisons. Evaluate the operations in the tree substituting NEW0 for
2584 any occurrence of OLD0 as an operand of a comparison and likewise for
2588 eval_subst (arg
, old0
, new0
, old1
, new1
)
2590 tree old0
, new0
, old1
, new1
;
2592 tree type
= TREE_TYPE (arg
);
2593 enum tree_code code
= TREE_CODE (arg
);
2594 char class = TREE_CODE_CLASS (code
);
2596 /* We can handle some of the 'e' cases here. */
2597 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
2599 else if (class == 'e'
2600 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2606 return fold (build1 (code
, type
,
2607 eval_subst (TREE_OPERAND (arg
, 0),
2608 old0
, new0
, old1
, new1
)));
2611 return fold (build (code
, type
,
2612 eval_subst (TREE_OPERAND (arg
, 0),
2613 old0
, new0
, old1
, new1
),
2614 eval_subst (TREE_OPERAND (arg
, 1),
2615 old0
, new0
, old1
, new1
)));
2621 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
2624 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
2627 return fold (build (code
, type
,
2628 eval_subst (TREE_OPERAND (arg
, 0),
2629 old0
, new0
, old1
, new1
),
2630 eval_subst (TREE_OPERAND (arg
, 1),
2631 old0
, new0
, old1
, new1
),
2632 eval_subst (TREE_OPERAND (arg
, 2),
2633 old0
, new0
, old1
, new1
)));
2637 /* fall through - ??? */
2641 tree arg0
= TREE_OPERAND (arg
, 0);
2642 tree arg1
= TREE_OPERAND (arg
, 1);
2644 /* We need to check both for exact equality and tree equality. The
2645 former will be true if the operand has a side-effect. In that
2646 case, we know the operand occurred exactly once. */
2648 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2650 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2653 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2655 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2658 return fold (build (code
, type
, arg0
, arg1
));
2666 /* Return a tree for the case when the result of an expression is RESULT
2667 converted to TYPE and OMITTED was previously an operand of the expression
2668 but is now not needed (e.g., we folded OMITTED * 0).
2670 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2671 the conversion of RESULT to TYPE. */
2674 omit_one_operand (type
, result
, omitted
)
2675 tree type
, result
, omitted
;
2677 tree t
= convert (type
, result
);
2679 if (TREE_SIDE_EFFECTS (omitted
))
2680 return build (COMPOUND_EXPR
, type
, omitted
, t
);
2682 return non_lvalue (t
);
2685 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2688 pedantic_omit_one_operand (type
, result
, omitted
)
2689 tree type
, result
, omitted
;
2691 tree t
= convert (type
, result
);
2693 if (TREE_SIDE_EFFECTS (omitted
))
2694 return build (COMPOUND_EXPR
, type
, omitted
, t
);
2696 return pedantic_non_lvalue (t
);
2699 /* Return a simplified tree node for the truth-negation of ARG. This
2700 never alters ARG itself. We assume that ARG is an operation that
2701 returns a truth value (0 or 1). */
2704 invert_truthvalue (arg
)
2707 tree type
= TREE_TYPE (arg
);
2708 enum tree_code code
= TREE_CODE (arg
);
2710 if (code
== ERROR_MARK
)
2713 /* If this is a comparison, we can simply invert it, except for
2714 floating-point non-equality comparisons, in which case we just
2715 enclose a TRUTH_NOT_EXPR around what we have. */
2717 if (TREE_CODE_CLASS (code
) == '<')
2719 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg
, 0)))
2720 && !flag_fast_math
&& code
!= NE_EXPR
&& code
!= EQ_EXPR
)
2721 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2723 return build (invert_tree_comparison (code
), type
,
2724 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
2730 return convert (type
, build_int_2 (integer_zerop (arg
), 0));
2732 case TRUTH_AND_EXPR
:
2733 return build (TRUTH_OR_EXPR
, type
,
2734 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2735 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2738 return build (TRUTH_AND_EXPR
, type
,
2739 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2740 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2742 case TRUTH_XOR_EXPR
:
2743 /* Here we can invert either operand. We invert the first operand
2744 unless the second operand is a TRUTH_NOT_EXPR in which case our
2745 result is the XOR of the first operand with the inside of the
2746 negation of the second operand. */
2748 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
2749 return build (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
2750 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
2752 return build (TRUTH_XOR_EXPR
, type
,
2753 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2754 TREE_OPERAND (arg
, 1));
2756 case TRUTH_ANDIF_EXPR
:
2757 return build (TRUTH_ORIF_EXPR
, type
,
2758 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2759 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2761 case TRUTH_ORIF_EXPR
:
2762 return build (TRUTH_ANDIF_EXPR
, type
,
2763 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2764 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2766 case TRUTH_NOT_EXPR
:
2767 return TREE_OPERAND (arg
, 0);
2770 return build (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2771 invert_truthvalue (TREE_OPERAND (arg
, 1)),
2772 invert_truthvalue (TREE_OPERAND (arg
, 2)));
2775 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2776 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2778 case WITH_RECORD_EXPR
:
2779 return build (WITH_RECORD_EXPR
, type
,
2780 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2781 TREE_OPERAND (arg
, 1));
2783 case NON_LVALUE_EXPR
:
2784 return invert_truthvalue (TREE_OPERAND (arg
, 0));
2789 return build1 (TREE_CODE (arg
), type
,
2790 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2793 if (!integer_onep (TREE_OPERAND (arg
, 1)))
2795 return build (EQ_EXPR
, type
, arg
, convert (type
, integer_zero_node
));
2798 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2800 case CLEANUP_POINT_EXPR
:
2801 return build1 (CLEANUP_POINT_EXPR
, type
,
2802 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2807 if (TREE_CODE (TREE_TYPE (arg
)) != BOOLEAN_TYPE
)
2809 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2812 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2813 operands are another bit-wise operation with a common input. If so,
2814 distribute the bit operations to save an operation and possibly two if
2815 constants are involved. For example, convert
2816 (A | B) & (A | C) into A | (B & C)
2817 Further simplification will occur if B and C are constants.
2819 If this optimization cannot be done, 0 will be returned. */
2822 distribute_bit_expr (code
, type
, arg0
, arg1
)
2823 enum tree_code code
;
2830 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2831 || TREE_CODE (arg0
) == code
2832 || (TREE_CODE (arg0
) != BIT_AND_EXPR
2833 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
2836 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
2838 common
= TREE_OPERAND (arg0
, 0);
2839 left
= TREE_OPERAND (arg0
, 1);
2840 right
= TREE_OPERAND (arg1
, 1);
2842 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
2844 common
= TREE_OPERAND (arg0
, 0);
2845 left
= TREE_OPERAND (arg0
, 1);
2846 right
= TREE_OPERAND (arg1
, 0);
2848 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
2850 common
= TREE_OPERAND (arg0
, 1);
2851 left
= TREE_OPERAND (arg0
, 0);
2852 right
= TREE_OPERAND (arg1
, 1);
2854 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
2856 common
= TREE_OPERAND (arg0
, 1);
2857 left
= TREE_OPERAND (arg0
, 0);
2858 right
= TREE_OPERAND (arg1
, 0);
2863 return fold (build (TREE_CODE (arg0
), type
, common
,
2864 fold (build (code
, type
, left
, right
))));
2867 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2868 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2871 make_bit_field_ref (inner
, type
, bitsize
, bitpos
, unsignedp
)
2874 int bitsize
, bitpos
;
2877 tree result
= build (BIT_FIELD_REF
, type
, inner
,
2878 size_int (bitsize
), bitsize_int (bitpos
));
2880 TREE_UNSIGNED (result
) = unsignedp
;
2885 /* Optimize a bit-field compare.
2887 There are two cases: First is a compare against a constant and the
2888 second is a comparison of two items where the fields are at the same
2889 bit position relative to the start of a chunk (byte, halfword, word)
2890 large enough to contain it. In these cases we can avoid the shift
2891 implicit in bitfield extractions.
2893 For constants, we emit a compare of the shifted constant with the
2894 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2895 compared. For two fields at the same position, we do the ANDs with the
2896 similar mask and compare the result of the ANDs.
2898 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2899 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2900 are the left and right operands of the comparison, respectively.
2902 If the optimization described above can be done, we return the resulting
2903 tree. Otherwise we return zero. */
2906 optimize_bit_field_compare (code
, compare_type
, lhs
, rhs
)
2907 enum tree_code code
;
2911 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
2912 tree type
= TREE_TYPE (lhs
);
2913 tree signed_type
, unsigned_type
;
2914 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
2915 enum machine_mode lmode
, rmode
, nmode
;
2916 int lunsignedp
, runsignedp
;
2917 int lvolatilep
= 0, rvolatilep
= 0;
2918 unsigned int alignment
;
2919 tree linner
, rinner
= NULL_TREE
;
2923 /* Get all the information about the extractions being done. If the bit size
2924 if the same as the size of the underlying object, we aren't doing an
2925 extraction at all and so can do nothing. We also don't want to
2926 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2927 then will no longer be able to replace it. */
2928 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
2929 &lunsignedp
, &lvolatilep
, &alignment
);
2930 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
2931 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
2936 /* If this is not a constant, we can only do something if bit positions,
2937 sizes, and signedness are the same. */
2938 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
2939 &runsignedp
, &rvolatilep
, &alignment
);
2941 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
2942 || lunsignedp
!= runsignedp
|| offset
!= 0
2943 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
2947 /* See if we can find a mode to refer to this field. We should be able to,
2948 but fail if we can't. */
2949 nmode
= get_best_mode (lbitsize
, lbitpos
,
2950 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
2951 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
2952 TYPE_ALIGN (TREE_TYPE (rinner
))),
2953 word_mode
, lvolatilep
|| rvolatilep
);
2954 if (nmode
== VOIDmode
)
2957 /* Set signed and unsigned types of the precision of this mode for the
2959 signed_type
= type_for_mode (nmode
, 0);
2960 unsigned_type
= type_for_mode (nmode
, 1);
2962 /* Compute the bit position and size for the new reference and our offset
2963 within it. If the new reference is the same size as the original, we
2964 won't optimize anything, so return zero. */
2965 nbitsize
= GET_MODE_BITSIZE (nmode
);
2966 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
2968 if (nbitsize
== lbitsize
)
2971 if (BYTES_BIG_ENDIAN
)
2972 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
2974 /* Make the mask to be used against the extracted field. */
2975 mask
= build_int_2 (~0, ~0);
2976 TREE_TYPE (mask
) = unsigned_type
;
2977 force_fit_type (mask
, 0);
2978 mask
= convert (unsigned_type
, mask
);
2979 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
2980 mask
= const_binop (RSHIFT_EXPR
, mask
,
2981 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
2984 /* If not comparing with constant, just rework the comparison
2986 return build (code
, compare_type
,
2987 build (BIT_AND_EXPR
, unsigned_type
,
2988 make_bit_field_ref (linner
, unsigned_type
,
2989 nbitsize
, nbitpos
, 1),
2991 build (BIT_AND_EXPR
, unsigned_type
,
2992 make_bit_field_ref (rinner
, unsigned_type
,
2993 nbitsize
, nbitpos
, 1),
2996 /* Otherwise, we are handling the constant case. See if the constant is too
2997 big for the field. Warn and return a tree of for 0 (false) if so. We do
2998 this not only for its own sake, but to avoid having to test for this
2999 error case below. If we didn't, we might generate wrong code.
3001 For unsigned fields, the constant shifted right by the field length should
3002 be all zero. For signed fields, the high-order bits should agree with
3007 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3008 convert (unsigned_type
, rhs
),
3009 size_int (lbitsize
), 0)))
3011 warning ("comparison is always %d due to width of bitfield",
3013 return convert (compare_type
,
3015 ? integer_one_node
: integer_zero_node
));
3020 tree tem
= const_binop (RSHIFT_EXPR
, convert (signed_type
, rhs
),
3021 size_int (lbitsize
- 1), 0);
3022 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3024 warning ("comparison is always %d due to width of bitfield",
3026 return convert (compare_type
,
3028 ? integer_one_node
: integer_zero_node
));
3032 /* Single-bit compares should always be against zero. */
3033 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3035 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3036 rhs
= convert (type
, integer_zero_node
);
3039 /* Make a new bitfield reference, shift the constant over the
3040 appropriate number of bits and mask it with the computed mask
3041 (in case this was a signed field). If we changed it, make a new one. */
3042 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3045 TREE_SIDE_EFFECTS (lhs
) = 1;
3046 TREE_THIS_VOLATILE (lhs
) = 1;
3049 rhs
= fold (const_binop (BIT_AND_EXPR
,
3050 const_binop (LSHIFT_EXPR
,
3051 convert (unsigned_type
, rhs
),
3052 size_int (lbitpos
), 0),
3055 return build (code
, compare_type
,
3056 build (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
3060 /* Subroutine for fold_truthop: decode a field reference.
3062 If EXP is a comparison reference, we return the innermost reference.
3064 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3065 set to the starting bit number.
3067 If the innermost field can be completely contained in a mode-sized
3068 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3070 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3071 otherwise it is not changed.
3073 *PUNSIGNEDP is set to the signedness of the field.
3075 *PMASK is set to the mask used. This is either contained in a
3076 BIT_AND_EXPR or derived from the width of the field.
3078 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3080 Return 0 if this is not a component reference or is one that we can't
3081 do anything with. */
3084 decode_field_reference (exp
, pbitsize
, pbitpos
, pmode
, punsignedp
,
3085 pvolatilep
, pmask
, pand_mask
)
3087 HOST_WIDE_INT
*pbitsize
, *pbitpos
;
3088 enum machine_mode
*pmode
;
3089 int *punsignedp
, *pvolatilep
;
3094 tree mask
, inner
, offset
;
3096 unsigned int precision
;
3097 unsigned int alignment
;
3099 /* All the optimizations using this function assume integer fields.
3100 There are problems with FP fields since the type_for_size call
3101 below can fail for, e.g., XFmode. */
3102 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3107 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3109 and_mask
= TREE_OPERAND (exp
, 1);
3110 exp
= TREE_OPERAND (exp
, 0);
3111 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3112 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3116 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3117 punsignedp
, pvolatilep
, &alignment
);
3118 if ((inner
== exp
&& and_mask
== 0)
3119 || *pbitsize
< 0 || offset
!= 0
3120 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3123 /* Compute the mask to access the bitfield. */
3124 unsigned_type
= type_for_size (*pbitsize
, 1);
3125 precision
= TYPE_PRECISION (unsigned_type
);
3127 mask
= build_int_2 (~0, ~0);
3128 TREE_TYPE (mask
) = unsigned_type
;
3129 force_fit_type (mask
, 0);
3130 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3131 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3133 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3135 mask
= fold (build (BIT_AND_EXPR
, unsigned_type
,
3136 convert (unsigned_type
, and_mask
), mask
));
3139 *pand_mask
= and_mask
;
3143 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
3147 all_ones_mask_p (mask
, size
)
3151 tree type
= TREE_TYPE (mask
);
3152 unsigned int precision
= TYPE_PRECISION (type
);
3155 tmask
= build_int_2 (~0, ~0);
3156 TREE_TYPE (tmask
) = signed_type (type
);
3157 force_fit_type (tmask
, 0);
3159 tree_int_cst_equal (mask
,
3160 const_binop (RSHIFT_EXPR
,
3161 const_binop (LSHIFT_EXPR
, tmask
,
3162 size_int (precision
- size
),
3164 size_int (precision
- size
), 0));
3167 /* Subroutine for fold_truthop: determine if an operand is simple enough
3168 to be evaluated unconditionally. */
3171 simple_operand_p (exp
)
3174 /* Strip any conversions that don't change the machine mode. */
3175 while ((TREE_CODE (exp
) == NOP_EXPR
3176 || TREE_CODE (exp
) == CONVERT_EXPR
)
3177 && (TYPE_MODE (TREE_TYPE (exp
))
3178 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
3179 exp
= TREE_OPERAND (exp
, 0);
3181 return (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c'
3183 && ! TREE_ADDRESSABLE (exp
)
3184 && ! TREE_THIS_VOLATILE (exp
)
3185 && ! DECL_NONLOCAL (exp
)
3186 /* Don't regard global variables as simple. They may be
3187 allocated in ways unknown to the compiler (shared memory,
3188 #pragma weak, etc). */
3189 && ! TREE_PUBLIC (exp
)
3190 && ! DECL_EXTERNAL (exp
)
3191 /* Loading a static variable is unduly expensive, but global
3192 registers aren't expensive. */
3193 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3196 /* The following functions are subroutines to fold_range_test and allow it to
3197 try to change a logical combination of comparisons into a range test.
3200 X == 2 || X == 3 || X == 4 || X == 5
3204 (unsigned) (X - 2) <= 3
3206 We describe each set of comparisons as being either inside or outside
3207 a range, using a variable named like IN_P, and then describe the
3208 range with a lower and upper bound. If one of the bounds is omitted,
3209 it represents either the highest or lowest value of the type.
3211 In the comments below, we represent a range by two numbers in brackets
3212 preceded by a "+" to designate being inside that range, or a "-" to
3213 designate being outside that range, so the condition can be inverted by
3214 flipping the prefix. An omitted bound is represented by a "-". For
3215 example, "- [-, 10]" means being outside the range starting at the lowest
3216 possible value and ending at 10, in other words, being greater than 10.
3217 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3220 We set up things so that the missing bounds are handled in a consistent
3221 manner so neither a missing bound nor "true" and "false" need to be
3222 handled using a special case. */
3224 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3225 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3226 and UPPER1_P are nonzero if the respective argument is an upper bound
3227 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3228 must be specified for a comparison. ARG1 will be converted to ARG0's
3229 type if both are specified. */
3232 range_binop (code
, type
, arg0
, upper0_p
, arg1
, upper1_p
)
3233 enum tree_code code
;
3236 int upper0_p
, upper1_p
;
3242 /* If neither arg represents infinity, do the normal operation.
3243 Else, if not a comparison, return infinity. Else handle the special
3244 comparison rules. Note that most of the cases below won't occur, but
3245 are handled for consistency. */
3247 if (arg0
!= 0 && arg1
!= 0)
3249 tem
= fold (build (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3250 arg0
, convert (TREE_TYPE (arg0
), arg1
)));
3252 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3255 if (TREE_CODE_CLASS (code
) != '<')
3258 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3259 for neither. In real maths, we cannot assume open ended ranges are
3260 the same. But, this is computer arithmetic, where numbers are finite.
3261 We can therefore make the transformation of any unbounded range with
3262 the value Z, Z being greater than any representable number. This permits
3263 us to treat unbounded ranges as equal. */
3264 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3265 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3269 result
= sgn0
== sgn1
;
3272 result
= sgn0
!= sgn1
;
3275 result
= sgn0
< sgn1
;
3278 result
= sgn0
<= sgn1
;
3281 result
= sgn0
> sgn1
;
3284 result
= sgn0
>= sgn1
;
3290 return convert (type
, result
? integer_one_node
: integer_zero_node
);
3293 /* Given EXP, a logical expression, set the range it is testing into
3294 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3295 actually being tested. *PLOW and *PHIGH will be made of the same type
3296 as the returned expression. If EXP is not a comparison, we will most
3297 likely not be returning a useful value and range. */
3300 make_range (exp
, pin_p
, plow
, phigh
)
3305 enum tree_code code
;
3306 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, type
= NULL_TREE
;
3307 tree orig_type
= NULL_TREE
;
3309 tree low
, high
, n_low
, n_high
;
3311 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3312 and see if we can refine the range. Some of the cases below may not
3313 happen, but it doesn't seem worth worrying about this. We "continue"
3314 the outer loop when we've changed something; otherwise we "break"
3315 the switch, which will "break" the while. */
3317 in_p
= 0, low
= high
= convert (TREE_TYPE (exp
), integer_zero_node
);
3321 code
= TREE_CODE (exp
);
3323 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3325 arg0
= TREE_OPERAND (exp
, 0);
3326 if (TREE_CODE_CLASS (code
) == '<'
3327 || TREE_CODE_CLASS (code
) == '1'
3328 || TREE_CODE_CLASS (code
) == '2')
3329 type
= TREE_TYPE (arg0
);
3330 if (TREE_CODE_CLASS (code
) == '2'
3331 || TREE_CODE_CLASS (code
) == '<'
3332 || (TREE_CODE_CLASS (code
) == 'e'
3333 && TREE_CODE_LENGTH (code
) > 1))
3334 arg1
= TREE_OPERAND (exp
, 1);
3337 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3338 lose a cast by accident. */
3339 if (type
!= NULL_TREE
&& orig_type
== NULL_TREE
)
3344 case TRUTH_NOT_EXPR
:
3345 in_p
= ! in_p
, exp
= arg0
;
3348 case EQ_EXPR
: case NE_EXPR
:
3349 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3350 /* We can only do something if the range is testing for zero
3351 and if the second operand is an integer constant. Note that
3352 saying something is "in" the range we make is done by
3353 complementing IN_P since it will set in the initial case of
3354 being not equal to zero; "out" is leaving it alone. */
3355 if (low
== 0 || high
== 0
3356 || ! integer_zerop (low
) || ! integer_zerop (high
)
3357 || TREE_CODE (arg1
) != INTEGER_CST
)
3362 case NE_EXPR
: /* - [c, c] */
3365 case EQ_EXPR
: /* + [c, c] */
3366 in_p
= ! in_p
, low
= high
= arg1
;
3368 case GT_EXPR
: /* - [-, c] */
3369 low
= 0, high
= arg1
;
3371 case GE_EXPR
: /* + [c, -] */
3372 in_p
= ! in_p
, low
= arg1
, high
= 0;
3374 case LT_EXPR
: /* - [c, -] */
3375 low
= arg1
, high
= 0;
3377 case LE_EXPR
: /* + [-, c] */
3378 in_p
= ! in_p
, low
= 0, high
= arg1
;
3386 /* If this is an unsigned comparison, we also know that EXP is
3387 greater than or equal to zero. We base the range tests we make
3388 on that fact, so we record it here so we can parse existing
3390 if (TREE_UNSIGNED (type
) && (low
== 0 || high
== 0))
3392 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, in_p
, low
, high
,
3393 1, convert (type
, integer_zero_node
),
3397 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3399 /* If the high bound is missing, but we
3400 have a low bound, reverse the range so
3401 it goes from zero to the low bound minus 1. */
3402 if (high
== 0 && low
)
3405 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3406 integer_one_node
, 0);
3407 low
= convert (type
, integer_zero_node
);
3413 /* (-x) IN [a,b] -> x in [-b, -a] */
3414 n_low
= range_binop (MINUS_EXPR
, type
,
3415 convert (type
, integer_zero_node
), 0, high
, 1);
3416 n_high
= range_binop (MINUS_EXPR
, type
,
3417 convert (type
, integer_zero_node
), 0, low
, 0);
3418 low
= n_low
, high
= n_high
;
3424 exp
= build (MINUS_EXPR
, type
, negate_expr (arg0
),
3425 convert (type
, integer_one_node
));
3428 case PLUS_EXPR
: case MINUS_EXPR
:
3429 if (TREE_CODE (arg1
) != INTEGER_CST
)
3432 /* If EXP is signed, any overflow in the computation is undefined,
3433 so we don't worry about it so long as our computations on
3434 the bounds don't overflow. For unsigned, overflow is defined
3435 and this is exactly the right thing. */
3436 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3437 type
, low
, 0, arg1
, 0);
3438 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3439 type
, high
, 1, arg1
, 0);
3440 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3441 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3444 /* Check for an unsigned range which has wrapped around the maximum
3445 value thus making n_high < n_low, and normalize it. */
3446 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3448 low
= range_binop (PLUS_EXPR
, type
, n_high
, 0,
3449 integer_one_node
, 0);
3450 high
= range_binop (MINUS_EXPR
, type
, n_low
, 0,
3451 integer_one_node
, 0);
3453 /* If the range is of the form +/- [ x+1, x ], we won't
3454 be able to normalize it. But then, it represents the
3455 whole range or the empty set, so make it
3457 if (tree_int_cst_equal (n_low
, low
)
3458 && tree_int_cst_equal (n_high
, high
))
3464 low
= n_low
, high
= n_high
;
3469 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
3470 if (TYPE_PRECISION (type
) > TYPE_PRECISION (orig_type
))
3473 if (! INTEGRAL_TYPE_P (type
)
3474 || (low
!= 0 && ! int_fits_type_p (low
, type
))
3475 || (high
!= 0 && ! int_fits_type_p (high
, type
)))
3478 n_low
= low
, n_high
= high
;
3481 n_low
= convert (type
, n_low
);
3484 n_high
= convert (type
, n_high
);
3486 /* If we're converting from an unsigned to a signed type,
3487 we will be doing the comparison as unsigned. The tests above
3488 have already verified that LOW and HIGH are both positive.
3490 So we have to make sure that the original unsigned value will
3491 be interpreted as positive. */
3492 if (TREE_UNSIGNED (type
) && ! TREE_UNSIGNED (TREE_TYPE (exp
)))
3494 tree equiv_type
= type_for_mode (TYPE_MODE (type
), 1);
3497 /* A range without an upper bound is, naturally, unbounded.
3498 Since convert would have cropped a very large value, use
3499 the max value for the destination type. */
3501 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3502 : TYPE_MAX_VALUE (type
);
3504 high_positive
= fold (build (RSHIFT_EXPR
, type
,
3505 convert (type
, high_positive
),
3506 convert (type
, integer_one_node
)));
3508 /* If the low bound is specified, "and" the range with the
3509 range for which the original unsigned value will be
3513 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3515 1, convert (type
, integer_zero_node
),
3519 in_p
= (n_in_p
== in_p
);
3523 /* Otherwise, "or" the range with the range of the input
3524 that will be interpreted as negative. */
3525 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3527 1, convert (type
, integer_zero_node
),
3531 in_p
= (in_p
!= n_in_p
);
3536 low
= n_low
, high
= n_high
;
3546 /* If EXP is a constant, we can evaluate whether this is true or false. */
3547 if (TREE_CODE (exp
) == INTEGER_CST
)
3549 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
3551 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3557 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3561 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3562 type, TYPE, return an expression to test if EXP is in (or out of, depending
3563 on IN_P) the range. */
3566 build_range_check (type
, exp
, in_p
, low
, high
)
3572 tree etype
= TREE_TYPE (exp
);
3576 && (0 != (value
= build_range_check (type
, exp
, 1, low
, high
))))
3577 return invert_truthvalue (value
);
3579 else if (low
== 0 && high
== 0)
3580 return convert (type
, integer_one_node
);
3583 return fold (build (LE_EXPR
, type
, exp
, high
));
3586 return fold (build (GE_EXPR
, type
, exp
, low
));
3588 else if (operand_equal_p (low
, high
, 0))
3589 return fold (build (EQ_EXPR
, type
, exp
, low
));
3591 else if (TREE_UNSIGNED (etype
) && integer_zerop (low
))
3592 return build_range_check (type
, exp
, 1, 0, high
);
3594 else if (integer_zerop (low
))
3596 utype
= unsigned_type (etype
);
3597 return build_range_check (type
, convert (utype
, exp
), 1, 0,
3598 convert (utype
, high
));
3601 else if (0 != (value
= const_binop (MINUS_EXPR
, high
, low
, 0))
3602 && ! TREE_OVERFLOW (value
))
3603 return build_range_check (type
,
3604 fold (build (MINUS_EXPR
, etype
, exp
, low
)),
3605 1, convert (etype
, integer_zero_node
), value
);
3610 /* Given two ranges, see if we can merge them into one. Return 1 if we
3611 can, 0 if we can't. Set the output range into the specified parameters. */
3614 merge_ranges (pin_p
, plow
, phigh
, in0_p
, low0
, high0
, in1_p
, low1
, high1
)
3618 tree low0
, high0
, low1
, high1
;
3626 int lowequal
= ((low0
== 0 && low1
== 0)
3627 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3628 low0
, 0, low1
, 0)));
3629 int highequal
= ((high0
== 0 && high1
== 0)
3630 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3631 high0
, 1, high1
, 1)));
3633 /* Make range 0 be the range that starts first, or ends last if they
3634 start at the same value. Swap them if it isn't. */
3635 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3638 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3639 high1
, 1, high0
, 1))))
3641 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
3642 tem
= low0
, low0
= low1
, low1
= tem
;
3643 tem
= high0
, high0
= high1
, high1
= tem
;
3646 /* Now flag two cases, whether the ranges are disjoint or whether the
3647 second range is totally subsumed in the first. Note that the tests
3648 below are simplified by the ones above. */
3649 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
3650 high0
, 1, low1
, 0));
3651 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3652 high1
, 1, high0
, 1));
3654 /* We now have four cases, depending on whether we are including or
3655 excluding the two ranges. */
3658 /* If they don't overlap, the result is false. If the second range
3659 is a subset it is the result. Otherwise, the range is from the start
3660 of the second to the end of the first. */
3662 in_p
= 0, low
= high
= 0;
3664 in_p
= 1, low
= low1
, high
= high1
;
3666 in_p
= 1, low
= low1
, high
= high0
;
3669 else if (in0_p
&& ! in1_p
)
3671 /* If they don't overlap, the result is the first range. If they are
3672 equal, the result is false. If the second range is a subset of the
3673 first, and the ranges begin at the same place, we go from just after
3674 the end of the first range to the end of the second. If the second
3675 range is not a subset of the first, or if it is a subset and both
3676 ranges end at the same place, the range starts at the start of the
3677 first range and ends just before the second range.
3678 Otherwise, we can't describe this as a single range. */
3680 in_p
= 1, low
= low0
, high
= high0
;
3681 else if (lowequal
&& highequal
)
3682 in_p
= 0, low
= high
= 0;
3683 else if (subset
&& lowequal
)
3685 in_p
= 1, high
= high0
;
3686 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high1
, 0,
3687 integer_one_node
, 0);
3689 else if (! subset
|| highequal
)
3691 in_p
= 1, low
= low0
;
3692 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
3693 integer_one_node
, 0);
3699 else if (! in0_p
&& in1_p
)
3701 /* If they don't overlap, the result is the second range. If the second
3702 is a subset of the first, the result is false. Otherwise,
3703 the range starts just after the first range and ends at the
3704 end of the second. */
3706 in_p
= 1, low
= low1
, high
= high1
;
3707 else if (subset
|| highequal
)
3708 in_p
= 0, low
= high
= 0;
3711 in_p
= 1, high
= high1
;
3712 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
3713 integer_one_node
, 0);
3719 /* The case where we are excluding both ranges. Here the complex case
3720 is if they don't overlap. In that case, the only time we have a
3721 range is if they are adjacent. If the second is a subset of the
3722 first, the result is the first. Otherwise, the range to exclude
3723 starts at the beginning of the first range and ends at the end of the
3727 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3728 range_binop (PLUS_EXPR
, NULL_TREE
,
3730 integer_one_node
, 1),
3732 in_p
= 0, low
= low0
, high
= high1
;
3737 in_p
= 0, low
= low0
, high
= high0
;
3739 in_p
= 0, low
= low0
, high
= high1
;
3742 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3746 /* EXP is some logical combination of boolean tests. See if we can
3747 merge it into some range test. Return the new tree if so. */
3750 fold_range_test (exp
)
3753 int or_op
= (TREE_CODE (exp
) == TRUTH_ORIF_EXPR
3754 || TREE_CODE (exp
) == TRUTH_OR_EXPR
);
3755 int in0_p
, in1_p
, in_p
;
3756 tree low0
, low1
, low
, high0
, high1
, high
;
3757 tree lhs
= make_range (TREE_OPERAND (exp
, 0), &in0_p
, &low0
, &high0
);
3758 tree rhs
= make_range (TREE_OPERAND (exp
, 1), &in1_p
, &low1
, &high1
);
3761 /* If this is an OR operation, invert both sides; we will invert
3762 again at the end. */
3764 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
3766 /* If both expressions are the same, if we can merge the ranges, and we
3767 can build the range test, return it or it inverted. If one of the
3768 ranges is always true or always false, consider it to be the same
3769 expression as the other. */
3770 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
3771 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
3773 && 0 != (tem
= (build_range_check (TREE_TYPE (exp
),
3775 : rhs
!= 0 ? rhs
: integer_zero_node
,
3777 return or_op
? invert_truthvalue (tem
) : tem
;
3779 /* On machines where the branch cost is expensive, if this is a
3780 short-circuited branch and the underlying object on both sides
3781 is the same, make a non-short-circuit operation. */
3782 else if (BRANCH_COST
>= 2
3783 && lhs
!= 0 && rhs
!= 0
3784 && (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
3785 || TREE_CODE (exp
) == TRUTH_ORIF_EXPR
)
3786 && operand_equal_p (lhs
, rhs
, 0))
3788 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3789 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3790 which cases we can't do this. */
3791 if (simple_operand_p (lhs
))
3792 return build (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
3793 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
3794 TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
3795 TREE_OPERAND (exp
, 1));
3797 else if (global_bindings_p () == 0
3798 && ! contains_placeholder_p (lhs
))
3800 tree common
= save_expr (lhs
);
3802 if (0 != (lhs
= build_range_check (TREE_TYPE (exp
), common
,
3803 or_op
? ! in0_p
: in0_p
,
3805 && (0 != (rhs
= build_range_check (TREE_TYPE (exp
), common
,
3806 or_op
? ! in1_p
: in1_p
,
3808 return build (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
3809 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
3810 TREE_TYPE (exp
), lhs
, rhs
);
3817 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3818 bit value. Arrange things so the extra bits will be set to zero if and
3819 only if C is signed-extended to its full width. If MASK is nonzero,
3820 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3823 unextend (c
, p
, unsignedp
, mask
)
3829 tree type
= TREE_TYPE (c
);
3830 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
3833 if (p
== modesize
|| unsignedp
)
3836 /* We work by getting just the sign bit into the low-order bit, then
3837 into the high-order bit, then sign-extend. We then XOR that value
3839 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
3840 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
3842 /* We must use a signed type in order to get an arithmetic right shift.
3843 However, we must also avoid introducing accidental overflows, so that
3844 a subsequent call to integer_zerop will work. Hence we must
3845 do the type conversion here. At this point, the constant is either
3846 zero or one, and the conversion to a signed type can never overflow.
3847 We could get an overflow if this conversion is done anywhere else. */
3848 if (TREE_UNSIGNED (type
))
3849 temp
= convert (signed_type (type
), temp
);
3851 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
3852 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
3854 temp
= const_binop (BIT_AND_EXPR
, temp
, convert (TREE_TYPE (c
), mask
), 0);
3855 /* If necessary, convert the type back to match the type of C. */
3856 if (TREE_UNSIGNED (type
))
3857 temp
= convert (type
, temp
);
3859 return convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
3862 /* Find ways of folding logical expressions of LHS and RHS:
3863 Try to merge two comparisons to the same innermost item.
3864 Look for range tests like "ch >= '0' && ch <= '9'".
3865 Look for combinations of simple terms on machines with expensive branches
3866 and evaluate the RHS unconditionally.
3868 For example, if we have p->a == 2 && p->b == 4 and we can make an
3869 object large enough to span both A and B, we can do this with a comparison
3870 against the object ANDed with the a mask.
3872 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3873 operations to do this with one comparison.
3875 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3876 function and the one above.
3878 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3879 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3881 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3884 We return the simplified tree or 0 if no optimization is possible. */
3887 fold_truthop (code
, truth_type
, lhs
, rhs
)
3888 enum tree_code code
;
3889 tree truth_type
, lhs
, rhs
;
3891 /* If this is the "or" of two comparisons, we can do something if
3892 the comparisons are NE_EXPR. If this is the "and", we can do something
3893 if the comparisons are EQ_EXPR. I.e.,
3894 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3896 WANTED_CODE is this operation code. For single bit fields, we can
3897 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3898 comparison for one-bit fields. */
3900 enum tree_code wanted_code
;
3901 enum tree_code lcode
, rcode
;
3902 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
3903 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
3904 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
3905 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
3906 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
3907 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
3908 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
3909 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
3910 enum machine_mode lnmode
, rnmode
;
3911 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
3912 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
3913 tree l_const
, r_const
;
3914 tree lntype
, rntype
, result
;
3915 int first_bit
, end_bit
;
3918 /* Start by getting the comparison codes. Fail if anything is volatile.
3919 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3920 it were surrounded with a NE_EXPR. */
3922 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
3925 lcode
= TREE_CODE (lhs
);
3926 rcode
= TREE_CODE (rhs
);
3928 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
3929 lcode
= NE_EXPR
, lhs
= build (NE_EXPR
, truth_type
, lhs
, integer_zero_node
);
3931 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
3932 rcode
= NE_EXPR
, rhs
= build (NE_EXPR
, truth_type
, rhs
, integer_zero_node
);
3934 if (TREE_CODE_CLASS (lcode
) != '<' || TREE_CODE_CLASS (rcode
) != '<')
3937 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
3938 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
3940 ll_arg
= TREE_OPERAND (lhs
, 0);
3941 lr_arg
= TREE_OPERAND (lhs
, 1);
3942 rl_arg
= TREE_OPERAND (rhs
, 0);
3943 rr_arg
= TREE_OPERAND (rhs
, 1);
3945 /* If the RHS can be evaluated unconditionally and its operands are
3946 simple, it wins to evaluate the RHS unconditionally on machines
3947 with expensive branches. In this case, this isn't a comparison
3948 that can be merged. Avoid doing this if the RHS is a floating-point
3949 comparison since those can trap. */
3951 if (BRANCH_COST
>= 2
3952 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
3953 && simple_operand_p (rl_arg
)
3954 && simple_operand_p (rr_arg
))
3955 return build (code
, truth_type
, lhs
, rhs
);
3957 /* See if the comparisons can be merged. Then get all the parameters for
3960 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
3961 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
3965 ll_inner
= decode_field_reference (ll_arg
,
3966 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
3967 &ll_unsignedp
, &volatilep
, &ll_mask
,
3969 lr_inner
= decode_field_reference (lr_arg
,
3970 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
3971 &lr_unsignedp
, &volatilep
, &lr_mask
,
3973 rl_inner
= decode_field_reference (rl_arg
,
3974 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
3975 &rl_unsignedp
, &volatilep
, &rl_mask
,
3977 rr_inner
= decode_field_reference (rr_arg
,
3978 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
3979 &rr_unsignedp
, &volatilep
, &rr_mask
,
3982 /* It must be true that the inner operation on the lhs of each
3983 comparison must be the same if we are to be able to do anything.
3984 Then see if we have constants. If not, the same must be true for
3986 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
3987 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
3990 if (TREE_CODE (lr_arg
) == INTEGER_CST
3991 && TREE_CODE (rr_arg
) == INTEGER_CST
)
3992 l_const
= lr_arg
, r_const
= rr_arg
;
3993 else if (lr_inner
== 0 || rr_inner
== 0
3994 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
3997 l_const
= r_const
= 0;
3999 /* If either comparison code is not correct for our logical operation,
4000 fail. However, we can convert a one-bit comparison against zero into
4001 the opposite comparison against that bit being set in the field. */
4003 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
4004 if (lcode
!= wanted_code
)
4006 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
4008 /* Make the left operand unsigned, since we are only interested
4009 in the value of one bit. Otherwise we are doing the wrong
4018 /* This is analogous to the code for l_const above. */
4019 if (rcode
!= wanted_code
)
4021 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
4030 /* See if we can find a mode that contains both fields being compared on
4031 the left. If we can't, fail. Otherwise, update all constants and masks
4032 to be relative to a field of that size. */
4033 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
4034 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
4035 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4036 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
4038 if (lnmode
== VOIDmode
)
4041 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
4042 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
4043 lntype
= type_for_size (lnbitsize
, 1);
4044 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
4046 if (BYTES_BIG_ENDIAN
)
4048 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
4049 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
4052 ll_mask
= const_binop (LSHIFT_EXPR
, convert (lntype
, ll_mask
),
4053 size_int (xll_bitpos
), 0);
4054 rl_mask
= const_binop (LSHIFT_EXPR
, convert (lntype
, rl_mask
),
4055 size_int (xrl_bitpos
), 0);
4059 l_const
= convert (lntype
, l_const
);
4060 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
4061 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
4062 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
4063 fold (build1 (BIT_NOT_EXPR
,
4067 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
4069 return convert (truth_type
,
4070 wanted_code
== NE_EXPR
4071 ? integer_one_node
: integer_zero_node
);
4076 r_const
= convert (lntype
, r_const
);
4077 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
4078 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
4079 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
4080 fold (build1 (BIT_NOT_EXPR
,
4084 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
4086 return convert (truth_type
,
4087 wanted_code
== NE_EXPR
4088 ? integer_one_node
: integer_zero_node
);
4092 /* If the right sides are not constant, do the same for it. Also,
4093 disallow this optimization if a size or signedness mismatch occurs
4094 between the left and right sides. */
4097 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
4098 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
4099 /* Make sure the two fields on the right
4100 correspond to the left without being swapped. */
4101 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
4104 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
4105 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
4106 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4107 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
4109 if (rnmode
== VOIDmode
)
4112 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
4113 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
4114 rntype
= type_for_size (rnbitsize
, 1);
4115 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
4117 if (BYTES_BIG_ENDIAN
)
4119 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
4120 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
4123 lr_mask
= const_binop (LSHIFT_EXPR
, convert (rntype
, lr_mask
),
4124 size_int (xlr_bitpos
), 0);
4125 rr_mask
= const_binop (LSHIFT_EXPR
, convert (rntype
, rr_mask
),
4126 size_int (xrr_bitpos
), 0);
4128 /* Make a mask that corresponds to both fields being compared.
4129 Do this for both items being compared. If the operands are the
4130 same size and the bits being compared are in the same position
4131 then we can do this by masking both and comparing the masked
4133 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4134 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
4135 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
4137 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4138 ll_unsignedp
|| rl_unsignedp
);
4139 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4140 lhs
= build (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
4142 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
4143 lr_unsignedp
|| rr_unsignedp
);
4144 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
4145 rhs
= build (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
4147 return build (wanted_code
, truth_type
, lhs
, rhs
);
4150 /* There is still another way we can do something: If both pairs of
4151 fields being compared are adjacent, we may be able to make a wider
4152 field containing them both.
4154 Note that we still must mask the lhs/rhs expressions. Furthermore,
4155 the mask must be shifted to account for the shift done by
4156 make_bit_field_ref. */
4157 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
4158 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
4159 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
4160 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
4164 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
4165 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
4166 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
4167 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
4169 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
4170 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
4171 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
4172 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
4174 /* Convert to the smaller type before masking out unwanted bits. */
4176 if (lntype
!= rntype
)
4178 if (lnbitsize
> rnbitsize
)
4180 lhs
= convert (rntype
, lhs
);
4181 ll_mask
= convert (rntype
, ll_mask
);
4184 else if (lnbitsize
< rnbitsize
)
4186 rhs
= convert (lntype
, rhs
);
4187 lr_mask
= convert (lntype
, lr_mask
);
4192 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
4193 lhs
= build (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
4195 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
4196 rhs
= build (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
4198 return build (wanted_code
, truth_type
, lhs
, rhs
);
4204 /* Handle the case of comparisons with constants. If there is something in
4205 common between the masks, those bits of the constants must be the same.
4206 If not, the condition is always false. Test for this to avoid generating
4207 incorrect code below. */
4208 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
4209 if (! integer_zerop (result
)
4210 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
4211 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
4213 if (wanted_code
== NE_EXPR
)
4215 warning ("`or' of unmatched not-equal tests is always 1");
4216 return convert (truth_type
, integer_one_node
);
4220 warning ("`and' of mutually exclusive equal-tests is always 0");
4221 return convert (truth_type
, integer_zero_node
);
4225 /* Construct the expression we will return. First get the component
4226 reference we will make. Unless the mask is all ones the width of
4227 that field, perform the mask operation. Then compare with the
4229 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4230 ll_unsignedp
|| rl_unsignedp
);
4232 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4233 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4234 result
= build (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
4236 return build (wanted_code
, truth_type
, result
,
4237 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
4240 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4244 optimize_minmax_comparison (t
)
4247 tree type
= TREE_TYPE (t
);
4248 tree arg0
= TREE_OPERAND (t
, 0);
4249 enum tree_code op_code
;
4250 tree comp_const
= TREE_OPERAND (t
, 1);
4252 int consts_equal
, consts_lt
;
4255 STRIP_SIGN_NOPS (arg0
);
4257 op_code
= TREE_CODE (arg0
);
4258 minmax_const
= TREE_OPERAND (arg0
, 1);
4259 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
4260 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
4261 inner
= TREE_OPERAND (arg0
, 0);
4263 /* If something does not permit us to optimize, return the original tree. */
4264 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
4265 || TREE_CODE (comp_const
) != INTEGER_CST
4266 || TREE_CONSTANT_OVERFLOW (comp_const
)
4267 || TREE_CODE (minmax_const
) != INTEGER_CST
4268 || TREE_CONSTANT_OVERFLOW (minmax_const
))
4271 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4272 and GT_EXPR, doing the rest with recursive calls using logical
4274 switch (TREE_CODE (t
))
4276 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
4278 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t
)));
4282 fold (build (TRUTH_ORIF_EXPR
, type
,
4283 optimize_minmax_comparison
4284 (build (EQ_EXPR
, type
, arg0
, comp_const
)),
4285 optimize_minmax_comparison
4286 (build (GT_EXPR
, type
, arg0
, comp_const
))));
4289 if (op_code
== MAX_EXPR
&& consts_equal
)
4290 /* MAX (X, 0) == 0 -> X <= 0 */
4291 return fold (build (LE_EXPR
, type
, inner
, comp_const
));
4293 else if (op_code
== MAX_EXPR
&& consts_lt
)
4294 /* MAX (X, 0) == 5 -> X == 5 */
4295 return fold (build (EQ_EXPR
, type
, inner
, comp_const
));
4297 else if (op_code
== MAX_EXPR
)
4298 /* MAX (X, 0) == -1 -> false */
4299 return omit_one_operand (type
, integer_zero_node
, inner
);
4301 else if (consts_equal
)
4302 /* MIN (X, 0) == 0 -> X >= 0 */
4303 return fold (build (GE_EXPR
, type
, inner
, comp_const
));
4306 /* MIN (X, 0) == 5 -> false */
4307 return omit_one_operand (type
, integer_zero_node
, inner
);
4310 /* MIN (X, 0) == -1 -> X == -1 */
4311 return fold (build (EQ_EXPR
, type
, inner
, comp_const
));
4314 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
4315 /* MAX (X, 0) > 0 -> X > 0
4316 MAX (X, 0) > 5 -> X > 5 */
4317 return fold (build (GT_EXPR
, type
, inner
, comp_const
));
4319 else if (op_code
== MAX_EXPR
)
4320 /* MAX (X, 0) > -1 -> true */
4321 return omit_one_operand (type
, integer_one_node
, inner
);
4323 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
4324 /* MIN (X, 0) > 0 -> false
4325 MIN (X, 0) > 5 -> false */
4326 return omit_one_operand (type
, integer_zero_node
, inner
);
4329 /* MIN (X, 0) > -1 -> X > -1 */
4330 return fold (build (GT_EXPR
, type
, inner
, comp_const
));
4337 /* T is an integer expression that is being multiplied, divided, or taken a
4338 modulus (CODE says which and what kind of divide or modulus) by a
4339 constant C. See if we can eliminate that operation by folding it with
4340 other operations already in T. WIDE_TYPE, if non-null, is a type that
4341 should be used for the computation if wider than our type.
4343 For example, if we are dividing (X * 8) + (Y + 16) by 4, we can return
4344 (X * 2) + (Y + 4). We must, however, be assured that either the original
4345 expression would not overflow or that overflow is undefined for the type
4346 in the language in question.
4348 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4349 the machine has a multiply-accumulate insn or that this is part of an
4350 addressing calculation.
4352 If we return a non-null expression, it is an equivalent form of the
4353 original computation, but need not be in the original type. */
4356 extract_muldiv (t
, c
, code
, wide_type
)
4359 enum tree_code code
;
4362 tree type
= TREE_TYPE (t
);
4363 enum tree_code tcode
= TREE_CODE (t
);
4364 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
4365 > GET_MODE_SIZE (TYPE_MODE (type
)))
4366 ? wide_type
: type
);
4368 int same_p
= tcode
== code
;
4369 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
4371 /* Don't deal with constants of zero here; they confuse the code below. */
4372 if (integer_zerop (c
))
4375 if (TREE_CODE_CLASS (tcode
) == '1')
4376 op0
= TREE_OPERAND (t
, 0);
4378 if (TREE_CODE_CLASS (tcode
) == '2')
4379 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
4381 /* Note that we need not handle conditional operations here since fold
4382 already handles those cases. So just do arithmetic here. */
4386 /* For a constant, we can always simplify if we are a multiply
4387 or (for divide and modulus) if it is a multiple of our constant. */
4388 if (code
== MULT_EXPR
4389 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
4390 return const_binop (code
, convert (ctype
, t
), convert (ctype
, c
), 0);
4393 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
4394 /* If op0 is an expression, and is unsigned, and the type is
4395 smaller than ctype, then we cannot widen the expression. */
4396 if ((TREE_CODE_CLASS (TREE_CODE (op0
)) == '<'
4397 || TREE_CODE_CLASS (TREE_CODE (op0
)) == '1'
4398 || TREE_CODE_CLASS (TREE_CODE (op0
)) == '2'
4399 || TREE_CODE_CLASS (TREE_CODE (op0
)) == 'e')
4400 && TREE_UNSIGNED (TREE_TYPE (op0
))
4401 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
4402 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
4403 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
4404 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
4407 /* Pass the constant down and see if we can make a simplification. If
4408 we can, replace this expression with the inner simplification for
4409 possible later conversion to our or some other type. */
4410 if (0 != (t1
= extract_muldiv (op0
, convert (TREE_TYPE (op0
), c
), code
,
4411 code
== MULT_EXPR
? ctype
: NULL_TREE
)))
4415 case NEGATE_EXPR
: case ABS_EXPR
:
4416 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
4417 return fold (build1 (tcode
, ctype
, convert (ctype
, t1
)));
4420 case MIN_EXPR
: case MAX_EXPR
:
4421 /* If widening the type changes the signedness, then we can't perform
4422 this optimization as that changes the result. */
4423 if (TREE_UNSIGNED (ctype
) != TREE_UNSIGNED (type
))
4426 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4427 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
4428 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
4430 if (tree_int_cst_sgn (c
) < 0)
4431 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
4433 return fold (build (tcode
, ctype
, convert (ctype
, t1
),
4434 convert (ctype
, t2
)));
4438 case WITH_RECORD_EXPR
:
4439 if ((t1
= extract_muldiv (TREE_OPERAND (t
, 0), c
, code
, wide_type
)) != 0)
4440 return build (WITH_RECORD_EXPR
, TREE_TYPE (t1
), t1
,
4441 TREE_OPERAND (t
, 1));
4445 /* If this has not been evaluated and the operand has no side effects,
4446 we can see if we can do something inside it and make a new one.
4447 Note that this test is overly conservative since we can do this
4448 if the only reason it had side effects is that it was another
4449 similar SAVE_EXPR, but that isn't worth bothering with. */
4450 if (SAVE_EXPR_RTL (t
) == 0 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0))
4451 && 0 != (t1
= extract_muldiv (TREE_OPERAND (t
, 0), c
, code
,
4453 return save_expr (t1
);
4456 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
4457 /* If the second operand is constant, this is a multiplication
4458 or floor division, by a power of two, so we can treat it that
4459 way unless the multiplier or divisor overflows. */
4460 if (TREE_CODE (op1
) == INTEGER_CST
4461 /* const_binop may not detect overflow correctly,
4462 so check for it explicitly here. */
4463 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
4464 && TREE_INT_CST_HIGH (op1
) == 0
4465 && 0 != (t1
= convert (ctype
,
4466 const_binop (LSHIFT_EXPR
, size_one_node
,
4468 && ! TREE_OVERFLOW (t1
))
4469 return extract_muldiv (build (tcode
== LSHIFT_EXPR
4470 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
4471 ctype
, convert (ctype
, op0
), t1
),
4472 c
, code
, wide_type
);
4475 case PLUS_EXPR
: case MINUS_EXPR
:
4476 /* See if we can eliminate the operation on both sides. If we can, we
4477 can return a new PLUS or MINUS. If we can't, the only remaining
4478 cases where we can do anything are if the second operand is a
4480 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
4481 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
4482 if (t1
!= 0 && t2
!= 0)
4483 return fold (build (tcode
, ctype
, convert (ctype
, t1
),
4484 convert (ctype
, t2
)));
4486 /* If this was a subtraction, negate OP1 and set it to be an addition.
4487 This simplifies the logic below. */
4488 if (tcode
== MINUS_EXPR
)
4489 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
4491 if (TREE_CODE (op1
) != INTEGER_CST
)
4494 /* If either OP1 or C are negative, this optimization is not safe for
4495 some of the division and remainder types while for others we need
4496 to change the code. */
4497 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
4499 if (code
== CEIL_DIV_EXPR
)
4500 code
= FLOOR_DIV_EXPR
;
4501 else if (code
== CEIL_MOD_EXPR
)
4502 code
= FLOOR_MOD_EXPR
;
4503 else if (code
== FLOOR_DIV_EXPR
)
4504 code
= CEIL_DIV_EXPR
;
4505 else if (code
== FLOOR_MOD_EXPR
)
4506 code
= CEIL_MOD_EXPR
;
4507 else if (code
!= MULT_EXPR
)
4511 /* If it's a multiply or a division/modulus operation of a multiple
4512 of our constant, do the operation and verify it doesn't overflow. */
4513 if (code
== MULT_EXPR
4514 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
4516 op1
= const_binop (code
, convert (ctype
, op1
), convert (ctype
, c
), 0);
4517 if (op1
== 0 || TREE_OVERFLOW (op1
))
4523 /* If we have an unsigned type is not a sizetype, we cannot widen
4524 the operation since it will change the result if the original
4525 computation overflowed. */
4526 if (TREE_UNSIGNED (ctype
)
4527 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
4531 /* If we were able to eliminate our operation from the first side,
4532 apply our operation to the second side and reform the PLUS. */
4533 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
4534 return fold (build (tcode
, ctype
, convert (ctype
, t1
), op1
));
4536 /* The last case is if we are a multiply. In that case, we can
4537 apply the distributive law to commute the multiply and addition
4538 if the multiplication of the constants doesn't overflow. */
4539 if (code
== MULT_EXPR
)
4540 return fold (build (tcode
, ctype
, fold (build (code
, ctype
,
4541 convert (ctype
, op0
),
4542 convert (ctype
, c
))),
4548 /* We have a special case here if we are doing something like
4549 (C * 8) % 4 since we know that's zero. */
4550 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
4551 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
4552 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
4553 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
4554 return omit_one_operand (type
, integer_zero_node
, op0
);
4556 /* ... fall through ... */
4558 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
4559 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
4560 /* If we can extract our operation from the LHS, do so and return a
4561 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4562 do something only if the second operand is a constant. */
4564 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
4565 return fold (build (tcode
, ctype
, convert (ctype
, t1
),
4566 convert (ctype
, op1
)));
4567 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
4568 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
4569 return fold (build (tcode
, ctype
, convert (ctype
, op0
),
4570 convert (ctype
, t1
)));
4571 else if (TREE_CODE (op1
) != INTEGER_CST
)
4574 /* If these are the same operation types, we can associate them
4575 assuming no overflow. */
4577 && 0 != (t1
= const_binop (MULT_EXPR
, convert (ctype
, op1
),
4578 convert (ctype
, c
), 0))
4579 && ! TREE_OVERFLOW (t1
))
4580 return fold (build (tcode
, ctype
, convert (ctype
, op0
), t1
));
4582 /* If these operations "cancel" each other, we have the main
4583 optimizations of this pass, which occur when either constant is a
4584 multiple of the other, in which case we replace this with either an
4585 operation or CODE or TCODE.
4587 If we have an unsigned type that is not a sizetype, we canot do
4588 this since it will change the result if the original computation
4590 if ((! TREE_UNSIGNED (ctype
)
4591 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
4592 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
4593 || (tcode
== MULT_EXPR
4594 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
4595 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
4597 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
4598 return fold (build (tcode
, ctype
, convert (ctype
, op0
),
4600 const_binop (TRUNC_DIV_EXPR
,
4602 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
4603 return fold (build (code
, ctype
, convert (ctype
, op0
),
4605 const_binop (TRUNC_DIV_EXPR
,
4617 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4618 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4619 that we may sometimes modify the tree. */
4622 strip_compound_expr (t
, s
)
4626 enum tree_code code
= TREE_CODE (t
);
4628 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4629 if (code
== COMPOUND_EXPR
&& TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
4630 && TREE_OPERAND (TREE_OPERAND (t
, 0), 0) == s
)
4631 return TREE_OPERAND (t
, 1);
4633 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4634 don't bother handling any other types. */
4635 else if (code
== COND_EXPR
)
4637 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
4638 TREE_OPERAND (t
, 1) = strip_compound_expr (TREE_OPERAND (t
, 1), s
);
4639 TREE_OPERAND (t
, 2) = strip_compound_expr (TREE_OPERAND (t
, 2), s
);
4641 else if (TREE_CODE_CLASS (code
) == '1')
4642 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
4643 else if (TREE_CODE_CLASS (code
) == '<'
4644 || TREE_CODE_CLASS (code
) == '2')
4646 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
4647 TREE_OPERAND (t
, 1) = strip_compound_expr (TREE_OPERAND (t
, 1), s
);
4653 /* Return a node which has the indicated constant VALUE (either 0 or
4654 1), and is of the indicated TYPE. */
4657 constant_boolean_node (value
, type
)
4661 if (type
== integer_type_node
)
4662 return value
? integer_one_node
: integer_zero_node
;
4663 else if (TREE_CODE (type
) == BOOLEAN_TYPE
)
4664 return truthvalue_conversion (value
? integer_one_node
:
4668 tree t
= build_int_2 (value
, 0);
4670 TREE_TYPE (t
) = type
;
4675 /* Utility function for the following routine, to see how complex a nesting of
4676 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4677 we don't care (to avoid spending too much time on complex expressions.). */
4680 count_cond (expr
, lim
)
4686 if (TREE_CODE (expr
) != COND_EXPR
)
4691 true = count_cond (TREE_OPERAND (expr
, 1), lim
- 1);
4692 false = count_cond (TREE_OPERAND (expr
, 2), lim
- 1 - true);
4693 return MIN (lim
, 1 + true + false);
4696 /* Perform constant folding and related simplification of EXPR.
4697 The related simplifications include x*1 => x, x*0 => 0, etc.,
4698 and application of the associative law.
4699 NOP_EXPR conversions may be removed freely (as long as we
4700 are careful not to change the C type of the overall expression)
4701 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4702 but we can constant-fold them if they have constant operands. */
4708 register tree t
= expr
;
4709 tree t1
= NULL_TREE
;
4711 tree type
= TREE_TYPE (expr
);
4712 register tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
4713 register enum tree_code code
= TREE_CODE (t
);
4716 /* WINS will be nonzero when the switch is done
4717 if all operands are constant. */
4720 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4721 Likewise for a SAVE_EXPR that's already been evaluated. */
4722 if (code
== RTL_EXPR
|| (code
== SAVE_EXPR
&& SAVE_EXPR_RTL (t
)) != 0)
4725 /* Return right away if already constant. */
4726 if (TREE_CONSTANT (t
))
4728 if (code
== CONST_DECL
)
4729 return DECL_INITIAL (t
);
4733 #ifdef MAX_INTEGER_COMPUTATION_MODE
4734 check_max_integer_computation_mode (expr
);
4737 kind
= TREE_CODE_CLASS (code
);
4738 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
4742 /* Special case for conversion ops that can have fixed point args. */
4743 arg0
= TREE_OPERAND (t
, 0);
4745 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4747 STRIP_SIGN_NOPS (arg0
);
4749 if (arg0
!= 0 && TREE_CODE (arg0
) == COMPLEX_CST
)
4750 subop
= TREE_REALPART (arg0
);
4754 if (subop
!= 0 && TREE_CODE (subop
) != INTEGER_CST
4755 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4756 && TREE_CODE (subop
) != REAL_CST
4757 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4759 /* Note that TREE_CONSTANT isn't enough:
4760 static var addresses are constant but we can't
4761 do arithmetic on them. */
4764 else if (IS_EXPR_CODE_CLASS (kind
) || kind
== 'r')
4766 register int len
= TREE_CODE_LENGTH (code
);
4768 for (i
= 0; i
< len
; i
++)
4770 tree op
= TREE_OPERAND (t
, i
);
4774 continue; /* Valid for CALL_EXPR, at least. */
4776 if (kind
== '<' || code
== RSHIFT_EXPR
)
4778 /* Signedness matters here. Perhaps we can refine this
4780 STRIP_SIGN_NOPS (op
);
4783 /* Strip any conversions that don't change the mode. */
4786 if (TREE_CODE (op
) == COMPLEX_CST
)
4787 subop
= TREE_REALPART (op
);
4791 if (TREE_CODE (subop
) != INTEGER_CST
4792 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4793 && TREE_CODE (subop
) != REAL_CST
4794 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4796 /* Note that TREE_CONSTANT isn't enough:
4797 static var addresses are constant but we can't
4798 do arithmetic on them. */
4808 /* If this is a commutative operation, and ARG0 is a constant, move it
4809 to ARG1 to reduce the number of tests below. */
4810 if ((code
== PLUS_EXPR
|| code
== MULT_EXPR
|| code
== MIN_EXPR
4811 || code
== MAX_EXPR
|| code
== BIT_IOR_EXPR
|| code
== BIT_XOR_EXPR
4812 || code
== BIT_AND_EXPR
)
4813 && (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
))
4815 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
4817 tem
= TREE_OPERAND (t
, 0); TREE_OPERAND (t
, 0) = TREE_OPERAND (t
, 1);
4818 TREE_OPERAND (t
, 1) = tem
;
4821 /* Now WINS is set as described above,
4822 ARG0 is the first operand of EXPR,
4823 and ARG1 is the second operand (if it has more than one operand).
4825 First check for cases where an arithmetic operation is applied to a
4826 compound, conditional, or comparison operation. Push the arithmetic
4827 operation inside the compound or conditional to see if any folding
4828 can then be done. Convert comparison to conditional for this purpose.
4829 The also optimizes non-constant cases that used to be done in
4832 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
4833 one of the operands is a comparison and the other is a comparison, a
4834 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4835 code below would make the expression more complex. Change it to a
4836 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4837 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4839 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
4840 || code
== EQ_EXPR
|| code
== NE_EXPR
)
4841 && ((truth_value_p (TREE_CODE (arg0
))
4842 && (truth_value_p (TREE_CODE (arg1
))
4843 || (TREE_CODE (arg1
) == BIT_AND_EXPR
4844 && integer_onep (TREE_OPERAND (arg1
, 1)))))
4845 || (truth_value_p (TREE_CODE (arg1
))
4846 && (truth_value_p (TREE_CODE (arg0
))
4847 || (TREE_CODE (arg0
) == BIT_AND_EXPR
4848 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
4850 t
= fold (build (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
4851 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
4855 if (code
== EQ_EXPR
)
4856 t
= invert_truthvalue (t
);
4861 if (TREE_CODE_CLASS (code
) == '1')
4863 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
4864 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
4865 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))));
4866 else if (TREE_CODE (arg0
) == COND_EXPR
)
4868 t
= fold (build (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
4869 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))),
4870 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 2)))));
4872 /* If this was a conversion, and all we did was to move into
4873 inside the COND_EXPR, bring it back out. But leave it if
4874 it is a conversion from integer to integer and the
4875 result precision is no wider than a word since such a
4876 conversion is cheap and may be optimized away by combine,
4877 while it couldn't if it were outside the COND_EXPR. Then return
4878 so we don't get into an infinite recursion loop taking the
4879 conversion out and then back in. */
4881 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
4882 || code
== NON_LVALUE_EXPR
)
4883 && TREE_CODE (t
) == COND_EXPR
4884 && TREE_CODE (TREE_OPERAND (t
, 1)) == code
4885 && TREE_CODE (TREE_OPERAND (t
, 2)) == code
4886 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0))
4887 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 2), 0)))
4888 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t
))
4890 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0))))
4891 && TYPE_PRECISION (TREE_TYPE (t
)) <= BITS_PER_WORD
))
4892 t
= build1 (code
, type
,
4894 TREE_TYPE (TREE_OPERAND
4895 (TREE_OPERAND (t
, 1), 0)),
4896 TREE_OPERAND (t
, 0),
4897 TREE_OPERAND (TREE_OPERAND (t
, 1), 0),
4898 TREE_OPERAND (TREE_OPERAND (t
, 2), 0)));
4901 else if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<')
4902 return fold (build (COND_EXPR
, type
, arg0
,
4903 fold (build1 (code
, type
, integer_one_node
)),
4904 fold (build1 (code
, type
, integer_zero_node
))));
4906 else if (TREE_CODE_CLASS (code
) == '2'
4907 || TREE_CODE_CLASS (code
) == '<')
4909 if (TREE_CODE (arg1
) == COMPOUND_EXPR
)
4910 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
4911 fold (build (code
, type
,
4912 arg0
, TREE_OPERAND (arg1
, 1))));
4913 else if ((TREE_CODE (arg1
) == COND_EXPR
4914 || (TREE_CODE_CLASS (TREE_CODE (arg1
)) == '<'
4915 && TREE_CODE_CLASS (code
) != '<'))
4916 && (TREE_CODE (arg0
) != COND_EXPR
4917 || count_cond (arg0
, 25) + count_cond (arg1
, 25) <= 25)
4918 && (! TREE_SIDE_EFFECTS (arg0
)
4919 || (global_bindings_p () == 0
4920 && ! contains_placeholder_p (arg0
))))
4922 tree test
, true_value
, false_value
;
4923 tree lhs
= 0, rhs
= 0;
4925 if (TREE_CODE (arg1
) == COND_EXPR
)
4927 test
= TREE_OPERAND (arg1
, 0);
4928 true_value
= TREE_OPERAND (arg1
, 1);
4929 false_value
= TREE_OPERAND (arg1
, 2);
4933 tree testtype
= TREE_TYPE (arg1
);
4935 true_value
= convert (testtype
, integer_one_node
);
4936 false_value
= convert (testtype
, integer_zero_node
);
4939 /* If ARG0 is complex we want to make sure we only evaluate
4940 it once. Though this is only required if it is volatile, it
4941 might be more efficient even if it is not. However, if we
4942 succeed in folding one part to a constant, we do not need
4943 to make this SAVE_EXPR. Since we do this optimization
4944 primarily to see if we do end up with constant and this
4945 SAVE_EXPR interferes with later optimizations, suppressing
4946 it when we can is important.
4948 If we are not in a function, we can't make a SAVE_EXPR, so don't
4949 try to do so. Don't try to see if the result is a constant
4950 if an arm is a COND_EXPR since we get exponential behavior
4953 if (TREE_CODE (arg0
) != SAVE_EXPR
&& ! TREE_CONSTANT (arg0
)
4954 && global_bindings_p () == 0
4955 && ((TREE_CODE (arg0
) != VAR_DECL
4956 && TREE_CODE (arg0
) != PARM_DECL
)
4957 || TREE_SIDE_EFFECTS (arg0
)))
4959 if (TREE_CODE (true_value
) != COND_EXPR
)
4960 lhs
= fold (build (code
, type
, arg0
, true_value
));
4962 if (TREE_CODE (false_value
) != COND_EXPR
)
4963 rhs
= fold (build (code
, type
, arg0
, false_value
));
4965 if ((lhs
== 0 || ! TREE_CONSTANT (lhs
))
4966 && (rhs
== 0 || !TREE_CONSTANT (rhs
)))
4967 arg0
= save_expr (arg0
), lhs
= rhs
= 0;
4971 lhs
= fold (build (code
, type
, arg0
, true_value
));
4973 rhs
= fold (build (code
, type
, arg0
, false_value
));
4975 test
= fold (build (COND_EXPR
, type
, test
, lhs
, rhs
));
4977 if (TREE_CODE (arg0
) == SAVE_EXPR
)
4978 return build (COMPOUND_EXPR
, type
,
4979 convert (void_type_node
, arg0
),
4980 strip_compound_expr (test
, arg0
));
4982 return convert (type
, test
);
4985 else if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
4986 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
4987 fold (build (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
4988 else if ((TREE_CODE (arg0
) == COND_EXPR
4989 || (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
4990 && TREE_CODE_CLASS (code
) != '<'))
4991 && (TREE_CODE (arg1
) != COND_EXPR
4992 || count_cond (arg0
, 25) + count_cond (arg1
, 25) <= 25)
4993 && (! TREE_SIDE_EFFECTS (arg1
)
4994 || (global_bindings_p () == 0
4995 && ! contains_placeholder_p (arg1
))))
4997 tree test
, true_value
, false_value
;
4998 tree lhs
= 0, rhs
= 0;
5000 if (TREE_CODE (arg0
) == COND_EXPR
)
5002 test
= TREE_OPERAND (arg0
, 0);
5003 true_value
= TREE_OPERAND (arg0
, 1);
5004 false_value
= TREE_OPERAND (arg0
, 2);
5008 tree testtype
= TREE_TYPE (arg0
);
5010 true_value
= convert (testtype
, integer_one_node
);
5011 false_value
= convert (testtype
, integer_zero_node
);
5014 if (TREE_CODE (arg1
) != SAVE_EXPR
&& ! TREE_CONSTANT (arg0
)
5015 && global_bindings_p () == 0
5016 && ((TREE_CODE (arg1
) != VAR_DECL
5017 && TREE_CODE (arg1
) != PARM_DECL
)
5018 || TREE_SIDE_EFFECTS (arg1
)))
5020 if (TREE_CODE (true_value
) != COND_EXPR
)
5021 lhs
= fold (build (code
, type
, true_value
, arg1
));
5023 if (TREE_CODE (false_value
) != COND_EXPR
)
5024 rhs
= fold (build (code
, type
, false_value
, arg1
));
5026 if ((lhs
== 0 || ! TREE_CONSTANT (lhs
))
5027 && (rhs
== 0 || !TREE_CONSTANT (rhs
)))
5028 arg1
= save_expr (arg1
), lhs
= rhs
= 0;
5032 lhs
= fold (build (code
, type
, true_value
, arg1
));
5035 rhs
= fold (build (code
, type
, false_value
, arg1
));
5037 test
= fold (build (COND_EXPR
, type
, test
, lhs
, rhs
));
5038 if (TREE_CODE (arg1
) == SAVE_EXPR
)
5039 return build (COMPOUND_EXPR
, type
,
5040 convert (void_type_node
, arg1
),
5041 strip_compound_expr (test
, arg1
));
5043 return convert (type
, test
);
5046 else if (TREE_CODE_CLASS (code
) == '<'
5047 && TREE_CODE (arg0
) == COMPOUND_EXPR
)
5048 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5049 fold (build (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
5050 else if (TREE_CODE_CLASS (code
) == '<'
5051 && TREE_CODE (arg1
) == COMPOUND_EXPR
)
5052 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
5053 fold (build (code
, type
, arg0
, TREE_OPERAND (arg1
, 1))));
5065 return fold (DECL_INITIAL (t
));
5070 case FIX_TRUNC_EXPR
:
5071 /* Other kinds of FIX are not handled properly by fold_convert. */
5073 if (TREE_TYPE (TREE_OPERAND (t
, 0)) == TREE_TYPE (t
))
5074 return TREE_OPERAND (t
, 0);
5076 /* Handle cases of two conversions in a row. */
5077 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
5078 || TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
)
5080 tree inside_type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
5081 tree inter_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
5082 tree final_type
= TREE_TYPE (t
);
5083 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
5084 int inside_ptr
= POINTER_TYPE_P (inside_type
);
5085 int inside_float
= FLOAT_TYPE_P (inside_type
);
5086 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
5087 int inside_unsignedp
= TREE_UNSIGNED (inside_type
);
5088 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
5089 int inter_ptr
= POINTER_TYPE_P (inter_type
);
5090 int inter_float
= FLOAT_TYPE_P (inter_type
);
5091 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
5092 int inter_unsignedp
= TREE_UNSIGNED (inter_type
);
5093 int final_int
= INTEGRAL_TYPE_P (final_type
);
5094 int final_ptr
= POINTER_TYPE_P (final_type
);
5095 int final_float
= FLOAT_TYPE_P (final_type
);
5096 unsigned int final_prec
= TYPE_PRECISION (final_type
);
5097 int final_unsignedp
= TREE_UNSIGNED (final_type
);
5099 /* In addition to the cases of two conversions in a row
5100 handled below, if we are converting something to its own
5101 type via an object of identical or wider precision, neither
5102 conversion is needed. */
5103 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (final_type
)
5104 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
5105 && inter_prec
>= final_prec
)
5106 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
5108 /* Likewise, if the intermediate and final types are either both
5109 float or both integer, we don't need the middle conversion if
5110 it is wider than the final type and doesn't change the signedness
5111 (for integers). Avoid this if the final type is a pointer
5112 since then we sometimes need the inner conversion. Likewise if
5113 the outer has a precision not equal to the size of its mode. */
5114 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
5115 || (inter_float
&& inside_float
))
5116 && inter_prec
>= inside_prec
5117 && (inter_float
|| inter_unsignedp
== inside_unsignedp
)
5118 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (final_type
))
5119 && TYPE_MODE (final_type
) == TYPE_MODE (inter_type
))
5121 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
5123 /* If we have a sign-extension of a zero-extended value, we can
5124 replace that by a single zero-extension. */
5125 if (inside_int
&& inter_int
&& final_int
5126 && inside_prec
< inter_prec
&& inter_prec
< final_prec
5127 && inside_unsignedp
&& !inter_unsignedp
)
5128 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
5130 /* Two conversions in a row are not needed unless:
5131 - some conversion is floating-point (overstrict for now), or
5132 - the intermediate type is narrower than both initial and
5134 - the intermediate type and innermost type differ in signedness,
5135 and the outermost type is wider than the intermediate, or
5136 - the initial type is a pointer type and the precisions of the
5137 intermediate and final types differ, or
5138 - the final type is a pointer type and the precisions of the
5139 initial and intermediate types differ. */
5140 if (! inside_float
&& ! inter_float
&& ! final_float
5141 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
5142 && ! (inside_int
&& inter_int
5143 && inter_unsignedp
!= inside_unsignedp
5144 && inter_prec
< final_prec
)
5145 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
5146 == (final_unsignedp
&& final_prec
> inter_prec
))
5147 && ! (inside_ptr
&& inter_prec
!= final_prec
)
5148 && ! (final_ptr
&& inside_prec
!= inter_prec
)
5149 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (final_type
))
5150 && TYPE_MODE (final_type
) == TYPE_MODE (inter_type
))
5152 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
5155 if (TREE_CODE (TREE_OPERAND (t
, 0)) == MODIFY_EXPR
5156 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t
, 0), 1))
5157 /* Detect assigning a bitfield. */
5158 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)) == COMPONENT_REF
5159 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 1))))
5161 /* Don't leave an assignment inside a conversion
5162 unless assigning a bitfield. */
5163 tree prev
= TREE_OPERAND (t
, 0);
5164 TREE_OPERAND (t
, 0) = TREE_OPERAND (prev
, 1);
5165 /* First do the assignment, then return converted constant. */
5166 t
= build (COMPOUND_EXPR
, TREE_TYPE (t
), prev
, fold (t
));
5172 TREE_CONSTANT (t
) = TREE_CONSTANT (arg0
);
5175 return fold_convert (t
, arg0
);
5177 #if 0 /* This loses on &"foo"[0]. */
5182 /* Fold an expression like: "foo"[2] */
5183 if (TREE_CODE (arg0
) == STRING_CST
5184 && TREE_CODE (arg1
) == INTEGER_CST
5185 && compare_tree_int (arg1
, TREE_STRING_LENGTH (arg0
)) < 0)
5187 t
= build_int_2 (TREE_STRING_POINTER (arg0
)[TREE_INT_CST_LOW (arg
))], 0);
5188 TREE_TYPE (t
) = TREE_TYPE (TREE_TYPE (arg0
));
5189 force_fit_type (t
, 0);
5196 if (TREE_CODE (arg0
) == CONSTRUCTOR
)
5198 tree m
= purpose_member (arg1
, CONSTRUCTOR_ELTS (arg0
));
5205 TREE_CONSTANT (t
) = wins
;
5211 if (TREE_CODE (arg0
) == INTEGER_CST
)
5213 unsigned HOST_WIDE_INT low
;
5215 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
5216 TREE_INT_CST_HIGH (arg0
),
5218 t
= build_int_2 (low
, high
);
5219 TREE_TYPE (t
) = type
;
5221 = (TREE_OVERFLOW (arg0
)
5222 | force_fit_type (t
, overflow
&& !TREE_UNSIGNED (type
)));
5223 TREE_CONSTANT_OVERFLOW (t
)
5224 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
5226 else if (TREE_CODE (arg0
) == REAL_CST
)
5227 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
5229 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
5230 return TREE_OPERAND (arg0
, 0);
5232 /* Convert - (a - b) to (b - a) for non-floating-point. */
5233 else if (TREE_CODE (arg0
) == MINUS_EXPR
5234 && (! FLOAT_TYPE_P (type
) || flag_fast_math
))
5235 return build (MINUS_EXPR
, type
, TREE_OPERAND (arg0
, 1),
5236 TREE_OPERAND (arg0
, 0));
5243 if (TREE_CODE (arg0
) == INTEGER_CST
)
5245 if (! TREE_UNSIGNED (type
)
5246 && TREE_INT_CST_HIGH (arg0
) < 0)
5248 unsigned HOST_WIDE_INT low
;
5250 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
5251 TREE_INT_CST_HIGH (arg0
),
5253 t
= build_int_2 (low
, high
);
5254 TREE_TYPE (t
) = type
;
5256 = (TREE_OVERFLOW (arg0
)
5257 | force_fit_type (t
, overflow
));
5258 TREE_CONSTANT_OVERFLOW (t
)
5259 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
5262 else if (TREE_CODE (arg0
) == REAL_CST
)
5264 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
5265 t
= build_real (type
,
5266 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
5269 else if (TREE_CODE (arg0
) == ABS_EXPR
|| TREE_CODE (arg0
) == NEGATE_EXPR
)
5270 return build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
5274 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
5275 return convert (type
, arg0
);
5276 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
5277 return build (COMPLEX_EXPR
, type
,
5278 TREE_OPERAND (arg0
, 0),
5279 negate_expr (TREE_OPERAND (arg0
, 1)));
5280 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
5281 return build_complex (type
, TREE_OPERAND (arg0
, 0),
5282 negate_expr (TREE_OPERAND (arg0
, 1)));
5283 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
5284 return fold (build (TREE_CODE (arg0
), type
,
5285 fold (build1 (CONJ_EXPR
, type
,
5286 TREE_OPERAND (arg0
, 0))),
5287 fold (build1 (CONJ_EXPR
,
5288 type
, TREE_OPERAND (arg0
, 1)))));
5289 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
5290 return TREE_OPERAND (arg0
, 0);
5296 t
= build_int_2 (~ TREE_INT_CST_LOW (arg0
),
5297 ~ TREE_INT_CST_HIGH (arg0
));
5298 TREE_TYPE (t
) = type
;
5299 force_fit_type (t
, 0);
5300 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg0
);
5301 TREE_CONSTANT_OVERFLOW (t
) = TREE_CONSTANT_OVERFLOW (arg0
);
5303 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
5304 return TREE_OPERAND (arg0
, 0);
5308 /* A + (-B) -> A - B */
5309 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
5310 return fold (build (MINUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
5311 /* (-A) + B -> B - A */
5312 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
5313 return fold (build (MINUS_EXPR
, type
, arg1
, TREE_OPERAND (arg0
, 0)));
5314 else if (! FLOAT_TYPE_P (type
))
5316 if (integer_zerop (arg1
))
5317 return non_lvalue (convert (type
, arg0
));
5319 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5320 with a constant, and the two constants have no bits in common,
5321 we should treat this as a BIT_IOR_EXPR since this may produce more
5323 if (TREE_CODE (arg0
) == BIT_AND_EXPR
5324 && TREE_CODE (arg1
) == BIT_AND_EXPR
5325 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
5326 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
5327 && integer_zerop (const_binop (BIT_AND_EXPR
,
5328 TREE_OPERAND (arg0
, 1),
5329 TREE_OPERAND (arg1
, 1), 0)))
5331 code
= BIT_IOR_EXPR
;
5335 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5336 (plus (plus (mult) (mult)) (foo)) so that we can
5337 take advantage of the factoring cases below. */
5338 if ((TREE_CODE (arg0
) == PLUS_EXPR
5339 && TREE_CODE (arg1
) == MULT_EXPR
)
5340 || (TREE_CODE (arg1
) == PLUS_EXPR
5341 && TREE_CODE (arg0
) == MULT_EXPR
))
5343 tree parg0
, parg1
, parg
, marg
;
5345 if (TREE_CODE (arg0
) == PLUS_EXPR
)
5346 parg
= arg0
, marg
= arg1
;
5348 parg
= arg1
, marg
= arg0
;
5349 parg0
= TREE_OPERAND (parg
, 0);
5350 parg1
= TREE_OPERAND (parg
, 1);
5354 if (TREE_CODE (parg0
) == MULT_EXPR
5355 && TREE_CODE (parg1
) != MULT_EXPR
)
5356 return fold (build (PLUS_EXPR
, type
,
5357 fold (build (PLUS_EXPR
, type
, parg0
, marg
)),
5359 if (TREE_CODE (parg0
) != MULT_EXPR
5360 && TREE_CODE (parg1
) == MULT_EXPR
)
5361 return fold (build (PLUS_EXPR
, type
,
5362 fold (build (PLUS_EXPR
, type
, parg1
, marg
)),
5366 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
)
5368 tree arg00
, arg01
, arg10
, arg11
;
5369 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
5371 /* (A * C) + (B * C) -> (A+B) * C.
5372 We are most concerned about the case where C is a constant,
5373 but other combinations show up during loop reduction. Since
5374 it is not difficult, try all four possibilities. */
5376 arg00
= TREE_OPERAND (arg0
, 0);
5377 arg01
= TREE_OPERAND (arg0
, 1);
5378 arg10
= TREE_OPERAND (arg1
, 0);
5379 arg11
= TREE_OPERAND (arg1
, 1);
5382 if (operand_equal_p (arg01
, arg11
, 0))
5383 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
5384 else if (operand_equal_p (arg00
, arg10
, 0))
5385 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
5386 else if (operand_equal_p (arg00
, arg11
, 0))
5387 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
5388 else if (operand_equal_p (arg01
, arg10
, 0))
5389 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
5391 /* No identical multiplicands; see if we can find a common
5392 power-of-two factor in non-power-of-two multiplies. This
5393 can help in multi-dimensional array access. */
5394 else if (TREE_CODE (arg01
) == INTEGER_CST
5395 && TREE_CODE (arg11
) == INTEGER_CST
5396 && TREE_INT_CST_HIGH (arg01
) == 0
5397 && TREE_INT_CST_HIGH (arg11
) == 0)
5399 HOST_WIDE_INT int01
, int11
, tmp
;
5400 int01
= TREE_INT_CST_LOW (arg01
);
5401 int11
= TREE_INT_CST_LOW (arg11
);
5403 /* Move min of absolute values to int11. */
5404 if ((int01
>= 0 ? int01
: -int01
)
5405 < (int11
>= 0 ? int11
: -int11
))
5407 tmp
= int01
, int01
= int11
, int11
= tmp
;
5408 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
5409 alt0
= arg01
, arg01
= arg11
, arg11
= alt0
;
5412 if (exact_log2 (int11
) > 0 && int01
% int11
== 0)
5414 alt0
= fold (build (MULT_EXPR
, type
, arg00
,
5415 build_int_2 (int01
/ int11
, 0)));
5422 return fold (build (MULT_EXPR
, type
,
5423 fold (build (PLUS_EXPR
, type
, alt0
, alt1
)),
5427 /* In IEEE floating point, x+0 may not equal x. */
5428 else if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
5430 && real_zerop (arg1
))
5431 return non_lvalue (convert (type
, arg0
));
5432 /* x+(-0) equals x, even for IEEE. */
5433 else if (TREE_CODE (arg1
) == REAL_CST
5434 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1
)))
5435 return non_lvalue (convert (type
, arg0
));
5438 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5439 is a rotate of A by C1 bits. */
5440 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5441 is a rotate of A by B bits. */
5443 register enum tree_code code0
, code1
;
5444 code0
= TREE_CODE (arg0
);
5445 code1
= TREE_CODE (arg1
);
5446 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
5447 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
5448 && operand_equal_p (TREE_OPERAND (arg0
, 0),
5449 TREE_OPERAND (arg1
, 0), 0)
5450 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
5452 register tree tree01
, tree11
;
5453 register enum tree_code code01
, code11
;
5455 tree01
= TREE_OPERAND (arg0
, 1);
5456 tree11
= TREE_OPERAND (arg1
, 1);
5457 STRIP_NOPS (tree01
);
5458 STRIP_NOPS (tree11
);
5459 code01
= TREE_CODE (tree01
);
5460 code11
= TREE_CODE (tree11
);
5461 if (code01
== INTEGER_CST
5462 && code11
== INTEGER_CST
5463 && TREE_INT_CST_HIGH (tree01
) == 0
5464 && TREE_INT_CST_HIGH (tree11
) == 0
5465 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
5466 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
5467 return build (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5468 code0
== LSHIFT_EXPR
? tree01
: tree11
);
5469 else if (code11
== MINUS_EXPR
)
5471 tree tree110
, tree111
;
5472 tree110
= TREE_OPERAND (tree11
, 0);
5473 tree111
= TREE_OPERAND (tree11
, 1);
5474 STRIP_NOPS (tree110
);
5475 STRIP_NOPS (tree111
);
5476 if (TREE_CODE (tree110
) == INTEGER_CST
5477 && 0 == compare_tree_int (tree110
,
5479 (TREE_TYPE (TREE_OPERAND
5481 && operand_equal_p (tree01
, tree111
, 0))
5482 return build ((code0
== LSHIFT_EXPR
5485 type
, TREE_OPERAND (arg0
, 0), tree01
);
5487 else if (code01
== MINUS_EXPR
)
5489 tree tree010
, tree011
;
5490 tree010
= TREE_OPERAND (tree01
, 0);
5491 tree011
= TREE_OPERAND (tree01
, 1);
5492 STRIP_NOPS (tree010
);
5493 STRIP_NOPS (tree011
);
5494 if (TREE_CODE (tree010
) == INTEGER_CST
5495 && 0 == compare_tree_int (tree010
,
5497 (TREE_TYPE (TREE_OPERAND
5499 && operand_equal_p (tree11
, tree011
, 0))
5500 return build ((code0
!= LSHIFT_EXPR
5503 type
, TREE_OPERAND (arg0
, 0), tree11
);
5509 /* In most languages, can't associate operations on floats through
5510 parentheses. Rather than remember where the parentheses were, we
5511 don't associate floats at all. It shouldn't matter much. However,
5512 associating multiplications is only very slightly inaccurate, so do
5513 that if -ffast-math is specified. */
5516 && (! FLOAT_TYPE_P (type
)
5517 || (flag_fast_math
&& code
!= MULT_EXPR
)))
5519 tree var0
, con0
, lit0
, var1
, con1
, lit1
;
5521 /* Split both trees into variables, constants, and literals. Then
5522 associate each group together, the constants with literals,
5523 then the result with variables. This increases the chances of
5524 literals being recombined later and of generating relocatable
5525 expressions for the sum of a constant and literal. */
5526 var0
= split_tree (arg0
, code
, &con0
, &lit0
, 0);
5527 var1
= split_tree (arg1
, code
, &con1
, &lit1
, code
== MINUS_EXPR
);
5529 /* Only do something if we found more than two objects. Otherwise,
5530 nothing has changed and we risk infinite recursion. */
5531 if (2 < ((var0
!= 0) + (var1
!= 0) + (con0
!= 0) + (con1
!= 0)
5532 + (lit0
!= 0) + (lit1
!= 0)))
5534 var0
= associate_trees (var0
, var1
, code
, type
);
5535 con0
= associate_trees (con0
, con1
, code
, type
);
5536 lit0
= associate_trees (lit0
, lit1
, code
, type
);
5537 con0
= associate_trees (con0
, lit0
, code
, type
);
5538 return convert (type
, associate_trees (var0
, con0
, code
, type
));
5543 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
5544 if (TREE_CODE (arg1
) == REAL_CST
)
5546 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
5548 t1
= const_binop (code
, arg0
, arg1
, 0);
5549 if (t1
!= NULL_TREE
)
5551 /* The return value should always have
5552 the same type as the original expression. */
5553 if (TREE_TYPE (t1
) != TREE_TYPE (t
))
5554 t1
= convert (TREE_TYPE (t
), t1
);
5561 /* A - (-B) -> A + B */
5562 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
5563 return fold (build (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
5564 /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5565 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& TREE_CODE (arg1
) == REAL_CST
)
5567 fold (build (MINUS_EXPR
, type
,
5568 build_real (TREE_TYPE (arg1
),
5569 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1
))),
5570 TREE_OPERAND (arg0
, 0)));
5572 if (! FLOAT_TYPE_P (type
))
5574 if (! wins
&& integer_zerop (arg0
))
5575 return negate_expr (convert (type
, arg1
));
5576 if (integer_zerop (arg1
))
5577 return non_lvalue (convert (type
, arg0
));
5579 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5580 about the case where C is a constant, just try one of the
5581 four possibilities. */
5583 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
5584 && operand_equal_p (TREE_OPERAND (arg0
, 1),
5585 TREE_OPERAND (arg1
, 1), 0))
5586 return fold (build (MULT_EXPR
, type
,
5587 fold (build (MINUS_EXPR
, type
,
5588 TREE_OPERAND (arg0
, 0),
5589 TREE_OPERAND (arg1
, 0))),
5590 TREE_OPERAND (arg0
, 1)));
5593 else if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
5596 /* Except with IEEE floating point, 0-x equals -x. */
5597 if (! wins
&& real_zerop (arg0
))
5598 return negate_expr (convert (type
, arg1
));
5599 /* Except with IEEE floating point, x-0 equals x. */
5600 if (real_zerop (arg1
))
5601 return non_lvalue (convert (type
, arg0
));
5604 /* Fold &x - &x. This can happen from &x.foo - &x.
5605 This is unsafe for certain floats even in non-IEEE formats.
5606 In IEEE, it is unsafe because it does wrong for NaNs.
5607 Also note that operand_equal_p is always false if an operand
5610 if ((! FLOAT_TYPE_P (type
) || flag_fast_math
)
5611 && operand_equal_p (arg0
, arg1
, 0))
5612 return convert (type
, integer_zero_node
);
5617 /* (-A) * (-B) -> A * B */
5618 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& TREE_CODE (arg1
) == NEGATE_EXPR
)
5619 return fold (build (MULT_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5620 TREE_OPERAND (arg1
, 0)));
5622 if (! FLOAT_TYPE_P (type
))
5624 if (integer_zerop (arg1
))
5625 return omit_one_operand (type
, arg1
, arg0
);
5626 if (integer_onep (arg1
))
5627 return non_lvalue (convert (type
, arg0
));
5629 /* (a * (1 << b)) is (a << b) */
5630 if (TREE_CODE (arg1
) == LSHIFT_EXPR
5631 && integer_onep (TREE_OPERAND (arg1
, 0)))
5632 return fold (build (LSHIFT_EXPR
, type
, arg0
,
5633 TREE_OPERAND (arg1
, 1)));
5634 if (TREE_CODE (arg0
) == LSHIFT_EXPR
5635 && integer_onep (TREE_OPERAND (arg0
, 0)))
5636 return fold (build (LSHIFT_EXPR
, type
, arg1
,
5637 TREE_OPERAND (arg0
, 1)));
5639 if (TREE_CODE (arg1
) == INTEGER_CST
5640 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
5642 return convert (type
, tem
);
5647 /* x*0 is 0, except for IEEE floating point. */
5648 if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
5650 && real_zerop (arg1
))
5651 return omit_one_operand (type
, arg1
, arg0
);
5652 /* In IEEE floating point, x*1 is not equivalent to x for snans.
5653 However, ANSI says we can drop signals,
5654 so we can do this anyway. */
5655 if (real_onep (arg1
))
5656 return non_lvalue (convert (type
, arg0
));
5658 if (! wins
&& real_twop (arg1
) && global_bindings_p () == 0
5659 && ! contains_placeholder_p (arg0
))
5661 tree arg
= save_expr (arg0
);
5662 return build (PLUS_EXPR
, type
, arg
, arg
);
5669 if (integer_all_onesp (arg1
))
5670 return omit_one_operand (type
, arg1
, arg0
);
5671 if (integer_zerop (arg1
))
5672 return non_lvalue (convert (type
, arg0
));
5673 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
5674 if (t1
!= NULL_TREE
)
5677 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5679 This results in more efficient code for machines without a NAND
5680 instruction. Combine will canonicalize to the first form
5681 which will allow use of NAND instructions provided by the
5682 backend if they exist. */
5683 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
5684 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
5686 return fold (build1 (BIT_NOT_EXPR
, type
,
5687 build (BIT_AND_EXPR
, type
,
5688 TREE_OPERAND (arg0
, 0),
5689 TREE_OPERAND (arg1
, 0))));
5692 /* See if this can be simplified into a rotate first. If that
5693 is unsuccessful continue in the association code. */
5697 if (integer_zerop (arg1
))
5698 return non_lvalue (convert (type
, arg0
));
5699 if (integer_all_onesp (arg1
))
5700 return fold (build1 (BIT_NOT_EXPR
, type
, arg0
));
5702 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5703 with a constant, and the two constants have no bits in common,
5704 we should treat this as a BIT_IOR_EXPR since this may produce more
5706 if (TREE_CODE (arg0
) == BIT_AND_EXPR
5707 && TREE_CODE (arg1
) == BIT_AND_EXPR
5708 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
5709 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
5710 && integer_zerop (const_binop (BIT_AND_EXPR
,
5711 TREE_OPERAND (arg0
, 1),
5712 TREE_OPERAND (arg1
, 1), 0)))
5714 code
= BIT_IOR_EXPR
;
5718 /* See if this can be simplified into a rotate first. If that
5719 is unsuccessful continue in the association code. */
5724 if (integer_all_onesp (arg1
))
5725 return non_lvalue (convert (type
, arg0
));
5726 if (integer_zerop (arg1
))
5727 return omit_one_operand (type
, arg1
, arg0
);
5728 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
5729 if (t1
!= NULL_TREE
)
5731 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5732 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == NOP_EXPR
5733 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
5736 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)));
5738 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
5739 && (~TREE_INT_CST_LOW (arg0
)
5740 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
5741 return build1 (NOP_EXPR
, type
, TREE_OPERAND (arg1
, 0));
5743 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
5744 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
5747 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
5749 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
5750 && (~TREE_INT_CST_LOW (arg1
)
5751 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
5752 return build1 (NOP_EXPR
, type
, TREE_OPERAND (arg0
, 0));
5755 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5757 This results in more efficient code for machines without a NOR
5758 instruction. Combine will canonicalize to the first form
5759 which will allow use of NOR instructions provided by the
5760 backend if they exist. */
5761 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
5762 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
5764 return fold (build1 (BIT_NOT_EXPR
, type
,
5765 build (BIT_IOR_EXPR
, type
,
5766 TREE_OPERAND (arg0
, 0),
5767 TREE_OPERAND (arg1
, 0))));
5772 case BIT_ANDTC_EXPR
:
5773 if (integer_all_onesp (arg0
))
5774 return non_lvalue (convert (type
, arg1
));
5775 if (integer_zerop (arg0
))
5776 return omit_one_operand (type
, arg0
, arg1
);
5777 if (TREE_CODE (arg1
) == INTEGER_CST
)
5779 arg1
= fold (build1 (BIT_NOT_EXPR
, type
, arg1
));
5780 code
= BIT_AND_EXPR
;
5786 /* In most cases, do nothing with a divide by zero. */
5787 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
5788 #ifndef REAL_INFINITY
5789 if (TREE_CODE (arg1
) == REAL_CST
&& real_zerop (arg1
))
5792 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
5794 /* (-A) / (-B) -> A / B */
5795 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& TREE_CODE (arg1
) == NEGATE_EXPR
)
5796 return fold (build (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5797 TREE_OPERAND (arg1
, 0)));
5799 /* In IEEE floating point, x/1 is not equivalent to x for snans.
5800 However, ANSI says we can drop signals, so we can do this anyway. */
5801 if (real_onep (arg1
))
5802 return non_lvalue (convert (type
, arg0
));
5804 /* If ARG1 is a constant, we can convert this to a multiply by the
5805 reciprocal. This does not have the same rounding properties,
5806 so only do this if -ffast-math. We can actually always safely
5807 do it if ARG1 is a power of two, but it's hard to tell if it is
5808 or not in a portable manner. */
5809 if (TREE_CODE (arg1
) == REAL_CST
)
5812 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
5814 return fold (build (MULT_EXPR
, type
, arg0
, tem
));
5815 /* Find the reciprocal if optimizing and the result is exact. */
5819 r
= TREE_REAL_CST (arg1
);
5820 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
5822 tem
= build_real (type
, r
);
5823 return fold (build (MULT_EXPR
, type
, arg0
, tem
));
5829 case TRUNC_DIV_EXPR
:
5830 case ROUND_DIV_EXPR
:
5831 case FLOOR_DIV_EXPR
:
5833 case EXACT_DIV_EXPR
:
5834 if (integer_onep (arg1
))
5835 return non_lvalue (convert (type
, arg0
));
5836 if (integer_zerop (arg1
))
5839 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5840 operation, EXACT_DIV_EXPR.
5842 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5843 At one time others generated faster code, it's not clear if they do
5844 after the last round to changes to the DIV code in expmed.c. */
5845 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
5846 && multiple_of_p (type
, arg0
, arg1
))
5847 return fold (build (EXACT_DIV_EXPR
, type
, arg0
, arg1
));
5849 if (TREE_CODE (arg1
) == INTEGER_CST
5850 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
5852 return convert (type
, tem
);
5857 case FLOOR_MOD_EXPR
:
5858 case ROUND_MOD_EXPR
:
5859 case TRUNC_MOD_EXPR
:
5860 if (integer_onep (arg1
))
5861 return omit_one_operand (type
, integer_zero_node
, arg0
);
5862 if (integer_zerop (arg1
))
5865 if (TREE_CODE (arg1
) == INTEGER_CST
5866 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
5868 return convert (type
, tem
);
5876 if (integer_zerop (arg1
))
5877 return non_lvalue (convert (type
, arg0
));
5878 /* Since negative shift count is not well-defined,
5879 don't try to compute it in the compiler. */
5880 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
5882 /* Rewrite an LROTATE_EXPR by a constant into an
5883 RROTATE_EXPR by a new constant. */
5884 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
5886 TREE_SET_CODE (t
, RROTATE_EXPR
);
5887 code
= RROTATE_EXPR
;
5888 TREE_OPERAND (t
, 1) = arg1
5891 convert (TREE_TYPE (arg1
),
5892 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type
)), 0)),
5894 if (tree_int_cst_sgn (arg1
) < 0)
5898 /* If we have a rotate of a bit operation with the rotate count and
5899 the second operand of the bit operation both constant,
5900 permute the two operations. */
5901 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
5902 && (TREE_CODE (arg0
) == BIT_AND_EXPR
5903 || TREE_CODE (arg0
) == BIT_ANDTC_EXPR
5904 || TREE_CODE (arg0
) == BIT_IOR_EXPR
5905 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
5906 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
5907 return fold (build (TREE_CODE (arg0
), type
,
5908 fold (build (code
, type
,
5909 TREE_OPERAND (arg0
, 0), arg1
)),
5910 fold (build (code
, type
,
5911 TREE_OPERAND (arg0
, 1), arg1
))));
5913 /* Two consecutive rotates adding up to the width of the mode can
5915 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
5916 && TREE_CODE (arg0
) == RROTATE_EXPR
5917 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
5918 && TREE_INT_CST_HIGH (arg1
) == 0
5919 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
5920 && ((TREE_INT_CST_LOW (arg1
)
5921 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
5922 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
5923 return TREE_OPERAND (arg0
, 0);
5928 if (operand_equal_p (arg0
, arg1
, 0))
5929 return omit_one_operand (type
, arg0
, arg1
);
5930 if (INTEGRAL_TYPE_P (type
)
5931 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), 1))
5932 return omit_one_operand (type
, arg1
, arg0
);
5936 if (operand_equal_p (arg0
, arg1
, 0))
5937 return omit_one_operand (type
, arg0
, arg1
);
5938 if (INTEGRAL_TYPE_P (type
)
5939 && TYPE_MAX_VALUE (type
)
5940 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), 1))
5941 return omit_one_operand (type
, arg1
, arg0
);
5944 case TRUTH_NOT_EXPR
:
5945 /* Note that the operand of this must be an int
5946 and its values must be 0 or 1.
5947 ("true" is a fixed value perhaps depending on the language,
5948 but we don't handle values other than 1 correctly yet.) */
5949 tem
= invert_truthvalue (arg0
);
5950 /* Avoid infinite recursion. */
5951 if (TREE_CODE (tem
) == TRUTH_NOT_EXPR
)
5953 return convert (type
, tem
);
5955 case TRUTH_ANDIF_EXPR
:
5956 /* Note that the operands of this must be ints
5957 and their values must be 0 or 1.
5958 ("true" is a fixed value perhaps depending on the language.) */
5959 /* If first arg is constant zero, return it. */
5960 if (integer_zerop (arg0
))
5961 return convert (type
, arg0
);
5962 case TRUTH_AND_EXPR
:
5963 /* If either arg is constant true, drop it. */
5964 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
5965 return non_lvalue (convert (type
, arg1
));
5966 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
5967 /* Preserve sequence points. */
5968 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
5969 return non_lvalue (convert (type
, arg0
));
5970 /* If second arg is constant zero, result is zero, but first arg
5971 must be evaluated. */
5972 if (integer_zerop (arg1
))
5973 return omit_one_operand (type
, arg1
, arg0
);
5974 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5975 case will be handled here. */
5976 if (integer_zerop (arg0
))
5977 return omit_one_operand (type
, arg0
, arg1
);
5980 /* We only do these simplifications if we are optimizing. */
5984 /* Check for things like (A || B) && (A || C). We can convert this
5985 to A || (B && C). Note that either operator can be any of the four
5986 truth and/or operations and the transformation will still be
5987 valid. Also note that we only care about order for the
5988 ANDIF and ORIF operators. If B contains side effects, this
5989 might change the truth-value of A. */
5990 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
5991 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
5992 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
5993 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
5994 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
5995 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
5997 tree a00
= TREE_OPERAND (arg0
, 0);
5998 tree a01
= TREE_OPERAND (arg0
, 1);
5999 tree a10
= TREE_OPERAND (arg1
, 0);
6000 tree a11
= TREE_OPERAND (arg1
, 1);
6001 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
6002 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
6003 && (code
== TRUTH_AND_EXPR
6004 || code
== TRUTH_OR_EXPR
));
6006 if (operand_equal_p (a00
, a10
, 0))
6007 return fold (build (TREE_CODE (arg0
), type
, a00
,
6008 fold (build (code
, type
, a01
, a11
))));
6009 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
6010 return fold (build (TREE_CODE (arg0
), type
, a00
,
6011 fold (build (code
, type
, a01
, a10
))));
6012 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
6013 return fold (build (TREE_CODE (arg0
), type
, a01
,
6014 fold (build (code
, type
, a00
, a11
))));
6016 /* This case if tricky because we must either have commutative
6017 operators or else A10 must not have side-effects. */
6019 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
6020 && operand_equal_p (a01
, a11
, 0))
6021 return fold (build (TREE_CODE (arg0
), type
,
6022 fold (build (code
, type
, a00
, a10
)),
6026 /* See if we can build a range comparison. */
6027 if (0 != (tem
= fold_range_test (t
)))
6030 /* Check for the possibility of merging component references. If our
6031 lhs is another similar operation, try to merge its rhs with our
6032 rhs. Then try to merge our lhs and rhs. */
6033 if (TREE_CODE (arg0
) == code
6034 && 0 != (tem
= fold_truthop (code
, type
,
6035 TREE_OPERAND (arg0
, 1), arg1
)))
6036 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
6038 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
6043 case TRUTH_ORIF_EXPR
:
6044 /* Note that the operands of this must be ints
6045 and their values must be 0 or true.
6046 ("true" is a fixed value perhaps depending on the language.) */
6047 /* If first arg is constant true, return it. */
6048 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
6049 return convert (type
, arg0
);
6051 /* If either arg is constant zero, drop it. */
6052 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
6053 return non_lvalue (convert (type
, arg1
));
6054 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
6055 /* Preserve sequence points. */
6056 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
6057 return non_lvalue (convert (type
, arg0
));
6058 /* If second arg is constant true, result is true, but we must
6059 evaluate first arg. */
6060 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
6061 return omit_one_operand (type
, arg1
, arg0
);
6062 /* Likewise for first arg, but note this only occurs here for
6064 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
6065 return omit_one_operand (type
, arg0
, arg1
);
6068 case TRUTH_XOR_EXPR
:
6069 /* If either arg is constant zero, drop it. */
6070 if (integer_zerop (arg0
))
6071 return non_lvalue (convert (type
, arg1
));
6072 if (integer_zerop (arg1
))
6073 return non_lvalue (convert (type
, arg0
));
6074 /* If either arg is constant true, this is a logical inversion. */
6075 if (integer_onep (arg0
))
6076 return non_lvalue (convert (type
, invert_truthvalue (arg1
)));
6077 if (integer_onep (arg1
))
6078 return non_lvalue (convert (type
, invert_truthvalue (arg0
)));
6087 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
6089 /* (-a) CMP (-b) -> b CMP a */
6090 if (TREE_CODE (arg0
) == NEGATE_EXPR
6091 && TREE_CODE (arg1
) == NEGATE_EXPR
)
6092 return fold (build (code
, type
, TREE_OPERAND (arg1
, 0),
6093 TREE_OPERAND (arg0
, 0)));
6094 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6095 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& TREE_CODE (arg1
) == REAL_CST
)
6098 (swap_tree_comparison (code
), type
,
6099 TREE_OPERAND (arg0
, 0),
6100 build_real (TREE_TYPE (arg1
),
6101 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1
)))));
6102 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6103 /* a CMP (-0) -> a CMP 0 */
6104 if (TREE_CODE (arg1
) == REAL_CST
6105 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1
)))
6106 return fold (build (code
, type
, arg0
,
6107 build_real (TREE_TYPE (arg1
), dconst0
)));
6110 /* If one arg is a constant integer, put it last. */
6111 if (TREE_CODE (arg0
) == INTEGER_CST
6112 && TREE_CODE (arg1
) != INTEGER_CST
)
6114 TREE_OPERAND (t
, 0) = arg1
;
6115 TREE_OPERAND (t
, 1) = arg0
;
6116 arg0
= TREE_OPERAND (t
, 0);
6117 arg1
= TREE_OPERAND (t
, 1);
6118 code
= swap_tree_comparison (code
);
6119 TREE_SET_CODE (t
, code
);
6122 /* Convert foo++ == CONST into ++foo == CONST + INCR.
6123 First, see if one arg is constant; find the constant arg
6124 and the other one. */
6126 tree constop
= 0, varop
= NULL_TREE
;
6127 int constopnum
= -1;
6129 if (TREE_CONSTANT (arg1
))
6130 constopnum
= 1, constop
= arg1
, varop
= arg0
;
6131 if (TREE_CONSTANT (arg0
))
6132 constopnum
= 0, constop
= arg0
, varop
= arg1
;
6134 if (constop
&& TREE_CODE (varop
) == POSTINCREMENT_EXPR
)
6136 /* This optimization is invalid for ordered comparisons
6137 if CONST+INCR overflows or if foo+incr might overflow.
6138 This optimization is invalid for floating point due to rounding.
6139 For pointer types we assume overflow doesn't happen. */
6140 if (POINTER_TYPE_P (TREE_TYPE (varop
))
6141 || (! FLOAT_TYPE_P (TREE_TYPE (varop
))
6142 && (code
== EQ_EXPR
|| code
== NE_EXPR
)))
6145 = fold (build (PLUS_EXPR
, TREE_TYPE (varop
),
6146 constop
, TREE_OPERAND (varop
, 1)));
6148 /* Do not overwrite the current varop to be a preincrement,
6149 create a new node so that we won't confuse our caller who
6150 might create trees and throw them away, reusing the
6151 arguments that they passed to build. This shows up in
6152 the THEN or ELSE parts of ?: being postincrements. */
6153 varop
= build (PREINCREMENT_EXPR
, TREE_TYPE (varop
),
6154 TREE_OPERAND (varop
, 0),
6155 TREE_OPERAND (varop
, 1));
6157 /* If VAROP is a reference to a bitfield, we must mask
6158 the constant by the width of the field. */
6159 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
6160 && DECL_BIT_FIELD(TREE_OPERAND
6161 (TREE_OPERAND (varop
, 0), 1)))
6164 = TREE_INT_CST_LOW (DECL_SIZE
6166 (TREE_OPERAND (varop
, 0), 1)));
6167 tree mask
, unsigned_type
;
6168 unsigned int precision
;
6169 tree folded_compare
;
6171 /* First check whether the comparison would come out
6172 always the same. If we don't do that we would
6173 change the meaning with the masking. */
6174 if (constopnum
== 0)
6175 folded_compare
= fold (build (code
, type
, constop
,
6176 TREE_OPERAND (varop
, 0)));
6178 folded_compare
= fold (build (code
, type
,
6179 TREE_OPERAND (varop
, 0),
6181 if (integer_zerop (folded_compare
)
6182 || integer_onep (folded_compare
))
6183 return omit_one_operand (type
, folded_compare
, varop
);
6185 unsigned_type
= type_for_size (size
, 1);
6186 precision
= TYPE_PRECISION (unsigned_type
);
6187 mask
= build_int_2 (~0, ~0);
6188 TREE_TYPE (mask
) = unsigned_type
;
6189 force_fit_type (mask
, 0);
6190 mask
= const_binop (RSHIFT_EXPR
, mask
,
6191 size_int (precision
- size
), 0);
6192 newconst
= fold (build (BIT_AND_EXPR
,
6193 TREE_TYPE (varop
), newconst
,
6194 convert (TREE_TYPE (varop
),
6198 t
= build (code
, type
,
6199 (constopnum
== 0) ? newconst
: varop
,
6200 (constopnum
== 1) ? newconst
: varop
);
6204 else if (constop
&& TREE_CODE (varop
) == POSTDECREMENT_EXPR
)
6206 if (POINTER_TYPE_P (TREE_TYPE (varop
))
6207 || (! FLOAT_TYPE_P (TREE_TYPE (varop
))
6208 && (code
== EQ_EXPR
|| code
== NE_EXPR
)))
6211 = fold (build (MINUS_EXPR
, TREE_TYPE (varop
),
6212 constop
, TREE_OPERAND (varop
, 1)));
6214 /* Do not overwrite the current varop to be a predecrement,
6215 create a new node so that we won't confuse our caller who
6216 might create trees and throw them away, reusing the
6217 arguments that they passed to build. This shows up in
6218 the THEN or ELSE parts of ?: being postdecrements. */
6219 varop
= build (PREDECREMENT_EXPR
, TREE_TYPE (varop
),
6220 TREE_OPERAND (varop
, 0),
6221 TREE_OPERAND (varop
, 1));
6223 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
6224 && DECL_BIT_FIELD(TREE_OPERAND
6225 (TREE_OPERAND (varop
, 0), 1)))
6228 = TREE_INT_CST_LOW (DECL_SIZE
6230 (TREE_OPERAND (varop
, 0), 1)));
6231 tree mask
, unsigned_type
;
6232 unsigned int precision
;
6233 tree folded_compare
;
6235 if (constopnum
== 0)
6236 folded_compare
= fold (build (code
, type
, constop
,
6237 TREE_OPERAND (varop
, 0)));
6239 folded_compare
= fold (build (code
, type
,
6240 TREE_OPERAND (varop
, 0),
6242 if (integer_zerop (folded_compare
)
6243 || integer_onep (folded_compare
))
6244 return omit_one_operand (type
, folded_compare
, varop
);
6246 unsigned_type
= type_for_size (size
, 1);
6247 precision
= TYPE_PRECISION (unsigned_type
);
6248 mask
= build_int_2 (~0, ~0);
6249 TREE_TYPE (mask
) = TREE_TYPE (varop
);
6250 force_fit_type (mask
, 0);
6251 mask
= const_binop (RSHIFT_EXPR
, mask
,
6252 size_int (precision
- size
), 0);
6253 newconst
= fold (build (BIT_AND_EXPR
,
6254 TREE_TYPE (varop
), newconst
,
6255 convert (TREE_TYPE (varop
),
6259 t
= build (code
, type
,
6260 (constopnum
== 0) ? newconst
: varop
,
6261 (constopnum
== 1) ? newconst
: varop
);
6267 /* Change X >= CST to X > (CST - 1) if CST is positive. */
6268 if (TREE_CODE (arg1
) == INTEGER_CST
6269 && TREE_CODE (arg0
) != INTEGER_CST
6270 && tree_int_cst_sgn (arg1
) > 0)
6272 switch (TREE_CODE (t
))
6276 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
6277 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
);
6282 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
6283 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
);
6291 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6292 a MINUS_EXPR of a constant, we can convert it into a comparison with
6293 a revised constant as long as no overflow occurs. */
6294 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
6295 && TREE_CODE (arg1
) == INTEGER_CST
6296 && (TREE_CODE (arg0
) == PLUS_EXPR
6297 || TREE_CODE (arg0
) == MINUS_EXPR
)
6298 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
6299 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
6300 ? MINUS_EXPR
: PLUS_EXPR
,
6301 arg1
, TREE_OPERAND (arg0
, 1), 0))
6302 && ! TREE_CONSTANT_OVERFLOW (tem
))
6303 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
6305 /* Similarly for a NEGATE_EXPR. */
6306 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
6307 && TREE_CODE (arg0
) == NEGATE_EXPR
6308 && TREE_CODE (arg1
) == INTEGER_CST
6309 && 0 != (tem
= negate_expr (arg1
))
6310 && TREE_CODE (tem
) == INTEGER_CST
6311 && ! TREE_CONSTANT_OVERFLOW (tem
))
6312 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
6314 /* If we have X - Y == 0, we can convert that to X == Y and similarly
6315 for !=. Don't do this for ordered comparisons due to overflow. */
6316 else if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6317 && integer_zerop (arg1
) && TREE_CODE (arg0
) == MINUS_EXPR
)
6318 return fold (build (code
, type
,
6319 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1)));
6321 /* If we are widening one operand of an integer comparison,
6322 see if the other operand is similarly being widened. Perhaps we
6323 can do the comparison in the narrower type. */
6324 else if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
6325 && TREE_CODE (arg0
) == NOP_EXPR
6326 && (tem
= get_unwidened (arg0
, NULL_TREE
)) != arg0
6327 && (t1
= get_unwidened (arg1
, TREE_TYPE (tem
))) != 0
6328 && (TREE_TYPE (t1
) == TREE_TYPE (tem
)
6329 || (TREE_CODE (t1
) == INTEGER_CST
6330 && int_fits_type_p (t1
, TREE_TYPE (tem
)))))
6331 return fold (build (code
, type
, tem
, convert (TREE_TYPE (tem
), t1
)));
6333 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
6334 constant, we can simplify it. */
6335 else if (TREE_CODE (arg1
) == INTEGER_CST
6336 && (TREE_CODE (arg0
) == MIN_EXPR
6337 || TREE_CODE (arg0
) == MAX_EXPR
)
6338 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
6339 return optimize_minmax_comparison (t
);
6341 /* If we are comparing an ABS_EXPR with a constant, we can
6342 convert all the cases into explicit comparisons, but they may
6343 well not be faster than doing the ABS and one comparison.
6344 But ABS (X) <= C is a range comparison, which becomes a subtraction
6345 and a comparison, and is probably faster. */
6346 else if (code
== LE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
6347 && TREE_CODE (arg0
) == ABS_EXPR
6348 && ! TREE_SIDE_EFFECTS (arg0
)
6349 && (0 != (tem
= negate_expr (arg1
)))
6350 && TREE_CODE (tem
) == INTEGER_CST
6351 && ! TREE_CONSTANT_OVERFLOW (tem
))
6352 return fold (build (TRUTH_ANDIF_EXPR
, type
,
6353 build (GE_EXPR
, type
, TREE_OPERAND (arg0
, 0), tem
),
6354 build (LE_EXPR
, type
,
6355 TREE_OPERAND (arg0
, 0), arg1
)));
6357 /* If this is an EQ or NE comparison with zero and ARG0 is
6358 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
6359 two operations, but the latter can be done in one less insn
6360 on machines that have only two-operand insns or on which a
6361 constant cannot be the first operand. */
6362 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
6363 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
6365 if (TREE_CODE (TREE_OPERAND (arg0
, 0)) == LSHIFT_EXPR
6366 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0)))
6368 fold (build (code
, type
,
6369 build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
6371 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
6372 TREE_OPERAND (arg0
, 1),
6373 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1)),
6374 convert (TREE_TYPE (arg0
),
6377 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
6378 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
6380 fold (build (code
, type
,
6381 build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
6383 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
6384 TREE_OPERAND (arg0
, 0),
6385 TREE_OPERAND (TREE_OPERAND (arg0
, 1), 1)),
6386 convert (TREE_TYPE (arg0
),
6391 /* If this is an NE or EQ comparison of zero against the result of a
6392 signed MOD operation whose second operand is a power of 2, make
6393 the MOD operation unsigned since it is simpler and equivalent. */
6394 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6395 && integer_zerop (arg1
)
6396 && ! TREE_UNSIGNED (TREE_TYPE (arg0
))
6397 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
6398 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
6399 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
6400 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
6401 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6403 tree newtype
= unsigned_type (TREE_TYPE (arg0
));
6404 tree newmod
= build (TREE_CODE (arg0
), newtype
,
6405 convert (newtype
, TREE_OPERAND (arg0
, 0)),
6406 convert (newtype
, TREE_OPERAND (arg0
, 1)));
6408 return build (code
, type
, newmod
, convert (newtype
, arg1
));
6411 /* If this is an NE comparison of zero with an AND of one, remove the
6412 comparison since the AND will give the correct value. */
6413 if (code
== NE_EXPR
&& integer_zerop (arg1
)
6414 && TREE_CODE (arg0
) == BIT_AND_EXPR
6415 && integer_onep (TREE_OPERAND (arg0
, 1)))
6416 return convert (type
, arg0
);
6418 /* If we have (A & C) == C where C is a power of 2, convert this into
6419 (A & C) != 0. Similarly for NE_EXPR. */
6420 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
6421 && TREE_CODE (arg0
) == BIT_AND_EXPR
6422 && integer_pow2p (TREE_OPERAND (arg0
, 1))
6423 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
6424 return build (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
6425 arg0
, integer_zero_node
);
6427 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6428 and similarly for >= into !=. */
6429 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
6430 && TREE_UNSIGNED (TREE_TYPE (arg0
))
6431 && TREE_CODE (arg1
) == LSHIFT_EXPR
6432 && integer_onep (TREE_OPERAND (arg1
, 0)))
6433 return build (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
6434 build (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
6435 TREE_OPERAND (arg1
, 1)),
6436 convert (TREE_TYPE (arg0
), integer_zero_node
));
6438 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
6439 && TREE_UNSIGNED (TREE_TYPE (arg0
))
6440 && (TREE_CODE (arg1
) == NOP_EXPR
6441 || TREE_CODE (arg1
) == CONVERT_EXPR
)
6442 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
6443 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
6445 build (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
6446 convert (TREE_TYPE (arg0
),
6447 build (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
6448 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1))),
6449 convert (TREE_TYPE (arg0
), integer_zero_node
));
6451 /* Simplify comparison of something with itself. (For IEEE
6452 floating-point, we can only do some of these simplifications.) */
6453 if (operand_equal_p (arg0
, arg1
, 0))
6460 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
6461 return constant_boolean_node (1, type
);
6463 TREE_SET_CODE (t
, code
);
6467 /* For NE, we can only do this simplification if integer. */
6468 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
6470 /* ... fall through ... */
6473 return constant_boolean_node (0, type
);
6479 /* An unsigned comparison against 0 can be simplified. */
6480 if (integer_zerop (arg1
)
6481 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
6482 || POINTER_TYPE_P (TREE_TYPE (arg1
)))
6483 && TREE_UNSIGNED (TREE_TYPE (arg1
)))
6485 switch (TREE_CODE (t
))
6489 TREE_SET_CODE (t
, NE_EXPR
);
6493 TREE_SET_CODE (t
, EQ_EXPR
);
6496 return omit_one_operand (type
,
6497 convert (type
, integer_one_node
),
6500 return omit_one_operand (type
,
6501 convert (type
, integer_zero_node
),
6508 /* Comparisons with the highest or lowest possible integer of
6509 the specified size will have known values and an unsigned
6510 <= 0x7fffffff can be simplified. */
6512 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1
)));
6514 if (TREE_CODE (arg1
) == INTEGER_CST
6515 && ! TREE_CONSTANT_OVERFLOW (arg1
)
6516 && width
<= HOST_BITS_PER_WIDE_INT
6517 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
6518 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
6520 if (TREE_INT_CST_HIGH (arg1
) == 0
6521 && (TREE_INT_CST_LOW (arg1
)
6522 == ((unsigned HOST_WIDE_INT
) 1 << (width
- 1)) - 1)
6523 && ! TREE_UNSIGNED (TREE_TYPE (arg1
)))
6524 switch (TREE_CODE (t
))
6527 return omit_one_operand (type
,
6528 convert (type
, integer_zero_node
),
6531 TREE_SET_CODE (t
, EQ_EXPR
);
6535 return omit_one_operand (type
,
6536 convert (type
, integer_one_node
),
6539 TREE_SET_CODE (t
, NE_EXPR
);
6546 else if (TREE_INT_CST_HIGH (arg1
) == -1
6547 && (- TREE_INT_CST_LOW (arg1
)
6548 == ((unsigned HOST_WIDE_INT
) 1 << (width
- 1)))
6549 && ! TREE_UNSIGNED (TREE_TYPE (arg1
)))
6550 switch (TREE_CODE (t
))
6553 return omit_one_operand (type
,
6554 convert (type
, integer_zero_node
),
6557 TREE_SET_CODE (t
, EQ_EXPR
);
6561 return omit_one_operand (type
,
6562 convert (type
, integer_one_node
),
6565 TREE_SET_CODE (t
, NE_EXPR
);
6572 else if (TREE_INT_CST_HIGH (arg1
) == 0
6573 && (TREE_INT_CST_LOW (arg1
)
6574 == ((unsigned HOST_WIDE_INT
) 1 << (width
- 1)) - 1)
6575 && TREE_UNSIGNED (TREE_TYPE (arg1
)))
6577 switch (TREE_CODE (t
))
6580 return fold (build (GE_EXPR
, type
,
6581 convert (signed_type (TREE_TYPE (arg0
)),
6583 convert (signed_type (TREE_TYPE (arg1
)),
6584 integer_zero_node
)));
6586 return fold (build (LT_EXPR
, type
,
6587 convert (signed_type (TREE_TYPE (arg0
)),
6589 convert (signed_type (TREE_TYPE (arg1
)),
6590 integer_zero_node
)));
6598 /* If we are comparing an expression that just has comparisons
6599 of two integer values, arithmetic expressions of those comparisons,
6600 and constants, we can simplify it. There are only three cases
6601 to check: the two values can either be equal, the first can be
6602 greater, or the second can be greater. Fold the expression for
6603 those three values. Since each value must be 0 or 1, we have
6604 eight possibilities, each of which corresponds to the constant 0
6605 or 1 or one of the six possible comparisons.
6607 This handles common cases like (a > b) == 0 but also handles
6608 expressions like ((x > y) - (y > x)) > 0, which supposedly
6609 occur in macroized code. */
6611 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
6613 tree cval1
= 0, cval2
= 0;
6616 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
6617 /* Don't handle degenerate cases here; they should already
6618 have been handled anyway. */
6619 && cval1
!= 0 && cval2
!= 0
6620 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
6621 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
6622 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
6623 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
6624 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
6625 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
6626 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
6628 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
6629 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
6631 /* We can't just pass T to eval_subst in case cval1 or cval2
6632 was the same as ARG1. */
6635 = fold (build (code
, type
,
6636 eval_subst (arg0
, cval1
, maxval
, cval2
, minval
),
6639 = fold (build (code
, type
,
6640 eval_subst (arg0
, cval1
, maxval
, cval2
, maxval
),
6643 = fold (build (code
, type
,
6644 eval_subst (arg0
, cval1
, minval
, cval2
, maxval
),
6647 /* All three of these results should be 0 or 1. Confirm they
6648 are. Then use those values to select the proper code
6651 if ((integer_zerop (high_result
)
6652 || integer_onep (high_result
))
6653 && (integer_zerop (equal_result
)
6654 || integer_onep (equal_result
))
6655 && (integer_zerop (low_result
)
6656 || integer_onep (low_result
)))
6658 /* Make a 3-bit mask with the high-order bit being the
6659 value for `>', the next for '=', and the low for '<'. */
6660 switch ((integer_onep (high_result
) * 4)
6661 + (integer_onep (equal_result
) * 2)
6662 + integer_onep (low_result
))
6666 return omit_one_operand (type
, integer_zero_node
, arg0
);
6687 return omit_one_operand (type
, integer_one_node
, arg0
);
6690 t
= build (code
, type
, cval1
, cval2
);
6692 return save_expr (t
);
6699 /* If this is a comparison of a field, we may be able to simplify it. */
6700 if ((TREE_CODE (arg0
) == COMPONENT_REF
6701 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
6702 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
6703 /* Handle the constant case even without -O
6704 to make sure the warnings are given. */
6705 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
6707 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
6711 /* If this is a comparison of complex values and either or both sides
6712 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6713 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6714 This may prevent needless evaluations. */
6715 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
6716 && TREE_CODE (TREE_TYPE (arg0
)) == COMPLEX_TYPE
6717 && (TREE_CODE (arg0
) == COMPLEX_EXPR
6718 || TREE_CODE (arg1
) == COMPLEX_EXPR
6719 || TREE_CODE (arg0
) == COMPLEX_CST
6720 || TREE_CODE (arg1
) == COMPLEX_CST
))
6722 tree subtype
= TREE_TYPE (TREE_TYPE (arg0
));
6723 tree real0
, imag0
, real1
, imag1
;
6725 arg0
= save_expr (arg0
);
6726 arg1
= save_expr (arg1
);
6727 real0
= fold (build1 (REALPART_EXPR
, subtype
, arg0
));
6728 imag0
= fold (build1 (IMAGPART_EXPR
, subtype
, arg0
));
6729 real1
= fold (build1 (REALPART_EXPR
, subtype
, arg1
));
6730 imag1
= fold (build1 (IMAGPART_EXPR
, subtype
, arg1
));
6732 return fold (build ((code
== EQ_EXPR
? TRUTH_ANDIF_EXPR
6735 fold (build (code
, type
, real0
, real1
)),
6736 fold (build (code
, type
, imag0
, imag1
))));
6739 /* From here on, the only cases we handle are when the result is
6740 known to be a constant.
6742 To compute GT, swap the arguments and do LT.
6743 To compute GE, do LT and invert the result.
6744 To compute LE, swap the arguments, do LT and invert the result.
6745 To compute NE, do EQ and invert the result.
6747 Therefore, the code below must handle only EQ and LT. */
6749 if (code
== LE_EXPR
|| code
== GT_EXPR
)
6751 tem
= arg0
, arg0
= arg1
, arg1
= tem
;
6752 code
= swap_tree_comparison (code
);
6755 /* Note that it is safe to invert for real values here because we
6756 will check below in the one case that it matters. */
6760 if (code
== NE_EXPR
|| code
== GE_EXPR
)
6763 code
= invert_tree_comparison (code
);
6766 /* Compute a result for LT or EQ if args permit;
6767 otherwise return T. */
6768 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
6770 if (code
== EQ_EXPR
)
6771 t1
= build_int_2 (tree_int_cst_equal (arg0
, arg1
), 0);
6773 t1
= build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0
))
6774 ? INT_CST_LT_UNSIGNED (arg0
, arg1
)
6775 : INT_CST_LT (arg0
, arg1
)),
6779 #if 0 /* This is no longer useful, but breaks some real code. */
6780 /* Assume a nonexplicit constant cannot equal an explicit one,
6781 since such code would be undefined anyway.
6782 Exception: on sysvr4, using #pragma weak,
6783 a label can come out as 0. */
6784 else if (TREE_CODE (arg1
) == INTEGER_CST
6785 && !integer_zerop (arg1
)
6786 && TREE_CONSTANT (arg0
)
6787 && TREE_CODE (arg0
) == ADDR_EXPR
6789 t1
= build_int_2 (0, 0);
6791 /* Two real constants can be compared explicitly. */
6792 else if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
6794 /* If either operand is a NaN, the result is false with two
6795 exceptions: First, an NE_EXPR is true on NaNs, but that case
6796 is already handled correctly since we will be inverting the
6797 result for NE_EXPR. Second, if we had inverted a LE_EXPR
6798 or a GE_EXPR into a LT_EXPR, we must return true so that it
6799 will be inverted into false. */
6801 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
6802 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
6803 t1
= build_int_2 (invert
&& code
== LT_EXPR
, 0);
6805 else if (code
== EQ_EXPR
)
6806 t1
= build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
),
6807 TREE_REAL_CST (arg1
)),
6810 t1
= build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0
),
6811 TREE_REAL_CST (arg1
)),
6815 if (t1
== NULL_TREE
)
6819 TREE_INT_CST_LOW (t1
) ^= 1;
6821 TREE_TYPE (t1
) = type
;
6822 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
6823 return truthvalue_conversion (t1
);
6827 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6828 so all simple results must be passed through pedantic_non_lvalue. */
6829 if (TREE_CODE (arg0
) == INTEGER_CST
)
6830 return pedantic_non_lvalue
6831 (TREE_OPERAND (t
, (integer_zerop (arg0
) ? 2 : 1)));
6832 else if (operand_equal_p (arg1
, TREE_OPERAND (expr
, 2), 0))
6833 return pedantic_omit_one_operand (type
, arg1
, arg0
);
6835 /* If the second operand is zero, invert the comparison and swap
6836 the second and third operands. Likewise if the second operand
6837 is constant and the third is not or if the third operand is
6838 equivalent to the first operand of the comparison. */
6840 if (integer_zerop (arg1
)
6841 || (TREE_CONSTANT (arg1
) && ! TREE_CONSTANT (TREE_OPERAND (t
, 2)))
6842 || (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
6843 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
6844 TREE_OPERAND (t
, 2),
6845 TREE_OPERAND (arg0
, 1))))
6847 /* See if this can be inverted. If it can't, possibly because
6848 it was a floating-point inequality comparison, don't do
6850 tem
= invert_truthvalue (arg0
);
6852 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
6854 t
= build (code
, type
, tem
,
6855 TREE_OPERAND (t
, 2), TREE_OPERAND (t
, 1));
6857 /* arg1 should be the first argument of the new T. */
6858 arg1
= TREE_OPERAND (t
, 1);
6863 /* If we have A op B ? A : C, we may be able to convert this to a
6864 simpler expression, depending on the operation and the values
6865 of B and C. IEEE floating point prevents this though,
6866 because A or B might be -0.0 or a NaN. */
6868 if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
6869 && (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
6870 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0)))
6872 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
6873 arg1
, TREE_OPERAND (arg0
, 1)))
6875 tree arg2
= TREE_OPERAND (t
, 2);
6876 enum tree_code comp_code
= TREE_CODE (arg0
);
6880 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
6881 depending on the comparison operation. */
6882 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
6883 ? real_zerop (TREE_OPERAND (arg0
, 1))
6884 : integer_zerop (TREE_OPERAND (arg0
, 1)))
6885 && TREE_CODE (arg2
) == NEGATE_EXPR
6886 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
6894 (convert (TREE_TYPE (TREE_OPERAND (t
, 1)),
6898 return pedantic_non_lvalue (convert (type
, arg1
));
6901 if (TREE_UNSIGNED (TREE_TYPE (arg1
)))
6902 arg1
= convert (signed_type (TREE_TYPE (arg1
)), arg1
);
6903 return pedantic_non_lvalue
6904 (convert (type
, fold (build1 (ABS_EXPR
,
6905 TREE_TYPE (arg1
), arg1
))));
6908 if (TREE_UNSIGNED (TREE_TYPE (arg1
)))
6909 arg1
= convert (signed_type (TREE_TYPE (arg1
)), arg1
);
6910 return pedantic_non_lvalue
6911 (negate_expr (convert (type
,
6912 fold (build1 (ABS_EXPR
,
6919 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
6922 if (integer_zerop (TREE_OPERAND (arg0
, 1)) && integer_zerop (arg2
))
6924 if (comp_code
== NE_EXPR
)
6925 return pedantic_non_lvalue (convert (type
, arg1
));
6926 else if (comp_code
== EQ_EXPR
)
6927 return pedantic_non_lvalue (convert (type
, integer_zero_node
));
6930 /* If this is A op B ? A : B, this is either A, B, min (A, B),
6931 or max (A, B), depending on the operation. */
6933 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 1),
6934 arg2
, TREE_OPERAND (arg0
, 0)))
6936 tree comp_op0
= TREE_OPERAND (arg0
, 0);
6937 tree comp_op1
= TREE_OPERAND (arg0
, 1);
6938 tree comp_type
= TREE_TYPE (comp_op0
);
6940 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
6941 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
6947 return pedantic_non_lvalue (convert (type
, arg2
));
6949 return pedantic_non_lvalue (convert (type
, arg1
));
6952 /* In C++ a ?: expression can be an lvalue, so put the
6953 operand which will be used if they are equal first
6954 so that we can convert this back to the
6955 corresponding COND_EXPR. */
6956 return pedantic_non_lvalue
6957 (convert (type
, fold (build (MIN_EXPR
, comp_type
,
6958 (comp_code
== LE_EXPR
6959 ? comp_op0
: comp_op1
),
6960 (comp_code
== LE_EXPR
6961 ? comp_op1
: comp_op0
)))));
6965 return pedantic_non_lvalue
6966 (convert (type
, fold (build (MAX_EXPR
, comp_type
,
6967 (comp_code
== GE_EXPR
6968 ? comp_op0
: comp_op1
),
6969 (comp_code
== GE_EXPR
6970 ? comp_op1
: comp_op0
)))));
6977 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6978 we might still be able to simplify this. For example,
6979 if C1 is one less or one more than C2, this might have started
6980 out as a MIN or MAX and been transformed by this function.
6981 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
6983 if (INTEGRAL_TYPE_P (type
)
6984 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
6985 && TREE_CODE (arg2
) == INTEGER_CST
)
6989 /* We can replace A with C1 in this case. */
6990 arg1
= convert (type
, TREE_OPERAND (arg0
, 1));
6991 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
,
6992 TREE_OPERAND (t
, 2));
6996 /* If C1 is C2 + 1, this is min(A, C2). */
6997 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
), 1)
6998 && operand_equal_p (TREE_OPERAND (arg0
, 1),
6999 const_binop (PLUS_EXPR
, arg2
,
7000 integer_one_node
, 0), 1))
7001 return pedantic_non_lvalue
7002 (fold (build (MIN_EXPR
, type
, arg1
, arg2
)));
7006 /* If C1 is C2 - 1, this is min(A, C2). */
7007 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
), 1)
7008 && operand_equal_p (TREE_OPERAND (arg0
, 1),
7009 const_binop (MINUS_EXPR
, arg2
,
7010 integer_one_node
, 0), 1))
7011 return pedantic_non_lvalue
7012 (fold (build (MIN_EXPR
, type
, arg1
, arg2
)));
7016 /* If C1 is C2 - 1, this is max(A, C2). */
7017 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
), 1)
7018 && operand_equal_p (TREE_OPERAND (arg0
, 1),
7019 const_binop (MINUS_EXPR
, arg2
,
7020 integer_one_node
, 0), 1))
7021 return pedantic_non_lvalue
7022 (fold (build (MAX_EXPR
, type
, arg1
, arg2
)));
7026 /* If C1 is C2 + 1, this is max(A, C2). */
7027 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
), 1)
7028 && operand_equal_p (TREE_OPERAND (arg0
, 1),
7029 const_binop (PLUS_EXPR
, arg2
,
7030 integer_one_node
, 0), 1))
7031 return pedantic_non_lvalue
7032 (fold (build (MAX_EXPR
, type
, arg1
, arg2
)));
7041 /* If the second operand is simpler than the third, swap them
7042 since that produces better jump optimization results. */
7043 if ((TREE_CONSTANT (arg1
) || DECL_P (arg1
)
7044 || TREE_CODE (arg1
) == SAVE_EXPR
)
7045 && ! (TREE_CONSTANT (TREE_OPERAND (t
, 2))
7046 || DECL_P (TREE_OPERAND (t
, 2))
7047 || TREE_CODE (TREE_OPERAND (t
, 2)) == SAVE_EXPR
))
7049 /* See if this can be inverted. If it can't, possibly because
7050 it was a floating-point inequality comparison, don't do
7052 tem
= invert_truthvalue (arg0
);
7054 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
7056 t
= build (code
, type
, tem
,
7057 TREE_OPERAND (t
, 2), TREE_OPERAND (t
, 1));
7059 /* arg1 should be the first argument of the new T. */
7060 arg1
= TREE_OPERAND (t
, 1);
7065 /* Convert A ? 1 : 0 to simply A. */
7066 if (integer_onep (TREE_OPERAND (t
, 1))
7067 && integer_zerop (TREE_OPERAND (t
, 2))
7068 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
7069 call to fold will try to move the conversion inside
7070 a COND, which will recurse. In that case, the COND_EXPR
7071 is probably the best choice, so leave it alone. */
7072 && type
== TREE_TYPE (arg0
))
7073 return pedantic_non_lvalue (arg0
);
7075 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
7076 operation is simply A & 2. */
7078 if (integer_zerop (TREE_OPERAND (t
, 2))
7079 && TREE_CODE (arg0
) == NE_EXPR
7080 && integer_zerop (TREE_OPERAND (arg0
, 1))
7081 && integer_pow2p (arg1
)
7082 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
7083 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
7085 return pedantic_non_lvalue (convert (type
, TREE_OPERAND (arg0
, 0)));
7090 /* When pedantic, a compound expression can be neither an lvalue
7091 nor an integer constant expression. */
7092 if (TREE_SIDE_EFFECTS (arg0
) || pedantic
)
7094 /* Don't let (0, 0) be null pointer constant. */
7095 if (integer_zerop (arg1
))
7096 return build1 (NOP_EXPR
, type
, arg1
);
7097 return convert (type
, arg1
);
7101 return build_complex (type
, arg0
, arg1
);
7105 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7107 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7108 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
7109 TREE_OPERAND (arg0
, 1));
7110 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
7111 return TREE_REALPART (arg0
);
7112 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7113 return fold (build (TREE_CODE (arg0
), type
,
7114 fold (build1 (REALPART_EXPR
, type
,
7115 TREE_OPERAND (arg0
, 0))),
7116 fold (build1 (REALPART_EXPR
,
7117 type
, TREE_OPERAND (arg0
, 1)))));
7121 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7122 return convert (type
, integer_zero_node
);
7123 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7124 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
7125 TREE_OPERAND (arg0
, 0));
7126 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
7127 return TREE_IMAGPART (arg0
);
7128 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7129 return fold (build (TREE_CODE (arg0
), type
,
7130 fold (build1 (IMAGPART_EXPR
, type
,
7131 TREE_OPERAND (arg0
, 0))),
7132 fold (build1 (IMAGPART_EXPR
, type
,
7133 TREE_OPERAND (arg0
, 1)))));
7136 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
7138 case CLEANUP_POINT_EXPR
:
7139 if (! has_cleanups (arg0
))
7140 return TREE_OPERAND (t
, 0);
7143 enum tree_code code0
= TREE_CODE (arg0
);
7144 int kind0
= TREE_CODE_CLASS (code0
);
7145 tree arg00
= TREE_OPERAND (arg0
, 0);
7148 if (kind0
== '1' || code0
== TRUTH_NOT_EXPR
)
7149 return fold (build1 (code0
, type
,
7150 fold (build1 (CLEANUP_POINT_EXPR
,
7151 TREE_TYPE (arg00
), arg00
))));
7153 if (kind0
== '<' || kind0
== '2'
7154 || code0
== TRUTH_ANDIF_EXPR
|| code0
== TRUTH_ORIF_EXPR
7155 || code0
== TRUTH_AND_EXPR
|| code0
== TRUTH_OR_EXPR
7156 || code0
== TRUTH_XOR_EXPR
)
7158 arg01
= TREE_OPERAND (arg0
, 1);
7160 if (TREE_CONSTANT (arg00
)
7161 || ((code0
== TRUTH_ANDIF_EXPR
|| code0
== TRUTH_ORIF_EXPR
)
7162 && ! has_cleanups (arg00
)))
7163 return fold (build (code0
, type
, arg00
,
7164 fold (build1 (CLEANUP_POINT_EXPR
,
7165 TREE_TYPE (arg01
), arg01
))));
7167 if (TREE_CONSTANT (arg01
))
7168 return fold (build (code0
, type
,
7169 fold (build1 (CLEANUP_POINT_EXPR
,
7170 TREE_TYPE (arg00
), arg00
)),
7178 /* Check for a built-in function. */
7179 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
7180 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0))
7182 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0)))
7184 tree tmp
= fold_builtin (expr
);
7192 } /* switch (code) */
7195 /* Determine if first argument is a multiple of second argument. Return 0 if
7196 it is not, or we cannot easily determined it to be.
7198 An example of the sort of thing we care about (at this point; this routine
7199 could surely be made more general, and expanded to do what the *_DIV_EXPR's
7200 fold cases do now) is discovering that
7202 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7208 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7210 This code also handles discovering that
7212 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7214 is a multiple of 8 so we don't have to worry about dealing with a
7217 Note that we *look* inside a SAVE_EXPR only to determine how it was
7218 calculated; it is not safe for fold to do much of anything else with the
7219 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7220 at run time. For example, the latter example above *cannot* be implemented
7221 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7222 evaluation time of the original SAVE_EXPR is not necessarily the same at
7223 the time the new expression is evaluated. The only optimization of this
7224 sort that would be valid is changing
7226 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7230 SAVE_EXPR (I) * SAVE_EXPR (J)
7232 (where the same SAVE_EXPR (J) is used in the original and the
7233 transformed version). */
7236 multiple_of_p (type
, top
, bottom
)
7241 if (operand_equal_p (top
, bottom
, 0))
7244 if (TREE_CODE (type
) != INTEGER_TYPE
)
7247 switch (TREE_CODE (top
))
7250 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
7251 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
7255 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
7256 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
7259 /* Can't handle conversions from non-integral or wider integral type. */
7260 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
7261 || (TYPE_PRECISION (type
)
7262 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
7265 /* .. fall through ... */
7268 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
7271 if ((TREE_CODE (bottom
) != INTEGER_CST
)
7272 || (tree_int_cst_sgn (top
) < 0)
7273 || (tree_int_cst_sgn (bottom
) < 0))
7275 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,
7283 /* Return true if `t' is known to be non-negative. */
7286 tree_expr_nonnegative_p (t
)
7289 switch (TREE_CODE (t
))
7292 return tree_int_cst_sgn (t
) >= 0;
7294 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
7295 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
7297 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
7299 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t
));
7302 if (truth_value_p (TREE_CODE (t
)))
7303 /* Truth values evaluate to 0 or 1, which is nonnegative. */
7306 /* We don't know sign of `t', so be conservative and return false. */
7311 /* Return true if `r' is known to be non-negative.
7312 Only handles constants at the moment. */
7315 rtl_expr_nonnegative_p (r
)
7318 switch (GET_CODE (r
))
7321 return INTVAL (r
) >= 0;
7324 if (GET_MODE (r
) == VOIDmode
)
7325 return CONST_DOUBLE_HIGH (r
) >= 0;
7330 /* These are always nonnegative. */