1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 /* The following constants represent a bit based encoding of GCC's
62 comparison operators. This encoding simplifies transformations
63 on relational comparison operators, such as AND and OR. */
64 enum comparison_code
{
83 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
84 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
85 static bool negate_mathfn_p (enum built_in_function
);
86 static bool negate_expr_p (tree
);
87 static tree
negate_expr (tree
);
88 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
89 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
90 static tree
const_binop (enum tree_code
, tree
, tree
, int);
91 static hashval_t
size_htab_hash (const void *);
92 static int size_htab_eq (const void *, const void *);
93 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
94 static enum tree_code
invert_tree_comparison (enum tree_code
, bool);
95 static enum tree_code
swap_tree_comparison (enum tree_code
);
96 static enum comparison_code
comparison_to_compcode (enum tree_code
);
97 static enum tree_code
compcode_to_comparison (enum comparison_code
);
98 static tree
combine_comparisons (enum tree_code
, enum tree_code
,
99 enum tree_code
, tree
, tree
, tree
);
100 static int truth_value_p (enum tree_code
);
101 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
102 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
103 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
104 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
105 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
106 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
107 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
108 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
109 enum machine_mode
*, int *, int *,
111 static int all_ones_mask_p (tree
, int);
112 static tree
sign_bit_p (tree
, tree
);
113 static int simple_operand_p (tree
);
114 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
115 static tree
make_range (tree
, int *, tree
*, tree
*);
116 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
117 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
119 static tree
fold_range_test (tree
);
120 static tree
unextend (tree
, int, int, tree
);
121 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
122 static tree
optimize_minmax_comparison (tree
);
123 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
);
124 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
);
125 static int multiple_of_p (tree
, tree
, tree
);
126 static tree
constant_boolean_node (int, tree
);
127 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
, tree
,
129 static bool fold_real_zero_addition_p (tree
, tree
, int);
130 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
132 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
133 static tree
fold_div_compare (enum tree_code
, tree
, tree
, tree
);
134 static bool reorder_operands_p (tree
, tree
);
135 static bool tree_swap_operands_p (tree
, tree
, bool);
137 static tree
fold_negate_const (tree
, tree
);
138 static tree
fold_not_const (tree
, tree
);
139 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
140 static tree
fold_relational_hi_lo (enum tree_code
*, const tree
,
143 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
144 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
145 and SUM1. Then this yields nonzero if overflow occurred during the
148 Overflow occurs if A and B have the same sign, but A and SUM differ in
149 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
151 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
153 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
154 We do that by representing the two-word integer in 4 words, with only
155 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
156 number. The value of the word is LOWPART + HIGHPART * BASE. */
159 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
160 #define HIGHPART(x) \
161 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
162 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
164 /* Unpack a two-word integer into 4 words.
165 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
166 WORDS points to the array of HOST_WIDE_INTs. */
169 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
171 words
[0] = LOWPART (low
);
172 words
[1] = HIGHPART (low
);
173 words
[2] = LOWPART (hi
);
174 words
[3] = HIGHPART (hi
);
177 /* Pack an array of 4 words into a two-word integer.
178 WORDS points to the array of words.
179 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
182 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
185 *low
= words
[0] + words
[1] * BASE
;
186 *hi
= words
[2] + words
[3] * BASE
;
189 /* Make the integer constant T valid for its type by setting to 0 or 1 all
190 the bits in the constant that don't belong in the type.
192 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
193 nonzero, a signed overflow has already occurred in calculating T, so
197 force_fit_type (tree t
, int overflow
)
199 unsigned HOST_WIDE_INT low
;
203 if (TREE_CODE (t
) == REAL_CST
)
205 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
206 Consider doing it via real_convert now. */
210 else if (TREE_CODE (t
) != INTEGER_CST
)
213 low
= TREE_INT_CST_LOW (t
);
214 high
= TREE_INT_CST_HIGH (t
);
216 if (POINTER_TYPE_P (TREE_TYPE (t
))
217 || TREE_CODE (TREE_TYPE (t
)) == OFFSET_TYPE
)
220 prec
= TYPE_PRECISION (TREE_TYPE (t
));
222 /* First clear all bits that are beyond the type's precision. */
224 if (prec
== 2 * HOST_BITS_PER_WIDE_INT
)
226 else if (prec
> HOST_BITS_PER_WIDE_INT
)
227 TREE_INT_CST_HIGH (t
)
228 &= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
231 TREE_INT_CST_HIGH (t
) = 0;
232 if (prec
< HOST_BITS_PER_WIDE_INT
)
233 TREE_INT_CST_LOW (t
) &= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
236 /* Unsigned types do not suffer sign extension or overflow unless they
238 if (TYPE_UNSIGNED (TREE_TYPE (t
))
239 && ! (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
240 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))))
243 /* If the value's sign bit is set, extend the sign. */
244 if (prec
!= 2 * HOST_BITS_PER_WIDE_INT
245 && (prec
> HOST_BITS_PER_WIDE_INT
246 ? 0 != (TREE_INT_CST_HIGH (t
)
248 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
249 : 0 != (TREE_INT_CST_LOW (t
)
250 & ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)))))
252 /* Value is negative:
253 set to 1 all the bits that are outside this type's precision. */
254 if (prec
> HOST_BITS_PER_WIDE_INT
)
255 TREE_INT_CST_HIGH (t
)
256 |= ((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
259 TREE_INT_CST_HIGH (t
) = -1;
260 if (prec
< HOST_BITS_PER_WIDE_INT
)
261 TREE_INT_CST_LOW (t
) |= ((unsigned HOST_WIDE_INT
) (-1) << prec
);
265 /* Return nonzero if signed overflow occurred. */
267 ((overflow
| (low
^ TREE_INT_CST_LOW (t
)) | (high
^ TREE_INT_CST_HIGH (t
)))
271 /* Add two doubleword integers with doubleword result.
272 Each argument is given as two `HOST_WIDE_INT' pieces.
273 One argument is L1 and H1; the other, L2 and H2.
274 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
277 add_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
278 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
279 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
281 unsigned HOST_WIDE_INT l
;
285 h
= h1
+ h2
+ (l
< l1
);
289 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
292 /* Negate a doubleword integer with doubleword result.
293 Return nonzero if the operation overflows, assuming it's signed.
294 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
295 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
298 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
299 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
305 return (*hv
& h1
) < 0;
315 /* Multiply two doubleword integers with doubleword result.
316 Return nonzero if the operation overflows, assuming it's signed.
317 Each argument is given as two `HOST_WIDE_INT' pieces.
318 One argument is L1 and H1; the other, L2 and H2.
319 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
322 mul_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
323 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
324 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
326 HOST_WIDE_INT arg1
[4];
327 HOST_WIDE_INT arg2
[4];
328 HOST_WIDE_INT prod
[4 * 2];
329 unsigned HOST_WIDE_INT carry
;
331 unsigned HOST_WIDE_INT toplow
, neglow
;
332 HOST_WIDE_INT tophigh
, neghigh
;
334 encode (arg1
, l1
, h1
);
335 encode (arg2
, l2
, h2
);
337 memset (prod
, 0, sizeof prod
);
339 for (i
= 0; i
< 4; i
++)
342 for (j
= 0; j
< 4; j
++)
345 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
346 carry
+= arg1
[i
] * arg2
[j
];
347 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
349 prod
[k
] = LOWPART (carry
);
350 carry
= HIGHPART (carry
);
355 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
357 /* Check for overflow by calculating the top half of the answer in full;
358 it should agree with the low half's sign bit. */
359 decode (prod
+ 4, &toplow
, &tophigh
);
362 neg_double (l2
, h2
, &neglow
, &neghigh
);
363 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
367 neg_double (l1
, h1
, &neglow
, &neghigh
);
368 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
370 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
373 /* Shift the doubleword integer in L1, H1 left by COUNT places
374 keeping only PREC bits of result.
375 Shift right if COUNT is negative.
376 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
377 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
380 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
381 HOST_WIDE_INT count
, unsigned int prec
,
382 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
384 unsigned HOST_WIDE_INT signmask
;
388 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
392 if (SHIFT_COUNT_TRUNCATED
)
395 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
397 /* Shifting by the host word size is undefined according to the
398 ANSI standard, so we must handle this as a special case. */
402 else if (count
>= HOST_BITS_PER_WIDE_INT
)
404 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
409 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
410 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
414 /* Sign extend all bits that are beyond the precision. */
416 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
417 ? ((unsigned HOST_WIDE_INT
) *hv
418 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
419 : (*lv
>> (prec
- 1))) & 1);
421 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
423 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
425 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
426 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
431 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
432 *lv
|= signmask
<< prec
;
436 /* Shift the doubleword integer in L1, H1 right by COUNT places
437 keeping only PREC bits of result. COUNT must be positive.
438 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
439 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
442 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
443 HOST_WIDE_INT count
, unsigned int prec
,
444 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
447 unsigned HOST_WIDE_INT signmask
;
450 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
453 if (SHIFT_COUNT_TRUNCATED
)
456 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
458 /* Shifting by the host word size is undefined according to the
459 ANSI standard, so we must handle this as a special case. */
463 else if (count
>= HOST_BITS_PER_WIDE_INT
)
466 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
470 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
472 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
475 /* Zero / sign extend all bits that are beyond the precision. */
477 if (count
>= (HOST_WIDE_INT
)prec
)
482 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
484 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
486 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
487 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
492 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
493 *lv
|= signmask
<< (prec
- count
);
497 /* Rotate the doubleword integer in L1, H1 left by COUNT places
498 keeping only PREC bits of result.
499 Rotate right if COUNT is negative.
500 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
503 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
504 HOST_WIDE_INT count
, unsigned int prec
,
505 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
507 unsigned HOST_WIDE_INT s1l
, s2l
;
508 HOST_WIDE_INT s1h
, s2h
;
514 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
515 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
520 /* Rotate the doubleword integer in L1, H1 left by COUNT places
521 keeping only PREC bits of result. COUNT must be positive.
522 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
525 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
526 HOST_WIDE_INT count
, unsigned int prec
,
527 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
529 unsigned HOST_WIDE_INT s1l
, s2l
;
530 HOST_WIDE_INT s1h
, s2h
;
536 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
537 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
542 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
543 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
544 CODE is a tree code for a kind of division, one of
545 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
547 It controls how the quotient is rounded to an integer.
548 Return nonzero if the operation overflows.
549 UNS nonzero says do unsigned division. */
552 div_and_round_double (enum tree_code code
, int uns
,
553 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
554 HOST_WIDE_INT hnum_orig
,
555 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
556 HOST_WIDE_INT hden_orig
,
557 unsigned HOST_WIDE_INT
*lquo
,
558 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
562 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
563 HOST_WIDE_INT den
[4], quo
[4];
565 unsigned HOST_WIDE_INT work
;
566 unsigned HOST_WIDE_INT carry
= 0;
567 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
568 HOST_WIDE_INT hnum
= hnum_orig
;
569 unsigned HOST_WIDE_INT lden
= lden_orig
;
570 HOST_WIDE_INT hden
= hden_orig
;
573 if (hden
== 0 && lden
== 0)
574 overflow
= 1, lden
= 1;
576 /* Calculate quotient sign and convert operands to unsigned. */
582 /* (minimum integer) / (-1) is the only overflow case. */
583 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
584 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
590 neg_double (lden
, hden
, &lden
, &hden
);
594 if (hnum
== 0 && hden
== 0)
595 { /* single precision */
597 /* This unsigned division rounds toward zero. */
603 { /* trivial case: dividend < divisor */
604 /* hden != 0 already checked. */
611 memset (quo
, 0, sizeof quo
);
613 memset (num
, 0, sizeof num
); /* to zero 9th element */
614 memset (den
, 0, sizeof den
);
616 encode (num
, lnum
, hnum
);
617 encode (den
, lden
, hden
);
619 /* Special code for when the divisor < BASE. */
620 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
622 /* hnum != 0 already checked. */
623 for (i
= 4 - 1; i
>= 0; i
--)
625 work
= num
[i
] + carry
* BASE
;
626 quo
[i
] = work
/ lden
;
632 /* Full double precision division,
633 with thanks to Don Knuth's "Seminumerical Algorithms". */
634 int num_hi_sig
, den_hi_sig
;
635 unsigned HOST_WIDE_INT quo_est
, scale
;
637 /* Find the highest nonzero divisor digit. */
638 for (i
= 4 - 1;; i
--)
645 /* Insure that the first digit of the divisor is at least BASE/2.
646 This is required by the quotient digit estimation algorithm. */
648 scale
= BASE
/ (den
[den_hi_sig
] + 1);
650 { /* scale divisor and dividend */
652 for (i
= 0; i
<= 4 - 1; i
++)
654 work
= (num
[i
] * scale
) + carry
;
655 num
[i
] = LOWPART (work
);
656 carry
= HIGHPART (work
);
661 for (i
= 0; i
<= 4 - 1; i
++)
663 work
= (den
[i
] * scale
) + carry
;
664 den
[i
] = LOWPART (work
);
665 carry
= HIGHPART (work
);
666 if (den
[i
] != 0) den_hi_sig
= i
;
673 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
675 /* Guess the next quotient digit, quo_est, by dividing the first
676 two remaining dividend digits by the high order quotient digit.
677 quo_est is never low and is at most 2 high. */
678 unsigned HOST_WIDE_INT tmp
;
680 num_hi_sig
= i
+ den_hi_sig
+ 1;
681 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
682 if (num
[num_hi_sig
] != den
[den_hi_sig
])
683 quo_est
= work
/ den
[den_hi_sig
];
687 /* Refine quo_est so it's usually correct, and at most one high. */
688 tmp
= work
- quo_est
* den
[den_hi_sig
];
690 && (den
[den_hi_sig
- 1] * quo_est
691 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
694 /* Try QUO_EST as the quotient digit, by multiplying the
695 divisor by QUO_EST and subtracting from the remaining dividend.
696 Keep in mind that QUO_EST is the I - 1st digit. */
699 for (j
= 0; j
<= den_hi_sig
; j
++)
701 work
= quo_est
* den
[j
] + carry
;
702 carry
= HIGHPART (work
);
703 work
= num
[i
+ j
] - LOWPART (work
);
704 num
[i
+ j
] = LOWPART (work
);
705 carry
+= HIGHPART (work
) != 0;
708 /* If quo_est was high by one, then num[i] went negative and
709 we need to correct things. */
710 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
713 carry
= 0; /* add divisor back in */
714 for (j
= 0; j
<= den_hi_sig
; j
++)
716 work
= num
[i
+ j
] + den
[j
] + carry
;
717 carry
= HIGHPART (work
);
718 num
[i
+ j
] = LOWPART (work
);
721 num
[num_hi_sig
] += carry
;
724 /* Store the quotient digit. */
729 decode (quo
, lquo
, hquo
);
732 /* If result is negative, make it so. */
734 neg_double (*lquo
, *hquo
, lquo
, hquo
);
736 /* Compute trial remainder: rem = num - (quo * den) */
737 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
738 neg_double (*lrem
, *hrem
, lrem
, hrem
);
739 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
744 case TRUNC_MOD_EXPR
: /* round toward zero */
745 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
749 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
750 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
753 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
761 case CEIL_MOD_EXPR
: /* round toward positive infinity */
762 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
764 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
772 case ROUND_MOD_EXPR
: /* round to closest integer */
774 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
775 HOST_WIDE_INT habs_rem
= *hrem
;
776 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
777 HOST_WIDE_INT habs_den
= hden
, htwice
;
779 /* Get absolute values. */
781 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
783 neg_double (lden
, hden
, &labs_den
, &habs_den
);
785 /* If (2 * abs (lrem) >= abs (lden)) */
786 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
787 labs_rem
, habs_rem
, <wice
, &htwice
);
789 if (((unsigned HOST_WIDE_INT
) habs_den
790 < (unsigned HOST_WIDE_INT
) htwice
)
791 || (((unsigned HOST_WIDE_INT
) habs_den
792 == (unsigned HOST_WIDE_INT
) htwice
)
793 && (labs_den
< ltwice
)))
797 add_double (*lquo
, *hquo
,
798 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
801 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
813 /* Compute true remainder: rem = num - (quo * den) */
814 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
815 neg_double (*lrem
, *hrem
, lrem
, hrem
);
816 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
820 /* Return true if built-in mathematical function specified by CODE
821 preserves the sign of it argument, i.e. -f(x) == f(-x). */
824 negate_mathfn_p (enum built_in_function code
)
848 /* Determine whether an expression T can be cheaply negated using
849 the function negate_expr. */
852 negate_expr_p (tree t
)
854 unsigned HOST_WIDE_INT val
;
861 type
= TREE_TYPE (t
);
864 switch (TREE_CODE (t
))
867 if (TYPE_UNSIGNED (type
) || ! flag_trapv
)
870 /* Check that -CST will not overflow type. */
871 prec
= TYPE_PRECISION (type
);
872 if (prec
> HOST_BITS_PER_WIDE_INT
)
874 if (TREE_INT_CST_LOW (t
) != 0)
876 prec
-= HOST_BITS_PER_WIDE_INT
;
877 val
= TREE_INT_CST_HIGH (t
);
880 val
= TREE_INT_CST_LOW (t
);
881 if (prec
< HOST_BITS_PER_WIDE_INT
)
882 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
883 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
890 return negate_expr_p (TREE_REALPART (t
))
891 && negate_expr_p (TREE_IMAGPART (t
));
894 if (FLOAT_TYPE_P (type
) && !flag_unsafe_math_optimizations
)
896 /* -(A + B) -> (-B) - A. */
897 if (negate_expr_p (TREE_OPERAND (t
, 1))
898 && reorder_operands_p (TREE_OPERAND (t
, 0),
899 TREE_OPERAND (t
, 1)))
901 /* -(A + B) -> (-A) - B. */
902 return negate_expr_p (TREE_OPERAND (t
, 0));
905 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
906 return (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
907 && reorder_operands_p (TREE_OPERAND (t
, 0),
908 TREE_OPERAND (t
, 1));
911 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
917 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
918 return negate_expr_p (TREE_OPERAND (t
, 1))
919 || negate_expr_p (TREE_OPERAND (t
, 0));
923 /* Negate -((double)float) as (double)(-float). */
924 if (TREE_CODE (type
) == REAL_TYPE
)
926 tree tem
= strip_float_extensions (t
);
928 return negate_expr_p (tem
);
933 /* Negate -f(x) as f(-x). */
934 if (negate_mathfn_p (builtin_mathfn_code (t
)))
935 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1)));
939 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
940 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
942 tree op1
= TREE_OPERAND (t
, 1);
943 if (TREE_INT_CST_HIGH (op1
) == 0
944 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
945 == TREE_INT_CST_LOW (op1
))
956 /* Given T, an expression, return the negation of T. Allow for T to be
957 null, in which case return null. */
968 type
= TREE_TYPE (t
);
971 switch (TREE_CODE (t
))
974 tem
= fold_negate_const (t
, type
);
975 if (! TREE_OVERFLOW (tem
)
976 || TYPE_UNSIGNED (type
)
982 tem
= fold_negate_const (t
, type
);
983 /* Two's complement FP formats, such as c4x, may overflow. */
984 if (! TREE_OVERFLOW (tem
) || ! flag_trapping_math
)
985 return fold_convert (type
, tem
);
990 tree rpart
= negate_expr (TREE_REALPART (t
));
991 tree ipart
= negate_expr (TREE_IMAGPART (t
));
993 if ((TREE_CODE (rpart
) == REAL_CST
994 && TREE_CODE (ipart
) == REAL_CST
)
995 || (TREE_CODE (rpart
) == INTEGER_CST
996 && TREE_CODE (ipart
) == INTEGER_CST
))
997 return build_complex (type
, rpart
, ipart
);
1002 return fold_convert (type
, TREE_OPERAND (t
, 0));
1005 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1007 /* -(A + B) -> (-B) - A. */
1008 if (negate_expr_p (TREE_OPERAND (t
, 1))
1009 && reorder_operands_p (TREE_OPERAND (t
, 0),
1010 TREE_OPERAND (t
, 1)))
1012 tem
= negate_expr (TREE_OPERAND (t
, 1));
1013 tem
= fold (build2 (MINUS_EXPR
, TREE_TYPE (t
),
1014 tem
, TREE_OPERAND (t
, 0)));
1015 return fold_convert (type
, tem
);
1018 /* -(A + B) -> (-A) - B. */
1019 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1021 tem
= negate_expr (TREE_OPERAND (t
, 0));
1022 tem
= fold (build2 (MINUS_EXPR
, TREE_TYPE (t
),
1023 tem
, TREE_OPERAND (t
, 1)));
1024 return fold_convert (type
, tem
);
1030 /* - (A - B) -> B - A */
1031 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1032 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1033 return fold_convert (type
,
1034 fold (build2 (MINUS_EXPR
, TREE_TYPE (t
),
1035 TREE_OPERAND (t
, 1),
1036 TREE_OPERAND (t
, 0))));
1040 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
1046 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1048 tem
= TREE_OPERAND (t
, 1);
1049 if (negate_expr_p (tem
))
1050 return fold_convert (type
,
1051 fold (build2 (TREE_CODE (t
), TREE_TYPE (t
),
1052 TREE_OPERAND (t
, 0),
1053 negate_expr (tem
))));
1054 tem
= TREE_OPERAND (t
, 0);
1055 if (negate_expr_p (tem
))
1056 return fold_convert (type
,
1057 fold (build2 (TREE_CODE (t
), TREE_TYPE (t
),
1059 TREE_OPERAND (t
, 1))));
1064 /* Convert -((double)float) into (double)(-float). */
1065 if (TREE_CODE (type
) == REAL_TYPE
)
1067 tem
= strip_float_extensions (t
);
1068 if (tem
!= t
&& negate_expr_p (tem
))
1069 return fold_convert (type
, negate_expr (tem
));
1074 /* Negate -f(x) as f(-x). */
1075 if (negate_mathfn_p (builtin_mathfn_code (t
))
1076 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1))))
1078 tree fndecl
, arg
, arglist
;
1080 fndecl
= get_callee_fndecl (t
);
1081 arg
= negate_expr (TREE_VALUE (TREE_OPERAND (t
, 1)));
1082 arglist
= build_tree_list (NULL_TREE
, arg
);
1083 return build_function_call_expr (fndecl
, arglist
);
1088 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1089 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1091 tree op1
= TREE_OPERAND (t
, 1);
1092 if (TREE_INT_CST_HIGH (op1
) == 0
1093 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1094 == TREE_INT_CST_LOW (op1
))
1096 tree ntype
= TYPE_UNSIGNED (type
)
1097 ? lang_hooks
.types
.signed_type (type
)
1098 : lang_hooks
.types
.unsigned_type (type
);
1099 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1100 temp
= fold (build2 (RSHIFT_EXPR
, ntype
, temp
, op1
));
1101 return fold_convert (type
, temp
);
1110 tem
= fold (build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
));
1111 return fold_convert (type
, tem
);
1114 /* Split a tree IN into a constant, literal and variable parts that could be
1115 combined with CODE to make IN. "constant" means an expression with
1116 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1117 commutative arithmetic operation. Store the constant part into *CONP,
1118 the literal in *LITP and return the variable part. If a part isn't
1119 present, set it to null. If the tree does not decompose in this way,
1120 return the entire tree as the variable part and the other parts as null.
1122 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1123 case, we negate an operand that was subtracted. Except if it is a
1124 literal for which we use *MINUS_LITP instead.
1126 If NEGATE_P is true, we are negating all of IN, again except a literal
1127 for which we use *MINUS_LITP instead.
1129 If IN is itself a literal or constant, return it as appropriate.
1131 Note that we do not guarantee that any of the three values will be the
1132 same type as IN, but they will have the same signedness and mode. */
1135 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1136 tree
*minus_litp
, int negate_p
)
1144 /* Strip any conversions that don't change the machine mode or signedness. */
1145 STRIP_SIGN_NOPS (in
);
1147 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1149 else if (TREE_CODE (in
) == code
1150 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1151 /* We can associate addition and subtraction together (even
1152 though the C standard doesn't say so) for integers because
1153 the value is not affected. For reals, the value might be
1154 affected, so we can't. */
1155 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1156 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1158 tree op0
= TREE_OPERAND (in
, 0);
1159 tree op1
= TREE_OPERAND (in
, 1);
1160 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1161 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1163 /* First see if either of the operands is a literal, then a constant. */
1164 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1165 *litp
= op0
, op0
= 0;
1166 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1167 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1169 if (op0
!= 0 && TREE_CONSTANT (op0
))
1170 *conp
= op0
, op0
= 0;
1171 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1172 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1174 /* If we haven't dealt with either operand, this is not a case we can
1175 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1176 if (op0
!= 0 && op1
!= 0)
1181 var
= op1
, neg_var_p
= neg1_p
;
1183 /* Now do any needed negations. */
1185 *minus_litp
= *litp
, *litp
= 0;
1187 *conp
= negate_expr (*conp
);
1189 var
= negate_expr (var
);
1191 else if (TREE_CONSTANT (in
))
1199 *minus_litp
= *litp
, *litp
= 0;
1200 else if (*minus_litp
)
1201 *litp
= *minus_litp
, *minus_litp
= 0;
1202 *conp
= negate_expr (*conp
);
1203 var
= negate_expr (var
);
1209 /* Re-associate trees split by the above function. T1 and T2 are either
1210 expressions to associate or null. Return the new expression, if any. If
1211 we build an operation, do it in TYPE and with CODE. */
1214 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1221 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1222 try to fold this since we will have infinite recursion. But do
1223 deal with any NEGATE_EXPRs. */
1224 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1225 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1227 if (code
== PLUS_EXPR
)
1229 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1230 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1231 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1232 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1233 return build2 (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1234 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1236 return build2 (code
, type
, fold_convert (type
, t1
),
1237 fold_convert (type
, t2
));
1240 return fold (build2 (code
, type
, fold_convert (type
, t1
),
1241 fold_convert (type
, t2
)));
1244 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1245 to produce a new constant.
1247 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1250 int_const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1252 unsigned HOST_WIDE_INT int1l
, int2l
;
1253 HOST_WIDE_INT int1h
, int2h
;
1254 unsigned HOST_WIDE_INT low
;
1256 unsigned HOST_WIDE_INT garbagel
;
1257 HOST_WIDE_INT garbageh
;
1259 tree type
= TREE_TYPE (arg1
);
1260 int uns
= TYPE_UNSIGNED (type
);
1262 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1264 int no_overflow
= 0;
1266 int1l
= TREE_INT_CST_LOW (arg1
);
1267 int1h
= TREE_INT_CST_HIGH (arg1
);
1268 int2l
= TREE_INT_CST_LOW (arg2
);
1269 int2h
= TREE_INT_CST_HIGH (arg2
);
1274 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1278 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1282 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1288 /* It's unclear from the C standard whether shifts can overflow.
1289 The following code ignores overflow; perhaps a C standard
1290 interpretation ruling is needed. */
1291 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1299 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1304 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1308 neg_double (int2l
, int2h
, &low
, &hi
);
1309 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1310 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1314 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1317 case TRUNC_DIV_EXPR
:
1318 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1319 case EXACT_DIV_EXPR
:
1320 /* This is a shortcut for a common special case. */
1321 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1322 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1323 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1324 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1326 if (code
== CEIL_DIV_EXPR
)
1329 low
= int1l
/ int2l
, hi
= 0;
1333 /* ... fall through ... */
1335 case ROUND_DIV_EXPR
:
1336 if (int2h
== 0 && int2l
== 1)
1338 low
= int1l
, hi
= int1h
;
1341 if (int1l
== int2l
&& int1h
== int2h
1342 && ! (int1l
== 0 && int1h
== 0))
1347 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1348 &low
, &hi
, &garbagel
, &garbageh
);
1351 case TRUNC_MOD_EXPR
:
1352 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1353 /* This is a shortcut for a common special case. */
1354 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1355 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1356 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1357 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1359 if (code
== CEIL_MOD_EXPR
)
1361 low
= int1l
% int2l
, hi
= 0;
1365 /* ... fall through ... */
1367 case ROUND_MOD_EXPR
:
1368 overflow
= div_and_round_double (code
, uns
,
1369 int1l
, int1h
, int2l
, int2h
,
1370 &garbagel
, &garbageh
, &low
, &hi
);
1376 low
= (((unsigned HOST_WIDE_INT
) int1h
1377 < (unsigned HOST_WIDE_INT
) int2h
)
1378 || (((unsigned HOST_WIDE_INT
) int1h
1379 == (unsigned HOST_WIDE_INT
) int2h
)
1382 low
= (int1h
< int2h
1383 || (int1h
== int2h
&& int1l
< int2l
));
1385 if (low
== (code
== MIN_EXPR
))
1386 low
= int1l
, hi
= int1h
;
1388 low
= int2l
, hi
= int2h
;
1395 /* If this is for a sizetype, can be represented as one (signed)
1396 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1399 && ((hi
== 0 && (HOST_WIDE_INT
) low
>= 0)
1400 || (hi
== -1 && (HOST_WIDE_INT
) low
< 0))
1401 && overflow
== 0 && ! TREE_OVERFLOW (arg1
) && ! TREE_OVERFLOW (arg2
))
1402 return size_int_type_wide (low
, type
);
1405 t
= build_int_2 (low
, hi
);
1406 TREE_TYPE (t
) = TREE_TYPE (arg1
);
1411 ? (!uns
|| is_sizetype
) && overflow
1412 : (force_fit_type (t
, (!uns
|| is_sizetype
) && overflow
)
1414 | TREE_OVERFLOW (arg1
)
1415 | TREE_OVERFLOW (arg2
));
1417 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1418 So check if force_fit_type truncated the value. */
1420 && ! TREE_OVERFLOW (t
)
1421 && (TREE_INT_CST_HIGH (t
) != hi
1422 || TREE_INT_CST_LOW (t
) != low
))
1423 TREE_OVERFLOW (t
) = 1;
1425 TREE_CONSTANT_OVERFLOW (t
) = (TREE_OVERFLOW (t
)
1426 | TREE_CONSTANT_OVERFLOW (arg1
)
1427 | TREE_CONSTANT_OVERFLOW (arg2
));
1431 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1432 constant. We assume ARG1 and ARG2 have the same data type, or at least
1433 are the same kind of constant and the same machine mode.
1435 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1438 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1443 if (TREE_CODE (arg1
) == INTEGER_CST
)
1444 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1446 if (TREE_CODE (arg1
) == REAL_CST
)
1448 enum machine_mode mode
;
1451 REAL_VALUE_TYPE value
;
1454 d1
= TREE_REAL_CST (arg1
);
1455 d2
= TREE_REAL_CST (arg2
);
1457 type
= TREE_TYPE (arg1
);
1458 mode
= TYPE_MODE (type
);
1460 /* Don't perform operation if we honor signaling NaNs and
1461 either operand is a NaN. */
1462 if (HONOR_SNANS (mode
)
1463 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1466 /* Don't perform operation if it would raise a division
1467 by zero exception. */
1468 if (code
== RDIV_EXPR
1469 && REAL_VALUES_EQUAL (d2
, dconst0
)
1470 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1473 /* If either operand is a NaN, just return it. Otherwise, set up
1474 for floating-point trap; we return an overflow. */
1475 if (REAL_VALUE_ISNAN (d1
))
1477 else if (REAL_VALUE_ISNAN (d2
))
1480 REAL_ARITHMETIC (value
, code
, d1
, d2
);
1482 t
= build_real (type
, real_value_truncate (mode
, value
));
1485 = (force_fit_type (t
, 0)
1486 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1487 TREE_CONSTANT_OVERFLOW (t
)
1489 | TREE_CONSTANT_OVERFLOW (arg1
)
1490 | TREE_CONSTANT_OVERFLOW (arg2
);
1493 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1495 tree type
= TREE_TYPE (arg1
);
1496 tree r1
= TREE_REALPART (arg1
);
1497 tree i1
= TREE_IMAGPART (arg1
);
1498 tree r2
= TREE_REALPART (arg2
);
1499 tree i2
= TREE_IMAGPART (arg2
);
1505 t
= build_complex (type
,
1506 const_binop (PLUS_EXPR
, r1
, r2
, notrunc
),
1507 const_binop (PLUS_EXPR
, i1
, i2
, notrunc
));
1511 t
= build_complex (type
,
1512 const_binop (MINUS_EXPR
, r1
, r2
, notrunc
),
1513 const_binop (MINUS_EXPR
, i1
, i2
, notrunc
));
1517 t
= build_complex (type
,
1518 const_binop (MINUS_EXPR
,
1519 const_binop (MULT_EXPR
,
1521 const_binop (MULT_EXPR
,
1524 const_binop (PLUS_EXPR
,
1525 const_binop (MULT_EXPR
,
1527 const_binop (MULT_EXPR
,
1535 = const_binop (PLUS_EXPR
,
1536 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1537 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1540 t
= build_complex (type
,
1542 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1543 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1544 const_binop (PLUS_EXPR
,
1545 const_binop (MULT_EXPR
, r1
, r2
,
1547 const_binop (MULT_EXPR
, i1
, i2
,
1550 magsquared
, notrunc
),
1552 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1553 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1554 const_binop (MINUS_EXPR
,
1555 const_binop (MULT_EXPR
, i1
, r2
,
1557 const_binop (MULT_EXPR
, r1
, i2
,
1560 magsquared
, notrunc
));
1572 /* These are the hash table functions for the hash table of INTEGER_CST
1573 nodes of a sizetype. */
1575 /* Return the hash code code X, an INTEGER_CST. */
1578 size_htab_hash (const void *x
)
1582 return (TREE_INT_CST_HIGH (t
) ^ TREE_INT_CST_LOW (t
)
1583 ^ htab_hash_pointer (TREE_TYPE (t
))
1584 ^ (TREE_OVERFLOW (t
) << 20));
1587 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1588 is the same as that given by *Y, which is the same. */
1591 size_htab_eq (const void *x
, const void *y
)
1596 return (TREE_INT_CST_HIGH (xt
) == TREE_INT_CST_HIGH (yt
)
1597 && TREE_INT_CST_LOW (xt
) == TREE_INT_CST_LOW (yt
)
1598 && TREE_TYPE (xt
) == TREE_TYPE (yt
)
1599 && TREE_OVERFLOW (xt
) == TREE_OVERFLOW (yt
));
1602 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1603 bits are given by NUMBER and of the sizetype represented by KIND. */
1606 size_int_wide (HOST_WIDE_INT number
, enum size_type_kind kind
)
1608 return size_int_type_wide (number
, sizetype_tab
[(int) kind
]);
1611 /* Likewise, but the desired type is specified explicitly. */
1613 static GTY (()) tree new_const
;
1614 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
1618 size_int_type_wide (HOST_WIDE_INT number
, tree type
)
1624 size_htab
= htab_create_ggc (1024, size_htab_hash
, size_htab_eq
, NULL
);
1625 new_const
= make_node (INTEGER_CST
);
1628 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1629 hash table, we return the value from the hash table. Otherwise, we
1630 place that in the hash table and make a new node for the next time. */
1631 TREE_INT_CST_LOW (new_const
) = number
;
1632 TREE_INT_CST_HIGH (new_const
) = number
< 0 ? -1 : 0;
1633 TREE_TYPE (new_const
) = type
;
1634 TREE_OVERFLOW (new_const
) = TREE_CONSTANT_OVERFLOW (new_const
)
1635 = force_fit_type (new_const
, 0);
1637 slot
= htab_find_slot (size_htab
, new_const
, INSERT
);
1643 new_const
= make_node (INTEGER_CST
);
1647 return (tree
) *slot
;
1650 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1651 is a tree code. The type of the result is taken from the operands.
1652 Both must be the same type integer type and it must be a size type.
1653 If the operands are constant, so is the result. */
1656 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
1658 tree type
= TREE_TYPE (arg0
);
1660 if (TREE_CODE (type
) != INTEGER_TYPE
|| ! TYPE_IS_SIZETYPE (type
)
1661 || type
!= TREE_TYPE (arg1
))
1664 /* Handle the special case of two integer constants faster. */
1665 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1667 /* And some specific cases even faster than that. */
1668 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1670 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1671 && integer_zerop (arg1
))
1673 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1676 /* Handle general case of two integer constants. */
1677 return int_const_binop (code
, arg0
, arg1
, 0);
1680 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1681 return error_mark_node
;
1683 return fold (build2 (code
, type
, arg0
, arg1
));
1686 /* Given two values, either both of sizetype or both of bitsizetype,
1687 compute the difference between the two values. Return the value
1688 in signed type corresponding to the type of the operands. */
1691 size_diffop (tree arg0
, tree arg1
)
1693 tree type
= TREE_TYPE (arg0
);
1696 if (TREE_CODE (type
) != INTEGER_TYPE
|| ! TYPE_IS_SIZETYPE (type
)
1697 || type
!= TREE_TYPE (arg1
))
1700 /* If the type is already signed, just do the simple thing. */
1701 if (!TYPE_UNSIGNED (type
))
1702 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1704 ctype
= (type
== bitsizetype
|| type
== ubitsizetype
1705 ? sbitsizetype
: ssizetype
);
1707 /* If either operand is not a constant, do the conversions to the signed
1708 type and subtract. The hardware will do the right thing with any
1709 overflow in the subtraction. */
1710 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1711 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
1712 fold_convert (ctype
, arg1
));
1714 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1715 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1716 overflow) and negate (which can't either). Special-case a result
1717 of zero while we're here. */
1718 if (tree_int_cst_equal (arg0
, arg1
))
1719 return fold_convert (ctype
, integer_zero_node
);
1720 else if (tree_int_cst_lt (arg1
, arg0
))
1721 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1723 return size_binop (MINUS_EXPR
, fold_convert (ctype
, integer_zero_node
),
1724 fold_convert (ctype
, size_binop (MINUS_EXPR
,
1729 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1730 type TYPE. If no simplification can be done return NULL_TREE. */
1733 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1738 if (TREE_TYPE (arg1
) == type
)
1741 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
1743 if (TREE_CODE (arg1
) == INTEGER_CST
)
1745 /* If we would build a constant wider than GCC supports,
1746 leave the conversion unfolded. */
1747 if (TYPE_PRECISION (type
) > 2 * HOST_BITS_PER_WIDE_INT
)
1750 /* If we are trying to make a sizetype for a small integer, use
1751 size_int to pick up cached types to reduce duplicate nodes. */
1752 if (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1753 && !TREE_CONSTANT_OVERFLOW (arg1
)
1754 && compare_tree_int (arg1
, 10000) < 0)
1755 return size_int_type_wide (TREE_INT_CST_LOW (arg1
), type
);
1757 /* Given an integer constant, make new constant with new type,
1758 appropriately sign-extended or truncated. */
1759 t
= build_int_2 (TREE_INT_CST_LOW (arg1
),
1760 TREE_INT_CST_HIGH (arg1
));
1761 TREE_TYPE (t
) = type
;
1762 /* Indicate an overflow if (1) ARG1 already overflowed,
1763 or (2) force_fit_type indicates an overflow.
1764 Tell force_fit_type that an overflow has already occurred
1765 if ARG1 is a too-large unsigned value and T is signed.
1766 But don't indicate an overflow if converting a pointer. */
1768 = ((force_fit_type (t
,
1769 (TREE_INT_CST_HIGH (arg1
) < 0
1770 && (TYPE_UNSIGNED (type
)
1771 < TYPE_UNSIGNED (TREE_TYPE (arg1
)))))
1772 && ! POINTER_TYPE_P (TREE_TYPE (arg1
)))
1773 || TREE_OVERFLOW (arg1
));
1774 TREE_CONSTANT_OVERFLOW (t
)
1775 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1778 else if (TREE_CODE (arg1
) == REAL_CST
)
1780 /* The following code implements the floating point to integer
1781 conversion rules required by the Java Language Specification,
1782 that IEEE NaNs are mapped to zero and values that overflow
1783 the target precision saturate, i.e. values greater than
1784 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1785 are mapped to INT_MIN. These semantics are allowed by the
1786 C and C++ standards that simply state that the behavior of
1787 FP-to-integer conversion is unspecified upon overflow. */
1789 HOST_WIDE_INT high
, low
;
1792 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1796 case FIX_TRUNC_EXPR
:
1797 real_trunc (&r
, VOIDmode
, &x
);
1801 real_ceil (&r
, VOIDmode
, &x
);
1804 case FIX_FLOOR_EXPR
:
1805 real_floor (&r
, VOIDmode
, &x
);
1808 case FIX_ROUND_EXPR
:
1809 real_round (&r
, VOIDmode
, &x
);
1816 /* If R is NaN, return zero and show we have an overflow. */
1817 if (REAL_VALUE_ISNAN (r
))
1824 /* See if R is less than the lower bound or greater than the
1829 tree lt
= TYPE_MIN_VALUE (type
);
1830 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1831 if (REAL_VALUES_LESS (r
, l
))
1834 high
= TREE_INT_CST_HIGH (lt
);
1835 low
= TREE_INT_CST_LOW (lt
);
1841 tree ut
= TYPE_MAX_VALUE (type
);
1844 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1845 if (REAL_VALUES_LESS (u
, r
))
1848 high
= TREE_INT_CST_HIGH (ut
);
1849 low
= TREE_INT_CST_LOW (ut
);
1855 REAL_VALUE_TO_INT (&low
, &high
, r
);
1857 t
= build_int_2 (low
, high
);
1858 TREE_TYPE (t
) = type
;
1860 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, overflow
);
1861 TREE_CONSTANT_OVERFLOW (t
)
1862 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1866 else if (TREE_CODE (type
) == REAL_TYPE
)
1868 if (TREE_CODE (arg1
) == INTEGER_CST
)
1869 return build_real_from_int_cst (type
, arg1
);
1870 if (TREE_CODE (arg1
) == REAL_CST
)
1872 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
1874 /* We make a copy of ARG1 so that we don't modify an
1875 existing constant tree. */
1876 t
= copy_node (arg1
);
1877 TREE_TYPE (t
) = type
;
1881 t
= build_real (type
,
1882 real_value_truncate (TYPE_MODE (type
),
1883 TREE_REAL_CST (arg1
)));
1886 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, 0);
1887 TREE_CONSTANT_OVERFLOW (t
)
1888 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1895 /* Convert expression ARG to type TYPE. Used by the middle-end for
1896 simple conversions in preference to calling the front-end's convert. */
1899 fold_convert (tree type
, tree arg
)
1901 tree orig
= TREE_TYPE (arg
);
1907 if (TREE_CODE (arg
) == ERROR_MARK
1908 || TREE_CODE (type
) == ERROR_MARK
1909 || TREE_CODE (orig
) == ERROR_MARK
)
1910 return error_mark_node
;
1912 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1913 return fold (build1 (NOP_EXPR
, type
, arg
));
1915 if (INTEGRAL_TYPE_P (type
) || POINTER_TYPE_P (type
)
1916 || TREE_CODE (type
) == OFFSET_TYPE
)
1918 if (TREE_CODE (arg
) == INTEGER_CST
)
1920 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1921 if (tem
!= NULL_TREE
)
1924 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1925 || TREE_CODE (orig
) == OFFSET_TYPE
)
1926 return fold (build1 (NOP_EXPR
, type
, arg
));
1927 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1929 tem
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1930 return fold_convert (type
, tem
);
1932 if (TREE_CODE (orig
) == VECTOR_TYPE
1933 && GET_MODE_SIZE (TYPE_MODE (type
))
1934 == GET_MODE_SIZE (TYPE_MODE (orig
)))
1935 return fold (build1 (NOP_EXPR
, type
, arg
));
1937 else if (TREE_CODE (type
) == REAL_TYPE
)
1939 if (TREE_CODE (arg
) == INTEGER_CST
)
1941 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1942 if (tem
!= NULL_TREE
)
1945 else if (TREE_CODE (arg
) == REAL_CST
)
1947 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1948 if (tem
!= NULL_TREE
)
1952 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
))
1953 return fold (build1 (FLOAT_EXPR
, type
, arg
));
1954 if (TREE_CODE (orig
) == REAL_TYPE
)
1955 return fold (build1 (flag_float_store
? CONVERT_EXPR
: NOP_EXPR
,
1957 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1959 tem
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1960 return fold_convert (type
, tem
);
1963 else if (TREE_CODE (type
) == COMPLEX_TYPE
)
1965 if (INTEGRAL_TYPE_P (orig
)
1966 || POINTER_TYPE_P (orig
)
1967 || TREE_CODE (orig
) == REAL_TYPE
)
1968 return build2 (COMPLEX_EXPR
, type
,
1969 fold_convert (TREE_TYPE (type
), arg
),
1970 fold_convert (TREE_TYPE (type
), integer_zero_node
));
1971 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1975 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1977 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
1978 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
1979 return fold (build2 (COMPLEX_EXPR
, type
, rpart
, ipart
));
1982 arg
= save_expr (arg
);
1983 rpart
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1984 ipart
= fold (build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
));
1985 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
1986 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
1987 return fold (build2 (COMPLEX_EXPR
, type
, rpart
, ipart
));
1990 else if (TREE_CODE (type
) == VECTOR_TYPE
)
1992 if ((INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
))
1993 && GET_MODE_SIZE (TYPE_MODE (type
))
1994 == GET_MODE_SIZE (TYPE_MODE (orig
)))
1995 return fold (build1 (NOP_EXPR
, type
, arg
));
1996 if (TREE_CODE (orig
) == VECTOR_TYPE
1997 && GET_MODE_SIZE (TYPE_MODE (type
))
1998 == GET_MODE_SIZE (TYPE_MODE (orig
)))
1999 return fold (build1 (NOP_EXPR
, type
, arg
));
2001 else if (VOID_TYPE_P (type
))
2002 return fold (build1 (CONVERT_EXPR
, type
, arg
));
2006 /* Return an expr equal to X but certainly not valid as an lvalue. */
2011 /* We only need to wrap lvalue tree codes. */
2012 switch (TREE_CODE (x
))
2026 case ARRAY_RANGE_REF
:
2031 case PREINCREMENT_EXPR
:
2032 case PREDECREMENT_EXPR
:
2035 case TRY_CATCH_EXPR
:
2036 case WITH_CLEANUP_EXPR
:
2048 /* Assume the worst for front-end tree codes. */
2049 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2053 return build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2056 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2057 Zero means allow extended lvalues. */
2059 int pedantic_lvalues
;
2061 /* When pedantic, return an expr equal to X but certainly not valid as a
2062 pedantic lvalue. Otherwise, return X. */
2065 pedantic_non_lvalue (tree x
)
2067 if (pedantic_lvalues
)
2068 return non_lvalue (x
);
2073 /* Given a tree comparison code, return the code that is the logical inverse
2074 of the given code. It is not safe to do this for floating-point
2075 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2076 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2078 static enum tree_code
2079 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2081 if (honor_nans
&& flag_trapping_math
)
2091 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2093 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2095 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2097 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2111 return UNORDERED_EXPR
;
2112 case UNORDERED_EXPR
:
2113 return ORDERED_EXPR
;
2119 /* Similar, but return the comparison that results if the operands are
2120 swapped. This is safe for floating-point. */
2122 static enum tree_code
2123 swap_tree_comparison (enum tree_code code
)
2144 /* Convert a comparison tree code from an enum tree_code representation
2145 into a compcode bit-based encoding. This function is the inverse of
2146 compcode_to_comparison. */
2148 static enum comparison_code
2149 comparison_to_compcode (enum tree_code code
)
2166 return COMPCODE_ORD
;
2167 case UNORDERED_EXPR
:
2168 return COMPCODE_UNORD
;
2170 return COMPCODE_UNLT
;
2172 return COMPCODE_UNEQ
;
2174 return COMPCODE_UNLE
;
2176 return COMPCODE_UNGT
;
2178 return COMPCODE_LTGT
;
2180 return COMPCODE_UNGE
;
2186 /* Convert a compcode bit-based encoding of a comparison operator back
2187 to GCC's enum tree_code representation. This function is the
2188 inverse of comparison_to_compcode. */
2190 static enum tree_code
2191 compcode_to_comparison (enum comparison_code code
)
2208 return ORDERED_EXPR
;
2209 case COMPCODE_UNORD
:
2210 return UNORDERED_EXPR
;
2228 /* Return a tree for the comparison which is the combination of
2229 doing the AND or OR (depending on CODE) of the two operations LCODE
2230 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2231 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2232 if this makes the transformation invalid. */
2235 combine_comparisons (enum tree_code code
, enum tree_code lcode
,
2236 enum tree_code rcode
, tree truth_type
,
2237 tree ll_arg
, tree lr_arg
)
2239 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2240 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2241 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2242 enum comparison_code compcode
;
2246 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2247 compcode
= lcompcode
& rcompcode
;
2250 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2251 compcode
= lcompcode
| rcompcode
;
2260 /* Eliminate unordered comparisons, as well as LTGT and ORD
2261 which are not used unless the mode has NaNs. */
2262 compcode
&= ~COMPCODE_UNORD
;
2263 if (compcode
== COMPCODE_LTGT
)
2264 compcode
= COMPCODE_NE
;
2265 else if (compcode
== COMPCODE_ORD
)
2266 compcode
= COMPCODE_TRUE
;
2268 else if (flag_trapping_math
)
2270 /* Check that the original operation and the optimized ones will trap
2271 under the same condition. */
2272 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2273 && (lcompcode
!= COMPCODE_EQ
)
2274 && (lcompcode
!= COMPCODE_ORD
);
2275 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2276 && (rcompcode
!= COMPCODE_EQ
)
2277 && (rcompcode
!= COMPCODE_ORD
);
2278 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2279 && (compcode
!= COMPCODE_EQ
)
2280 && (compcode
!= COMPCODE_ORD
);
2282 /* In a short-circuited boolean expression the LHS might be
2283 such that the RHS, if evaluated, will never trap. For
2284 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2285 if neither x nor y is NaN. (This is a mixed blessing: for
2286 example, the expression above will never trap, hence
2287 optimizing it to x < y would be invalid). */
2288 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2289 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2292 /* If the comparison was short-circuited, and only the RHS
2293 trapped, we may now generate a spurious trap. */
2295 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2298 /* If we changed the conditions that cause a trap, we lose. */
2299 if ((ltrap
|| rtrap
) != trap
)
2303 if (compcode
== COMPCODE_TRUE
)
2304 return constant_boolean_node (true, truth_type
);
2305 else if (compcode
== COMPCODE_FALSE
)
2306 return constant_boolean_node (false, truth_type
);
2308 return fold (build2 (compcode_to_comparison (compcode
),
2309 truth_type
, ll_arg
, lr_arg
));
2312 /* Return nonzero if CODE is a tree code that represents a truth value. */
2315 truth_value_p (enum tree_code code
)
2317 return (TREE_CODE_CLASS (code
) == '<'
2318 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2319 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2320 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2323 /* Return nonzero if two operands (typically of the same tree node)
2324 are necessarily equal. If either argument has side-effects this
2325 function returns zero. FLAGS modifies behavior as follows:
2327 If OEP_ONLY_CONST is set, only return nonzero for constants.
2328 This function tests whether the operands are indistinguishable;
2329 it does not test whether they are equal using C's == operation.
2330 The distinction is important for IEEE floating point, because
2331 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2332 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2334 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2335 even though it may hold multiple values during a function.
2336 This is because a GCC tree node guarantees that nothing else is
2337 executed between the evaluation of its "operands" (which may often
2338 be evaluated in arbitrary order). Hence if the operands themselves
2339 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2340 same value in each operand/subexpression. Hence a zero value for
2341 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2342 If comparing arbitrary expression trees, such as from different
2343 statements, ONLY_CONST must usually be nonzero.
2345 If OEP_PURE_SAME is set, then pure functions with identical arguments
2346 are considered the same. It is used when the caller has other ways
2347 to ensure that global memory is unchanged in between. */
2350 operand_equal_p (tree arg0
, tree arg1
, unsigned int flags
)
2352 /* If either is ERROR_MARK, they aren't equal. */
2353 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
)
2356 /* If both types don't have the same signedness, then we can't consider
2357 them equal. We must check this before the STRIP_NOPS calls
2358 because they may change the signedness of the arguments. */
2359 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2365 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2366 /* This is needed for conversions and for COMPONENT_REF.
2367 Might as well play it safe and always test this. */
2368 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2369 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2370 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2373 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2374 We don't care about side effects in that case because the SAVE_EXPR
2375 takes care of that for us. In all other cases, two expressions are
2376 equal if they have no side effects. If we have two identical
2377 expressions with side effects that should be treated the same due
2378 to the only side effects being identical SAVE_EXPR's, that will
2379 be detected in the recursive calls below. */
2380 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2381 && (TREE_CODE (arg0
) == SAVE_EXPR
2382 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2385 /* Next handle constant cases, those for which we can return 1 even
2386 if ONLY_CONST is set. */
2387 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2388 switch (TREE_CODE (arg0
))
2391 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2392 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2393 && tree_int_cst_equal (arg0
, arg1
));
2396 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2397 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2398 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2399 TREE_REAL_CST (arg1
)));
2405 if (TREE_CONSTANT_OVERFLOW (arg0
)
2406 || TREE_CONSTANT_OVERFLOW (arg1
))
2409 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2410 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2413 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2416 v1
= TREE_CHAIN (v1
);
2417 v2
= TREE_CHAIN (v2
);
2424 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2426 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2430 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2431 && ! memcmp (TREE_STRING_POINTER (arg0
),
2432 TREE_STRING_POINTER (arg1
),
2433 TREE_STRING_LENGTH (arg0
)));
2436 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2442 if (flags
& OEP_ONLY_CONST
)
2445 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2448 /* Two conversions are equal only if signedness and modes match. */
2449 if ((TREE_CODE (arg0
) == NOP_EXPR
|| TREE_CODE (arg0
) == CONVERT_EXPR
)
2450 && (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2451 != TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2454 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2455 TREE_OPERAND (arg1
, 0), flags
);
2459 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0)
2460 && operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1),
2464 /* For commutative ops, allow the other order. */
2465 return (commutative_tree_code (TREE_CODE (arg0
))
2466 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2467 TREE_OPERAND (arg1
, 1), flags
)
2468 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2469 TREE_OPERAND (arg1
, 0), flags
));
2472 /* If either of the pointer (or reference) expressions we are
2473 dereferencing contain a side effect, these cannot be equal. */
2474 if (TREE_SIDE_EFFECTS (arg0
)
2475 || TREE_SIDE_EFFECTS (arg1
))
2478 switch (TREE_CODE (arg0
))
2481 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2482 TREE_OPERAND (arg1
, 0), flags
);
2486 case ARRAY_RANGE_REF
:
2487 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2488 TREE_OPERAND (arg1
, 0), flags
)
2489 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2490 TREE_OPERAND (arg1
, 1), flags
));
2493 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2494 TREE_OPERAND (arg1
, 0), flags
)
2495 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2496 TREE_OPERAND (arg1
, 1), flags
)
2497 && operand_equal_p (TREE_OPERAND (arg0
, 2),
2498 TREE_OPERAND (arg1
, 2), flags
));
2504 switch (TREE_CODE (arg0
))
2507 case TRUTH_NOT_EXPR
:
2508 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2509 TREE_OPERAND (arg1
, 0), flags
);
2512 return rtx_equal_p (RTL_EXPR_RTL (arg0
), RTL_EXPR_RTL (arg1
));
2515 /* If the CALL_EXPRs call different functions, then they
2516 clearly can not be equal. */
2517 if (! operand_equal_p (TREE_OPERAND (arg0
, 0),
2518 TREE_OPERAND (arg1
, 0), flags
))
2522 unsigned int cef
= call_expr_flags (arg0
);
2523 if (flags
& OEP_PURE_SAME
)
2524 cef
&= ECF_CONST
| ECF_PURE
;
2531 /* Now see if all the arguments are the same. operand_equal_p
2532 does not handle TREE_LIST, so we walk the operands here
2533 feeding them to operand_equal_p. */
2534 arg0
= TREE_OPERAND (arg0
, 1);
2535 arg1
= TREE_OPERAND (arg1
, 1);
2536 while (arg0
&& arg1
)
2538 if (! operand_equal_p (TREE_VALUE (arg0
), TREE_VALUE (arg1
),
2542 arg0
= TREE_CHAIN (arg0
);
2543 arg1
= TREE_CHAIN (arg1
);
2546 /* If we get here and both argument lists are exhausted
2547 then the CALL_EXPRs are equal. */
2548 return ! (arg0
|| arg1
);
2555 /* Consider __builtin_sqrt equal to sqrt. */
2556 return (TREE_CODE (arg0
) == FUNCTION_DECL
2557 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2558 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2559 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2566 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2567 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2569 When in doubt, return 0. */
2572 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2574 int unsignedp1
, unsignedpo
;
2575 tree primarg0
, primarg1
, primother
;
2576 unsigned int correct_width
;
2578 if (operand_equal_p (arg0
, arg1
, 0))
2581 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2582 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2585 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2586 and see if the inner values are the same. This removes any
2587 signedness comparison, which doesn't matter here. */
2588 primarg0
= arg0
, primarg1
= arg1
;
2589 STRIP_NOPS (primarg0
);
2590 STRIP_NOPS (primarg1
);
2591 if (operand_equal_p (primarg0
, primarg1
, 0))
2594 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2595 actual comparison operand, ARG0.
2597 First throw away any conversions to wider types
2598 already present in the operands. */
2600 primarg1
= get_narrower (arg1
, &unsignedp1
);
2601 primother
= get_narrower (other
, &unsignedpo
);
2603 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2604 if (unsignedp1
== unsignedpo
2605 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2606 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2608 tree type
= TREE_TYPE (arg0
);
2610 /* Make sure shorter operand is extended the right way
2611 to match the longer operand. */
2612 primarg1
= fold_convert (lang_hooks
.types
.signed_or_unsigned_type
2613 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2615 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2622 /* See if ARG is an expression that is either a comparison or is performing
2623 arithmetic on comparisons. The comparisons must only be comparing
2624 two different values, which will be stored in *CVAL1 and *CVAL2; if
2625 they are nonzero it means that some operands have already been found.
2626 No variables may be used anywhere else in the expression except in the
2627 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2628 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2630 If this is true, return 1. Otherwise, return zero. */
2633 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2635 enum tree_code code
= TREE_CODE (arg
);
2636 char class = TREE_CODE_CLASS (code
);
2638 /* We can handle some of the 'e' cases here. */
2639 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
2641 else if (class == 'e'
2642 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2643 || code
== COMPOUND_EXPR
))
2646 else if (class == 'e' && code
== SAVE_EXPR
&& SAVE_EXPR_RTL (arg
) == 0
2647 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2649 /* If we've already found a CVAL1 or CVAL2, this expression is
2650 two complex to handle. */
2651 if (*cval1
|| *cval2
)
2661 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2664 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2665 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2666 cval1
, cval2
, save_p
));
2672 if (code
== COND_EXPR
)
2673 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2674 cval1
, cval2
, save_p
)
2675 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2676 cval1
, cval2
, save_p
)
2677 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2678 cval1
, cval2
, save_p
));
2682 /* First see if we can handle the first operand, then the second. For
2683 the second operand, we know *CVAL1 can't be zero. It must be that
2684 one side of the comparison is each of the values; test for the
2685 case where this isn't true by failing if the two operands
2688 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2689 TREE_OPERAND (arg
, 1), 0))
2693 *cval1
= TREE_OPERAND (arg
, 0);
2694 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2696 else if (*cval2
== 0)
2697 *cval2
= TREE_OPERAND (arg
, 0);
2698 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2703 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2705 else if (*cval2
== 0)
2706 *cval2
= TREE_OPERAND (arg
, 1);
2707 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2719 /* ARG is a tree that is known to contain just arithmetic operations and
2720 comparisons. Evaluate the operations in the tree substituting NEW0 for
2721 any occurrence of OLD0 as an operand of a comparison and likewise for
2725 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
2727 tree type
= TREE_TYPE (arg
);
2728 enum tree_code code
= TREE_CODE (arg
);
2729 char class = TREE_CODE_CLASS (code
);
2731 /* We can handle some of the 'e' cases here. */
2732 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
2734 else if (class == 'e'
2735 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2741 return fold (build1 (code
, type
,
2742 eval_subst (TREE_OPERAND (arg
, 0),
2743 old0
, new0
, old1
, new1
)));
2746 return fold (build2 (code
, type
,
2747 eval_subst (TREE_OPERAND (arg
, 0),
2748 old0
, new0
, old1
, new1
),
2749 eval_subst (TREE_OPERAND (arg
, 1),
2750 old0
, new0
, old1
, new1
)));
2756 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
2759 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
2762 return fold (build3 (code
, type
,
2763 eval_subst (TREE_OPERAND (arg
, 0),
2764 old0
, new0
, old1
, new1
),
2765 eval_subst (TREE_OPERAND (arg
, 1),
2766 old0
, new0
, old1
, new1
),
2767 eval_subst (TREE_OPERAND (arg
, 2),
2768 old0
, new0
, old1
, new1
)));
2772 /* Fall through - ??? */
2776 tree arg0
= TREE_OPERAND (arg
, 0);
2777 tree arg1
= TREE_OPERAND (arg
, 1);
2779 /* We need to check both for exact equality and tree equality. The
2780 former will be true if the operand has a side-effect. In that
2781 case, we know the operand occurred exactly once. */
2783 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2785 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2788 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2790 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2793 return fold (build2 (code
, type
, arg0
, arg1
));
2801 /* Return a tree for the case when the result of an expression is RESULT
2802 converted to TYPE and OMITTED was previously an operand of the expression
2803 but is now not needed (e.g., we folded OMITTED * 0).
2805 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2806 the conversion of RESULT to TYPE. */
2809 omit_one_operand (tree type
, tree result
, tree omitted
)
2811 tree t
= fold_convert (type
, result
);
2813 if (TREE_SIDE_EFFECTS (omitted
))
2814 return build2 (COMPOUND_EXPR
, type
, omitted
, t
);
2816 return non_lvalue (t
);
2819 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2822 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
2824 tree t
= fold_convert (type
, result
);
2826 if (TREE_SIDE_EFFECTS (omitted
))
2827 return build2 (COMPOUND_EXPR
, type
, omitted
, t
);
2829 return pedantic_non_lvalue (t
);
2832 /* Return a tree for the case when the result of an expression is RESULT
2833 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2834 of the expression but are now not needed.
2836 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2837 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2838 evaluated before OMITTED2. Otherwise, if neither has side effects,
2839 just do the conversion of RESULT to TYPE. */
2842 omit_two_operands (tree type
, tree result
, tree omitted1
, tree omitted2
)
2844 tree t
= fold_convert (type
, result
);
2846 if (TREE_SIDE_EFFECTS (omitted2
))
2847 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
2848 if (TREE_SIDE_EFFECTS (omitted1
))
2849 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
2851 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue (t
) : t
;
2855 /* Return a simplified tree node for the truth-negation of ARG. This
2856 never alters ARG itself. We assume that ARG is an operation that
2857 returns a truth value (0 or 1).
2859 FIXME: one would think we would fold the result, but it causes
2860 problems with the dominator optimizer. */
2862 invert_truthvalue (tree arg
)
2864 tree type
= TREE_TYPE (arg
);
2865 enum tree_code code
= TREE_CODE (arg
);
2867 if (code
== ERROR_MARK
)
2870 /* If this is a comparison, we can simply invert it, except for
2871 floating-point non-equality comparisons, in which case we just
2872 enclose a TRUTH_NOT_EXPR around what we have. */
2874 if (TREE_CODE_CLASS (code
) == '<')
2876 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
2877 if (FLOAT_TYPE_P (op_type
)
2878 && flag_trapping_math
2879 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
2880 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
2881 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2884 code
= invert_tree_comparison (code
,
2885 HONOR_NANS (TYPE_MODE (op_type
)));
2886 if (code
== ERROR_MARK
)
2887 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2889 return build2 (code
, type
,
2890 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
2897 return fold_convert (type
, build_int_2 (integer_zerop (arg
), 0));
2899 case TRUTH_AND_EXPR
:
2900 return build2 (TRUTH_OR_EXPR
, type
,
2901 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2902 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2905 return build2 (TRUTH_AND_EXPR
, type
,
2906 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2907 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2909 case TRUTH_XOR_EXPR
:
2910 /* Here we can invert either operand. We invert the first operand
2911 unless the second operand is a TRUTH_NOT_EXPR in which case our
2912 result is the XOR of the first operand with the inside of the
2913 negation of the second operand. */
2915 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
2916 return build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
2917 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
2919 return build2 (TRUTH_XOR_EXPR
, type
,
2920 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2921 TREE_OPERAND (arg
, 1));
2923 case TRUTH_ANDIF_EXPR
:
2924 return build2 (TRUTH_ORIF_EXPR
, type
,
2925 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2926 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2928 case TRUTH_ORIF_EXPR
:
2929 return build2 (TRUTH_ANDIF_EXPR
, type
,
2930 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2931 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2933 case TRUTH_NOT_EXPR
:
2934 return TREE_OPERAND (arg
, 0);
2937 return build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2938 invert_truthvalue (TREE_OPERAND (arg
, 1)),
2939 invert_truthvalue (TREE_OPERAND (arg
, 2)));
2942 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2943 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2945 case NON_LVALUE_EXPR
:
2946 return invert_truthvalue (TREE_OPERAND (arg
, 0));
2949 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
2954 return build1 (TREE_CODE (arg
), type
,
2955 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2958 if (!integer_onep (TREE_OPERAND (arg
, 1)))
2960 return build2 (EQ_EXPR
, type
, arg
,
2961 fold_convert (type
, integer_zero_node
));
2964 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2966 case CLEANUP_POINT_EXPR
:
2967 return build1 (CLEANUP_POINT_EXPR
, type
,
2968 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2973 if (TREE_CODE (TREE_TYPE (arg
)) != BOOLEAN_TYPE
)
2975 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2978 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2979 operands are another bit-wise operation with a common input. If so,
2980 distribute the bit operations to save an operation and possibly two if
2981 constants are involved. For example, convert
2982 (A | B) & (A | C) into A | (B & C)
2983 Further simplification will occur if B and C are constants.
2985 If this optimization cannot be done, 0 will be returned. */
2988 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
2993 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2994 || TREE_CODE (arg0
) == code
2995 || (TREE_CODE (arg0
) != BIT_AND_EXPR
2996 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
2999 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3001 common
= TREE_OPERAND (arg0
, 0);
3002 left
= TREE_OPERAND (arg0
, 1);
3003 right
= TREE_OPERAND (arg1
, 1);
3005 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3007 common
= TREE_OPERAND (arg0
, 0);
3008 left
= TREE_OPERAND (arg0
, 1);
3009 right
= TREE_OPERAND (arg1
, 0);
3011 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3013 common
= TREE_OPERAND (arg0
, 1);
3014 left
= TREE_OPERAND (arg0
, 0);
3015 right
= TREE_OPERAND (arg1
, 1);
3017 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3019 common
= TREE_OPERAND (arg0
, 1);
3020 left
= TREE_OPERAND (arg0
, 0);
3021 right
= TREE_OPERAND (arg1
, 0);
3026 return fold (build2 (TREE_CODE (arg0
), type
, common
,
3027 fold (build2 (code
, type
, left
, right
))));
3030 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3031 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3034 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
3037 tree result
= build3 (BIT_FIELD_REF
, type
, inner
,
3038 size_int (bitsize
), bitsize_int (bitpos
));
3040 BIT_FIELD_REF_UNSIGNED (result
) = unsignedp
;
3045 /* Optimize a bit-field compare.
3047 There are two cases: First is a compare against a constant and the
3048 second is a comparison of two items where the fields are at the same
3049 bit position relative to the start of a chunk (byte, halfword, word)
3050 large enough to contain it. In these cases we can avoid the shift
3051 implicit in bitfield extractions.
3053 For constants, we emit a compare of the shifted constant with the
3054 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3055 compared. For two fields at the same position, we do the ANDs with the
3056 similar mask and compare the result of the ANDs.
3058 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3059 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3060 are the left and right operands of the comparison, respectively.
3062 If the optimization described above can be done, we return the resulting
3063 tree. Otherwise we return zero. */
3066 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
3069 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3070 tree type
= TREE_TYPE (lhs
);
3071 tree signed_type
, unsigned_type
;
3072 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3073 enum machine_mode lmode
, rmode
, nmode
;
3074 int lunsignedp
, runsignedp
;
3075 int lvolatilep
= 0, rvolatilep
= 0;
3076 tree linner
, rinner
= NULL_TREE
;
3080 /* Get all the information about the extractions being done. If the bit size
3081 if the same as the size of the underlying object, we aren't doing an
3082 extraction at all and so can do nothing. We also don't want to
3083 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3084 then will no longer be able to replace it. */
3085 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3086 &lunsignedp
, &lvolatilep
);
3087 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3088 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3093 /* If this is not a constant, we can only do something if bit positions,
3094 sizes, and signedness are the same. */
3095 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3096 &runsignedp
, &rvolatilep
);
3098 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3099 || lunsignedp
!= runsignedp
|| offset
!= 0
3100 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3104 /* See if we can find a mode to refer to this field. We should be able to,
3105 but fail if we can't. */
3106 nmode
= get_best_mode (lbitsize
, lbitpos
,
3107 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3108 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3109 TYPE_ALIGN (TREE_TYPE (rinner
))),
3110 word_mode
, lvolatilep
|| rvolatilep
);
3111 if (nmode
== VOIDmode
)
3114 /* Set signed and unsigned types of the precision of this mode for the
3116 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3117 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3119 /* Compute the bit position and size for the new reference and our offset
3120 within it. If the new reference is the same size as the original, we
3121 won't optimize anything, so return zero. */
3122 nbitsize
= GET_MODE_BITSIZE (nmode
);
3123 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3125 if (nbitsize
== lbitsize
)
3128 if (BYTES_BIG_ENDIAN
)
3129 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3131 /* Make the mask to be used against the extracted field. */
3132 mask
= build_int_2 (~0, ~0);
3133 TREE_TYPE (mask
) = unsigned_type
;
3134 force_fit_type (mask
, 0);
3135 mask
= fold_convert (unsigned_type
, mask
);
3136 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
3137 mask
= const_binop (RSHIFT_EXPR
, mask
,
3138 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
3141 /* If not comparing with constant, just rework the comparison
3143 return build2 (code
, compare_type
,
3144 build2 (BIT_AND_EXPR
, unsigned_type
,
3145 make_bit_field_ref (linner
, unsigned_type
,
3146 nbitsize
, nbitpos
, 1),
3148 build2 (BIT_AND_EXPR
, unsigned_type
,
3149 make_bit_field_ref (rinner
, unsigned_type
,
3150 nbitsize
, nbitpos
, 1),
3153 /* Otherwise, we are handling the constant case. See if the constant is too
3154 big for the field. Warn and return a tree of for 0 (false) if so. We do
3155 this not only for its own sake, but to avoid having to test for this
3156 error case below. If we didn't, we might generate wrong code.
3158 For unsigned fields, the constant shifted right by the field length should
3159 be all zero. For signed fields, the high-order bits should agree with
3164 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3165 fold_convert (unsigned_type
, rhs
),
3166 size_int (lbitsize
), 0)))
3168 warning ("comparison is always %d due to width of bit-field",
3170 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3175 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
3176 size_int (lbitsize
- 1), 0);
3177 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3179 warning ("comparison is always %d due to width of bit-field",
3181 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3185 /* Single-bit compares should always be against zero. */
3186 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3188 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3189 rhs
= fold_convert (type
, integer_zero_node
);
3192 /* Make a new bitfield reference, shift the constant over the
3193 appropriate number of bits and mask it with the computed mask
3194 (in case this was a signed field). If we changed it, make a new one. */
3195 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3198 TREE_SIDE_EFFECTS (lhs
) = 1;
3199 TREE_THIS_VOLATILE (lhs
) = 1;
3202 rhs
= fold (const_binop (BIT_AND_EXPR
,
3203 const_binop (LSHIFT_EXPR
,
3204 fold_convert (unsigned_type
, rhs
),
3205 size_int (lbitpos
), 0),
3208 return build2 (code
, compare_type
,
3209 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
3213 /* Subroutine for fold_truthop: decode a field reference.
3215 If EXP is a comparison reference, we return the innermost reference.
3217 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3218 set to the starting bit number.
3220 If the innermost field can be completely contained in a mode-sized
3221 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3223 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3224 otherwise it is not changed.
3226 *PUNSIGNEDP is set to the signedness of the field.
3228 *PMASK is set to the mask used. This is either contained in a
3229 BIT_AND_EXPR or derived from the width of the field.
3231 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3233 Return 0 if this is not a component reference or is one that we can't
3234 do anything with. */
3237 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3238 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3239 int *punsignedp
, int *pvolatilep
,
3240 tree
*pmask
, tree
*pand_mask
)
3242 tree outer_type
= 0;
3244 tree mask
, inner
, offset
;
3246 unsigned int precision
;
3248 /* All the optimizations using this function assume integer fields.
3249 There are problems with FP fields since the type_for_size call
3250 below can fail for, e.g., XFmode. */
3251 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3254 /* We are interested in the bare arrangement of bits, so strip everything
3255 that doesn't affect the machine mode. However, record the type of the
3256 outermost expression if it may matter below. */
3257 if (TREE_CODE (exp
) == NOP_EXPR
3258 || TREE_CODE (exp
) == CONVERT_EXPR
3259 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3260 outer_type
= TREE_TYPE (exp
);
3263 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3265 and_mask
= TREE_OPERAND (exp
, 1);
3266 exp
= TREE_OPERAND (exp
, 0);
3267 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3268 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3272 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3273 punsignedp
, pvolatilep
);
3274 if ((inner
== exp
&& and_mask
== 0)
3275 || *pbitsize
< 0 || offset
!= 0
3276 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3279 /* If the number of bits in the reference is the same as the bitsize of
3280 the outer type, then the outer type gives the signedness. Otherwise
3281 (in case of a small bitfield) the signedness is unchanged. */
3282 if (outer_type
&& *pbitsize
== tree_low_cst (TYPE_SIZE (outer_type
), 1))
3283 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3285 /* Compute the mask to access the bitfield. */
3286 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3287 precision
= TYPE_PRECISION (unsigned_type
);
3289 mask
= build_int_2 (~0, ~0);
3290 TREE_TYPE (mask
) = unsigned_type
;
3291 force_fit_type (mask
, 0);
3292 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3293 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3295 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3297 mask
= fold (build2 (BIT_AND_EXPR
, unsigned_type
,
3298 fold_convert (unsigned_type
, and_mask
), mask
));
3301 *pand_mask
= and_mask
;
3305 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3309 all_ones_mask_p (tree mask
, int size
)
3311 tree type
= TREE_TYPE (mask
);
3312 unsigned int precision
= TYPE_PRECISION (type
);
3315 tmask
= build_int_2 (~0, ~0);
3316 TREE_TYPE (tmask
) = lang_hooks
.types
.signed_type (type
);
3317 force_fit_type (tmask
, 0);
3319 tree_int_cst_equal (mask
,
3320 const_binop (RSHIFT_EXPR
,
3321 const_binop (LSHIFT_EXPR
, tmask
,
3322 size_int (precision
- size
),
3324 size_int (precision
- size
), 0));
3327 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3328 represents the sign bit of EXP's type. If EXP represents a sign
3329 or zero extension, also test VAL against the unextended type.
3330 The return value is the (sub)expression whose sign bit is VAL,
3331 or NULL_TREE otherwise. */
3334 sign_bit_p (tree exp
, tree val
)
3336 unsigned HOST_WIDE_INT mask_lo
, lo
;
3337 HOST_WIDE_INT mask_hi
, hi
;
3341 /* Tree EXP must have an integral type. */
3342 t
= TREE_TYPE (exp
);
3343 if (! INTEGRAL_TYPE_P (t
))
3346 /* Tree VAL must be an integer constant. */
3347 if (TREE_CODE (val
) != INTEGER_CST
3348 || TREE_CONSTANT_OVERFLOW (val
))
3351 width
= TYPE_PRECISION (t
);
3352 if (width
> HOST_BITS_PER_WIDE_INT
)
3354 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3357 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3358 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3364 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3367 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3368 >> (HOST_BITS_PER_WIDE_INT
- width
));
3371 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3372 treat VAL as if it were unsigned. */
3373 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3374 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3377 /* Handle extension from a narrower type. */
3378 if (TREE_CODE (exp
) == NOP_EXPR
3379 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3380 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3385 /* Subroutine for fold_truthop: determine if an operand is simple enough
3386 to be evaluated unconditionally. */
3389 simple_operand_p (tree exp
)
3391 /* Strip any conversions that don't change the machine mode. */
3392 while ((TREE_CODE (exp
) == NOP_EXPR
3393 || TREE_CODE (exp
) == CONVERT_EXPR
)
3394 && (TYPE_MODE (TREE_TYPE (exp
))
3395 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
3396 exp
= TREE_OPERAND (exp
, 0);
3398 return (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c'
3400 && ! TREE_ADDRESSABLE (exp
)
3401 && ! TREE_THIS_VOLATILE (exp
)
3402 && ! DECL_NONLOCAL (exp
)
3403 /* Don't regard global variables as simple. They may be
3404 allocated in ways unknown to the compiler (shared memory,
3405 #pragma weak, etc). */
3406 && ! TREE_PUBLIC (exp
)
3407 && ! DECL_EXTERNAL (exp
)
3408 /* Loading a static variable is unduly expensive, but global
3409 registers aren't expensive. */
3410 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3413 /* The following functions are subroutines to fold_range_test and allow it to
3414 try to change a logical combination of comparisons into a range test.
3417 X == 2 || X == 3 || X == 4 || X == 5
3421 (unsigned) (X - 2) <= 3
3423 We describe each set of comparisons as being either inside or outside
3424 a range, using a variable named like IN_P, and then describe the
3425 range with a lower and upper bound. If one of the bounds is omitted,
3426 it represents either the highest or lowest value of the type.
3428 In the comments below, we represent a range by two numbers in brackets
3429 preceded by a "+" to designate being inside that range, or a "-" to
3430 designate being outside that range, so the condition can be inverted by
3431 flipping the prefix. An omitted bound is represented by a "-". For
3432 example, "- [-, 10]" means being outside the range starting at the lowest
3433 possible value and ending at 10, in other words, being greater than 10.
3434 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3437 We set up things so that the missing bounds are handled in a consistent
3438 manner so neither a missing bound nor "true" and "false" need to be
3439 handled using a special case. */
3441 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3442 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3443 and UPPER1_P are nonzero if the respective argument is an upper bound
3444 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3445 must be specified for a comparison. ARG1 will be converted to ARG0's
3446 type if both are specified. */
3449 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3450 tree arg1
, int upper1_p
)
3456 /* If neither arg represents infinity, do the normal operation.
3457 Else, if not a comparison, return infinity. Else handle the special
3458 comparison rules. Note that most of the cases below won't occur, but
3459 are handled for consistency. */
3461 if (arg0
!= 0 && arg1
!= 0)
3463 tem
= fold (build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3464 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
)));
3466 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3469 if (TREE_CODE_CLASS (code
) != '<')
3472 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3473 for neither. In real maths, we cannot assume open ended ranges are
3474 the same. But, this is computer arithmetic, where numbers are finite.
3475 We can therefore make the transformation of any unbounded range with
3476 the value Z, Z being greater than any representable number. This permits
3477 us to treat unbounded ranges as equal. */
3478 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3479 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3483 result
= sgn0
== sgn1
;
3486 result
= sgn0
!= sgn1
;
3489 result
= sgn0
< sgn1
;
3492 result
= sgn0
<= sgn1
;
3495 result
= sgn0
> sgn1
;
3498 result
= sgn0
>= sgn1
;
3504 return constant_boolean_node (result
, type
);
3507 /* Given EXP, a logical expression, set the range it is testing into
3508 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3509 actually being tested. *PLOW and *PHIGH will be made of the same type
3510 as the returned expression. If EXP is not a comparison, we will most
3511 likely not be returning a useful value and range. */
3514 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
)
3516 enum tree_code code
;
3517 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, type
= NULL_TREE
;
3518 tree orig_type
= NULL_TREE
;
3520 tree low
, high
, n_low
, n_high
;
3522 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3523 and see if we can refine the range. Some of the cases below may not
3524 happen, but it doesn't seem worth worrying about this. We "continue"
3525 the outer loop when we've changed something; otherwise we "break"
3526 the switch, which will "break" the while. */
3529 low
= high
= fold_convert (TREE_TYPE (exp
), integer_zero_node
);
3533 code
= TREE_CODE (exp
);
3535 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3537 if (first_rtl_op (code
) > 0)
3538 arg0
= TREE_OPERAND (exp
, 0);
3539 if (TREE_CODE_CLASS (code
) == '<'
3540 || TREE_CODE_CLASS (code
) == '1'
3541 || TREE_CODE_CLASS (code
) == '2')
3542 type
= TREE_TYPE (arg0
);
3543 if (TREE_CODE_CLASS (code
) == '2'
3544 || TREE_CODE_CLASS (code
) == '<'
3545 || (TREE_CODE_CLASS (code
) == 'e'
3546 && TREE_CODE_LENGTH (code
) > 1))
3547 arg1
= TREE_OPERAND (exp
, 1);
3550 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3551 lose a cast by accident. */
3552 if (type
!= NULL_TREE
&& orig_type
== NULL_TREE
)
3557 case TRUTH_NOT_EXPR
:
3558 in_p
= ! in_p
, exp
= arg0
;
3561 case EQ_EXPR
: case NE_EXPR
:
3562 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3563 /* We can only do something if the range is testing for zero
3564 and if the second operand is an integer constant. Note that
3565 saying something is "in" the range we make is done by
3566 complementing IN_P since it will set in the initial case of
3567 being not equal to zero; "out" is leaving it alone. */
3568 if (low
== 0 || high
== 0
3569 || ! integer_zerop (low
) || ! integer_zerop (high
)
3570 || TREE_CODE (arg1
) != INTEGER_CST
)
3575 case NE_EXPR
: /* - [c, c] */
3578 case EQ_EXPR
: /* + [c, c] */
3579 in_p
= ! in_p
, low
= high
= arg1
;
3581 case GT_EXPR
: /* - [-, c] */
3582 low
= 0, high
= arg1
;
3584 case GE_EXPR
: /* + [c, -] */
3585 in_p
= ! in_p
, low
= arg1
, high
= 0;
3587 case LT_EXPR
: /* - [c, -] */
3588 low
= arg1
, high
= 0;
3590 case LE_EXPR
: /* + [-, c] */
3591 in_p
= ! in_p
, low
= 0, high
= arg1
;
3599 /* If this is an unsigned comparison, we also know that EXP is
3600 greater than or equal to zero. We base the range tests we make
3601 on that fact, so we record it here so we can parse existing
3603 if (TYPE_UNSIGNED (type
) && (low
== 0 || high
== 0))
3605 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, in_p
, low
, high
,
3606 1, fold_convert (type
, integer_zero_node
),
3610 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3612 /* If the high bound is missing, but we have a nonzero low
3613 bound, reverse the range so it goes from zero to the low bound
3615 if (high
== 0 && low
&& ! integer_zerop (low
))
3618 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3619 integer_one_node
, 0);
3620 low
= fold_convert (type
, integer_zero_node
);
3626 /* (-x) IN [a,b] -> x in [-b, -a] */
3627 n_low
= range_binop (MINUS_EXPR
, type
,
3628 fold_convert (type
, integer_zero_node
),
3630 n_high
= range_binop (MINUS_EXPR
, type
,
3631 fold_convert (type
, integer_zero_node
),
3633 low
= n_low
, high
= n_high
;
3639 exp
= build2 (MINUS_EXPR
, type
, negate_expr (arg0
),
3640 fold_convert (type
, integer_one_node
));
3643 case PLUS_EXPR
: case MINUS_EXPR
:
3644 if (TREE_CODE (arg1
) != INTEGER_CST
)
3647 /* If EXP is signed, any overflow in the computation is undefined,
3648 so we don't worry about it so long as our computations on
3649 the bounds don't overflow. For unsigned, overflow is defined
3650 and this is exactly the right thing. */
3651 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3652 type
, low
, 0, arg1
, 0);
3653 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3654 type
, high
, 1, arg1
, 0);
3655 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3656 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3659 /* Check for an unsigned range which has wrapped around the maximum
3660 value thus making n_high < n_low, and normalize it. */
3661 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3663 low
= range_binop (PLUS_EXPR
, type
, n_high
, 0,
3664 integer_one_node
, 0);
3665 high
= range_binop (MINUS_EXPR
, type
, n_low
, 0,
3666 integer_one_node
, 0);
3668 /* If the range is of the form +/- [ x+1, x ], we won't
3669 be able to normalize it. But then, it represents the
3670 whole range or the empty set, so make it
3672 if (tree_int_cst_equal (n_low
, low
)
3673 && tree_int_cst_equal (n_high
, high
))
3679 low
= n_low
, high
= n_high
;
3684 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
3685 if (TYPE_PRECISION (type
) > TYPE_PRECISION (orig_type
))
3688 if (! INTEGRAL_TYPE_P (type
)
3689 || (low
!= 0 && ! int_fits_type_p (low
, type
))
3690 || (high
!= 0 && ! int_fits_type_p (high
, type
)))
3693 n_low
= low
, n_high
= high
;
3696 n_low
= fold_convert (type
, n_low
);
3699 n_high
= fold_convert (type
, n_high
);
3701 /* If we're converting from an unsigned to a signed type,
3702 we will be doing the comparison as unsigned. The tests above
3703 have already verified that LOW and HIGH are both positive.
3705 So we have to make sure that the original unsigned value will
3706 be interpreted as positive. */
3707 if (TYPE_UNSIGNED (type
) && ! TYPE_UNSIGNED (TREE_TYPE (exp
)))
3709 tree equiv_type
= lang_hooks
.types
.type_for_mode
3710 (TYPE_MODE (type
), 1);
3713 /* A range without an upper bound is, naturally, unbounded.
3714 Since convert would have cropped a very large value, use
3715 the max value for the destination type. */
3717 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3718 : TYPE_MAX_VALUE (type
);
3720 if (TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (exp
)))
3721 high_positive
= fold (build2 (RSHIFT_EXPR
, type
,
3725 integer_one_node
)));
3727 /* If the low bound is specified, "and" the range with the
3728 range for which the original unsigned value will be
3732 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3733 1, n_low
, n_high
, 1,
3734 fold_convert (type
, integer_zero_node
),
3738 in_p
= (n_in_p
== in_p
);
3742 /* Otherwise, "or" the range with the range of the input
3743 that will be interpreted as negative. */
3744 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3745 0, n_low
, n_high
, 1,
3746 fold_convert (type
, integer_zero_node
),
3750 in_p
= (in_p
!= n_in_p
);
3755 low
= n_low
, high
= n_high
;
3765 /* If EXP is a constant, we can evaluate whether this is true or false. */
3766 if (TREE_CODE (exp
) == INTEGER_CST
)
3768 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
3770 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3776 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3780 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3781 type, TYPE, return an expression to test if EXP is in (or out of, depending
3782 on IN_P) the range. */
3785 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
3787 tree etype
= TREE_TYPE (exp
);
3791 && (0 != (value
= build_range_check (type
, exp
, 1, low
, high
))))
3792 return invert_truthvalue (value
);
3794 if (low
== 0 && high
== 0)
3795 return fold_convert (type
, integer_one_node
);
3798 return fold (build2 (LE_EXPR
, type
, exp
, high
));
3801 return fold (build2 (GE_EXPR
, type
, exp
, low
));
3803 if (operand_equal_p (low
, high
, 0))
3804 return fold (build2 (EQ_EXPR
, type
, exp
, low
));
3806 if (integer_zerop (low
))
3808 if (! TYPE_UNSIGNED (etype
))
3810 etype
= lang_hooks
.types
.unsigned_type (etype
);
3811 high
= fold_convert (etype
, high
);
3812 exp
= fold_convert (etype
, exp
);
3814 return build_range_check (type
, exp
, 1, 0, high
);
3817 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3818 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
3820 unsigned HOST_WIDE_INT lo
;
3824 prec
= TYPE_PRECISION (etype
);
3825 if (prec
<= HOST_BITS_PER_WIDE_INT
)
3828 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
3832 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
3833 lo
= (unsigned HOST_WIDE_INT
) -1;
3836 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
3838 if (TYPE_UNSIGNED (etype
))
3840 etype
= lang_hooks
.types
.signed_type (etype
);
3841 exp
= fold_convert (etype
, exp
);
3843 return fold (build2 (GT_EXPR
, type
, exp
,
3844 fold_convert (etype
, integer_zero_node
)));
3848 if (0 != (value
= const_binop (MINUS_EXPR
, high
, low
, 0))
3849 && ! TREE_OVERFLOW (value
))
3850 return build_range_check (type
,
3851 fold (build2 (MINUS_EXPR
, etype
, exp
, low
)),
3852 1, fold_convert (etype
, integer_zero_node
),
3858 /* Given two ranges, see if we can merge them into one. Return 1 if we
3859 can, 0 if we can't. Set the output range into the specified parameters. */
3862 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
3863 tree high0
, int in1_p
, tree low1
, tree high1
)
3871 int lowequal
= ((low0
== 0 && low1
== 0)
3872 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3873 low0
, 0, low1
, 0)));
3874 int highequal
= ((high0
== 0 && high1
== 0)
3875 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3876 high0
, 1, high1
, 1)));
3878 /* Make range 0 be the range that starts first, or ends last if they
3879 start at the same value. Swap them if it isn't. */
3880 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3883 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3884 high1
, 1, high0
, 1))))
3886 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
3887 tem
= low0
, low0
= low1
, low1
= tem
;
3888 tem
= high0
, high0
= high1
, high1
= tem
;
3891 /* Now flag two cases, whether the ranges are disjoint or whether the
3892 second range is totally subsumed in the first. Note that the tests
3893 below are simplified by the ones above. */
3894 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
3895 high0
, 1, low1
, 0));
3896 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3897 high1
, 1, high0
, 1));
3899 /* We now have four cases, depending on whether we are including or
3900 excluding the two ranges. */
3903 /* If they don't overlap, the result is false. If the second range
3904 is a subset it is the result. Otherwise, the range is from the start
3905 of the second to the end of the first. */
3907 in_p
= 0, low
= high
= 0;
3909 in_p
= 1, low
= low1
, high
= high1
;
3911 in_p
= 1, low
= low1
, high
= high0
;
3914 else if (in0_p
&& ! in1_p
)
3916 /* If they don't overlap, the result is the first range. If they are
3917 equal, the result is false. If the second range is a subset of the
3918 first, and the ranges begin at the same place, we go from just after
3919 the end of the first range to the end of the second. If the second
3920 range is not a subset of the first, or if it is a subset and both
3921 ranges end at the same place, the range starts at the start of the
3922 first range and ends just before the second range.
3923 Otherwise, we can't describe this as a single range. */
3925 in_p
= 1, low
= low0
, high
= high0
;
3926 else if (lowequal
&& highequal
)
3927 in_p
= 0, low
= high
= 0;
3928 else if (subset
&& lowequal
)
3930 in_p
= 1, high
= high0
;
3931 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high1
, 0,
3932 integer_one_node
, 0);
3934 else if (! subset
|| highequal
)
3936 in_p
= 1, low
= low0
;
3937 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
3938 integer_one_node
, 0);
3944 else if (! in0_p
&& in1_p
)
3946 /* If they don't overlap, the result is the second range. If the second
3947 is a subset of the first, the result is false. Otherwise,
3948 the range starts just after the first range and ends at the
3949 end of the second. */
3951 in_p
= 1, low
= low1
, high
= high1
;
3952 else if (subset
|| highequal
)
3953 in_p
= 0, low
= high
= 0;
3956 in_p
= 1, high
= high1
;
3957 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
3958 integer_one_node
, 0);
3964 /* The case where we are excluding both ranges. Here the complex case
3965 is if they don't overlap. In that case, the only time we have a
3966 range is if they are adjacent. If the second is a subset of the
3967 first, the result is the first. Otherwise, the range to exclude
3968 starts at the beginning of the first range and ends at the end of the
3972 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3973 range_binop (PLUS_EXPR
, NULL_TREE
,
3975 integer_one_node
, 1),
3977 in_p
= 0, low
= low0
, high
= high1
;
3982 in_p
= 0, low
= low0
, high
= high0
;
3984 in_p
= 0, low
= low0
, high
= high1
;
3987 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3991 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3992 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3995 /* EXP is some logical combination of boolean tests. See if we can
3996 merge it into some range test. Return the new tree if so. */
3999 fold_range_test (tree exp
)
4001 int or_op
= (TREE_CODE (exp
) == TRUTH_ORIF_EXPR
4002 || TREE_CODE (exp
) == TRUTH_OR_EXPR
);
4003 int in0_p
, in1_p
, in_p
;
4004 tree low0
, low1
, low
, high0
, high1
, high
;
4005 tree lhs
= make_range (TREE_OPERAND (exp
, 0), &in0_p
, &low0
, &high0
);
4006 tree rhs
= make_range (TREE_OPERAND (exp
, 1), &in1_p
, &low1
, &high1
);
4009 /* If this is an OR operation, invert both sides; we will invert
4010 again at the end. */
4012 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4014 /* If both expressions are the same, if we can merge the ranges, and we
4015 can build the range test, return it or it inverted. If one of the
4016 ranges is always true or always false, consider it to be the same
4017 expression as the other. */
4018 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4019 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4021 && 0 != (tem
= (build_range_check (TREE_TYPE (exp
),
4023 : rhs
!= 0 ? rhs
: integer_zero_node
,
4025 return or_op
? invert_truthvalue (tem
) : tem
;
4027 /* On machines where the branch cost is expensive, if this is a
4028 short-circuited branch and the underlying object on both sides
4029 is the same, make a non-short-circuit operation. */
4030 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4031 && lhs
!= 0 && rhs
!= 0
4032 && (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
4033 || TREE_CODE (exp
) == TRUTH_ORIF_EXPR
)
4034 && operand_equal_p (lhs
, rhs
, 0))
4036 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4037 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4038 which cases we can't do this. */
4039 if (simple_operand_p (lhs
))
4040 return build2 (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
4041 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4042 TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
4043 TREE_OPERAND (exp
, 1));
4045 else if (lang_hooks
.decls
.global_bindings_p () == 0
4046 && ! CONTAINS_PLACEHOLDER_P (lhs
))
4048 tree common
= save_expr (lhs
);
4050 if (0 != (lhs
= build_range_check (TREE_TYPE (exp
), common
,
4051 or_op
? ! in0_p
: in0_p
,
4053 && (0 != (rhs
= build_range_check (TREE_TYPE (exp
), common
,
4054 or_op
? ! in1_p
: in1_p
,
4056 return build2 (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
4057 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4058 TREE_TYPE (exp
), lhs
, rhs
);
4065 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4066 bit value. Arrange things so the extra bits will be set to zero if and
4067 only if C is signed-extended to its full width. If MASK is nonzero,
4068 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4071 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4073 tree type
= TREE_TYPE (c
);
4074 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4077 if (p
== modesize
|| unsignedp
)
4080 /* We work by getting just the sign bit into the low-order bit, then
4081 into the high-order bit, then sign-extend. We then XOR that value
4083 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
4084 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
4086 /* We must use a signed type in order to get an arithmetic right shift.
4087 However, we must also avoid introducing accidental overflows, so that
4088 a subsequent call to integer_zerop will work. Hence we must
4089 do the type conversion here. At this point, the constant is either
4090 zero or one, and the conversion to a signed type can never overflow.
4091 We could get an overflow if this conversion is done anywhere else. */
4092 if (TYPE_UNSIGNED (type
))
4093 temp
= fold_convert (lang_hooks
.types
.signed_type (type
), temp
);
4095 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
4096 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
4098 temp
= const_binop (BIT_AND_EXPR
, temp
,
4099 fold_convert (TREE_TYPE (c
), mask
), 0);
4100 /* If necessary, convert the type back to match the type of C. */
4101 if (TYPE_UNSIGNED (type
))
4102 temp
= fold_convert (type
, temp
);
4104 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
4107 /* Find ways of folding logical expressions of LHS and RHS:
4108 Try to merge two comparisons to the same innermost item.
4109 Look for range tests like "ch >= '0' && ch <= '9'".
4110 Look for combinations of simple terms on machines with expensive branches
4111 and evaluate the RHS unconditionally.
4113 For example, if we have p->a == 2 && p->b == 4 and we can make an
4114 object large enough to span both A and B, we can do this with a comparison
4115 against the object ANDed with the a mask.
4117 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4118 operations to do this with one comparison.
4120 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4121 function and the one above.
4123 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4124 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4126 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4129 We return the simplified tree or 0 if no optimization is possible. */
4132 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
4134 /* If this is the "or" of two comparisons, we can do something if
4135 the comparisons are NE_EXPR. If this is the "and", we can do something
4136 if the comparisons are EQ_EXPR. I.e.,
4137 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4139 WANTED_CODE is this operation code. For single bit fields, we can
4140 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4141 comparison for one-bit fields. */
4143 enum tree_code wanted_code
;
4144 enum tree_code lcode
, rcode
;
4145 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
4146 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
4147 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
4148 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
4149 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
4150 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
4151 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
4152 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
4153 enum machine_mode lnmode
, rnmode
;
4154 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
4155 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
4156 tree l_const
, r_const
;
4157 tree lntype
, rntype
, result
;
4158 int first_bit
, end_bit
;
4161 /* Start by getting the comparison codes. Fail if anything is volatile.
4162 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4163 it were surrounded with a NE_EXPR. */
4165 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
4168 lcode
= TREE_CODE (lhs
);
4169 rcode
= TREE_CODE (rhs
);
4171 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
4173 lhs
= build2 (NE_EXPR
, truth_type
, lhs
, integer_zero_node
);
4177 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
4179 rhs
= build2 (NE_EXPR
, truth_type
, rhs
, integer_zero_node
);
4183 if (TREE_CODE_CLASS (lcode
) != '<' || TREE_CODE_CLASS (rcode
) != '<')
4186 ll_arg
= TREE_OPERAND (lhs
, 0);
4187 lr_arg
= TREE_OPERAND (lhs
, 1);
4188 rl_arg
= TREE_OPERAND (rhs
, 0);
4189 rr_arg
= TREE_OPERAND (rhs
, 1);
4191 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4192 if (simple_operand_p (ll_arg
)
4193 && simple_operand_p (lr_arg
))
4196 if (operand_equal_p (ll_arg
, rl_arg
, 0)
4197 && operand_equal_p (lr_arg
, rr_arg
, 0))
4199 result
= combine_comparisons (code
, lcode
, rcode
,
4200 truth_type
, ll_arg
, lr_arg
);
4204 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
4205 && operand_equal_p (lr_arg
, rl_arg
, 0))
4207 result
= combine_comparisons (code
, lcode
,
4208 swap_tree_comparison (rcode
),
4209 truth_type
, ll_arg
, lr_arg
);
4215 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
4216 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
4218 /* If the RHS can be evaluated unconditionally and its operands are
4219 simple, it wins to evaluate the RHS unconditionally on machines
4220 with expensive branches. In this case, this isn't a comparison
4221 that can be merged. Avoid doing this if the RHS is a floating-point
4222 comparison since those can trap. */
4224 if (BRANCH_COST
>= 2
4225 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
4226 && simple_operand_p (rl_arg
)
4227 && simple_operand_p (rr_arg
))
4229 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4230 if (code
== TRUTH_OR_EXPR
4231 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
4232 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
4233 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4234 return build2 (NE_EXPR
, truth_type
,
4235 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4237 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4239 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4240 if (code
== TRUTH_AND_EXPR
4241 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
4242 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
4243 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4244 return build2 (EQ_EXPR
, truth_type
,
4245 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4247 fold_convert (TREE_TYPE (ll_arg
), integer_zero_node
));
4249 return build2 (code
, truth_type
, lhs
, rhs
);
4252 /* See if the comparisons can be merged. Then get all the parameters for
4255 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
4256 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
4260 ll_inner
= decode_field_reference (ll_arg
,
4261 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
4262 &ll_unsignedp
, &volatilep
, &ll_mask
,
4264 lr_inner
= decode_field_reference (lr_arg
,
4265 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
4266 &lr_unsignedp
, &volatilep
, &lr_mask
,
4268 rl_inner
= decode_field_reference (rl_arg
,
4269 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
4270 &rl_unsignedp
, &volatilep
, &rl_mask
,
4272 rr_inner
= decode_field_reference (rr_arg
,
4273 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
4274 &rr_unsignedp
, &volatilep
, &rr_mask
,
4277 /* It must be true that the inner operation on the lhs of each
4278 comparison must be the same if we are to be able to do anything.
4279 Then see if we have constants. If not, the same must be true for
4281 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
4282 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
4285 if (TREE_CODE (lr_arg
) == INTEGER_CST
4286 && TREE_CODE (rr_arg
) == INTEGER_CST
)
4287 l_const
= lr_arg
, r_const
= rr_arg
;
4288 else if (lr_inner
== 0 || rr_inner
== 0
4289 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
4292 l_const
= r_const
= 0;
4294 /* If either comparison code is not correct for our logical operation,
4295 fail. However, we can convert a one-bit comparison against zero into
4296 the opposite comparison against that bit being set in the field. */
4298 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
4299 if (lcode
!= wanted_code
)
4301 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
4303 /* Make the left operand unsigned, since we are only interested
4304 in the value of one bit. Otherwise we are doing the wrong
4313 /* This is analogous to the code for l_const above. */
4314 if (rcode
!= wanted_code
)
4316 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
4325 /* After this point all optimizations will generate bit-field
4326 references, which we might not want. */
4327 if (! lang_hooks
.can_use_bit_fields_p ())
4330 /* See if we can find a mode that contains both fields being compared on
4331 the left. If we can't, fail. Otherwise, update all constants and masks
4332 to be relative to a field of that size. */
4333 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
4334 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
4335 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4336 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
4338 if (lnmode
== VOIDmode
)
4341 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
4342 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
4343 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
4344 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
4346 if (BYTES_BIG_ENDIAN
)
4348 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
4349 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
4352 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
4353 size_int (xll_bitpos
), 0);
4354 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
4355 size_int (xrl_bitpos
), 0);
4359 l_const
= fold_convert (lntype
, l_const
);
4360 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
4361 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
4362 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
4363 fold (build1 (BIT_NOT_EXPR
,
4367 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
4369 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4374 r_const
= fold_convert (lntype
, r_const
);
4375 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
4376 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
4377 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
4378 fold (build1 (BIT_NOT_EXPR
,
4382 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
4384 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
4388 /* If the right sides are not constant, do the same for it. Also,
4389 disallow this optimization if a size or signedness mismatch occurs
4390 between the left and right sides. */
4393 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
4394 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
4395 /* Make sure the two fields on the right
4396 correspond to the left without being swapped. */
4397 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
4400 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
4401 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
4402 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4403 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
4405 if (rnmode
== VOIDmode
)
4408 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
4409 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
4410 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
4411 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
4413 if (BYTES_BIG_ENDIAN
)
4415 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
4416 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
4419 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
4420 size_int (xlr_bitpos
), 0);
4421 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
4422 size_int (xrr_bitpos
), 0);
4424 /* Make a mask that corresponds to both fields being compared.
4425 Do this for both items being compared. If the operands are the
4426 same size and the bits being compared are in the same position
4427 then we can do this by masking both and comparing the masked
4429 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4430 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
4431 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
4433 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4434 ll_unsignedp
|| rl_unsignedp
);
4435 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4436 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
4438 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
4439 lr_unsignedp
|| rr_unsignedp
);
4440 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
4441 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
4443 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
4446 /* There is still another way we can do something: If both pairs of
4447 fields being compared are adjacent, we may be able to make a wider
4448 field containing them both.
4450 Note that we still must mask the lhs/rhs expressions. Furthermore,
4451 the mask must be shifted to account for the shift done by
4452 make_bit_field_ref. */
4453 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
4454 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
4455 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
4456 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
4460 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
4461 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
4462 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
4463 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
4465 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
4466 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
4467 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
4468 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
4470 /* Convert to the smaller type before masking out unwanted bits. */
4472 if (lntype
!= rntype
)
4474 if (lnbitsize
> rnbitsize
)
4476 lhs
= fold_convert (rntype
, lhs
);
4477 ll_mask
= fold_convert (rntype
, ll_mask
);
4480 else if (lnbitsize
< rnbitsize
)
4482 rhs
= fold_convert (lntype
, rhs
);
4483 lr_mask
= fold_convert (lntype
, lr_mask
);
4488 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
4489 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
4491 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
4492 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
4494 return build2 (wanted_code
, truth_type
, lhs
, rhs
);
4500 /* Handle the case of comparisons with constants. If there is something in
4501 common between the masks, those bits of the constants must be the same.
4502 If not, the condition is always false. Test for this to avoid generating
4503 incorrect code below. */
4504 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
4505 if (! integer_zerop (result
)
4506 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
4507 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
4509 if (wanted_code
== NE_EXPR
)
4511 warning ("`or' of unmatched not-equal tests is always 1");
4512 return constant_boolean_node (true, truth_type
);
4516 warning ("`and' of mutually exclusive equal-tests is always 0");
4517 return constant_boolean_node (false, truth_type
);
4521 /* Construct the expression we will return. First get the component
4522 reference we will make. Unless the mask is all ones the width of
4523 that field, perform the mask operation. Then compare with the
4525 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4526 ll_unsignedp
|| rl_unsignedp
);
4528 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4529 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4530 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
4532 return build2 (wanted_code
, truth_type
, result
,
4533 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
4536 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4540 optimize_minmax_comparison (tree t
)
4542 tree type
= TREE_TYPE (t
);
4543 tree arg0
= TREE_OPERAND (t
, 0);
4544 enum tree_code op_code
;
4545 tree comp_const
= TREE_OPERAND (t
, 1);
4547 int consts_equal
, consts_lt
;
4550 STRIP_SIGN_NOPS (arg0
);
4552 op_code
= TREE_CODE (arg0
);
4553 minmax_const
= TREE_OPERAND (arg0
, 1);
4554 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
4555 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
4556 inner
= TREE_OPERAND (arg0
, 0);
4558 /* If something does not permit us to optimize, return the original tree. */
4559 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
4560 || TREE_CODE (comp_const
) != INTEGER_CST
4561 || TREE_CONSTANT_OVERFLOW (comp_const
)
4562 || TREE_CODE (minmax_const
) != INTEGER_CST
4563 || TREE_CONSTANT_OVERFLOW (minmax_const
))
4566 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4567 and GT_EXPR, doing the rest with recursive calls using logical
4569 switch (TREE_CODE (t
))
4571 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
4573 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t
)));
4577 fold (build2 (TRUTH_ORIF_EXPR
, type
,
4578 optimize_minmax_comparison
4579 (build2 (EQ_EXPR
, type
, arg0
, comp_const
)),
4580 optimize_minmax_comparison
4581 (build2 (GT_EXPR
, type
, arg0
, comp_const
))));
4584 if (op_code
== MAX_EXPR
&& consts_equal
)
4585 /* MAX (X, 0) == 0 -> X <= 0 */
4586 return fold (build2 (LE_EXPR
, type
, inner
, comp_const
));
4588 else if (op_code
== MAX_EXPR
&& consts_lt
)
4589 /* MAX (X, 0) == 5 -> X == 5 */
4590 return fold (build2 (EQ_EXPR
, type
, inner
, comp_const
));
4592 else if (op_code
== MAX_EXPR
)
4593 /* MAX (X, 0) == -1 -> false */
4594 return omit_one_operand (type
, integer_zero_node
, inner
);
4596 else if (consts_equal
)
4597 /* MIN (X, 0) == 0 -> X >= 0 */
4598 return fold (build2 (GE_EXPR
, type
, inner
, comp_const
));
4601 /* MIN (X, 0) == 5 -> false */
4602 return omit_one_operand (type
, integer_zero_node
, inner
);
4605 /* MIN (X, 0) == -1 -> X == -1 */
4606 return fold (build2 (EQ_EXPR
, type
, inner
, comp_const
));
4609 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
4610 /* MAX (X, 0) > 0 -> X > 0
4611 MAX (X, 0) > 5 -> X > 5 */
4612 return fold (build2 (GT_EXPR
, type
, inner
, comp_const
));
4614 else if (op_code
== MAX_EXPR
)
4615 /* MAX (X, 0) > -1 -> true */
4616 return omit_one_operand (type
, integer_one_node
, inner
);
4618 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
4619 /* MIN (X, 0) > 0 -> false
4620 MIN (X, 0) > 5 -> false */
4621 return omit_one_operand (type
, integer_zero_node
, inner
);
4624 /* MIN (X, 0) > -1 -> X > -1 */
4625 return fold (build2 (GT_EXPR
, type
, inner
, comp_const
));
4632 /* T is an integer expression that is being multiplied, divided, or taken a
4633 modulus (CODE says which and what kind of divide or modulus) by a
4634 constant C. See if we can eliminate that operation by folding it with
4635 other operations already in T. WIDE_TYPE, if non-null, is a type that
4636 should be used for the computation if wider than our type.
4638 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4639 (X * 2) + (Y * 4). We must, however, be assured that either the original
4640 expression would not overflow or that overflow is undefined for the type
4641 in the language in question.
4643 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4644 the machine has a multiply-accumulate insn or that this is part of an
4645 addressing calculation.
4647 If we return a non-null expression, it is an equivalent form of the
4648 original computation, but need not be in the original type. */
4651 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
)
4653 /* To avoid exponential search depth, refuse to allow recursion past
4654 three levels. Beyond that (1) it's highly unlikely that we'll find
4655 something interesting and (2) we've probably processed it before
4656 when we built the inner expression. */
4665 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
);
4672 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
)
4674 tree type
= TREE_TYPE (t
);
4675 enum tree_code tcode
= TREE_CODE (t
);
4676 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
4677 > GET_MODE_SIZE (TYPE_MODE (type
)))
4678 ? wide_type
: type
);
4680 int same_p
= tcode
== code
;
4681 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
4683 /* Don't deal with constants of zero here; they confuse the code below. */
4684 if (integer_zerop (c
))
4687 if (TREE_CODE_CLASS (tcode
) == '1')
4688 op0
= TREE_OPERAND (t
, 0);
4690 if (TREE_CODE_CLASS (tcode
) == '2')
4691 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
4693 /* Note that we need not handle conditional operations here since fold
4694 already handles those cases. So just do arithmetic here. */
4698 /* For a constant, we can always simplify if we are a multiply
4699 or (for divide and modulus) if it is a multiple of our constant. */
4700 if (code
== MULT_EXPR
4701 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
4702 return const_binop (code
, fold_convert (ctype
, t
),
4703 fold_convert (ctype
, c
), 0);
4706 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
4707 /* If op0 is an expression ... */
4708 if ((TREE_CODE_CLASS (TREE_CODE (op0
)) == '<'
4709 || TREE_CODE_CLASS (TREE_CODE (op0
)) == '1'
4710 || TREE_CODE_CLASS (TREE_CODE (op0
)) == '2'
4711 || TREE_CODE_CLASS (TREE_CODE (op0
)) == 'e')
4712 /* ... and is unsigned, and its type is smaller than ctype,
4713 then we cannot pass through as widening. */
4714 && ((TYPE_UNSIGNED (TREE_TYPE (op0
))
4715 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
4716 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
4717 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
4718 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
4719 /* ... or its type is larger than ctype,
4720 then we cannot pass through this truncation. */
4721 || (GET_MODE_SIZE (TYPE_MODE (ctype
))
4722 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
4723 /* ... or signedness changes for division or modulus,
4724 then we cannot pass through this conversion. */
4725 || (code
!= MULT_EXPR
4726 && (TYPE_UNSIGNED (ctype
)
4727 != TYPE_UNSIGNED (TREE_TYPE (op0
))))))
4730 /* Pass the constant down and see if we can make a simplification. If
4731 we can, replace this expression with the inner simplification for
4732 possible later conversion to our or some other type. */
4733 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
4734 && TREE_CODE (t2
) == INTEGER_CST
4735 && ! TREE_CONSTANT_OVERFLOW (t2
)
4736 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
4738 ? ctype
: NULL_TREE
))))
4742 case NEGATE_EXPR
: case ABS_EXPR
:
4743 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
4744 return fold (build1 (tcode
, ctype
, fold_convert (ctype
, t1
)));
4747 case MIN_EXPR
: case MAX_EXPR
:
4748 /* If widening the type changes the signedness, then we can't perform
4749 this optimization as that changes the result. */
4750 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
4753 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4754 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
4755 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
4757 if (tree_int_cst_sgn (c
) < 0)
4758 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
4760 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
4761 fold_convert (ctype
, t2
)));
4765 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
4766 /* If the second operand is constant, this is a multiplication
4767 or floor division, by a power of two, so we can treat it that
4768 way unless the multiplier or divisor overflows. */
4769 if (TREE_CODE (op1
) == INTEGER_CST
4770 /* const_binop may not detect overflow correctly,
4771 so check for it explicitly here. */
4772 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
4773 && TREE_INT_CST_HIGH (op1
) == 0
4774 && 0 != (t1
= fold_convert (ctype
,
4775 const_binop (LSHIFT_EXPR
,
4778 && ! TREE_OVERFLOW (t1
))
4779 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
4780 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
4781 ctype
, fold_convert (ctype
, op0
), t1
),
4782 c
, code
, wide_type
);
4785 case PLUS_EXPR
: case MINUS_EXPR
:
4786 /* See if we can eliminate the operation on both sides. If we can, we
4787 can return a new PLUS or MINUS. If we can't, the only remaining
4788 cases where we can do anything are if the second operand is a
4790 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
4791 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
4792 if (t1
!= 0 && t2
!= 0
4793 && (code
== MULT_EXPR
4794 /* If not multiplication, we can only do this if both operands
4795 are divisible by c. */
4796 || (multiple_of_p (ctype
, op0
, c
)
4797 && multiple_of_p (ctype
, op1
, c
))))
4798 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
4799 fold_convert (ctype
, t2
)));
4801 /* If this was a subtraction, negate OP1 and set it to be an addition.
4802 This simplifies the logic below. */
4803 if (tcode
== MINUS_EXPR
)
4804 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
4806 if (TREE_CODE (op1
) != INTEGER_CST
)
4809 /* If either OP1 or C are negative, this optimization is not safe for
4810 some of the division and remainder types while for others we need
4811 to change the code. */
4812 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
4814 if (code
== CEIL_DIV_EXPR
)
4815 code
= FLOOR_DIV_EXPR
;
4816 else if (code
== FLOOR_DIV_EXPR
)
4817 code
= CEIL_DIV_EXPR
;
4818 else if (code
!= MULT_EXPR
4819 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
4823 /* If it's a multiply or a division/modulus operation of a multiple
4824 of our constant, do the operation and verify it doesn't overflow. */
4825 if (code
== MULT_EXPR
4826 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
4828 op1
= const_binop (code
, fold_convert (ctype
, op1
),
4829 fold_convert (ctype
, c
), 0);
4830 /* We allow the constant to overflow with wrapping semantics. */
4832 || (TREE_OVERFLOW (op1
) && ! flag_wrapv
))
4838 /* If we have an unsigned type is not a sizetype, we cannot widen
4839 the operation since it will change the result if the original
4840 computation overflowed. */
4841 if (TYPE_UNSIGNED (ctype
)
4842 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
4846 /* If we were able to eliminate our operation from the first side,
4847 apply our operation to the second side and reform the PLUS. */
4848 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
4849 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
));
4851 /* The last case is if we are a multiply. In that case, we can
4852 apply the distributive law to commute the multiply and addition
4853 if the multiplication of the constants doesn't overflow. */
4854 if (code
== MULT_EXPR
)
4855 return fold (build2 (tcode
, ctype
,
4856 fold (build2 (code
, ctype
,
4857 fold_convert (ctype
, op0
),
4858 fold_convert (ctype
, c
))),
4864 /* We have a special case here if we are doing something like
4865 (C * 8) % 4 since we know that's zero. */
4866 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
4867 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
4868 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
4869 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
4870 return omit_one_operand (type
, integer_zero_node
, op0
);
4872 /* ... fall through ... */
4874 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
4875 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
4876 /* If we can extract our operation from the LHS, do so and return a
4877 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4878 do something only if the second operand is a constant. */
4880 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
4881 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
4882 fold_convert (ctype
, op1
)));
4883 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
4884 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
4885 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
4886 fold_convert (ctype
, t1
)));
4887 else if (TREE_CODE (op1
) != INTEGER_CST
)
4890 /* If these are the same operation types, we can associate them
4891 assuming no overflow. */
4893 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
4894 fold_convert (ctype
, c
), 0))
4895 && ! TREE_OVERFLOW (t1
))
4896 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
));
4898 /* If these operations "cancel" each other, we have the main
4899 optimizations of this pass, which occur when either constant is a
4900 multiple of the other, in which case we replace this with either an
4901 operation or CODE or TCODE.
4903 If we have an unsigned type that is not a sizetype, we cannot do
4904 this since it will change the result if the original computation
4906 if ((! TYPE_UNSIGNED (ctype
)
4907 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
4909 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
4910 || (tcode
== MULT_EXPR
4911 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
4912 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
4914 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
4915 return fold (build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
4916 fold_convert (ctype
,
4917 const_binop (TRUNC_DIV_EXPR
,
4919 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
4920 return fold (build2 (code
, ctype
, fold_convert (ctype
, op0
),
4921 fold_convert (ctype
,
4922 const_binop (TRUNC_DIV_EXPR
,
4934 /* Return a node which has the indicated constant VALUE (either 0 or
4935 1), and is of the indicated TYPE. */
4938 constant_boolean_node (int value
, tree type
)
4940 if (type
== integer_type_node
)
4941 return value
? integer_one_node
: integer_zero_node
;
4942 else if (TREE_CODE (type
) == BOOLEAN_TYPE
)
4943 return lang_hooks
.truthvalue_conversion (value
? integer_one_node
4944 : integer_zero_node
);
4947 tree t
= build_int_2 (value
, 0);
4949 TREE_TYPE (t
) = type
;
4954 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4955 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4956 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4957 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4958 COND is the first argument to CODE; otherwise (as in the example
4959 given here), it is the second argument. TYPE is the type of the
4960 original expression. Return NULL_TREE if no simplification is
4964 fold_binary_op_with_conditional_arg (enum tree_code code
, tree type
,
4965 tree cond
, tree arg
, int cond_first_p
)
4967 tree test
, true_value
, false_value
;
4968 tree lhs
= NULL_TREE
;
4969 tree rhs
= NULL_TREE
;
4971 /* This transformation is only worthwhile if we don't have to wrap
4972 arg in a SAVE_EXPR, and the operation can be simplified on atleast
4973 one of the branches once its pushed inside the COND_EXPR. */
4974 if (!TREE_CONSTANT (arg
))
4977 if (TREE_CODE (cond
) == COND_EXPR
)
4979 test
= TREE_OPERAND (cond
, 0);
4980 true_value
= TREE_OPERAND (cond
, 1);
4981 false_value
= TREE_OPERAND (cond
, 2);
4982 /* If this operand throws an expression, then it does not make
4983 sense to try to perform a logical or arithmetic operation
4985 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
4987 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
4992 tree testtype
= TREE_TYPE (cond
);
4994 true_value
= constant_boolean_node (true, testtype
);
4995 false_value
= constant_boolean_node (false, testtype
);
4999 lhs
= fold (cond_first_p
? build2 (code
, type
, true_value
, arg
)
5000 : build2 (code
, type
, arg
, true_value
));
5002 rhs
= fold (cond_first_p
? build2 (code
, type
, false_value
, arg
)
5003 : build2 (code
, type
, arg
, false_value
));
5005 test
= fold (build3 (COND_EXPR
, type
, test
, lhs
, rhs
));
5006 return fold_convert (type
, test
);
5010 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5012 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5013 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5014 ADDEND is the same as X.
5016 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5017 and finite. The problematic cases are when X is zero, and its mode
5018 has signed zeros. In the case of rounding towards -infinity,
5019 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5020 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5023 fold_real_zero_addition_p (tree type
, tree addend
, int negate
)
5025 if (!real_zerop (addend
))
5028 /* Don't allow the fold with -fsignaling-nans. */
5029 if (HONOR_SNANS (TYPE_MODE (type
)))
5032 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5033 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
5036 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5037 if (TREE_CODE (addend
) == REAL_CST
5038 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
5041 /* The mode has signed zeros, and we have to honor their sign.
5042 In this situation, there is only one case we can return true for.
5043 X - 0 is the same as X unless rounding towards -infinity is
5045 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
5048 /* Subroutine of fold() that checks comparisons of built-in math
5049 functions against real constants.
5051 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5052 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5053 is the type of the result and ARG0 and ARG1 are the operands of the
5054 comparison. ARG1 must be a TREE_REAL_CST.
5056 The function returns the constant folded tree if a simplification
5057 can be made, and NULL_TREE otherwise. */
5060 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
5061 tree type
, tree arg0
, tree arg1
)
5065 if (BUILTIN_SQRT_P (fcode
))
5067 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5068 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
5070 c
= TREE_REAL_CST (arg1
);
5071 if (REAL_VALUE_NEGATIVE (c
))
5073 /* sqrt(x) < y is always false, if y is negative. */
5074 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
5075 return omit_one_operand (type
, integer_zero_node
, arg
);
5077 /* sqrt(x) > y is always true, if y is negative and we
5078 don't care about NaNs, i.e. negative values of x. */
5079 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
5080 return omit_one_operand (type
, integer_one_node
, arg
);
5082 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5083 return fold (build2 (GE_EXPR
, type
, arg
,
5084 build_real (TREE_TYPE (arg
), dconst0
)));
5086 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
5090 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5091 real_convert (&c2
, mode
, &c2
);
5093 if (REAL_VALUE_ISINF (c2
))
5095 /* sqrt(x) > y is x == +Inf, when y is very large. */
5096 if (HONOR_INFINITIES (mode
))
5097 return fold (build2 (EQ_EXPR
, type
, arg
,
5098 build_real (TREE_TYPE (arg
), c2
)));
5100 /* sqrt(x) > y is always false, when y is very large
5101 and we don't care about infinities. */
5102 return omit_one_operand (type
, integer_zero_node
, arg
);
5105 /* sqrt(x) > c is the same as x > c*c. */
5106 return fold (build2 (code
, type
, arg
,
5107 build_real (TREE_TYPE (arg
), c2
)));
5109 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
5113 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5114 real_convert (&c2
, mode
, &c2
);
5116 if (REAL_VALUE_ISINF (c2
))
5118 /* sqrt(x) < y is always true, when y is a very large
5119 value and we don't care about NaNs or Infinities. */
5120 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
5121 return omit_one_operand (type
, integer_one_node
, arg
);
5123 /* sqrt(x) < y is x != +Inf when y is very large and we
5124 don't care about NaNs. */
5125 if (! HONOR_NANS (mode
))
5126 return fold (build2 (NE_EXPR
, type
, arg
,
5127 build_real (TREE_TYPE (arg
), c2
)));
5129 /* sqrt(x) < y is x >= 0 when y is very large and we
5130 don't care about Infinities. */
5131 if (! HONOR_INFINITIES (mode
))
5132 return fold (build2 (GE_EXPR
, type
, arg
,
5133 build_real (TREE_TYPE (arg
), dconst0
)));
5135 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5136 if (lang_hooks
.decls
.global_bindings_p () != 0
5137 || CONTAINS_PLACEHOLDER_P (arg
))
5140 arg
= save_expr (arg
);
5141 return fold (build2 (TRUTH_ANDIF_EXPR
, type
,
5142 fold (build2 (GE_EXPR
, type
, arg
,
5143 build_real (TREE_TYPE (arg
),
5145 fold (build2 (NE_EXPR
, type
, arg
,
5146 build_real (TREE_TYPE (arg
),
5150 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5151 if (! HONOR_NANS (mode
))
5152 return fold (build2 (code
, type
, arg
,
5153 build_real (TREE_TYPE (arg
), c2
)));
5155 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5156 if (lang_hooks
.decls
.global_bindings_p () == 0
5157 && ! CONTAINS_PLACEHOLDER_P (arg
))
5159 arg
= save_expr (arg
);
5160 return fold (build2 (TRUTH_ANDIF_EXPR
, type
,
5161 fold (build2 (GE_EXPR
, type
, arg
,
5162 build_real (TREE_TYPE (arg
),
5164 fold (build2 (code
, type
, arg
,
5165 build_real (TREE_TYPE (arg
),
5174 /* Subroutine of fold() that optimizes comparisons against Infinities,
5175 either +Inf or -Inf.
5177 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5178 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5179 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5181 The function returns the constant folded tree if a simplification
5182 can be made, and NULL_TREE otherwise. */
5185 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5187 enum machine_mode mode
;
5188 REAL_VALUE_TYPE max
;
5192 mode
= TYPE_MODE (TREE_TYPE (arg0
));
5194 /* For negative infinity swap the sense of the comparison. */
5195 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
5197 code
= swap_tree_comparison (code
);
5202 /* x > +Inf is always false, if with ignore sNANs. */
5203 if (HONOR_SNANS (mode
))
5205 return omit_one_operand (type
, integer_zero_node
, arg0
);
5208 /* x <= +Inf is always true, if we don't case about NaNs. */
5209 if (! HONOR_NANS (mode
))
5210 return omit_one_operand (type
, integer_one_node
, arg0
);
5212 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5213 if (lang_hooks
.decls
.global_bindings_p () == 0
5214 && ! CONTAINS_PLACEHOLDER_P (arg0
))
5216 arg0
= save_expr (arg0
);
5217 return fold (build2 (EQ_EXPR
, type
, arg0
, arg0
));
5223 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5224 real_maxval (&max
, neg
, mode
);
5225 return fold (build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5226 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5229 /* x < +Inf is always equal to x <= DBL_MAX. */
5230 real_maxval (&max
, neg
, mode
);
5231 return fold (build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5232 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5235 /* x != +Inf is always equal to !(x > DBL_MAX). */
5236 real_maxval (&max
, neg
, mode
);
5237 if (! HONOR_NANS (mode
))
5238 return fold (build2 (neg
? GE_EXPR
: LE_EXPR
, type
,
5239 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5241 /* The transformation below creates non-gimple code and thus is
5242 not appropriate if we are in gimple form. */
5246 temp
= fold (build2 (neg
? LT_EXPR
: GT_EXPR
, type
,
5247 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5248 return fold (build1 (TRUTH_NOT_EXPR
, type
, temp
));
5257 /* Subroutine of fold() that optimizes comparisons of a division by
5258 a nonzero integer constant against an integer constant, i.e.
5261 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5262 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5263 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5265 The function returns the constant folded tree if a simplification
5266 can be made, and NULL_TREE otherwise. */
5269 fold_div_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5271 tree prod
, tmp
, hi
, lo
;
5272 tree arg00
= TREE_OPERAND (arg0
, 0);
5273 tree arg01
= TREE_OPERAND (arg0
, 1);
5274 unsigned HOST_WIDE_INT lpart
;
5275 HOST_WIDE_INT hpart
;
5278 /* We have to do this the hard way to detect unsigned overflow.
5279 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5280 overflow
= mul_double (TREE_INT_CST_LOW (arg01
),
5281 TREE_INT_CST_HIGH (arg01
),
5282 TREE_INT_CST_LOW (arg1
),
5283 TREE_INT_CST_HIGH (arg1
), &lpart
, &hpart
);
5284 prod
= build_int_2 (lpart
, hpart
);
5285 TREE_TYPE (prod
) = TREE_TYPE (arg00
);
5286 TREE_OVERFLOW (prod
) = force_fit_type (prod
, overflow
)
5287 || TREE_INT_CST_HIGH (prod
) != hpart
5288 || TREE_INT_CST_LOW (prod
) != lpart
;
5289 TREE_CONSTANT_OVERFLOW (prod
) = TREE_OVERFLOW (prod
);
5291 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)))
5293 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5296 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5297 overflow
= add_double (TREE_INT_CST_LOW (prod
),
5298 TREE_INT_CST_HIGH (prod
),
5299 TREE_INT_CST_LOW (tmp
),
5300 TREE_INT_CST_HIGH (tmp
),
5302 hi
= build_int_2 (lpart
, hpart
);
5303 TREE_TYPE (hi
) = TREE_TYPE (arg00
);
5304 TREE_OVERFLOW (hi
) = force_fit_type (hi
, overflow
)
5305 || TREE_INT_CST_HIGH (hi
) != hpart
5306 || TREE_INT_CST_LOW (hi
) != lpart
5307 || TREE_OVERFLOW (prod
);
5308 TREE_CONSTANT_OVERFLOW (hi
) = TREE_OVERFLOW (hi
);
5310 else if (tree_int_cst_sgn (arg01
) >= 0)
5312 tmp
= int_const_binop (MINUS_EXPR
, arg01
, integer_one_node
, 0);
5313 switch (tree_int_cst_sgn (arg1
))
5316 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
5321 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
5326 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
5336 tmp
= int_const_binop (PLUS_EXPR
, arg01
, integer_one_node
, 0);
5337 switch (tree_int_cst_sgn (arg1
))
5340 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
5345 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
5350 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
5362 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
5363 return omit_one_operand (type
, integer_zero_node
, arg00
);
5364 if (TREE_OVERFLOW (hi
))
5365 return fold (build2 (GE_EXPR
, type
, arg00
, lo
));
5366 if (TREE_OVERFLOW (lo
))
5367 return fold (build2 (LE_EXPR
, type
, arg00
, hi
));
5368 return build_range_check (type
, arg00
, 1, lo
, hi
);
5371 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
5372 return omit_one_operand (type
, integer_one_node
, arg00
);
5373 if (TREE_OVERFLOW (hi
))
5374 return fold (build2 (LT_EXPR
, type
, arg00
, lo
));
5375 if (TREE_OVERFLOW (lo
))
5376 return fold (build2 (GT_EXPR
, type
, arg00
, hi
));
5377 return build_range_check (type
, arg00
, 0, lo
, hi
);
5380 if (TREE_OVERFLOW (lo
))
5381 return omit_one_operand (type
, integer_zero_node
, arg00
);
5382 return fold (build2 (LT_EXPR
, type
, arg00
, lo
));
5385 if (TREE_OVERFLOW (hi
))
5386 return omit_one_operand (type
, integer_one_node
, arg00
);
5387 return fold (build2 (LE_EXPR
, type
, arg00
, hi
));
5390 if (TREE_OVERFLOW (hi
))
5391 return omit_one_operand (type
, integer_zero_node
, arg00
);
5392 return fold (build2 (GT_EXPR
, type
, arg00
, hi
));
5395 if (TREE_OVERFLOW (lo
))
5396 return omit_one_operand (type
, integer_one_node
, arg00
);
5397 return fold (build2 (GE_EXPR
, type
, arg00
, lo
));
5407 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5408 equality/inequality test, then return a simplified form of
5409 the test using shifts and logical operations. Otherwise return
5410 NULL. TYPE is the desired result type. */
5413 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
5416 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5418 if (code
== TRUTH_NOT_EXPR
)
5420 code
= TREE_CODE (arg0
);
5421 if (code
!= NE_EXPR
&& code
!= EQ_EXPR
)
5424 /* Extract the arguments of the EQ/NE. */
5425 arg1
= TREE_OPERAND (arg0
, 1);
5426 arg0
= TREE_OPERAND (arg0
, 0);
5428 /* This requires us to invert the code. */
5429 code
= (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
);
5432 /* If this is testing a single bit, we can optimize the test. */
5433 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
5434 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
5435 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
5437 tree inner
= TREE_OPERAND (arg0
, 0);
5438 tree type
= TREE_TYPE (arg0
);
5439 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
5440 enum machine_mode operand_mode
= TYPE_MODE (type
);
5442 tree signed_type
, unsigned_type
, intermediate_type
;
5445 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5446 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5447 arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
5448 if (arg00
!= NULL_TREE
5449 /* This is only a win if casting to a signed type is cheap,
5450 i.e. when arg00's type is not a partial mode. */
5451 && TYPE_PRECISION (TREE_TYPE (arg00
))
5452 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
5454 tree stype
= lang_hooks
.types
.signed_type (TREE_TYPE (arg00
));
5455 return fold (build2 (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
5456 result_type
, fold_convert (stype
, arg00
),
5457 fold_convert (stype
, integer_zero_node
)));
5460 /* Otherwise we have (A & C) != 0 where C is a single bit,
5461 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5462 Similarly for (A & C) == 0. */
5464 /* If INNER is a right shift of a constant and it plus BITNUM does
5465 not overflow, adjust BITNUM and INNER. */
5466 if (TREE_CODE (inner
) == RSHIFT_EXPR
5467 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
5468 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
5469 && bitnum
< TYPE_PRECISION (type
)
5470 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
5471 bitnum
- TYPE_PRECISION (type
)))
5473 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
5474 inner
= TREE_OPERAND (inner
, 0);
5477 /* If we are going to be able to omit the AND below, we must do our
5478 operations as unsigned. If we must use the AND, we have a choice.
5479 Normally unsigned is faster, but for some machines signed is. */
5480 #ifdef LOAD_EXTEND_OP
5481 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1);
5486 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
5487 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
5488 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
5489 inner
= fold_convert (intermediate_type
, inner
);
5492 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
5493 inner
, size_int (bitnum
));
5495 if (code
== EQ_EXPR
)
5496 inner
= build2 (BIT_XOR_EXPR
, intermediate_type
,
5497 inner
, integer_one_node
);
5499 /* Put the AND last so it can combine with more things. */
5500 inner
= build2 (BIT_AND_EXPR
, intermediate_type
,
5501 inner
, integer_one_node
);
5503 /* Make sure to return the proper type. */
5504 inner
= fold_convert (result_type
, inner
);
5511 /* Check whether we are allowed to reorder operands arg0 and arg1,
5512 such that the evaluation of arg1 occurs before arg0. */
5515 reorder_operands_p (tree arg0
, tree arg1
)
5517 if (! flag_evaluation_order
)
5519 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
5521 return ! TREE_SIDE_EFFECTS (arg0
)
5522 && ! TREE_SIDE_EFFECTS (arg1
);
5525 /* Test whether it is preferable two swap two operands, ARG0 and
5526 ARG1, for example because ARG0 is an integer constant and ARG1
5527 isn't. If REORDER is true, only recommend swapping if we can
5528 evaluate the operands in reverse order. */
5531 tree_swap_operands_p (tree arg0
, tree arg1
, bool reorder
)
5533 STRIP_SIGN_NOPS (arg0
);
5534 STRIP_SIGN_NOPS (arg1
);
5536 if (TREE_CODE (arg1
) == INTEGER_CST
)
5538 if (TREE_CODE (arg0
) == INTEGER_CST
)
5541 if (TREE_CODE (arg1
) == REAL_CST
)
5543 if (TREE_CODE (arg0
) == REAL_CST
)
5546 if (TREE_CODE (arg1
) == COMPLEX_CST
)
5548 if (TREE_CODE (arg0
) == COMPLEX_CST
)
5551 if (TREE_CONSTANT (arg1
))
5553 if (TREE_CONSTANT (arg0
))
5559 if (reorder
&& flag_evaluation_order
5560 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
5568 if (reorder
&& flag_evaluation_order
5569 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
5580 /* Perform constant folding and related simplification of EXPR.
5581 The related simplifications include x*1 => x, x*0 => 0, etc.,
5582 and application of the associative law.
5583 NOP_EXPR conversions may be removed freely (as long as we
5584 are careful not to change the type of the overall expression).
5585 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5586 but we can constant-fold them if they have constant operands. */
5588 #ifdef ENABLE_FOLD_CHECKING
5589 # define fold(x) fold_1 (x)
5590 static tree
fold_1 (tree
);
5596 const tree t
= expr
;
5597 const tree type
= TREE_TYPE (expr
);
5598 tree t1
= NULL_TREE
;
5600 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
5601 enum tree_code code
= TREE_CODE (t
);
5602 int kind
= TREE_CODE_CLASS (code
);
5604 /* WINS will be nonzero when the switch is done
5605 if all operands are constant. */
5608 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5609 Likewise for a SAVE_EXPR that's already been evaluated. */
5610 if (code
== RTL_EXPR
|| (code
== SAVE_EXPR
&& SAVE_EXPR_RTL (t
) != 0))
5613 /* Return right away if a constant. */
5617 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
5621 /* Special case for conversion ops that can have fixed point args. */
5622 arg0
= TREE_OPERAND (t
, 0);
5624 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5626 STRIP_SIGN_NOPS (arg0
);
5628 if (arg0
!= 0 && TREE_CODE (arg0
) == COMPLEX_CST
)
5629 subop
= TREE_REALPART (arg0
);
5633 if (subop
!= 0 && TREE_CODE (subop
) != INTEGER_CST
5634 && TREE_CODE (subop
) != REAL_CST
)
5635 /* Note that TREE_CONSTANT isn't enough:
5636 static var addresses are constant but we can't
5637 do arithmetic on them. */
5640 else if (IS_EXPR_CODE_CLASS (kind
))
5642 int len
= first_rtl_op (code
);
5644 for (i
= 0; i
< len
; i
++)
5646 tree op
= TREE_OPERAND (t
, i
);
5650 continue; /* Valid for CALL_EXPR, at least. */
5652 /* Strip any conversions that don't change the mode. This is
5653 safe for every expression, except for a comparison expression
5654 because its signedness is derived from its operands. So, in
5655 the latter case, only strip conversions that don't change the
5658 Note that this is done as an internal manipulation within the
5659 constant folder, in order to find the simplest representation
5660 of the arguments so that their form can be studied. In any
5661 cases, the appropriate type conversions should be put back in
5662 the tree that will get out of the constant folder. */
5664 STRIP_SIGN_NOPS (op
);
5668 if (TREE_CODE (op
) == COMPLEX_CST
)
5669 subop
= TREE_REALPART (op
);
5673 if (TREE_CODE (subop
) != INTEGER_CST
5674 && TREE_CODE (subop
) != REAL_CST
)
5675 /* Note that TREE_CONSTANT isn't enough:
5676 static var addresses are constant but we can't
5677 do arithmetic on them. */
5687 /* If this is a commutative operation, and ARG0 is a constant, move it
5688 to ARG1 to reduce the number of tests below. */
5689 if (commutative_tree_code (code
)
5690 && tree_swap_operands_p (arg0
, arg1
, true))
5691 return fold (build2 (code
, type
, TREE_OPERAND (t
, 1),
5692 TREE_OPERAND (t
, 0)));
5694 /* Now WINS is set as described above,
5695 ARG0 is the first operand of EXPR,
5696 and ARG1 is the second operand (if it has more than one operand).
5698 First check for cases where an arithmetic operation is applied to a
5699 compound, conditional, or comparison operation. Push the arithmetic
5700 operation inside the compound or conditional to see if any folding
5701 can then be done. Convert comparison to conditional for this purpose.
5702 The also optimizes non-constant cases that used to be done in
5705 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5706 one of the operands is a comparison and the other is a comparison, a
5707 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5708 code below would make the expression more complex. Change it to a
5709 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5710 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5712 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
5713 || code
== EQ_EXPR
|| code
== NE_EXPR
)
5714 && ((truth_value_p (TREE_CODE (arg0
))
5715 && (truth_value_p (TREE_CODE (arg1
))
5716 || (TREE_CODE (arg1
) == BIT_AND_EXPR
5717 && integer_onep (TREE_OPERAND (arg1
, 1)))))
5718 || (truth_value_p (TREE_CODE (arg1
))
5719 && (truth_value_p (TREE_CODE (arg0
))
5720 || (TREE_CODE (arg0
) == BIT_AND_EXPR
5721 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
5723 tem
= fold (build2 (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
5724 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
5726 type
, fold_convert (boolean_type_node
, arg0
),
5727 fold_convert (boolean_type_node
, arg1
)));
5729 if (code
== EQ_EXPR
)
5730 tem
= invert_truthvalue (tem
);
5735 if (TREE_CODE_CLASS (code
) == '1')
5737 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
5738 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5739 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))));
5740 else if (TREE_CODE (arg0
) == COND_EXPR
)
5742 tree arg01
= TREE_OPERAND (arg0
, 1);
5743 tree arg02
= TREE_OPERAND (arg0
, 2);
5744 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
5745 arg01
= fold (build1 (code
, type
, arg01
));
5746 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
5747 arg02
= fold (build1 (code
, type
, arg02
));
5748 tem
= fold (build3 (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5751 /* If this was a conversion, and all we did was to move into
5752 inside the COND_EXPR, bring it back out. But leave it if
5753 it is a conversion from integer to integer and the
5754 result precision is no wider than a word since such a
5755 conversion is cheap and may be optimized away by combine,
5756 while it couldn't if it were outside the COND_EXPR. Then return
5757 so we don't get into an infinite recursion loop taking the
5758 conversion out and then back in. */
5760 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
5761 || code
== NON_LVALUE_EXPR
)
5762 && TREE_CODE (tem
) == COND_EXPR
5763 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
5764 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
5765 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
5766 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
5767 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
5768 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
5769 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
5771 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
5772 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
))
5773 tem
= build1 (code
, type
,
5775 TREE_TYPE (TREE_OPERAND
5776 (TREE_OPERAND (tem
, 1), 0)),
5777 TREE_OPERAND (tem
, 0),
5778 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
5779 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
5782 else if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<')
5784 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
5786 arg0
= copy_node (arg0
);
5787 TREE_TYPE (arg0
) = type
;
5790 else if (TREE_CODE (type
) != INTEGER_TYPE
)
5791 return fold (build3 (COND_EXPR
, type
, arg0
,
5792 fold (build1 (code
, type
,
5794 fold (build1 (code
, type
,
5795 integer_zero_node
))));
5798 else if (TREE_CODE_CLASS (code
) == '<'
5799 && TREE_CODE (arg0
) == COMPOUND_EXPR
)
5800 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5801 fold (build2 (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
5802 else if (TREE_CODE_CLASS (code
) == '<'
5803 && TREE_CODE (arg1
) == COMPOUND_EXPR
)
5804 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
5805 fold (build2 (code
, type
, arg0
, TREE_OPERAND (arg1
, 1))));
5806 else if (TREE_CODE_CLASS (code
) == '2'
5807 || TREE_CODE_CLASS (code
) == '<')
5809 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
5810 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5811 fold (build2 (code
, type
, TREE_OPERAND (arg0
, 1),
5813 if (TREE_CODE (arg1
) == COMPOUND_EXPR
5814 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
5815 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
5816 fold (build2 (code
, type
,
5817 arg0
, TREE_OPERAND (arg1
, 1))));
5819 if (TREE_CODE (arg0
) == COND_EXPR
5820 || TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<')
5822 tem
= fold_binary_op_with_conditional_arg (code
, type
, arg0
, arg1
,
5823 /*cond_first_p=*/1);
5824 if (tem
!= NULL_TREE
)
5828 if (TREE_CODE (arg1
) == COND_EXPR
5829 || TREE_CODE_CLASS (TREE_CODE (arg1
)) == '<')
5831 tem
= fold_binary_op_with_conditional_arg (code
, type
, arg1
, arg0
,
5832 /*cond_first_p=*/0);
5833 if (tem
!= NULL_TREE
)
5841 return fold (DECL_INITIAL (t
));
5846 case FIX_TRUNC_EXPR
:
5848 case FIX_FLOOR_EXPR
:
5849 case FIX_ROUND_EXPR
:
5850 if (TREE_TYPE (TREE_OPERAND (t
, 0)) == type
)
5851 return TREE_OPERAND (t
, 0);
5853 /* Handle cases of two conversions in a row. */
5854 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
5855 || TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
)
5857 tree inside_type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
5858 tree inter_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
5859 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
5860 int inside_ptr
= POINTER_TYPE_P (inside_type
);
5861 int inside_float
= FLOAT_TYPE_P (inside_type
);
5862 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
5863 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
5864 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
5865 int inter_ptr
= POINTER_TYPE_P (inter_type
);
5866 int inter_float
= FLOAT_TYPE_P (inter_type
);
5867 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
5868 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
5869 int final_int
= INTEGRAL_TYPE_P (type
);
5870 int final_ptr
= POINTER_TYPE_P (type
);
5871 int final_float
= FLOAT_TYPE_P (type
);
5872 unsigned int final_prec
= TYPE_PRECISION (type
);
5873 int final_unsignedp
= TYPE_UNSIGNED (type
);
5875 /* In addition to the cases of two conversions in a row
5876 handled below, if we are converting something to its own
5877 type via an object of identical or wider precision, neither
5878 conversion is needed. */
5879 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
5880 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
5881 && inter_prec
>= final_prec
)
5882 return fold (build1 (code
, type
,
5883 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
5885 /* Likewise, if the intermediate and final types are either both
5886 float or both integer, we don't need the middle conversion if
5887 it is wider than the final type and doesn't change the signedness
5888 (for integers). Avoid this if the final type is a pointer
5889 since then we sometimes need the inner conversion. Likewise if
5890 the outer has a precision not equal to the size of its mode. */
5891 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
5892 || (inter_float
&& inside_float
))
5893 && inter_prec
>= inside_prec
5894 && (inter_float
|| inter_unsignedp
== inside_unsignedp
)
5895 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
5896 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
5898 return fold (build1 (code
, type
,
5899 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
5901 /* If we have a sign-extension of a zero-extended value, we can
5902 replace that by a single zero-extension. */
5903 if (inside_int
&& inter_int
&& final_int
5904 && inside_prec
< inter_prec
&& inter_prec
< final_prec
5905 && inside_unsignedp
&& !inter_unsignedp
)
5906 return fold (build1 (code
, type
,
5907 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
5909 /* Two conversions in a row are not needed unless:
5910 - some conversion is floating-point (overstrict for now), or
5911 - the intermediate type is narrower than both initial and
5913 - the intermediate type and innermost type differ in signedness,
5914 and the outermost type is wider than the intermediate, or
5915 - the initial type is a pointer type and the precisions of the
5916 intermediate and final types differ, or
5917 - the final type is a pointer type and the precisions of the
5918 initial and intermediate types differ. */
5919 if (! inside_float
&& ! inter_float
&& ! final_float
5920 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
5921 && ! (inside_int
&& inter_int
5922 && inter_unsignedp
!= inside_unsignedp
5923 && inter_prec
< final_prec
)
5924 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
5925 == (final_unsignedp
&& final_prec
> inter_prec
))
5926 && ! (inside_ptr
&& inter_prec
!= final_prec
)
5927 && ! (final_ptr
&& inside_prec
!= inter_prec
)
5928 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
5929 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
5931 return fold (build1 (code
, type
,
5932 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
5935 if (TREE_CODE (TREE_OPERAND (t
, 0)) == MODIFY_EXPR
5936 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t
, 0), 1))
5937 /* Detect assigning a bitfield. */
5938 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)) == COMPONENT_REF
5939 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 1))))
5941 /* Don't leave an assignment inside a conversion
5942 unless assigning a bitfield. */
5943 tree prev
= TREE_OPERAND (t
, 0);
5944 tem
= copy_node (t
);
5945 TREE_OPERAND (tem
, 0) = TREE_OPERAND (prev
, 1);
5946 /* First do the assignment, then return converted constant. */
5947 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), prev
, fold (tem
));
5948 TREE_NO_WARNING (tem
) = 1;
5949 TREE_USED (tem
) = 1;
5953 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5954 constants (if x has signed type, the sign bit cannot be set
5955 in c). This folds extension into the BIT_AND_EXPR. */
5956 if (INTEGRAL_TYPE_P (type
)
5957 && TREE_CODE (type
) != BOOLEAN_TYPE
5958 && TREE_CODE (TREE_OPERAND (t
, 0)) == BIT_AND_EXPR
5959 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 1)) == INTEGER_CST
)
5961 tree
and = TREE_OPERAND (t
, 0);
5962 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
5965 if (TYPE_UNSIGNED (TREE_TYPE (and))
5966 || (TYPE_PRECISION (type
)
5967 <= TYPE_PRECISION (TREE_TYPE (and))))
5969 else if (TYPE_PRECISION (TREE_TYPE (and1
))
5970 <= HOST_BITS_PER_WIDE_INT
5971 && host_integerp (and1
, 1))
5973 unsigned HOST_WIDE_INT cst
;
5975 cst
= tree_low_cst (and1
, 1);
5976 cst
&= (HOST_WIDE_INT
) -1
5977 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
5978 change
= (cst
== 0);
5979 #ifdef LOAD_EXTEND_OP
5981 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
5984 tree uns
= lang_hooks
.types
.unsigned_type (TREE_TYPE (and0
));
5985 and0
= fold_convert (uns
, and0
);
5986 and1
= fold_convert (uns
, and1
);
5991 return fold (build2 (BIT_AND_EXPR
, type
,
5992 fold_convert (type
, and0
),
5993 fold_convert (type
, and1
)));
5996 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
5997 T2 being pointers to types of the same size. */
5998 if (POINTER_TYPE_P (TREE_TYPE (t
))
5999 && TREE_CODE_CLASS (TREE_CODE (arg0
)) == '2'
6000 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
6001 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
6003 tree arg00
= TREE_OPERAND (arg0
, 0);
6004 tree t0
= TREE_TYPE (t
);
6005 tree t1
= TREE_TYPE (arg00
);
6006 tree tt0
= TREE_TYPE (t0
);
6007 tree tt1
= TREE_TYPE (t1
);
6008 tree s0
= TYPE_SIZE (tt0
);
6009 tree s1
= TYPE_SIZE (tt1
);
6011 if (s0
&& s1
&& operand_equal_p (s0
, s1
, OEP_ONLY_CONST
))
6012 return build2 (TREE_CODE (arg0
), t0
, fold_convert (t0
, arg00
),
6013 TREE_OPERAND (arg0
, 1));
6016 tem
= fold_convert_const (code
, type
, arg0
);
6017 return tem
? tem
: t
;
6019 case VIEW_CONVERT_EXPR
:
6020 if (TREE_CODE (TREE_OPERAND (t
, 0)) == VIEW_CONVERT_EXPR
)
6021 return build1 (VIEW_CONVERT_EXPR
, type
,
6022 TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
6026 if (TREE_CODE (arg0
) == CONSTRUCTOR
6027 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
6029 tree m
= purpose_member (arg1
, CONSTRUCTOR_ELTS (arg0
));
6031 return TREE_VALUE (m
);
6036 if (TREE_CONSTANT (t
) != wins
)
6038 tem
= copy_node (t
);
6039 TREE_CONSTANT (tem
) = wins
;
6040 TREE_INVARIANT (tem
) = wins
;
6046 if (negate_expr_p (arg0
))
6047 return fold_convert (type
, negate_expr (arg0
));
6051 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
6052 return fold_abs_const (arg0
, type
);
6053 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
6054 return fold (build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0)));
6055 /* Convert fabs((double)float) into (double)fabsf(float). */
6056 else if (TREE_CODE (arg0
) == NOP_EXPR
6057 && TREE_CODE (type
) == REAL_TYPE
)
6059 tree targ0
= strip_float_extensions (arg0
);
6061 return fold_convert (type
, fold (build1 (ABS_EXPR
,
6065 else if (tree_expr_nonnegative_p (arg0
))
6070 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
6071 return fold_convert (type
, arg0
);
6072 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
6073 return build2 (COMPLEX_EXPR
, type
,
6074 TREE_OPERAND (arg0
, 0),
6075 negate_expr (TREE_OPERAND (arg0
, 1)));
6076 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
6077 return build_complex (type
, TREE_REALPART (arg0
),
6078 negate_expr (TREE_IMAGPART (arg0
)));
6079 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
6080 return fold (build2 (TREE_CODE (arg0
), type
,
6081 fold (build1 (CONJ_EXPR
, type
,
6082 TREE_OPERAND (arg0
, 0))),
6083 fold (build1 (CONJ_EXPR
, type
,
6084 TREE_OPERAND (arg0
, 1)))));
6085 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
6086 return TREE_OPERAND (arg0
, 0);
6090 if (TREE_CODE (arg0
) == INTEGER_CST
)
6091 return fold_not_const (arg0
, type
);
6092 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
6093 return TREE_OPERAND (arg0
, 0);
6097 /* A + (-B) -> A - B */
6098 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
6099 return fold (build2 (MINUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
6100 /* (-A) + B -> B - A */
6101 if (TREE_CODE (arg0
) == NEGATE_EXPR
6102 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
6103 return fold (build2 (MINUS_EXPR
, type
, arg1
, TREE_OPERAND (arg0
, 0)));
6104 if (! FLOAT_TYPE_P (type
))
6106 if (integer_zerop (arg1
))
6107 return non_lvalue (fold_convert (type
, arg0
));
6109 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6110 with a constant, and the two constants have no bits in common,
6111 we should treat this as a BIT_IOR_EXPR since this may produce more
6113 if (TREE_CODE (arg0
) == BIT_AND_EXPR
6114 && TREE_CODE (arg1
) == BIT_AND_EXPR
6115 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
6116 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
6117 && integer_zerop (const_binop (BIT_AND_EXPR
,
6118 TREE_OPERAND (arg0
, 1),
6119 TREE_OPERAND (arg1
, 1), 0)))
6121 code
= BIT_IOR_EXPR
;
6125 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6126 (plus (plus (mult) (mult)) (foo)) so that we can
6127 take advantage of the factoring cases below. */
6128 if ((TREE_CODE (arg0
) == PLUS_EXPR
6129 && TREE_CODE (arg1
) == MULT_EXPR
)
6130 || (TREE_CODE (arg1
) == PLUS_EXPR
6131 && TREE_CODE (arg0
) == MULT_EXPR
))
6133 tree parg0
, parg1
, parg
, marg
;
6135 if (TREE_CODE (arg0
) == PLUS_EXPR
)
6136 parg
= arg0
, marg
= arg1
;
6138 parg
= arg1
, marg
= arg0
;
6139 parg0
= TREE_OPERAND (parg
, 0);
6140 parg1
= TREE_OPERAND (parg
, 1);
6144 if (TREE_CODE (parg0
) == MULT_EXPR
6145 && TREE_CODE (parg1
) != MULT_EXPR
)
6146 return fold (build2 (PLUS_EXPR
, type
,
6147 fold (build2 (PLUS_EXPR
, type
,
6148 fold_convert (type
, parg0
),
6149 fold_convert (type
, marg
))),
6150 fold_convert (type
, parg1
)));
6151 if (TREE_CODE (parg0
) != MULT_EXPR
6152 && TREE_CODE (parg1
) == MULT_EXPR
)
6153 return fold (build2 (PLUS_EXPR
, type
,
6154 fold (build2 (PLUS_EXPR
, type
,
6155 fold_convert (type
, parg1
),
6156 fold_convert (type
, marg
))),
6157 fold_convert (type
, parg0
)));
6160 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
)
6162 tree arg00
, arg01
, arg10
, arg11
;
6163 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6165 /* (A * C) + (B * C) -> (A+B) * C.
6166 We are most concerned about the case where C is a constant,
6167 but other combinations show up during loop reduction. Since
6168 it is not difficult, try all four possibilities. */
6170 arg00
= TREE_OPERAND (arg0
, 0);
6171 arg01
= TREE_OPERAND (arg0
, 1);
6172 arg10
= TREE_OPERAND (arg1
, 0);
6173 arg11
= TREE_OPERAND (arg1
, 1);
6176 if (operand_equal_p (arg01
, arg11
, 0))
6177 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6178 else if (operand_equal_p (arg00
, arg10
, 0))
6179 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6180 else if (operand_equal_p (arg00
, arg11
, 0))
6181 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6182 else if (operand_equal_p (arg01
, arg10
, 0))
6183 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6185 /* No identical multiplicands; see if we can find a common
6186 power-of-two factor in non-power-of-two multiplies. This
6187 can help in multi-dimensional array access. */
6188 else if (TREE_CODE (arg01
) == INTEGER_CST
6189 && TREE_CODE (arg11
) == INTEGER_CST
6190 && TREE_INT_CST_HIGH (arg01
) == 0
6191 && TREE_INT_CST_HIGH (arg11
) == 0)
6193 HOST_WIDE_INT int01
, int11
, tmp
;
6194 int01
= TREE_INT_CST_LOW (arg01
);
6195 int11
= TREE_INT_CST_LOW (arg11
);
6197 /* Move min of absolute values to int11. */
6198 if ((int01
>= 0 ? int01
: -int01
)
6199 < (int11
>= 0 ? int11
: -int11
))
6201 tmp
= int01
, int01
= int11
, int11
= tmp
;
6202 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6203 alt0
= arg01
, arg01
= arg11
, arg11
= alt0
;
6206 if (exact_log2 (int11
) > 0 && int01
% int11
== 0)
6208 alt0
= fold (build2 (MULT_EXPR
, type
, arg00
,
6209 build_int_2 (int01
/ int11
, 0)));
6216 return fold (build2 (MULT_EXPR
, type
,
6217 fold (build2 (PLUS_EXPR
, type
,
6224 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6225 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
6226 return non_lvalue (fold_convert (type
, arg0
));
6228 /* Likewise if the operands are reversed. */
6229 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
6230 return non_lvalue (fold_convert (type
, arg1
));
6232 /* Convert x+x into x*2.0. */
6233 if (operand_equal_p (arg0
, arg1
, 0)
6234 && SCALAR_FLOAT_TYPE_P (type
))
6235 return fold (build2 (MULT_EXPR
, type
, arg0
,
6236 build_real (type
, dconst2
)));
6238 /* Convert x*c+x into x*(c+1). */
6239 if (flag_unsafe_math_optimizations
6240 && TREE_CODE (arg0
) == MULT_EXPR
6241 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
6242 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
6243 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
6247 c
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
6248 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6249 return fold (build2 (MULT_EXPR
, type
, arg1
,
6250 build_real (type
, c
)));
6253 /* Convert x+x*c into x*(c+1). */
6254 if (flag_unsafe_math_optimizations
6255 && TREE_CODE (arg1
) == MULT_EXPR
6256 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
6257 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
6258 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
6262 c
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
6263 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6264 return fold (build2 (MULT_EXPR
, type
, arg0
,
6265 build_real (type
, c
)));
6268 /* Convert x*c1+x*c2 into x*(c1+c2). */
6269 if (flag_unsafe_math_optimizations
6270 && TREE_CODE (arg0
) == MULT_EXPR
6271 && TREE_CODE (arg1
) == MULT_EXPR
6272 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
6273 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
6274 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
6275 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
6276 && operand_equal_p (TREE_OPERAND (arg0
, 0),
6277 TREE_OPERAND (arg1
, 0), 0))
6279 REAL_VALUE_TYPE c1
, c2
;
6281 c1
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
6282 c2
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
6283 real_arithmetic (&c1
, PLUS_EXPR
, &c1
, &c2
);
6284 return fold (build2 (MULT_EXPR
, type
,
6285 TREE_OPERAND (arg0
, 0),
6286 build_real (type
, c1
)));
6288 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6289 if (flag_unsafe_math_optimizations
6290 && TREE_CODE (arg1
) == PLUS_EXPR
6291 && TREE_CODE (arg0
) != MULT_EXPR
)
6293 tree tree10
= TREE_OPERAND (arg1
, 0);
6294 tree tree11
= TREE_OPERAND (arg1
, 1);
6295 if (TREE_CODE (tree11
) == MULT_EXPR
6296 && TREE_CODE (tree10
) == MULT_EXPR
)
6299 tree0
= fold (build2 (PLUS_EXPR
, type
, arg0
, tree10
));
6300 return fold (build2 (PLUS_EXPR
, type
, tree0
, tree11
));
6303 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6304 if (flag_unsafe_math_optimizations
6305 && TREE_CODE (arg0
) == PLUS_EXPR
6306 && TREE_CODE (arg1
) != MULT_EXPR
)
6308 tree tree00
= TREE_OPERAND (arg0
, 0);
6309 tree tree01
= TREE_OPERAND (arg0
, 1);
6310 if (TREE_CODE (tree01
) == MULT_EXPR
6311 && TREE_CODE (tree00
) == MULT_EXPR
)
6314 tree0
= fold (build2 (PLUS_EXPR
, type
, tree01
, arg1
));
6315 return fold (build2 (PLUS_EXPR
, type
, tree00
, tree0
));
6321 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6322 is a rotate of A by C1 bits. */
6323 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6324 is a rotate of A by B bits. */
6326 enum tree_code code0
, code1
;
6327 code0
= TREE_CODE (arg0
);
6328 code1
= TREE_CODE (arg1
);
6329 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
6330 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
6331 && operand_equal_p (TREE_OPERAND (arg0
, 0),
6332 TREE_OPERAND (arg1
, 0), 0)
6333 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
6335 tree tree01
, tree11
;
6336 enum tree_code code01
, code11
;
6338 tree01
= TREE_OPERAND (arg0
, 1);
6339 tree11
= TREE_OPERAND (arg1
, 1);
6340 STRIP_NOPS (tree01
);
6341 STRIP_NOPS (tree11
);
6342 code01
= TREE_CODE (tree01
);
6343 code11
= TREE_CODE (tree11
);
6344 if (code01
== INTEGER_CST
6345 && code11
== INTEGER_CST
6346 && TREE_INT_CST_HIGH (tree01
) == 0
6347 && TREE_INT_CST_HIGH (tree11
) == 0
6348 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
6349 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
6350 return build2 (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6351 code0
== LSHIFT_EXPR
? tree01
: tree11
);
6352 else if (code11
== MINUS_EXPR
)
6354 tree tree110
, tree111
;
6355 tree110
= TREE_OPERAND (tree11
, 0);
6356 tree111
= TREE_OPERAND (tree11
, 1);
6357 STRIP_NOPS (tree110
);
6358 STRIP_NOPS (tree111
);
6359 if (TREE_CODE (tree110
) == INTEGER_CST
6360 && 0 == compare_tree_int (tree110
,
6362 (TREE_TYPE (TREE_OPERAND
6364 && operand_equal_p (tree01
, tree111
, 0))
6365 return build2 ((code0
== LSHIFT_EXPR
6368 type
, TREE_OPERAND (arg0
, 0), tree01
);
6370 else if (code01
== MINUS_EXPR
)
6372 tree tree010
, tree011
;
6373 tree010
= TREE_OPERAND (tree01
, 0);
6374 tree011
= TREE_OPERAND (tree01
, 1);
6375 STRIP_NOPS (tree010
);
6376 STRIP_NOPS (tree011
);
6377 if (TREE_CODE (tree010
) == INTEGER_CST
6378 && 0 == compare_tree_int (tree010
,
6380 (TREE_TYPE (TREE_OPERAND
6382 && operand_equal_p (tree11
, tree011
, 0))
6383 return build2 ((code0
!= LSHIFT_EXPR
6386 type
, TREE_OPERAND (arg0
, 0), tree11
);
6392 /* In most languages, can't associate operations on floats through
6393 parentheses. Rather than remember where the parentheses were, we
6394 don't associate floats at all, unless the user has specified
6395 -funsafe-math-optimizations. */
6398 && (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
6400 tree var0
, con0
, lit0
, minus_lit0
;
6401 tree var1
, con1
, lit1
, minus_lit1
;
6403 /* Split both trees into variables, constants, and literals. Then
6404 associate each group together, the constants with literals,
6405 then the result with variables. This increases the chances of
6406 literals being recombined later and of generating relocatable
6407 expressions for the sum of a constant and literal. */
6408 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
6409 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
6410 code
== MINUS_EXPR
);
6412 /* Only do something if we found more than two objects. Otherwise,
6413 nothing has changed and we risk infinite recursion. */
6414 if (2 < ((var0
!= 0) + (var1
!= 0)
6415 + (con0
!= 0) + (con1
!= 0)
6416 + (lit0
!= 0) + (lit1
!= 0)
6417 + (minus_lit0
!= 0) + (minus_lit1
!= 0)))
6419 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6420 if (code
== MINUS_EXPR
)
6423 var0
= associate_trees (var0
, var1
, code
, type
);
6424 con0
= associate_trees (con0
, con1
, code
, type
);
6425 lit0
= associate_trees (lit0
, lit1
, code
, type
);
6426 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
6428 /* Preserve the MINUS_EXPR if the negative part of the literal is
6429 greater than the positive part. Otherwise, the multiplicative
6430 folding code (i.e extract_muldiv) may be fooled in case
6431 unsigned constants are subtracted, like in the following
6432 example: ((X*2 + 4) - 8U)/2. */
6433 if (minus_lit0
&& lit0
)
6435 if (TREE_CODE (lit0
) == INTEGER_CST
6436 && TREE_CODE (minus_lit0
) == INTEGER_CST
6437 && tree_int_cst_lt (lit0
, minus_lit0
))
6439 minus_lit0
= associate_trees (minus_lit0
, lit0
,
6445 lit0
= associate_trees (lit0
, minus_lit0
,
6453 return fold_convert (type
,
6454 associate_trees (var0
, minus_lit0
,
6458 con0
= associate_trees (con0
, minus_lit0
,
6460 return fold_convert (type
,
6461 associate_trees (var0
, con0
,
6466 con0
= associate_trees (con0
, lit0
, code
, type
);
6467 return fold_convert (type
, associate_trees (var0
, con0
,
6474 t1
= const_binop (code
, arg0
, arg1
, 0);
6475 if (t1
!= NULL_TREE
)
6477 /* The return value should always have
6478 the same type as the original expression. */
6479 if (TREE_TYPE (t1
) != type
)
6480 t1
= fold_convert (type
, t1
);
6487 /* A - (-B) -> A + B */
6488 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
6489 return fold (build2 (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
6490 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6491 if (TREE_CODE (arg0
) == NEGATE_EXPR
6492 && (FLOAT_TYPE_P (type
)
6493 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
))
6494 && negate_expr_p (arg1
)
6495 && reorder_operands_p (arg0
, arg1
))
6496 return fold (build2 (MINUS_EXPR
, type
, negate_expr (arg1
),
6497 TREE_OPERAND (arg0
, 0)));
6499 if (! FLOAT_TYPE_P (type
))
6501 if (! wins
&& integer_zerop (arg0
))
6502 return negate_expr (fold_convert (type
, arg1
));
6503 if (integer_zerop (arg1
))
6504 return non_lvalue (fold_convert (type
, arg0
));
6506 /* Fold A - (A & B) into ~B & A. */
6507 if (!TREE_SIDE_EFFECTS (arg0
)
6508 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
6510 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
6511 return fold (build2 (BIT_AND_EXPR
, type
,
6512 fold (build1 (BIT_NOT_EXPR
, type
,
6513 TREE_OPERAND (arg1
, 0))),
6515 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
6516 return fold (build2 (BIT_AND_EXPR
, type
,
6517 fold (build1 (BIT_NOT_EXPR
, type
,
6518 TREE_OPERAND (arg1
, 1))),
6522 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6523 any power of 2 minus 1. */
6524 if (TREE_CODE (arg0
) == BIT_AND_EXPR
6525 && TREE_CODE (arg1
) == BIT_AND_EXPR
6526 && operand_equal_p (TREE_OPERAND (arg0
, 0),
6527 TREE_OPERAND (arg1
, 0), 0))
6529 tree mask0
= TREE_OPERAND (arg0
, 1);
6530 tree mask1
= TREE_OPERAND (arg1
, 1);
6531 tree tem
= fold (build1 (BIT_NOT_EXPR
, type
, mask0
));
6533 if (operand_equal_p (tem
, mask1
, 0))
6535 tem
= fold (build2 (BIT_XOR_EXPR
, type
,
6536 TREE_OPERAND (arg0
, 0), mask1
));
6537 return fold (build2 (MINUS_EXPR
, type
, tem
, mask1
));
6542 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6543 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
6544 return non_lvalue (fold_convert (type
, arg0
));
6546 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6547 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6548 (-ARG1 + ARG0) reduces to -ARG1. */
6549 else if (!wins
&& fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
6550 return negate_expr (fold_convert (type
, arg1
));
6552 /* Fold &x - &x. This can happen from &x.foo - &x.
6553 This is unsafe for certain floats even in non-IEEE formats.
6554 In IEEE, it is unsafe because it does wrong for NaNs.
6555 Also note that operand_equal_p is always false if an operand
6558 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
6559 && operand_equal_p (arg0
, arg1
, 0))
6560 return fold_convert (type
, integer_zero_node
);
6562 /* A - B -> A + (-B) if B is easily negatable. */
6563 if (!wins
&& negate_expr_p (arg1
)
6564 && (FLOAT_TYPE_P (type
)
6565 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
)))
6566 return fold (build2 (PLUS_EXPR
, type
, arg0
, negate_expr (arg1
)));
6568 if (TREE_CODE (arg0
) == MULT_EXPR
6569 && TREE_CODE (arg1
) == MULT_EXPR
6570 && (INTEGRAL_TYPE_P (type
) || flag_unsafe_math_optimizations
))
6572 /* (A * C) - (B * C) -> (A-B) * C. */
6573 if (operand_equal_p (TREE_OPERAND (arg0
, 1),
6574 TREE_OPERAND (arg1
, 1), 0))
6575 return fold (build2 (MULT_EXPR
, type
,
6576 fold (build2 (MINUS_EXPR
, type
,
6577 TREE_OPERAND (arg0
, 0),
6578 TREE_OPERAND (arg1
, 0))),
6579 TREE_OPERAND (arg0
, 1)));
6580 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6581 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
6582 TREE_OPERAND (arg1
, 0), 0))
6583 return fold (build2 (MULT_EXPR
, type
,
6584 TREE_OPERAND (arg0
, 0),
6585 fold (build2 (MINUS_EXPR
, type
,
6586 TREE_OPERAND (arg0
, 1),
6587 TREE_OPERAND (arg1
, 1)))));
6593 /* (-A) * (-B) -> A * B */
6594 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
6595 return fold (build2 (MULT_EXPR
, type
,
6596 TREE_OPERAND (arg0
, 0),
6597 negate_expr (arg1
)));
6598 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
6599 return fold (build2 (MULT_EXPR
, type
,
6601 TREE_OPERAND (arg1
, 0)));
6603 if (! FLOAT_TYPE_P (type
))
6605 if (integer_zerop (arg1
))
6606 return omit_one_operand (type
, arg1
, arg0
);
6607 if (integer_onep (arg1
))
6608 return non_lvalue (fold_convert (type
, arg0
));
6610 /* (a * (1 << b)) is (a << b) */
6611 if (TREE_CODE (arg1
) == LSHIFT_EXPR
6612 && integer_onep (TREE_OPERAND (arg1
, 0)))
6613 return fold (build2 (LSHIFT_EXPR
, type
, arg0
,
6614 TREE_OPERAND (arg1
, 1)));
6615 if (TREE_CODE (arg0
) == LSHIFT_EXPR
6616 && integer_onep (TREE_OPERAND (arg0
, 0)))
6617 return fold (build2 (LSHIFT_EXPR
, type
, arg1
,
6618 TREE_OPERAND (arg0
, 1)));
6620 if (TREE_CODE (arg1
) == INTEGER_CST
6621 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0),
6622 fold_convert (type
, arg1
),
6624 return fold_convert (type
, tem
);
6629 /* Maybe fold x * 0 to 0. The expressions aren't the same
6630 when x is NaN, since x * 0 is also NaN. Nor are they the
6631 same in modes with signed zeros, since multiplying a
6632 negative value by 0 gives -0, not +0. */
6633 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
6634 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
6635 && real_zerop (arg1
))
6636 return omit_one_operand (type
, arg1
, arg0
);
6637 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6638 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
6639 && real_onep (arg1
))
6640 return non_lvalue (fold_convert (type
, arg0
));
6642 /* Transform x * -1.0 into -x. */
6643 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
6644 && real_minus_onep (arg1
))
6645 return fold_convert (type
, negate_expr (arg0
));
6647 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6648 if (flag_unsafe_math_optimizations
6649 && TREE_CODE (arg0
) == RDIV_EXPR
6650 && TREE_CODE (arg1
) == REAL_CST
6651 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
6653 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
6656 return fold (build2 (RDIV_EXPR
, type
, tem
,
6657 TREE_OPERAND (arg0
, 1)));
6660 if (flag_unsafe_math_optimizations
)
6662 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
6663 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
6665 /* Optimizations of root(...)*root(...). */
6666 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
6668 tree rootfn
, arg
, arglist
;
6669 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
6670 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
6672 /* Optimize sqrt(x)*sqrt(x) as x. */
6673 if (BUILTIN_SQRT_P (fcode0
)
6674 && operand_equal_p (arg00
, arg10
, 0)
6675 && ! HONOR_SNANS (TYPE_MODE (type
)))
6678 /* Optimize root(x)*root(y) as root(x*y). */
6679 rootfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6680 arg
= fold (build2 (MULT_EXPR
, type
, arg00
, arg10
));
6681 arglist
= build_tree_list (NULL_TREE
, arg
);
6682 return build_function_call_expr (rootfn
, arglist
);
6685 /* Optimize expN(x)*expN(y) as expN(x+y). */
6686 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
6688 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6689 tree arg
= build2 (PLUS_EXPR
, type
,
6690 TREE_VALUE (TREE_OPERAND (arg0
, 1)),
6691 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
6692 tree arglist
= build_tree_list (NULL_TREE
, fold (arg
));
6693 return build_function_call_expr (expfn
, arglist
);
6696 /* Optimizations of pow(...)*pow(...). */
6697 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
6698 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
6699 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
6701 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
6702 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
6704 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
6705 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
6708 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6709 if (operand_equal_p (arg01
, arg11
, 0))
6711 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6712 tree arg
= build2 (MULT_EXPR
, type
, arg00
, arg10
);
6713 tree arglist
= tree_cons (NULL_TREE
, fold (arg
),
6714 build_tree_list (NULL_TREE
,
6716 return build_function_call_expr (powfn
, arglist
);
6719 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6720 if (operand_equal_p (arg00
, arg10
, 0))
6722 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6723 tree arg
= fold (build2 (PLUS_EXPR
, type
, arg01
, arg11
));
6724 tree arglist
= tree_cons (NULL_TREE
, arg00
,
6725 build_tree_list (NULL_TREE
,
6727 return build_function_call_expr (powfn
, arglist
);
6731 /* Optimize tan(x)*cos(x) as sin(x). */
6732 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
6733 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
6734 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
6735 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
6736 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
6737 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
6738 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
6739 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
6741 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
6743 if (sinfn
!= NULL_TREE
)
6744 return build_function_call_expr (sinfn
,
6745 TREE_OPERAND (arg0
, 1));
6748 /* Optimize x*pow(x,c) as pow(x,c+1). */
6749 if (fcode1
== BUILT_IN_POW
6750 || fcode1
== BUILT_IN_POWF
6751 || fcode1
== BUILT_IN_POWL
)
6753 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
6754 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
6756 if (TREE_CODE (arg11
) == REAL_CST
6757 && ! TREE_CONSTANT_OVERFLOW (arg11
)
6758 && operand_equal_p (arg0
, arg10
, 0))
6760 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
6764 c
= TREE_REAL_CST (arg11
);
6765 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6766 arg
= build_real (type
, c
);
6767 arglist
= build_tree_list (NULL_TREE
, arg
);
6768 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
6769 return build_function_call_expr (powfn
, arglist
);
6773 /* Optimize pow(x,c)*x as pow(x,c+1). */
6774 if (fcode0
== BUILT_IN_POW
6775 || fcode0
== BUILT_IN_POWF
6776 || fcode0
== BUILT_IN_POWL
)
6778 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
6779 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
6781 if (TREE_CODE (arg01
) == REAL_CST
6782 && ! TREE_CONSTANT_OVERFLOW (arg01
)
6783 && operand_equal_p (arg1
, arg00
, 0))
6785 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6789 c
= TREE_REAL_CST (arg01
);
6790 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6791 arg
= build_real (type
, c
);
6792 arglist
= build_tree_list (NULL_TREE
, arg
);
6793 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
6794 return build_function_call_expr (powfn
, arglist
);
6798 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6800 && operand_equal_p (arg0
, arg1
, 0))
6802 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
6806 tree arg
= build_real (type
, dconst2
);
6807 tree arglist
= build_tree_list (NULL_TREE
, arg
);
6808 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
6809 return build_function_call_expr (powfn
, arglist
);
6818 if (integer_all_onesp (arg1
))
6819 return omit_one_operand (type
, arg1
, arg0
);
6820 if (integer_zerop (arg1
))
6821 return non_lvalue (fold_convert (type
, arg0
));
6822 if (operand_equal_p (arg0
, arg1
, 0))
6823 return non_lvalue (fold_convert (type
, arg0
));
6824 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
6825 if (t1
!= NULL_TREE
)
6828 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6830 This results in more efficient code for machines without a NAND
6831 instruction. Combine will canonicalize to the first form
6832 which will allow use of NAND instructions provided by the
6833 backend if they exist. */
6834 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
6835 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
6837 return fold (build1 (BIT_NOT_EXPR
, type
,
6838 build2 (BIT_AND_EXPR
, type
,
6839 TREE_OPERAND (arg0
, 0),
6840 TREE_OPERAND (arg1
, 0))));
6843 /* See if this can be simplified into a rotate first. If that
6844 is unsuccessful continue in the association code. */
6848 if (integer_zerop (arg1
))
6849 return non_lvalue (fold_convert (type
, arg0
));
6850 if (integer_all_onesp (arg1
))
6851 return fold (build1 (BIT_NOT_EXPR
, type
, arg0
));
6852 if (operand_equal_p (arg0
, arg1
, 0))
6853 return omit_one_operand (type
, integer_zero_node
, arg0
);
6855 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6856 with a constant, and the two constants have no bits in common,
6857 we should treat this as a BIT_IOR_EXPR since this may produce more
6859 if (TREE_CODE (arg0
) == BIT_AND_EXPR
6860 && TREE_CODE (arg1
) == BIT_AND_EXPR
6861 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
6862 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
6863 && integer_zerop (const_binop (BIT_AND_EXPR
,
6864 TREE_OPERAND (arg0
, 1),
6865 TREE_OPERAND (arg1
, 1), 0)))
6867 code
= BIT_IOR_EXPR
;
6871 /* See if this can be simplified into a rotate first. If that
6872 is unsuccessful continue in the association code. */
6876 if (integer_all_onesp (arg1
))
6877 return non_lvalue (fold_convert (type
, arg0
));
6878 if (integer_zerop (arg1
))
6879 return omit_one_operand (type
, arg1
, arg0
);
6880 if (operand_equal_p (arg0
, arg1
, 0))
6881 return non_lvalue (fold_convert (type
, arg0
));
6882 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
6883 if (t1
!= NULL_TREE
)
6885 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6886 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
6887 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
6890 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
6892 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
6893 && (~TREE_INT_CST_LOW (arg1
)
6894 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
6895 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
6898 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6900 This results in more efficient code for machines without a NOR
6901 instruction. Combine will canonicalize to the first form
6902 which will allow use of NOR instructions provided by the
6903 backend if they exist. */
6904 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
6905 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
6907 return fold (build1 (BIT_NOT_EXPR
, type
,
6908 build2 (BIT_IOR_EXPR
, type
,
6909 TREE_OPERAND (arg0
, 0),
6910 TREE_OPERAND (arg1
, 0))));
6916 /* Don't touch a floating-point divide by zero unless the mode
6917 of the constant can represent infinity. */
6918 if (TREE_CODE (arg1
) == REAL_CST
6919 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
6920 && real_zerop (arg1
))
6923 /* (-A) / (-B) -> A / B */
6924 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
6925 return fold (build2 (RDIV_EXPR
, type
,
6926 TREE_OPERAND (arg0
, 0),
6927 negate_expr (arg1
)));
6928 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
6929 return fold (build2 (RDIV_EXPR
, type
,
6931 TREE_OPERAND (arg1
, 0)));
6933 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6934 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
6935 && real_onep (arg1
))
6936 return non_lvalue (fold_convert (type
, arg0
));
6938 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6939 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
6940 && real_minus_onep (arg1
))
6941 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
6943 /* If ARG1 is a constant, we can convert this to a multiply by the
6944 reciprocal. This does not have the same rounding properties,
6945 so only do this if -funsafe-math-optimizations. We can actually
6946 always safely do it if ARG1 is a power of two, but it's hard to
6947 tell if it is or not in a portable manner. */
6948 if (TREE_CODE (arg1
) == REAL_CST
)
6950 if (flag_unsafe_math_optimizations
6951 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
6953 return fold (build2 (MULT_EXPR
, type
, arg0
, tem
));
6954 /* Find the reciprocal if optimizing and the result is exact. */
6958 r
= TREE_REAL_CST (arg1
);
6959 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
6961 tem
= build_real (type
, r
);
6962 return fold (build2 (MULT_EXPR
, type
, arg0
, tem
));
6966 /* Convert A/B/C to A/(B*C). */
6967 if (flag_unsafe_math_optimizations
6968 && TREE_CODE (arg0
) == RDIV_EXPR
)
6969 return fold (build2 (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6970 fold (build2 (MULT_EXPR
, type
,
6971 TREE_OPERAND (arg0
, 1), arg1
))));
6973 /* Convert A/(B/C) to (A/B)*C. */
6974 if (flag_unsafe_math_optimizations
6975 && TREE_CODE (arg1
) == RDIV_EXPR
)
6976 return fold (build2 (MULT_EXPR
, type
,
6977 fold (build2 (RDIV_EXPR
, type
, arg0
,
6978 TREE_OPERAND (arg1
, 0))),
6979 TREE_OPERAND (arg1
, 1)));
6981 /* Convert C1/(X*C2) into (C1/C2)/X. */
6982 if (flag_unsafe_math_optimizations
6983 && TREE_CODE (arg1
) == MULT_EXPR
6984 && TREE_CODE (arg0
) == REAL_CST
6985 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
6987 tree tem
= const_binop (RDIV_EXPR
, arg0
,
6988 TREE_OPERAND (arg1
, 1), 0);
6990 return fold (build2 (RDIV_EXPR
, type
, tem
,
6991 TREE_OPERAND (arg1
, 0)));
6994 if (flag_unsafe_math_optimizations
)
6996 enum built_in_function fcode
= builtin_mathfn_code (arg1
);
6997 /* Optimize x/expN(y) into x*expN(-y). */
6998 if (BUILTIN_EXPONENT_P (fcode
))
7000 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
7001 tree arg
= negate_expr (TREE_VALUE (TREE_OPERAND (arg1
, 1)));
7002 tree arglist
= build_tree_list (NULL_TREE
,
7003 fold_convert (type
, arg
));
7004 arg1
= build_function_call_expr (expfn
, arglist
);
7005 return fold (build2 (MULT_EXPR
, type
, arg0
, arg1
));
7008 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7009 if (fcode
== BUILT_IN_POW
7010 || fcode
== BUILT_IN_POWF
7011 || fcode
== BUILT_IN_POWL
)
7013 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
7014 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
7015 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
, 1)));
7016 tree neg11
= fold_convert (type
, negate_expr (arg11
));
7017 tree arglist
= tree_cons(NULL_TREE
, arg10
,
7018 build_tree_list (NULL_TREE
, neg11
));
7019 arg1
= build_function_call_expr (powfn
, arglist
);
7020 return fold (build2 (MULT_EXPR
, type
, arg0
, arg1
));
7024 if (flag_unsafe_math_optimizations
)
7026 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
7027 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
7029 /* Optimize sin(x)/cos(x) as tan(x). */
7030 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
7031 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
7032 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
7033 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7034 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
7036 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
7038 if (tanfn
!= NULL_TREE
)
7039 return build_function_call_expr (tanfn
,
7040 TREE_OPERAND (arg0
, 1));
7043 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7044 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
7045 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
7046 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
7047 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
7048 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
7050 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
7052 if (tanfn
!= NULL_TREE
)
7054 tree tmp
= TREE_OPERAND (arg0
, 1);
7055 tmp
= build_function_call_expr (tanfn
, tmp
);
7056 return fold (build2 (RDIV_EXPR
, type
,
7057 build_real (type
, dconst1
), tmp
));
7061 /* Optimize pow(x,c)/x as pow(x,c-1). */
7062 if (fcode0
== BUILT_IN_POW
7063 || fcode0
== BUILT_IN_POWF
7064 || fcode0
== BUILT_IN_POWL
)
7066 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7067 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
7068 if (TREE_CODE (arg01
) == REAL_CST
7069 && ! TREE_CONSTANT_OVERFLOW (arg01
)
7070 && operand_equal_p (arg1
, arg00
, 0))
7072 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7076 c
= TREE_REAL_CST (arg01
);
7077 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
7078 arg
= build_real (type
, c
);
7079 arglist
= build_tree_list (NULL_TREE
, arg
);
7080 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
7081 return build_function_call_expr (powfn
, arglist
);
7087 case TRUNC_DIV_EXPR
:
7088 case ROUND_DIV_EXPR
:
7089 case FLOOR_DIV_EXPR
:
7091 case EXACT_DIV_EXPR
:
7092 if (integer_onep (arg1
))
7093 return non_lvalue (fold_convert (type
, arg0
));
7094 if (integer_zerop (arg1
))
7097 if (!TYPE_UNSIGNED (type
)
7098 && TREE_CODE (arg1
) == INTEGER_CST
7099 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
7100 && TREE_INT_CST_HIGH (arg1
) == -1)
7101 return fold_convert (type
, negate_expr (arg0
));
7103 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7104 operation, EXACT_DIV_EXPR.
7106 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7107 At one time others generated faster code, it's not clear if they do
7108 after the last round to changes to the DIV code in expmed.c. */
7109 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
7110 && multiple_of_p (type
, arg0
, arg1
))
7111 return fold (build2 (EXACT_DIV_EXPR
, type
, arg0
, arg1
));
7113 if (TREE_CODE (arg1
) == INTEGER_CST
7114 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
7116 return fold_convert (type
, tem
);
7121 case FLOOR_MOD_EXPR
:
7122 case ROUND_MOD_EXPR
:
7123 case TRUNC_MOD_EXPR
:
7124 if (integer_onep (arg1
))
7125 return omit_one_operand (type
, integer_zero_node
, arg0
);
7126 if (integer_zerop (arg1
))
7128 /* X % -1 is zero. */
7129 if (!TYPE_UNSIGNED (type
)
7130 && TREE_CODE (arg1
) == INTEGER_CST
7131 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
7132 && TREE_INT_CST_HIGH (arg1
) == -1)
7133 return omit_one_operand (type
, integer_zero_node
, arg0
);
7135 if (TREE_CODE (arg1
) == INTEGER_CST
7136 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
7138 return fold_convert (type
, tem
);
7144 if (integer_all_onesp (arg0
))
7145 return omit_one_operand (type
, arg0
, arg1
);
7149 /* Optimize -1 >> x for arithmetic right shifts. */
7150 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
))
7151 return omit_one_operand (type
, arg0
, arg1
);
7152 /* ... fall through ... */
7156 if (integer_zerop (arg1
))
7157 return non_lvalue (fold_convert (type
, arg0
));
7158 if (integer_zerop (arg0
))
7159 return omit_one_operand (type
, arg0
, arg1
);
7161 /* Since negative shift count is not well-defined,
7162 don't try to compute it in the compiler. */
7163 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
7165 /* Rewrite an LROTATE_EXPR by a constant into an
7166 RROTATE_EXPR by a new constant. */
7167 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
7169 tree tem
= build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type
)), 0);
7170 tem
= fold_convert (TREE_TYPE (arg1
), tem
);
7171 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
7172 return fold (build2 (RROTATE_EXPR
, type
, arg0
, tem
));
7175 /* If we have a rotate of a bit operation with the rotate count and
7176 the second operand of the bit operation both constant,
7177 permute the two operations. */
7178 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
7179 && (TREE_CODE (arg0
) == BIT_AND_EXPR
7180 || TREE_CODE (arg0
) == BIT_IOR_EXPR
7181 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
7182 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
7183 return fold (build2 (TREE_CODE (arg0
), type
,
7184 fold (build2 (code
, type
,
7185 TREE_OPERAND (arg0
, 0), arg1
)),
7186 fold (build2 (code
, type
,
7187 TREE_OPERAND (arg0
, 1), arg1
))));
7189 /* Two consecutive rotates adding up to the width of the mode can
7191 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
7192 && TREE_CODE (arg0
) == RROTATE_EXPR
7193 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7194 && TREE_INT_CST_HIGH (arg1
) == 0
7195 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
7196 && ((TREE_INT_CST_LOW (arg1
)
7197 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
7198 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
7199 return TREE_OPERAND (arg0
, 0);
7204 if (operand_equal_p (arg0
, arg1
, 0))
7205 return omit_one_operand (type
, arg0
, arg1
);
7206 if (INTEGRAL_TYPE_P (type
)
7207 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
7208 return omit_one_operand (type
, arg1
, arg0
);
7212 if (operand_equal_p (arg0
, arg1
, 0))
7213 return omit_one_operand (type
, arg0
, arg1
);
7214 if (INTEGRAL_TYPE_P (type
)
7215 && TYPE_MAX_VALUE (type
)
7216 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
7217 return omit_one_operand (type
, arg1
, arg0
);
7220 case TRUTH_NOT_EXPR
:
7221 /* The argument to invert_truthvalue must have Boolean type. */
7222 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
7223 arg0
= fold_convert (boolean_type_node
, arg0
);
7225 /* Note that the operand of this must be an int
7226 and its values must be 0 or 1.
7227 ("true" is a fixed value perhaps depending on the language,
7228 but we don't handle values other than 1 correctly yet.) */
7229 tem
= invert_truthvalue (arg0
);
7230 /* Avoid infinite recursion. */
7231 if (TREE_CODE (tem
) == TRUTH_NOT_EXPR
)
7233 tem
= fold_single_bit_test (code
, arg0
, arg1
, type
);
7238 return fold_convert (type
, tem
);
7240 case TRUTH_ANDIF_EXPR
:
7241 /* Note that the operands of this must be ints
7242 and their values must be 0 or 1.
7243 ("true" is a fixed value perhaps depending on the language.) */
7244 /* If first arg is constant zero, return it. */
7245 if (integer_zerop (arg0
))
7246 return fold_convert (type
, arg0
);
7247 case TRUTH_AND_EXPR
:
7248 /* If either arg is constant true, drop it. */
7249 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
7250 return non_lvalue (fold_convert (type
, arg1
));
7251 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
7252 /* Preserve sequence points. */
7253 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
7254 return non_lvalue (fold_convert (type
, arg0
));
7255 /* If second arg is constant zero, result is zero, but first arg
7256 must be evaluated. */
7257 if (integer_zerop (arg1
))
7258 return omit_one_operand (type
, arg1
, arg0
);
7259 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7260 case will be handled here. */
7261 if (integer_zerop (arg0
))
7262 return omit_one_operand (type
, arg0
, arg1
);
7265 /* We only do these simplifications if we are optimizing. */
7269 /* Check for things like (A || B) && (A || C). We can convert this
7270 to A || (B && C). Note that either operator can be any of the four
7271 truth and/or operations and the transformation will still be
7272 valid. Also note that we only care about order for the
7273 ANDIF and ORIF operators. If B contains side effects, this
7274 might change the truth-value of A. */
7275 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
7276 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
7277 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
7278 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
7279 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
7280 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
7282 tree a00
= TREE_OPERAND (arg0
, 0);
7283 tree a01
= TREE_OPERAND (arg0
, 1);
7284 tree a10
= TREE_OPERAND (arg1
, 0);
7285 tree a11
= TREE_OPERAND (arg1
, 1);
7286 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
7287 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
7288 && (code
== TRUTH_AND_EXPR
7289 || code
== TRUTH_OR_EXPR
));
7291 if (operand_equal_p (a00
, a10
, 0))
7292 return fold (build2 (TREE_CODE (arg0
), type
, a00
,
7293 fold (build2 (code
, type
, a01
, a11
))));
7294 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
7295 return fold (build2 (TREE_CODE (arg0
), type
, a00
,
7296 fold (build2 (code
, type
, a01
, a10
))));
7297 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
7298 return fold (build2 (TREE_CODE (arg0
), type
, a01
,
7299 fold (build2 (code
, type
, a00
, a11
))));
7301 /* This case if tricky because we must either have commutative
7302 operators or else A10 must not have side-effects. */
7304 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
7305 && operand_equal_p (a01
, a11
, 0))
7306 return fold (build2 (TREE_CODE (arg0
), type
,
7307 fold (build2 (code
, type
, a00
, a10
)),
7311 /* See if we can build a range comparison. */
7312 if (0 != (tem
= fold_range_test (t
)))
7315 /* Check for the possibility of merging component references. If our
7316 lhs is another similar operation, try to merge its rhs with our
7317 rhs. Then try to merge our lhs and rhs. */
7318 if (TREE_CODE (arg0
) == code
7319 && 0 != (tem
= fold_truthop (code
, type
,
7320 TREE_OPERAND (arg0
, 1), arg1
)))
7321 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
7323 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
7328 case TRUTH_ORIF_EXPR
:
7329 /* Note that the operands of this must be ints
7330 and their values must be 0 or true.
7331 ("true" is a fixed value perhaps depending on the language.) */
7332 /* If first arg is constant true, return it. */
7333 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
7334 return fold_convert (type
, arg0
);
7336 /* If either arg is constant zero, drop it. */
7337 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
7338 return non_lvalue (fold_convert (type
, arg1
));
7339 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
7340 /* Preserve sequence points. */
7341 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
7342 return non_lvalue (fold_convert (type
, arg0
));
7343 /* If second arg is constant true, result is true, but we must
7344 evaluate first arg. */
7345 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
7346 return omit_one_operand (type
, arg1
, arg0
);
7347 /* Likewise for first arg, but note this only occurs here for
7349 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
7350 return omit_one_operand (type
, arg0
, arg1
);
7353 case TRUTH_XOR_EXPR
:
7354 /* If either arg is constant zero, drop it. */
7355 if (integer_zerop (arg0
))
7356 return non_lvalue (fold_convert (type
, arg1
));
7357 if (integer_zerop (arg1
))
7358 return non_lvalue (fold_convert (type
, arg0
));
7359 /* If either arg is constant true, this is a logical inversion. */
7360 if (integer_onep (arg0
))
7361 return non_lvalue (fold_convert (type
, invert_truthvalue (arg1
)));
7362 if (integer_onep (arg1
))
7363 return non_lvalue (fold_convert (type
, invert_truthvalue (arg0
)));
7364 /* Identical arguments cancel to zero. */
7365 if (operand_equal_p (arg0
, arg1
, 0))
7366 return omit_one_operand (type
, integer_zero_node
, arg0
);
7375 /* If one arg is a real or integer constant, put it last. */
7376 if (tree_swap_operands_p (arg0
, arg1
, true))
7377 return fold (build2 (swap_tree_comparison (code
), type
, arg1
, arg0
));
7379 /* If this is an equality comparison of the address of a non-weak
7380 object against zero, then we know the result. */
7381 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7382 && TREE_CODE (arg0
) == ADDR_EXPR
7383 && DECL_P (TREE_OPERAND (arg0
, 0))
7384 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
7385 && integer_zerop (arg1
))
7386 return constant_boolean_node (code
!= EQ_EXPR
, type
);
7388 /* If this is an equality comparison of the address of two non-weak,
7389 unaliased symbols neither of which are extern (since we do not
7390 have access to attributes for externs), then we know the result. */
7391 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7392 && TREE_CODE (arg0
) == ADDR_EXPR
7393 && DECL_P (TREE_OPERAND (arg0
, 0))
7394 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
7395 && ! lookup_attribute ("alias",
7396 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
7397 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
7398 && TREE_CODE (arg1
) == ADDR_EXPR
7399 && DECL_P (TREE_OPERAND (arg1
, 0))
7400 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
7401 && ! lookup_attribute ("alias",
7402 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
7403 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
7404 return constant_boolean_node (operand_equal_p (arg0
, arg1
, 0)
7405 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
7408 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
7410 tree targ0
= strip_float_extensions (arg0
);
7411 tree targ1
= strip_float_extensions (arg1
);
7412 tree newtype
= TREE_TYPE (targ0
);
7414 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
7415 newtype
= TREE_TYPE (targ1
);
7417 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7418 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
7419 return fold (build2 (code
, type
, fold_convert (newtype
, targ0
),
7420 fold_convert (newtype
, targ1
)));
7422 /* (-a) CMP (-b) -> b CMP a */
7423 if (TREE_CODE (arg0
) == NEGATE_EXPR
7424 && TREE_CODE (arg1
) == NEGATE_EXPR
)
7425 return fold (build2 (code
, type
, TREE_OPERAND (arg1
, 0),
7426 TREE_OPERAND (arg0
, 0)));
7428 if (TREE_CODE (arg1
) == REAL_CST
)
7430 REAL_VALUE_TYPE cst
;
7431 cst
= TREE_REAL_CST (arg1
);
7433 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7434 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
7436 fold (build2 (swap_tree_comparison (code
), type
,
7437 TREE_OPERAND (arg0
, 0),
7438 build_real (TREE_TYPE (arg1
),
7439 REAL_VALUE_NEGATE (cst
))));
7441 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7442 /* a CMP (-0) -> a CMP 0 */
7443 if (REAL_VALUE_MINUS_ZERO (cst
))
7444 return fold (build2 (code
, type
, arg0
,
7445 build_real (TREE_TYPE (arg1
), dconst0
)));
7447 /* x != NaN is always true, other ops are always false. */
7448 if (REAL_VALUE_ISNAN (cst
)
7449 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
7451 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
7452 return omit_one_operand (type
, tem
, arg0
);
7455 /* Fold comparisons against infinity. */
7456 if (REAL_VALUE_ISINF (cst
))
7458 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
7459 if (tem
!= NULL_TREE
)
7464 /* If this is a comparison of a real constant with a PLUS_EXPR
7465 or a MINUS_EXPR of a real constant, we can convert it into a
7466 comparison with a revised real constant as long as no overflow
7467 occurs when unsafe_math_optimizations are enabled. */
7468 if (flag_unsafe_math_optimizations
7469 && TREE_CODE (arg1
) == REAL_CST
7470 && (TREE_CODE (arg0
) == PLUS_EXPR
7471 || TREE_CODE (arg0
) == MINUS_EXPR
)
7472 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
7473 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
7474 ? MINUS_EXPR
: PLUS_EXPR
,
7475 arg1
, TREE_OPERAND (arg0
, 1), 0))
7476 && ! TREE_CONSTANT_OVERFLOW (tem
))
7477 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
7479 /* Likewise, we can simplify a comparison of a real constant with
7480 a MINUS_EXPR whose first operand is also a real constant, i.e.
7481 (c1 - x) < c2 becomes x > c1-c2. */
7482 if (flag_unsafe_math_optimizations
7483 && TREE_CODE (arg1
) == REAL_CST
7484 && TREE_CODE (arg0
) == MINUS_EXPR
7485 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
7486 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
7488 && ! TREE_CONSTANT_OVERFLOW (tem
))
7489 return fold (build2 (swap_tree_comparison (code
), type
,
7490 TREE_OPERAND (arg0
, 1), tem
));
7492 /* Fold comparisons against built-in math functions. */
7493 if (TREE_CODE (arg1
) == REAL_CST
7494 && flag_unsafe_math_optimizations
7495 && ! flag_errno_math
)
7497 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
7499 if (fcode
!= END_BUILTINS
)
7501 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
7502 if (tem
!= NULL_TREE
)
7508 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7509 if (TREE_CONSTANT (arg1
)
7510 && (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
7511 || TREE_CODE (arg0
) == POSTDECREMENT_EXPR
)
7512 /* This optimization is invalid for ordered comparisons
7513 if CONST+INCR overflows or if foo+incr might overflow.
7514 This optimization is invalid for floating point due to rounding.
7515 For pointer types we assume overflow doesn't happen. */
7516 && (POINTER_TYPE_P (TREE_TYPE (arg0
))
7517 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
7518 && (code
== EQ_EXPR
|| code
== NE_EXPR
))))
7520 tree varop
, newconst
;
7522 if (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
)
7524 newconst
= fold (build2 (PLUS_EXPR
, TREE_TYPE (arg0
),
7525 arg1
, TREE_OPERAND (arg0
, 1)));
7526 varop
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (arg0
),
7527 TREE_OPERAND (arg0
, 0),
7528 TREE_OPERAND (arg0
, 1));
7532 newconst
= fold (build2 (MINUS_EXPR
, TREE_TYPE (arg0
),
7533 arg1
, TREE_OPERAND (arg0
, 1)));
7534 varop
= build2 (PREDECREMENT_EXPR
, TREE_TYPE (arg0
),
7535 TREE_OPERAND (arg0
, 0),
7536 TREE_OPERAND (arg0
, 1));
7540 /* If VAROP is a reference to a bitfield, we must mask
7541 the constant by the width of the field. */
7542 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
7543 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop
, 0), 1)))
7545 tree fielddecl
= TREE_OPERAND (TREE_OPERAND (varop
, 0), 1);
7546 int size
= TREE_INT_CST_LOW (DECL_SIZE (fielddecl
));
7547 tree folded_compare
, shift
;
7549 /* First check whether the comparison would come out
7550 always the same. If we don't do that we would
7551 change the meaning with the masking. */
7552 folded_compare
= fold (build2 (code
, type
,
7553 TREE_OPERAND (varop
, 0),
7555 if (integer_zerop (folded_compare
)
7556 || integer_onep (folded_compare
))
7557 return omit_one_operand (type
, folded_compare
, varop
);
7559 shift
= build_int_2 (TYPE_PRECISION (TREE_TYPE (varop
)) - size
,
7561 newconst
= fold (build2 (LSHIFT_EXPR
, TREE_TYPE (varop
),
7563 newconst
= fold (build2 (RSHIFT_EXPR
, TREE_TYPE (varop
),
7567 return fold (build2 (code
, type
, varop
, newconst
));
7570 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7571 This transformation affects the cases which are handled in later
7572 optimizations involving comparisons with non-negative constants. */
7573 if (TREE_CODE (arg1
) == INTEGER_CST
7574 && TREE_CODE (arg0
) != INTEGER_CST
7575 && tree_int_cst_sgn (arg1
) > 0)
7580 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
7581 return fold (build2 (GT_EXPR
, type
, arg0
, arg1
));
7584 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
7585 return fold (build2 (LE_EXPR
, type
, arg0
, arg1
));
7592 /* Comparisons with the highest or lowest possible integer of
7593 the specified size will have known values.
7595 This is quite similar to fold_relational_hi_lo; however, my
7596 attempts to share the code have been nothing but trouble.
7597 I give up for now. */
7599 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1
)));
7601 if (TREE_CODE (arg1
) == INTEGER_CST
7602 && ! TREE_CONSTANT_OVERFLOW (arg1
)
7603 && width
<= HOST_BITS_PER_WIDE_INT
7604 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
7605 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
7607 unsigned HOST_WIDE_INT signed_max
;
7608 unsigned HOST_WIDE_INT max
, min
;
7610 signed_max
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1)) - 1;
7612 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
7614 max
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
7620 min
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
7623 if (TREE_INT_CST_HIGH (arg1
) == 0
7624 && TREE_INT_CST_LOW (arg1
) == max
)
7628 return omit_one_operand (type
, integer_zero_node
, arg0
);
7631 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
7634 return omit_one_operand (type
, integer_one_node
, arg0
);
7637 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
7639 /* The GE_EXPR and LT_EXPR cases above are not normally
7640 reached because of previous transformations. */
7645 else if (TREE_INT_CST_HIGH (arg1
) == 0
7646 && TREE_INT_CST_LOW (arg1
) == max
- 1)
7650 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
7651 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
7653 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
7654 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
7658 else if (TREE_INT_CST_HIGH (arg1
) == (min
? -1 : 0)
7659 && TREE_INT_CST_LOW (arg1
) == min
)
7663 return omit_one_operand (type
, integer_zero_node
, arg0
);
7666 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
7669 return omit_one_operand (type
, integer_one_node
, arg0
);
7672 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
7677 else if (TREE_INT_CST_HIGH (arg1
) == (min
? -1 : 0)
7678 && TREE_INT_CST_LOW (arg1
) == min
+ 1)
7682 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
7683 return fold (build2 (NE_EXPR
, type
, arg0
, arg1
));
7685 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
7686 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
7691 else if (!in_gimple_form
7692 && TREE_INT_CST_HIGH (arg1
) == 0
7693 && TREE_INT_CST_LOW (arg1
) == signed_max
7694 && TYPE_UNSIGNED (TREE_TYPE (arg1
))
7695 /* signed_type does not work on pointer types. */
7696 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
7698 /* The following case also applies to X < signed_max+1
7699 and X >= signed_max+1 because previous transformations. */
7700 if (code
== LE_EXPR
|| code
== GT_EXPR
)
7703 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (arg0
));
7704 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (arg1
));
7706 (build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
7707 type
, fold_convert (st0
, arg0
),
7708 fold_convert (st1
, integer_zero_node
)));
7714 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7715 a MINUS_EXPR of a constant, we can convert it into a comparison with
7716 a revised constant as long as no overflow occurs. */
7717 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7718 && TREE_CODE (arg1
) == INTEGER_CST
7719 && (TREE_CODE (arg0
) == PLUS_EXPR
7720 || TREE_CODE (arg0
) == MINUS_EXPR
)
7721 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7722 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
7723 ? MINUS_EXPR
: PLUS_EXPR
,
7724 arg1
, TREE_OPERAND (arg0
, 1), 0))
7725 && ! TREE_CONSTANT_OVERFLOW (tem
))
7726 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
7728 /* Similarly for a NEGATE_EXPR. */
7729 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7730 && TREE_CODE (arg0
) == NEGATE_EXPR
7731 && TREE_CODE (arg1
) == INTEGER_CST
7732 && 0 != (tem
= negate_expr (arg1
))
7733 && TREE_CODE (tem
) == INTEGER_CST
7734 && ! TREE_CONSTANT_OVERFLOW (tem
))
7735 return fold (build2 (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
7737 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7738 for !=. Don't do this for ordered comparisons due to overflow. */
7739 else if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7740 && integer_zerop (arg1
) && TREE_CODE (arg0
) == MINUS_EXPR
)
7741 return fold (build2 (code
, type
,
7742 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1)));
7744 /* If we are widening one operand of an integer comparison,
7745 see if the other operand is similarly being widened. Perhaps we
7746 can do the comparison in the narrower type. */
7747 else if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
7748 && TREE_CODE (arg0
) == NOP_EXPR
7749 && (tem
= get_unwidened (arg0
, NULL_TREE
)) != arg0
7750 && (code
== EQ_EXPR
|| code
== NE_EXPR
7751 || TYPE_UNSIGNED (TREE_TYPE (arg0
))
7752 == TYPE_UNSIGNED (TREE_TYPE (tem
)))
7753 && (t1
= get_unwidened (arg1
, TREE_TYPE (tem
))) != 0
7754 && (TREE_TYPE (t1
) == TREE_TYPE (tem
)
7755 || (TREE_CODE (t1
) == INTEGER_CST
7756 && int_fits_type_p (t1
, TREE_TYPE (tem
)))))
7757 return fold (build2 (code
, type
, tem
,
7758 fold_convert (TREE_TYPE (tem
), t1
)));
7760 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7761 constant, we can simplify it. */
7762 else if (TREE_CODE (arg1
) == INTEGER_CST
7763 && (TREE_CODE (arg0
) == MIN_EXPR
7764 || TREE_CODE (arg0
) == MAX_EXPR
)
7765 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
7766 return optimize_minmax_comparison (t
);
7768 /* If we are comparing an ABS_EXPR with a constant, we can
7769 convert all the cases into explicit comparisons, but they may
7770 well not be faster than doing the ABS and one comparison.
7771 But ABS (X) <= C is a range comparison, which becomes a subtraction
7772 and a comparison, and is probably faster. */
7773 else if (code
== LE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
7774 && TREE_CODE (arg0
) == ABS_EXPR
7775 && ! TREE_SIDE_EFFECTS (arg0
)
7776 && (0 != (tem
= negate_expr (arg1
)))
7777 && TREE_CODE (tem
) == INTEGER_CST
7778 && ! TREE_CONSTANT_OVERFLOW (tem
))
7779 return fold (build2 (TRUTH_ANDIF_EXPR
, type
,
7780 build2 (GE_EXPR
, type
,
7781 TREE_OPERAND (arg0
, 0), tem
),
7782 build2 (LE_EXPR
, type
,
7783 TREE_OPERAND (arg0
, 0), arg1
)));
7785 /* If this is an EQ or NE comparison with zero and ARG0 is
7786 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7787 two operations, but the latter can be done in one less insn
7788 on machines that have only two-operand insns or on which a
7789 constant cannot be the first operand. */
7790 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
7791 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
7793 tree arg00
= TREE_OPERAND (arg0
, 0);
7794 tree arg01
= TREE_OPERAND (arg0
, 1);
7795 if (TREE_CODE (arg00
) == LSHIFT_EXPR
7796 && integer_onep (TREE_OPERAND (arg00
, 0)))
7798 fold (build2 (code
, type
,
7799 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
7800 build2 (RSHIFT_EXPR
, TREE_TYPE (arg00
),
7801 arg01
, TREE_OPERAND (arg00
, 1)),
7802 fold_convert (TREE_TYPE (arg0
),
7805 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
7806 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
7808 fold (build2 (code
, type
,
7809 build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
7810 build2 (RSHIFT_EXPR
, TREE_TYPE (arg01
),
7811 arg00
, TREE_OPERAND (arg01
, 1)),
7812 fold_convert (TREE_TYPE (arg0
),
7817 /* If this is an NE or EQ comparison of zero against the result of a
7818 signed MOD operation whose second operand is a power of 2, make
7819 the MOD operation unsigned since it is simpler and equivalent. */
7820 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7821 && integer_zerop (arg1
)
7822 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
7823 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
7824 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
7825 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
7826 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
7827 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
7829 tree newtype
= lang_hooks
.types
.unsigned_type (TREE_TYPE (arg0
));
7830 tree newmod
= build2 (TREE_CODE (arg0
), newtype
,
7831 fold_convert (newtype
,
7832 TREE_OPERAND (arg0
, 0)),
7833 fold_convert (newtype
,
7834 TREE_OPERAND (arg0
, 1)));
7836 return build2 (code
, type
, newmod
, fold_convert (newtype
, arg1
));
7839 /* If this is an NE comparison of zero with an AND of one, remove the
7840 comparison since the AND will give the correct value. */
7841 if (code
== NE_EXPR
&& integer_zerop (arg1
)
7842 && TREE_CODE (arg0
) == BIT_AND_EXPR
7843 && integer_onep (TREE_OPERAND (arg0
, 1)))
7844 return fold_convert (type
, arg0
);
7846 /* If we have (A & C) == C where C is a power of 2, convert this into
7847 (A & C) != 0. Similarly for NE_EXPR. */
7848 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7849 && TREE_CODE (arg0
) == BIT_AND_EXPR
7850 && integer_pow2p (TREE_OPERAND (arg0
, 1))
7851 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
7852 return fold (build2 (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
7853 arg0
, integer_zero_node
));
7855 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7856 2, then fold the expression into shifts and logical operations. */
7857 tem
= fold_single_bit_test (code
, arg0
, arg1
, type
);
7861 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7862 Similarly for NE_EXPR. */
7863 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7864 && TREE_CODE (arg0
) == BIT_AND_EXPR
7865 && TREE_CODE (arg1
) == INTEGER_CST
7866 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
7869 = fold (build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
7870 arg1
, build1 (BIT_NOT_EXPR
,
7871 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
7872 TREE_OPERAND (arg0
, 1))));
7873 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
7874 if (integer_nonzerop (dandnotc
))
7875 return omit_one_operand (type
, rslt
, arg0
);
7878 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7879 Similarly for NE_EXPR. */
7880 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7881 && TREE_CODE (arg0
) == BIT_IOR_EXPR
7882 && TREE_CODE (arg1
) == INTEGER_CST
7883 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
7886 = fold (build2 (BIT_AND_EXPR
, TREE_TYPE (arg0
),
7887 TREE_OPERAND (arg0
, 1),
7888 build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
)));
7889 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
7890 if (integer_nonzerop (candnotd
))
7891 return omit_one_operand (type
, rslt
, arg0
);
7894 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7895 and similarly for >= into !=. */
7896 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
7897 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
7898 && TREE_CODE (arg1
) == LSHIFT_EXPR
7899 && integer_onep (TREE_OPERAND (arg1
, 0)))
7900 return build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
7901 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
7902 TREE_OPERAND (arg1
, 1)),
7903 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
7905 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
7906 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
7907 && (TREE_CODE (arg1
) == NOP_EXPR
7908 || TREE_CODE (arg1
) == CONVERT_EXPR
)
7909 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
7910 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
7912 build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
7913 fold_convert (TREE_TYPE (arg0
),
7914 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
7915 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
7917 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
7919 /* Simplify comparison of something with itself. (For IEEE
7920 floating-point, we can only do some of these simplifications.) */
7921 if (operand_equal_p (arg0
, arg1
, 0))
7926 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
7927 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
7928 return constant_boolean_node (1, type
);
7933 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
7934 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
7935 return constant_boolean_node (1, type
);
7936 return fold (build2 (EQ_EXPR
, type
, arg0
, arg1
));
7939 /* For NE, we can only do this simplification if integer
7940 or we don't honor IEEE floating point NaNs. */
7941 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
7942 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
7944 /* ... fall through ... */
7947 return constant_boolean_node (0, type
);
7953 /* If we are comparing an expression that just has comparisons
7954 of two integer values, arithmetic expressions of those comparisons,
7955 and constants, we can simplify it. There are only three cases
7956 to check: the two values can either be equal, the first can be
7957 greater, or the second can be greater. Fold the expression for
7958 those three values. Since each value must be 0 or 1, we have
7959 eight possibilities, each of which corresponds to the constant 0
7960 or 1 or one of the six possible comparisons.
7962 This handles common cases like (a > b) == 0 but also handles
7963 expressions like ((x > y) - (y > x)) > 0, which supposedly
7964 occur in macroized code. */
7966 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
7968 tree cval1
= 0, cval2
= 0;
7971 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
7972 /* Don't handle degenerate cases here; they should already
7973 have been handled anyway. */
7974 && cval1
!= 0 && cval2
!= 0
7975 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
7976 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
7977 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
7978 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
7979 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
7980 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
7981 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
7983 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
7984 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
7986 /* We can't just pass T to eval_subst in case cval1 or cval2
7987 was the same as ARG1. */
7990 = fold (build2 (code
, type
,
7991 eval_subst (arg0
, cval1
, maxval
,
7995 = fold (build2 (code
, type
,
7996 eval_subst (arg0
, cval1
, maxval
,
8000 = fold (build2 (code
, type
,
8001 eval_subst (arg0
, cval1
, minval
,
8005 /* All three of these results should be 0 or 1. Confirm they
8006 are. Then use those values to select the proper code
8009 if ((integer_zerop (high_result
)
8010 || integer_onep (high_result
))
8011 && (integer_zerop (equal_result
)
8012 || integer_onep (equal_result
))
8013 && (integer_zerop (low_result
)
8014 || integer_onep (low_result
)))
8016 /* Make a 3-bit mask with the high-order bit being the
8017 value for `>', the next for '=', and the low for '<'. */
8018 switch ((integer_onep (high_result
) * 4)
8019 + (integer_onep (equal_result
) * 2)
8020 + integer_onep (low_result
))
8024 return omit_one_operand (type
, integer_zero_node
, arg0
);
8045 return omit_one_operand (type
, integer_one_node
, arg0
);
8048 tem
= build2 (code
, type
, cval1
, cval2
);
8050 return save_expr (tem
);
8057 /* If this is a comparison of a field, we may be able to simplify it. */
8058 if (((TREE_CODE (arg0
) == COMPONENT_REF
8059 && lang_hooks
.can_use_bit_fields_p ())
8060 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
8061 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
8062 /* Handle the constant case even without -O
8063 to make sure the warnings are given. */
8064 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
8066 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
8071 /* If this is a comparison of complex values and either or both sides
8072 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8073 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8074 This may prevent needless evaluations. */
8075 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8076 && TREE_CODE (TREE_TYPE (arg0
)) == COMPLEX_TYPE
8077 && (TREE_CODE (arg0
) == COMPLEX_EXPR
8078 || TREE_CODE (arg1
) == COMPLEX_EXPR
8079 || TREE_CODE (arg0
) == COMPLEX_CST
8080 || TREE_CODE (arg1
) == COMPLEX_CST
))
8082 tree subtype
= TREE_TYPE (TREE_TYPE (arg0
));
8083 tree real0
, imag0
, real1
, imag1
;
8085 arg0
= save_expr (arg0
);
8086 arg1
= save_expr (arg1
);
8087 real0
= fold (build1 (REALPART_EXPR
, subtype
, arg0
));
8088 imag0
= fold (build1 (IMAGPART_EXPR
, subtype
, arg0
));
8089 real1
= fold (build1 (REALPART_EXPR
, subtype
, arg1
));
8090 imag1
= fold (build1 (IMAGPART_EXPR
, subtype
, arg1
));
8092 return fold (build2 ((code
== EQ_EXPR
? TRUTH_ANDIF_EXPR
8095 fold (build2 (code
, type
, real0
, real1
)),
8096 fold (build2 (code
, type
, imag0
, imag1
))));
8099 /* Optimize comparisons of strlen vs zero to a compare of the
8100 first character of the string vs zero. To wit,
8101 strlen(ptr) == 0 => *ptr == 0
8102 strlen(ptr) != 0 => *ptr != 0
8103 Other cases should reduce to one of these two (or a constant)
8104 due to the return value of strlen being unsigned. */
8105 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8106 && integer_zerop (arg1
)
8107 && TREE_CODE (arg0
) == CALL_EXPR
)
8109 tree fndecl
= get_callee_fndecl (arg0
);
8113 && DECL_BUILT_IN (fndecl
)
8114 && DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_MD
8115 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
8116 && (arglist
= TREE_OPERAND (arg0
, 1))
8117 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
8118 && ! TREE_CHAIN (arglist
))
8119 return fold (build2 (code
, type
,
8120 build1 (INDIRECT_REF
, char_type_node
,
8121 TREE_VALUE(arglist
)),
8122 integer_zero_node
));
8125 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8126 into a single range test. */
8127 if (TREE_CODE (arg0
) == TRUNC_DIV_EXPR
8128 && TREE_CODE (arg1
) == INTEGER_CST
8129 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8130 && !integer_zerop (TREE_OPERAND (arg0
, 1))
8131 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8132 && !TREE_OVERFLOW (arg1
))
8134 t1
= fold_div_compare (code
, type
, arg0
, arg1
);
8135 if (t1
!= NULL_TREE
)
8139 /* Both ARG0 and ARG1 are known to be constants at this point. */
8140 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
8141 return (t1
== NULL_TREE
? t
: t1
);
8144 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8145 so all simple results must be passed through pedantic_non_lvalue. */
8146 if (TREE_CODE (arg0
) == INTEGER_CST
)
8148 tem
= TREE_OPERAND (t
, (integer_zerop (arg0
) ? 2 : 1));
8149 /* Only optimize constant conditions when the selected branch
8150 has the same type as the COND_EXPR. This avoids optimizing
8151 away "c ? x : throw", where the throw has a void type. */
8152 if (! VOID_TYPE_P (TREE_TYPE (tem
))
8153 || VOID_TYPE_P (type
))
8154 return pedantic_non_lvalue (tem
);
8157 if (operand_equal_p (arg1
, TREE_OPERAND (t
, 2), 0))
8158 return pedantic_omit_one_operand (type
, arg1
, arg0
);
8160 /* If we have A op B ? A : C, we may be able to convert this to a
8161 simpler expression, depending on the operation and the values
8162 of B and C. Signed zeros prevent all of these transformations,
8163 for reasons given above each one. */
8165 if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
8166 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
8167 arg1
, TREE_OPERAND (arg0
, 1))
8168 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
8170 tree arg2
= TREE_OPERAND (t
, 2);
8171 enum tree_code comp_code
= TREE_CODE (arg0
);
8175 /* If we have A op 0 ? A : -A, consider applying the following
8178 A == 0? A : -A same as -A
8179 A != 0? A : -A same as A
8180 A >= 0? A : -A same as abs (A)
8181 A > 0? A : -A same as abs (A)
8182 A <= 0? A : -A same as -abs (A)
8183 A < 0? A : -A same as -abs (A)
8185 None of these transformations work for modes with signed
8186 zeros. If A is +/-0, the first two transformations will
8187 change the sign of the result (from +0 to -0, or vice
8188 versa). The last four will fix the sign of the result,
8189 even though the original expressions could be positive or
8190 negative, depending on the sign of A.
8192 Note that all these transformations are correct if A is
8193 NaN, since the two alternatives (A and -A) are also NaNs. */
8194 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
8195 ? real_zerop (TREE_OPERAND (arg0
, 1))
8196 : integer_zerop (TREE_OPERAND (arg0
, 1)))
8197 && TREE_CODE (arg2
) == NEGATE_EXPR
8198 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
8202 tem
= fold_convert (TREE_TYPE (TREE_OPERAND (t
, 1)), arg1
);
8203 tem
= fold_convert (type
, negate_expr (tem
));
8204 return pedantic_non_lvalue (tem
);
8206 return pedantic_non_lvalue (fold_convert (type
, arg1
));
8209 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
8210 arg1
= fold_convert (lang_hooks
.types
.signed_type
8211 (TREE_TYPE (arg1
)), arg1
);
8212 arg1
= fold (build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
));
8213 return pedantic_non_lvalue (fold_convert (type
, arg1
));
8216 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
8217 arg1
= fold_convert (lang_hooks
.types
.signed_type
8218 (TREE_TYPE (arg1
)), arg1
);
8219 arg1
= fold (build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
));
8220 arg1
= negate_expr (fold_convert (type
, arg1
));
8221 return pedantic_non_lvalue (arg1
);
8226 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
8227 A == 0 ? A : 0 is always 0 unless A is -0. Note that
8228 both transformations are correct when A is NaN: A != 0
8229 is then true, and A == 0 is false. */
8231 if (integer_zerop (TREE_OPERAND (arg0
, 1)) && integer_zerop (arg2
))
8233 if (comp_code
== NE_EXPR
)
8234 return pedantic_non_lvalue (fold_convert (type
, arg1
));
8235 else if (comp_code
== EQ_EXPR
)
8236 return pedantic_non_lvalue (fold_convert (type
, integer_zero_node
));
8239 /* Try some transformations of A op B ? A : B.
8241 A == B? A : B same as B
8242 A != B? A : B same as A
8243 A >= B? A : B same as max (A, B)
8244 A > B? A : B same as max (B, A)
8245 A <= B? A : B same as min (A, B)
8246 A < B? A : B same as min (B, A)
8248 As above, these transformations don't work in the presence
8249 of signed zeros. For example, if A and B are zeros of
8250 opposite sign, the first two transformations will change
8251 the sign of the result. In the last four, the original
8252 expressions give different results for (A=+0, B=-0) and
8253 (A=-0, B=+0), but the transformed expressions do not.
8255 The first two transformations are correct if either A or B
8256 is a NaN. In the first transformation, the condition will
8257 be false, and B will indeed be chosen. In the case of the
8258 second transformation, the condition A != B will be true,
8259 and A will be chosen.
8261 The conversions to max() and min() are not correct if B is
8262 a number and A is not. The conditions in the original
8263 expressions will be false, so all four give B. The min()
8264 and max() versions would give a NaN instead. */
8265 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 1),
8266 arg2
, TREE_OPERAND (arg0
, 0)))
8268 tree comp_op0
= TREE_OPERAND (arg0
, 0);
8269 tree comp_op1
= TREE_OPERAND (arg0
, 1);
8270 tree comp_type
= TREE_TYPE (comp_op0
);
8272 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8273 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
8283 return pedantic_non_lvalue (fold_convert (type
, arg2
));
8285 return pedantic_non_lvalue (fold_convert (type
, arg1
));
8288 /* In C++ a ?: expression can be an lvalue, so put the
8289 operand which will be used if they are equal first
8290 so that we can convert this back to the
8291 corresponding COND_EXPR. */
8292 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
8293 return pedantic_non_lvalue (fold_convert
8294 (type
, fold (build2 (MIN_EXPR
, comp_type
,
8295 (comp_code
== LE_EXPR
8296 ? comp_op0
: comp_op1
),
8297 (comp_code
== LE_EXPR
8298 ? comp_op1
: comp_op0
)))));
8302 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
8303 return pedantic_non_lvalue (fold_convert
8304 (type
, fold (build2 (MAX_EXPR
, comp_type
,
8305 (comp_code
== GE_EXPR
8306 ? comp_op0
: comp_op1
),
8307 (comp_code
== GE_EXPR
8308 ? comp_op1
: comp_op0
)))));
8315 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8316 we might still be able to simplify this. For example,
8317 if C1 is one less or one more than C2, this might have started
8318 out as a MIN or MAX and been transformed by this function.
8319 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8321 if (INTEGRAL_TYPE_P (type
)
8322 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8323 && TREE_CODE (arg2
) == INTEGER_CST
)
8327 /* We can replace A with C1 in this case. */
8328 arg1
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
8329 return fold (build3 (code
, type
, TREE_OPERAND (t
, 0), arg1
,
8330 TREE_OPERAND (t
, 2)));
8333 /* If C1 is C2 + 1, this is min(A, C2). */
8334 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
8336 && operand_equal_p (TREE_OPERAND (arg0
, 1),
8337 const_binop (PLUS_EXPR
, arg2
,
8338 integer_one_node
, 0),
8340 return pedantic_non_lvalue
8341 (fold (build2 (MIN_EXPR
, type
, arg1
, arg2
)));
8345 /* If C1 is C2 - 1, this is min(A, C2). */
8346 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
8348 && operand_equal_p (TREE_OPERAND (arg0
, 1),
8349 const_binop (MINUS_EXPR
, arg2
,
8350 integer_one_node
, 0),
8352 return pedantic_non_lvalue
8353 (fold (build2 (MIN_EXPR
, type
, arg1
, arg2
)));
8357 /* If C1 is C2 - 1, this is max(A, C2). */
8358 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
8360 && operand_equal_p (TREE_OPERAND (arg0
, 1),
8361 const_binop (MINUS_EXPR
, arg2
,
8362 integer_one_node
, 0),
8364 return pedantic_non_lvalue
8365 (fold (build2 (MAX_EXPR
, type
, arg1
, arg2
)));
8369 /* If C1 is C2 + 1, this is max(A, C2). */
8370 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
8372 && operand_equal_p (TREE_OPERAND (arg0
, 1),
8373 const_binop (PLUS_EXPR
, arg2
,
8374 integer_one_node
, 0),
8376 return pedantic_non_lvalue
8377 (fold (build2 (MAX_EXPR
, type
, arg1
, arg2
)));
8386 /* If the second operand is simpler than the third, swap them
8387 since that produces better jump optimization results. */
8388 if (tree_swap_operands_p (TREE_OPERAND (t
, 1),
8389 TREE_OPERAND (t
, 2), false))
8391 /* See if this can be inverted. If it can't, possibly because
8392 it was a floating-point inequality comparison, don't do
8394 tem
= invert_truthvalue (arg0
);
8396 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
8397 return fold (build3 (code
, type
, tem
,
8398 TREE_OPERAND (t
, 2), TREE_OPERAND (t
, 1)));
8401 /* Convert A ? 1 : 0 to simply A. */
8402 if (integer_onep (TREE_OPERAND (t
, 1))
8403 && integer_zerop (TREE_OPERAND (t
, 2))
8404 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8405 call to fold will try to move the conversion inside
8406 a COND, which will recurse. In that case, the COND_EXPR
8407 is probably the best choice, so leave it alone. */
8408 && type
== TREE_TYPE (arg0
))
8409 return pedantic_non_lvalue (arg0
);
8411 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8412 over COND_EXPR in cases such as floating point comparisons. */
8413 if (integer_zerop (TREE_OPERAND (t
, 1))
8414 && integer_onep (TREE_OPERAND (t
, 2))
8415 && truth_value_p (TREE_CODE (arg0
)))
8416 return pedantic_non_lvalue (fold_convert (type
,
8417 invert_truthvalue (arg0
)));
8419 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8420 operation is simply A & 2. */
8422 if (integer_zerop (TREE_OPERAND (t
, 2))
8423 && TREE_CODE (arg0
) == NE_EXPR
8424 && integer_zerop (TREE_OPERAND (arg0
, 1))
8425 && integer_pow2p (arg1
)
8426 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
8427 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
8428 arg1
, OEP_ONLY_CONST
))
8429 return pedantic_non_lvalue (fold_convert (type
,
8430 TREE_OPERAND (arg0
, 0)));
8432 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8433 if (integer_zerop (TREE_OPERAND (t
, 2))
8434 && truth_value_p (TREE_CODE (arg0
))
8435 && truth_value_p (TREE_CODE (arg1
)))
8436 return pedantic_non_lvalue (fold (build2 (TRUTH_ANDIF_EXPR
, type
,
8439 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8440 if (integer_onep (TREE_OPERAND (t
, 2))
8441 && truth_value_p (TREE_CODE (arg0
))
8442 && truth_value_p (TREE_CODE (arg1
)))
8444 /* Only perform transformation if ARG0 is easily inverted. */
8445 tem
= invert_truthvalue (arg0
);
8446 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
8447 return pedantic_non_lvalue (fold (build2 (TRUTH_ORIF_EXPR
, type
,
8454 /* When pedantic, a compound expression can be neither an lvalue
8455 nor an integer constant expression. */
8456 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
8458 /* Don't let (0, 0) be null pointer constant. */
8459 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
8460 : fold_convert (type
, arg1
);
8461 return pedantic_non_lvalue (tem
);
8465 return build_complex (type
, arg0
, arg1
);
8469 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8471 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8472 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
8473 TREE_OPERAND (arg0
, 1));
8474 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
8475 return TREE_REALPART (arg0
);
8476 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8477 return fold (build2 (TREE_CODE (arg0
), type
,
8478 fold (build1 (REALPART_EXPR
, type
,
8479 TREE_OPERAND (arg0
, 0))),
8480 fold (build1 (REALPART_EXPR
, type
,
8481 TREE_OPERAND (arg0
, 1)))));
8485 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8486 return fold_convert (type
, integer_zero_node
);
8487 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8488 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
8489 TREE_OPERAND (arg0
, 0));
8490 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
8491 return TREE_IMAGPART (arg0
);
8492 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8493 return fold (build2 (TREE_CODE (arg0
), type
,
8494 fold (build1 (IMAGPART_EXPR
, type
,
8495 TREE_OPERAND (arg0
, 0))),
8496 fold (build1 (IMAGPART_EXPR
, type
,
8497 TREE_OPERAND (arg0
, 1)))));
8500 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8502 case CLEANUP_POINT_EXPR
:
8503 if (! has_cleanups (arg0
))
8504 return TREE_OPERAND (t
, 0);
8507 enum tree_code code0
= TREE_CODE (arg0
);
8508 int kind0
= TREE_CODE_CLASS (code0
);
8509 tree arg00
= TREE_OPERAND (arg0
, 0);
8512 if (kind0
== '1' || code0
== TRUTH_NOT_EXPR
)
8513 return fold (build1 (code0
, type
,
8514 fold (build1 (CLEANUP_POINT_EXPR
,
8515 TREE_TYPE (arg00
), arg00
))));
8517 if (kind0
== '<' || kind0
== '2'
8518 || code0
== TRUTH_ANDIF_EXPR
|| code0
== TRUTH_ORIF_EXPR
8519 || code0
== TRUTH_AND_EXPR
|| code0
== TRUTH_OR_EXPR
8520 || code0
== TRUTH_XOR_EXPR
)
8522 arg01
= TREE_OPERAND (arg0
, 1);
8524 if (TREE_CONSTANT (arg00
)
8525 || ((code0
== TRUTH_ANDIF_EXPR
|| code0
== TRUTH_ORIF_EXPR
)
8526 && ! has_cleanups (arg00
)))
8527 return fold (build2 (code0
, type
, arg00
,
8528 fold (build1 (CLEANUP_POINT_EXPR
,
8529 TREE_TYPE (arg01
), arg01
))));
8531 if (TREE_CONSTANT (arg01
))
8532 return fold (build2 (code0
, type
,
8533 fold (build1 (CLEANUP_POINT_EXPR
,
8534 TREE_TYPE (arg00
), arg00
)),
8542 /* Check for a built-in function. */
8543 if (TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
8544 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0))
8546 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)))
8548 tree tmp
= fold_builtin (t
);
8556 } /* switch (code) */
8559 #ifdef ENABLE_FOLD_CHECKING
8562 static void fold_checksum_tree (tree
, struct md5_ctx
*, htab_t
);
8563 static void fold_check_failed (tree
, tree
);
8564 void print_fold_checksum (tree
);
8566 /* When --enable-checking=fold, compute a digest of expr before
8567 and after actual fold call to see if fold did not accidentally
8568 change original expr. */
8575 unsigned char checksum_before
[16], checksum_after
[16];
8578 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
8579 md5_init_ctx (&ctx
);
8580 fold_checksum_tree (expr
, &ctx
, ht
);
8581 md5_finish_ctx (&ctx
, checksum_before
);
8584 ret
= fold_1 (expr
);
8586 md5_init_ctx (&ctx
);
8587 fold_checksum_tree (expr
, &ctx
, ht
);
8588 md5_finish_ctx (&ctx
, checksum_after
);
8591 if (memcmp (checksum_before
, checksum_after
, 16))
8592 fold_check_failed (expr
, ret
);
8598 print_fold_checksum (tree expr
)
8601 unsigned char checksum
[16], cnt
;
8604 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
8605 md5_init_ctx (&ctx
);
8606 fold_checksum_tree (expr
, &ctx
, ht
);
8607 md5_finish_ctx (&ctx
, checksum
);
8609 for (cnt
= 0; cnt
< 16; ++cnt
)
8610 fprintf (stderr
, "%02x", checksum
[cnt
]);
8611 putc ('\n', stderr
);
8615 fold_check_failed (tree expr ATTRIBUTE_UNUSED
, tree ret ATTRIBUTE_UNUSED
)
8617 internal_error ("fold check: original tree changed by fold");
8621 fold_checksum_tree (tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
8624 enum tree_code code
;
8625 char buf
[sizeof (struct tree_decl
)];
8628 if (sizeof (struct tree_exp
) + 5 * sizeof (tree
)
8629 > sizeof (struct tree_decl
)
8630 || sizeof (struct tree_type
) > sizeof (struct tree_decl
))
8634 slot
= htab_find_slot (ht
, expr
, INSERT
);
8638 code
= TREE_CODE (expr
);
8639 if (code
== SAVE_EXPR
&& SAVE_EXPR_NOPLACEHOLDER (expr
))
8641 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8642 memcpy (buf
, expr
, tree_size (expr
));
8644 SAVE_EXPR_NOPLACEHOLDER (expr
) = 0;
8646 else if (TREE_CODE_CLASS (code
) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr
))
8648 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8649 memcpy (buf
, expr
, tree_size (expr
));
8651 SET_DECL_ASSEMBLER_NAME (expr
, NULL
);
8653 else if (TREE_CODE_CLASS (code
) == 't'
8654 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)))
8656 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8657 memcpy (buf
, expr
, tree_size (expr
));
8659 TYPE_POINTER_TO (expr
) = NULL
;
8660 TYPE_REFERENCE_TO (expr
) = NULL
;
8662 md5_process_bytes (expr
, tree_size (expr
), ctx
);
8663 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
8664 if (TREE_CODE_CLASS (code
) != 't' && TREE_CODE_CLASS (code
) != 'd')
8665 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
8666 len
= TREE_CODE_LENGTH (code
);
8667 switch (TREE_CODE_CLASS (code
))
8673 md5_process_bytes (TREE_STRING_POINTER (expr
),
8674 TREE_STRING_LENGTH (expr
), ctx
);
8677 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
8678 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
8681 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
8691 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
8692 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
8695 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
8696 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
8705 case SAVE_EXPR
: len
= 2; break;
8706 case GOTO_SUBROUTINE_EXPR
: len
= 0; break;
8707 case RTL_EXPR
: len
= 0; break;
8708 case WITH_CLEANUP_EXPR
: len
= 2; break;
8717 for (i
= 0; i
< len
; ++i
)
8718 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
8721 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
8722 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
8723 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
8724 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
8725 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
8726 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
8727 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
8728 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
8729 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
8730 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
8731 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
8734 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
8735 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
8736 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
8737 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
8738 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
8739 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
8740 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
8741 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
8742 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
8743 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
8752 /* Perform constant folding and related simplification of initializer
8753 expression EXPR. This behaves identically to "fold" but ignores
8754 potential run-time traps and exceptions that fold must preserve. */
8757 fold_initializer (tree expr
)
8759 int saved_signaling_nans
= flag_signaling_nans
;
8760 int saved_trapping_math
= flag_trapping_math
;
8761 int saved_trapv
= flag_trapv
;
8764 flag_signaling_nans
= 0;
8765 flag_trapping_math
= 0;
8768 result
= fold (expr
);
8770 flag_signaling_nans
= saved_signaling_nans
;
8771 flag_trapping_math
= saved_trapping_math
;
8772 flag_trapv
= saved_trapv
;
8777 /* Determine if first argument is a multiple of second argument. Return 0 if
8778 it is not, or we cannot easily determined it to be.
8780 An example of the sort of thing we care about (at this point; this routine
8781 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8782 fold cases do now) is discovering that
8784 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8790 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8792 This code also handles discovering that
8794 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8796 is a multiple of 8 so we don't have to worry about dealing with a
8799 Note that we *look* inside a SAVE_EXPR only to determine how it was
8800 calculated; it is not safe for fold to do much of anything else with the
8801 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8802 at run time. For example, the latter example above *cannot* be implemented
8803 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8804 evaluation time of the original SAVE_EXPR is not necessarily the same at
8805 the time the new expression is evaluated. The only optimization of this
8806 sort that would be valid is changing
8808 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8812 SAVE_EXPR (I) * SAVE_EXPR (J)
8814 (where the same SAVE_EXPR (J) is used in the original and the
8815 transformed version). */
8818 multiple_of_p (tree type
, tree top
, tree bottom
)
8820 if (operand_equal_p (top
, bottom
, 0))
8823 if (TREE_CODE (type
) != INTEGER_TYPE
)
8826 switch (TREE_CODE (top
))
8829 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
8830 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
8834 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
8835 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
8838 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
8842 op1
= TREE_OPERAND (top
, 1);
8843 /* const_binop may not detect overflow correctly,
8844 so check for it explicitly here. */
8845 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
8846 > TREE_INT_CST_LOW (op1
)
8847 && TREE_INT_CST_HIGH (op1
) == 0
8848 && 0 != (t1
= fold_convert (type
,
8849 const_binop (LSHIFT_EXPR
,
8852 && ! TREE_OVERFLOW (t1
))
8853 return multiple_of_p (type
, t1
, bottom
);
8858 /* Can't handle conversions from non-integral or wider integral type. */
8859 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
8860 || (TYPE_PRECISION (type
)
8861 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
8864 /* .. fall through ... */
8867 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
8870 if (TREE_CODE (bottom
) != INTEGER_CST
8871 || (TYPE_UNSIGNED (type
)
8872 && (tree_int_cst_sgn (top
) < 0
8873 || tree_int_cst_sgn (bottom
) < 0)))
8875 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,
8883 /* Return true if `t' is known to be non-negative. */
8886 tree_expr_nonnegative_p (tree t
)
8888 switch (TREE_CODE (t
))
8894 return tree_int_cst_sgn (t
) >= 0;
8897 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
8900 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
8901 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8902 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8904 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8905 both unsigned and at least 2 bits shorter than the result. */
8906 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
8907 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
8908 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
8910 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
8911 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
8912 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
8913 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
8915 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
8916 TYPE_PRECISION (inner2
)) + 1;
8917 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
8923 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
8925 /* x * x for floating point x is always non-negative. */
8926 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
8928 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8929 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8932 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8933 both unsigned and their total bits is shorter than the result. */
8934 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
8935 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
8936 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
8938 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
8939 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
8940 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
8941 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
8942 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
8943 < TYPE_PRECISION (TREE_TYPE (t
));
8947 case TRUNC_DIV_EXPR
:
8949 case FLOOR_DIV_EXPR
:
8950 case ROUND_DIV_EXPR
:
8951 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8952 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8954 case TRUNC_MOD_EXPR
:
8956 case FLOOR_MOD_EXPR
:
8957 case ROUND_MOD_EXPR
:
8958 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8961 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8962 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8965 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
8966 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8969 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8970 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8974 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
8975 tree outer_type
= TREE_TYPE (t
);
8977 if (TREE_CODE (outer_type
) == REAL_TYPE
)
8979 if (TREE_CODE (inner_type
) == REAL_TYPE
)
8980 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8981 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
8983 if (TYPE_UNSIGNED (inner_type
))
8985 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8988 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
8990 if (TREE_CODE (inner_type
) == REAL_TYPE
)
8991 return tree_expr_nonnegative_p (TREE_OPERAND (t
,0));
8992 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
8993 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
8994 && TYPE_UNSIGNED (inner_type
);
9000 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
9001 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
9003 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9005 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9006 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9008 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9009 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9011 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9013 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
9015 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9016 case NON_LVALUE_EXPR
:
9017 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9019 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9021 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t
));
9025 tree fndecl
= get_callee_fndecl (t
);
9026 tree arglist
= TREE_OPERAND (t
, 1);
9028 && DECL_BUILT_IN (fndecl
)
9029 && DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_MD
)
9030 switch (DECL_FUNCTION_CODE (fndecl
))
9032 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9033 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9034 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9035 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9037 CASE_BUILTIN_F (BUILT_IN_ACOS
)
9038 CASE_BUILTIN_F (BUILT_IN_ACOSH
)
9039 CASE_BUILTIN_F (BUILT_IN_CABS
)
9040 CASE_BUILTIN_F (BUILT_IN_COSH
)
9041 CASE_BUILTIN_F (BUILT_IN_ERFC
)
9042 CASE_BUILTIN_F (BUILT_IN_EXP
)
9043 CASE_BUILTIN_F (BUILT_IN_EXP10
)
9044 CASE_BUILTIN_F (BUILT_IN_EXP2
)
9045 CASE_BUILTIN_F (BUILT_IN_FABS
)
9046 CASE_BUILTIN_F (BUILT_IN_FDIM
)
9047 CASE_BUILTIN_F (BUILT_IN_FREXP
)
9048 CASE_BUILTIN_F (BUILT_IN_HYPOT
)
9049 CASE_BUILTIN_F (BUILT_IN_POW10
)
9050 CASE_BUILTIN_I (BUILT_IN_FFS
)
9051 CASE_BUILTIN_I (BUILT_IN_PARITY
)
9052 CASE_BUILTIN_I (BUILT_IN_POPCOUNT
)
9056 CASE_BUILTIN_F (BUILT_IN_SQRT
)
9057 /* sqrt(-0.0) is -0.0. */
9058 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t
))))
9060 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
9062 CASE_BUILTIN_F (BUILT_IN_ASINH
)
9063 CASE_BUILTIN_F (BUILT_IN_ATAN
)
9064 CASE_BUILTIN_F (BUILT_IN_ATANH
)
9065 CASE_BUILTIN_F (BUILT_IN_CBRT
)
9066 CASE_BUILTIN_F (BUILT_IN_CEIL
)
9067 CASE_BUILTIN_F (BUILT_IN_ERF
)
9068 CASE_BUILTIN_F (BUILT_IN_EXPM1
)
9069 CASE_BUILTIN_F (BUILT_IN_FLOOR
)
9070 CASE_BUILTIN_F (BUILT_IN_FMOD
)
9071 CASE_BUILTIN_F (BUILT_IN_LDEXP
)
9072 CASE_BUILTIN_F (BUILT_IN_LLRINT
)
9073 CASE_BUILTIN_F (BUILT_IN_LLROUND
)
9074 CASE_BUILTIN_F (BUILT_IN_LRINT
)
9075 CASE_BUILTIN_F (BUILT_IN_LROUND
)
9076 CASE_BUILTIN_F (BUILT_IN_MODF
)
9077 CASE_BUILTIN_F (BUILT_IN_NEARBYINT
)
9078 CASE_BUILTIN_F (BUILT_IN_POW
)
9079 CASE_BUILTIN_F (BUILT_IN_RINT
)
9080 CASE_BUILTIN_F (BUILT_IN_ROUND
)
9081 CASE_BUILTIN_F (BUILT_IN_SIGNBIT
)
9082 CASE_BUILTIN_F (BUILT_IN_SINH
)
9083 CASE_BUILTIN_F (BUILT_IN_TANH
)
9084 CASE_BUILTIN_F (BUILT_IN_TRUNC
)
9085 /* True if the 1st argument is nonnegative. */
9086 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
9088 CASE_BUILTIN_F (BUILT_IN_FMAX
)
9089 /* True if the 1st OR 2nd arguments are nonnegative. */
9090 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
9091 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
9093 CASE_BUILTIN_F (BUILT_IN_FMIN
)
9094 /* True if the 1st AND 2nd arguments are nonnegative. */
9095 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
9096 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
9098 CASE_BUILTIN_F (BUILT_IN_COPYSIGN
)
9099 /* True if the 2nd argument is nonnegative. */
9100 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
9104 #undef CASE_BUILTIN_F
9105 #undef CASE_BUILTIN_I
9109 /* ... fall through ... */
9112 if (truth_value_p (TREE_CODE (t
)))
9113 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9117 /* We don't know sign of `t', so be conservative and return false. */
9121 /* Return true when T is an address and is known to be nonzero.
9122 For floating point we further ensure that T is not denormal.
9123 Similar logic is present in nonzero_address in rtlanal.h */
9126 tree_expr_nonzero_p (tree t
)
9128 tree type
= TREE_TYPE (t
);
9130 /* Doing something useful for floating point would need more work. */
9131 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9134 switch (TREE_CODE (t
))
9137 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
9138 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
9141 return !integer_zerop (t
);
9144 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
9146 /* With the presence of negative values it is hard
9147 to say something. */
9148 if (!tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
9149 || !tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
9151 /* One of operands must be positive and the other non-negative. */
9152 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
9153 || tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
9158 if (!TYPE_UNSIGNED (type
) && !flag_wrapv
)
9160 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
9161 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
9167 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
9168 tree outer_type
= TREE_TYPE (t
);
9170 return (TYPE_PRECISION (inner_type
) >= TYPE_PRECISION (outer_type
)
9171 && tree_expr_nonzero_p (TREE_OPERAND (t
, 0)));
9176 /* Weak declarations may link to NULL. */
9177 if (DECL_P (TREE_OPERAND (t
, 0)))
9178 return !DECL_WEAK (TREE_OPERAND (t
, 0));
9179 /* Constants and all other cases are never weak. */
9183 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
9184 && tree_expr_nonzero_p (TREE_OPERAND (t
, 2)));
9187 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
9188 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
9191 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 0)))
9193 /* When both operands are nonzero, then MAX must be too. */
9194 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1)))
9197 /* MAX where operand 0 is positive is positive. */
9198 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
9200 /* MAX where operand 1 is positive is positive. */
9201 else if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
9202 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
9209 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1));
9212 case NON_LVALUE_EXPR
:
9213 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
9216 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
9217 || tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
9225 /* Return true if `r' is known to be non-negative.
9226 Only handles constants at the moment. */
9229 rtl_expr_nonnegative_p (rtx r
)
9231 switch (GET_CODE (r
))
9234 return INTVAL (r
) >= 0;
9237 if (GET_MODE (r
) == VOIDmode
)
9238 return CONST_DOUBLE_HIGH (r
) >= 0;
9246 units
= CONST_VECTOR_NUNITS (r
);
9248 for (i
= 0; i
< units
; ++i
)
9250 elt
= CONST_VECTOR_ELT (r
, i
);
9251 if (!rtl_expr_nonnegative_p (elt
))
9260 /* These are always nonnegative. */
9269 /* See if we are applying CODE, a relational to the highest or lowest
9270 possible integer of TYPE. If so, then the result is a compile
9274 fold_relational_hi_lo (enum tree_code
*code_p
, const tree type
, tree
*op0_p
,
9279 enum tree_code code
= *code_p
;
9280 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1
)));
9282 if (TREE_CODE (op1
) == INTEGER_CST
9283 && ! TREE_CONSTANT_OVERFLOW (op1
)
9284 && width
<= HOST_BITS_PER_WIDE_INT
9285 && (INTEGRAL_TYPE_P (TREE_TYPE (op1
))
9286 || POINTER_TYPE_P (TREE_TYPE (op1
))))
9288 unsigned HOST_WIDE_INT signed_max
;
9289 unsigned HOST_WIDE_INT max
, min
;
9291 signed_max
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1)) - 1;
9293 if (TYPE_UNSIGNED (TREE_TYPE (op1
)))
9295 max
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
9301 min
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
9304 if (TREE_INT_CST_HIGH (op1
) == 0
9305 && TREE_INT_CST_LOW (op1
) == max
)
9309 return omit_one_operand (type
, integer_zero_node
, op0
);
9315 return omit_one_operand (type
, integer_one_node
, op0
);
9321 /* The GE_EXPR and LT_EXPR cases above are not normally
9322 reached because of previous transformations. */
9327 else if (TREE_INT_CST_HIGH (op1
) == 0
9328 && TREE_INT_CST_LOW (op1
) == max
- 1)
9333 *op1_p
= const_binop (PLUS_EXPR
, op1
, integer_one_node
, 0);
9337 *op1_p
= const_binop (PLUS_EXPR
, op1
, integer_one_node
, 0);
9342 else if (TREE_INT_CST_HIGH (op1
) == (min
? -1 : 0)
9343 && TREE_INT_CST_LOW (op1
) == min
)
9347 return omit_one_operand (type
, integer_zero_node
, op0
);
9354 return omit_one_operand (type
, integer_one_node
, op0
);
9363 else if (TREE_INT_CST_HIGH (op1
) == (min
? -1 : 0)
9364 && TREE_INT_CST_LOW (op1
) == min
+ 1)
9369 *op1_p
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
9373 *op1_p
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
9379 else if (TREE_INT_CST_HIGH (op1
) == 0
9380 && TREE_INT_CST_LOW (op1
) == signed_max
9381 && TYPE_UNSIGNED (TREE_TYPE (op1
))
9382 /* signed_type does not work on pointer types. */
9383 && INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
9385 /* The following case also applies to X < signed_max+1
9386 and X >= signed_max+1 because previous transformations. */
9387 if (code
== LE_EXPR
|| code
== GT_EXPR
)
9389 tree st0
, st1
, exp
, retval
;
9390 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (op0
));
9391 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (op1
));
9393 exp
= build2 (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
9395 fold_convert (st0
, op0
),
9396 fold_convert (st1
, integer_zero_node
));
9399 = nondestructive_fold_binary_to_constant (TREE_CODE (exp
),
9401 TREE_OPERAND (exp
, 0),
9402 TREE_OPERAND (exp
, 1));
9404 /* If we are in gimple form, then returning EXP would create
9405 non-gimple expressions. Clearing it is safe and insures
9406 we do not allow a non-gimple expression to escape. */
9410 return (retval
? retval
: exp
);
9419 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9420 attempt to fold the expression to a constant without modifying TYPE,
9423 If the expression could be simplified to a constant, then return
9424 the constant. If the expression would not be simplified to a
9425 constant, then return NULL_TREE.
9427 Note this is primarily designed to be called after gimplification
9428 of the tree structures and when at least one operand is a constant.
9429 As a result of those simplifying assumptions this routine is far
9430 simpler than the generic fold routine. */
9433 nondestructive_fold_binary_to_constant (enum tree_code code
, tree type
,
9441 /* If this is a commutative operation, and ARG0 is a constant, move it
9442 to ARG1 to reduce the number of tests below. */
9443 if (commutative_tree_code (code
)
9444 && (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
))
9451 /* If either operand is a complex type, extract its real component. */
9452 if (TREE_CODE (op0
) == COMPLEX_CST
)
9453 subop0
= TREE_REALPART (op0
);
9457 if (TREE_CODE (op1
) == COMPLEX_CST
)
9458 subop1
= TREE_REALPART (op1
);
9462 /* Note if either argument is not a real or integer constant.
9463 With a few exceptions, simplification is limited to cases
9464 where both arguments are constants. */
9465 if ((TREE_CODE (subop0
) != INTEGER_CST
9466 && TREE_CODE (subop0
) != REAL_CST
)
9467 || (TREE_CODE (subop1
) != INTEGER_CST
9468 && TREE_CODE (subop1
) != REAL_CST
))
9474 /* (plus (address) (const_int)) is a constant. */
9475 if (TREE_CODE (op0
) == PLUS_EXPR
9476 && TREE_CODE (op1
) == INTEGER_CST
9477 && (TREE_CODE (TREE_OPERAND (op0
, 0)) == ADDR_EXPR
9478 || (TREE_CODE (TREE_OPERAND (op0
, 0)) == NOP_EXPR
9479 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0
, 0), 0))
9481 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
9483 return build2 (PLUS_EXPR
, type
, TREE_OPERAND (op0
, 0),
9484 const_binop (PLUS_EXPR
, op1
,
9485 TREE_OPERAND (op0
, 1), 0));
9493 /* Both arguments are constants. Simplify. */
9494 tem
= const_binop (code
, op0
, op1
, 0);
9495 if (tem
!= NULL_TREE
)
9497 /* The return value should always have the same type as
9498 the original expression. */
9499 if (TREE_TYPE (tem
) != type
)
9500 tem
= fold_convert (type
, tem
);
9507 /* Fold &x - &x. This can happen from &x.foo - &x.
9508 This is unsafe for certain floats even in non-IEEE formats.
9509 In IEEE, it is unsafe because it does wrong for NaNs.
9510 Also note that operand_equal_p is always false if an
9511 operand is volatile. */
9512 if (! FLOAT_TYPE_P (type
) && operand_equal_p (op0
, op1
, 0))
9513 return fold_convert (type
, integer_zero_node
);
9519 /* Special case multiplication or bitwise AND where one argument
9521 if (! FLOAT_TYPE_P (type
) && integer_zerop (op1
))
9522 return omit_one_operand (type
, op1
, op0
);
9524 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0
)))
9525 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0
)))
9526 && real_zerop (op1
))
9527 return omit_one_operand (type
, op1
, op0
);
9532 /* Special case when we know the result will be all ones. */
9533 if (integer_all_onesp (op1
))
9534 return omit_one_operand (type
, op1
, op0
);
9538 case TRUNC_DIV_EXPR
:
9539 case ROUND_DIV_EXPR
:
9540 case FLOOR_DIV_EXPR
:
9542 case EXACT_DIV_EXPR
:
9543 case TRUNC_MOD_EXPR
:
9544 case ROUND_MOD_EXPR
:
9545 case FLOOR_MOD_EXPR
:
9548 /* Division by zero is undefined. */
9549 if (integer_zerop (op1
))
9552 if (TREE_CODE (op1
) == REAL_CST
9553 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1
)))
9554 && real_zerop (op1
))
9560 if (INTEGRAL_TYPE_P (type
)
9561 && operand_equal_p (op1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
9562 return omit_one_operand (type
, op1
, op0
);
9567 if (INTEGRAL_TYPE_P (type
)
9568 && TYPE_MAX_VALUE (type
)
9569 && operand_equal_p (op1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
9570 return omit_one_operand (type
, op1
, op0
);
9575 /* Optimize -1 >> x for arithmetic right shifts. */
9576 if (integer_all_onesp (op0
) && ! TYPE_UNSIGNED (type
))
9577 return omit_one_operand (type
, op0
, op1
);
9578 /* ... fall through ... */
9581 if (integer_zerop (op0
))
9582 return omit_one_operand (type
, op0
, op1
);
9584 /* Since negative shift count is not well-defined, don't
9585 try to compute it in the compiler. */
9586 if (TREE_CODE (op1
) == INTEGER_CST
&& tree_int_cst_sgn (op1
) < 0)
9593 /* -1 rotated either direction by any amount is still -1. */
9594 if (integer_all_onesp (op0
))
9595 return omit_one_operand (type
, op0
, op1
);
9597 /* 0 rotated either direction by any amount is still zero. */
9598 if (integer_zerop (op0
))
9599 return omit_one_operand (type
, op0
, op1
);
9605 return build_complex (type
, op0
, op1
);
9614 /* If one arg is a real or integer constant, put it last. */
9615 if ((TREE_CODE (op0
) == INTEGER_CST
9616 && TREE_CODE (op1
) != INTEGER_CST
)
9617 || (TREE_CODE (op0
) == REAL_CST
9618 && TREE_CODE (op0
) != REAL_CST
))
9625 code
= swap_tree_comparison (code
);
9628 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9629 This transformation affects the cases which are handled in later
9630 optimizations involving comparisons with non-negative constants. */
9631 if (TREE_CODE (op1
) == INTEGER_CST
9632 && TREE_CODE (op0
) != INTEGER_CST
9633 && tree_int_cst_sgn (op1
) > 0)
9639 op1
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
9644 op1
= const_binop (MINUS_EXPR
, op1
, integer_one_node
, 0);
9652 tem
= fold_relational_hi_lo (&code
, type
, &op0
, &op1
);
9659 return fold_relational_const (code
, type
, op0
, op1
);
9662 /* This could probably be handled. */
9665 case TRUTH_AND_EXPR
:
9666 /* If second arg is constant zero, result is zero, but first arg
9667 must be evaluated. */
9668 if (integer_zerop (op1
))
9669 return omit_one_operand (type
, op1
, op0
);
9670 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
9671 case will be handled here. */
9672 if (integer_zerop (op0
))
9673 return omit_one_operand (type
, op0
, op1
);
9674 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
9675 return constant_boolean_node (true, type
);
9679 /* If second arg is constant true, result is true, but we must
9680 evaluate first arg. */
9681 if (TREE_CODE (op1
) == INTEGER_CST
&& ! integer_zerop (op1
))
9682 return omit_one_operand (type
, op1
, op0
);
9683 /* Likewise for first arg, but note this only occurs here for
9685 if (TREE_CODE (op0
) == INTEGER_CST
&& ! integer_zerop (op0
))
9686 return omit_one_operand (type
, op0
, op1
);
9687 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
9688 return constant_boolean_node (false, type
);
9691 case TRUTH_XOR_EXPR
:
9692 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
9694 int x
= ! integer_zerop (op0
) ^ ! integer_zerop (op1
);
9695 return constant_boolean_node (x
, type
);
9704 /* Given the components of a unary expression CODE, TYPE and OP0,
9705 attempt to fold the expression to a constant without modifying
9708 If the expression could be simplified to a constant, then return
9709 the constant. If the expression would not be simplified to a
9710 constant, then return NULL_TREE.
9712 Note this is primarily designed to be called after gimplification
9713 of the tree structures and when op0 is a constant. As a result
9714 of those simplifying assumptions this routine is far simpler than
9715 the generic fold routine. */
9718 nondestructive_fold_unary_to_constant (enum tree_code code
, tree type
,
9721 /* Make sure we have a suitable constant argument. */
9722 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
9726 if (TREE_CODE (op0
) == COMPLEX_CST
)
9727 subop
= TREE_REALPART (op0
);
9731 if (TREE_CODE (subop
) != INTEGER_CST
&& TREE_CODE (subop
) != REAL_CST
)
9740 case FIX_TRUNC_EXPR
:
9741 case FIX_FLOOR_EXPR
:
9743 return fold_convert_const (code
, type
, op0
);
9746 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
9747 return fold_negate_const (op0
, type
);
9752 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
9753 return fold_abs_const (op0
, type
);
9758 if (TREE_CODE (op0
) == INTEGER_CST
)
9759 return fold_not_const (op0
, type
);
9764 if (TREE_CODE (op0
) == COMPLEX_CST
)
9765 return TREE_REALPART (op0
);
9770 if (TREE_CODE (op0
) == COMPLEX_CST
)
9771 return TREE_IMAGPART (op0
);
9776 if (TREE_CODE (op0
) == COMPLEX_CST
9777 && TREE_CODE (TREE_TYPE (op0
)) == COMPLEX_TYPE
)
9778 return build_complex (type
, TREE_REALPART (op0
),
9779 negate_expr (TREE_IMAGPART (op0
)));
9787 /* If EXP represents referencing an element in a constant string
9788 (either via pointer arithmetic or array indexing), return the
9789 tree representing the value accessed, otherwise return NULL. */
9792 fold_read_from_constant_string (tree exp
)
9794 if (TREE_CODE (exp
) == INDIRECT_REF
|| TREE_CODE (exp
) == ARRAY_REF
)
9796 tree exp1
= TREE_OPERAND (exp
, 0);
9800 if (TREE_CODE (exp
) == INDIRECT_REF
)
9802 string
= string_constant (exp1
, &index
);
9806 tree domain
= TYPE_DOMAIN (TREE_TYPE (exp1
));
9807 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
9808 index
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
9810 /* Optimize the special-case of a zero lower bound.
9812 We convert the low_bound to sizetype to avoid some problems
9813 with constant folding. (E.g. suppose the lower bound is 1,
9814 and its mode is QI. Without the conversion,l (ARRAY
9815 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9816 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
9817 if (! integer_zerop (low_bound
))
9818 index
= size_diffop (index
, fold_convert (sizetype
, low_bound
));
9824 && TREE_CODE (string
) == STRING_CST
9825 && TREE_CODE (index
) == INTEGER_CST
9826 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
9827 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
9829 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
9830 return fold_convert (TREE_TYPE (exp
),
9831 build_int_2 ((TREE_STRING_POINTER (string
)
9832 [TREE_INT_CST_LOW (index
)]), 0));
9837 /* Return the tree for neg (ARG0) when ARG0 is known to be either
9838 an integer constant or real constant.
9840 TYPE is the type of the result. */
9843 fold_negate_const (tree arg0
, tree type
)
9847 if (TREE_CODE (arg0
) == INTEGER_CST
)
9849 unsigned HOST_WIDE_INT low
;
9851 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
9852 TREE_INT_CST_HIGH (arg0
),
9854 t
= build_int_2 (low
, high
);
9855 TREE_TYPE (t
) = type
;
9857 = (TREE_OVERFLOW (arg0
)
9858 | force_fit_type (t
, overflow
&& !TYPE_UNSIGNED (type
)));
9859 TREE_CONSTANT_OVERFLOW (t
)
9860 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
9862 else if (TREE_CODE (arg0
) == REAL_CST
)
9863 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
9864 #ifdef ENABLE_CHECKING
9872 /* Return the tree for abs (ARG0) when ARG0 is known to be either
9873 an integer constant or real constant.
9875 TYPE is the type of the result. */
9878 fold_abs_const (tree arg0
, tree type
)
9882 if (TREE_CODE (arg0
) == INTEGER_CST
)
9884 /* If the value is unsigned, then the absolute value is
9885 the same as the ordinary value. */
9886 if (TYPE_UNSIGNED (type
))
9888 /* Similarly, if the value is non-negative. */
9889 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
9891 /* If the value is negative, then the absolute value is
9895 unsigned HOST_WIDE_INT low
;
9897 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
9898 TREE_INT_CST_HIGH (arg0
),
9900 t
= build_int_2 (low
, high
);
9901 TREE_TYPE (t
) = type
;
9903 = (TREE_OVERFLOW (arg0
)
9904 | force_fit_type (t
, overflow
));
9905 TREE_CONSTANT_OVERFLOW (t
)
9906 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
9910 else if (TREE_CODE (arg0
) == REAL_CST
)
9912 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
9913 return build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
9917 #ifdef ENABLE_CHECKING
9925 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
9926 constant. TYPE is the type of the result. */
9929 fold_not_const (tree arg0
, tree type
)
9933 if (TREE_CODE (arg0
) == INTEGER_CST
)
9935 t
= build_int_2 (~ TREE_INT_CST_LOW (arg0
),
9936 ~ TREE_INT_CST_HIGH (arg0
));
9937 TREE_TYPE (t
) = type
;
9938 force_fit_type (t
, 0);
9939 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg0
);
9940 TREE_CONSTANT_OVERFLOW (t
) = TREE_CONSTANT_OVERFLOW (arg0
);
9942 #ifdef ENABLE_CHECKING
9950 /* Given CODE, a relational operator, the target type, TYPE and two
9951 constant operands OP0 and OP1, return the result of the
9952 relational operation. If the result is not a compile time
9953 constant, then return NULL_TREE. */
9956 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
9961 /* From here on, the only cases we handle are when the result is
9962 known to be a constant.
9964 To compute GT, swap the arguments and do LT.
9965 To compute GE, do LT and invert the result.
9966 To compute LE, swap the arguments, do LT and invert the result.
9967 To compute NE, do EQ and invert the result.
9969 Therefore, the code below must handle only EQ and LT. */
9971 if (code
== LE_EXPR
|| code
== GT_EXPR
)
9973 tem
= op0
, op0
= op1
, op1
= tem
;
9974 code
= swap_tree_comparison (code
);
9977 /* Note that it is safe to invert for real values here because we
9978 will check below in the one case that it matters. */
9982 if (code
== NE_EXPR
|| code
== GE_EXPR
)
9985 code
= invert_tree_comparison (code
, false);
9988 /* Compute a result for LT or EQ if args permit;
9989 Otherwise return T. */
9990 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
9992 if (code
== EQ_EXPR
)
9993 tem
= build_int_2 (tree_int_cst_equal (op0
, op1
), 0);
9995 tem
= build_int_2 ((TYPE_UNSIGNED (TREE_TYPE (op0
))
9996 ? INT_CST_LT_UNSIGNED (op0
, op1
)
9997 : INT_CST_LT (op0
, op1
)),
10001 else if (code
== EQ_EXPR
&& !TREE_SIDE_EFFECTS (op0
)
10002 && integer_zerop (op1
) && tree_expr_nonzero_p (op0
))
10003 tem
= build_int_2 (0, 0);
10005 /* Two real constants can be compared explicitly. */
10006 else if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
10008 /* If either operand is a NaN, the result is false with two
10009 exceptions: First, an NE_EXPR is true on NaNs, but that case
10010 is already handled correctly since we will be inverting the
10011 result for NE_EXPR. Second, if we had inverted a LE_EXPR
10012 or a GE_EXPR into a LT_EXPR, we must return true so that it
10013 will be inverted into false. */
10015 if (REAL_VALUE_ISNAN (TREE_REAL_CST (op0
))
10016 || REAL_VALUE_ISNAN (TREE_REAL_CST (op1
)))
10017 tem
= build_int_2 (invert
&& code
== LT_EXPR
, 0);
10019 else if (code
== EQ_EXPR
)
10020 tem
= build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (op0
),
10021 TREE_REAL_CST (op1
)),
10024 tem
= build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (op0
),
10025 TREE_REAL_CST (op1
)),
10029 if (tem
== NULL_TREE
)
10033 TREE_INT_CST_LOW (tem
) ^= 1;
10035 TREE_TYPE (tem
) = type
;
10036 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
10037 return lang_hooks
.truthvalue_conversion (tem
);
10041 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10042 avoid confusing the gimplify process. */
10045 build_fold_addr_expr_with_type (tree t
, tree ptrtype
)
10047 if (TREE_CODE (t
) == INDIRECT_REF
)
10049 t
= TREE_OPERAND (t
, 0);
10050 if (TREE_TYPE (t
) != ptrtype
)
10051 t
= build1 (NOP_EXPR
, ptrtype
, t
);
10056 while (TREE_CODE (base
) == COMPONENT_REF
10057 || TREE_CODE (base
) == ARRAY_REF
)
10058 base
= TREE_OPERAND (base
, 0);
10060 TREE_ADDRESSABLE (base
) = 1;
10062 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
10069 build_fold_addr_expr (tree t
)
10071 return build_fold_addr_expr_with_type (t
, build_pointer_type (TREE_TYPE (t
)));
10074 /* Builds an expression for an indirection through T, simplifying some
10078 build_fold_indirect_ref (tree t
)
10080 tree type
= TREE_TYPE (TREE_TYPE (t
));
10085 if (TREE_CODE (sub
) == ADDR_EXPR
)
10087 tree op
= TREE_OPERAND (sub
, 0);
10088 tree optype
= TREE_TYPE (op
);
10090 if (lang_hooks
.types_compatible_p (type
, optype
))
10092 /* *(foo *)&fooarray => fooarray[0] */
10093 else if (TREE_CODE (optype
) == ARRAY_TYPE
10094 && lang_hooks
.types_compatible_p (type
, TREE_TYPE (optype
)))
10095 return build2 (ARRAY_REF
, type
, op
, size_zero_node
);
10098 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10099 subtype
= TREE_TYPE (sub
);
10100 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
10101 && lang_hooks
.types_compatible_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
10103 sub
= build_fold_indirect_ref (sub
);
10104 return build2 (ARRAY_REF
, type
, sub
, size_zero_node
);
10107 return build1 (INDIRECT_REF
, type
, t
);
10110 #include "gt-fold-const.h"