1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
62 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
63 static bool negate_mathfn_p (enum built_in_function
);
64 static bool negate_expr_p (tree
);
65 static tree
negate_expr (tree
);
66 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
67 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
68 static tree
int_const_binop (enum tree_code
, tree
, tree
, int);
69 static tree
const_binop (enum tree_code
, tree
, tree
, int);
70 static hashval_t
size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
73 static tree
fold_convert (tree
, tree
);
74 static enum tree_code
invert_tree_comparison (enum tree_code
);
75 static enum tree_code
swap_tree_comparison (enum tree_code
);
76 static int comparison_to_compcode (enum tree_code
);
77 static enum tree_code
compcode_to_comparison (int);
78 static int truth_value_p (enum tree_code
);
79 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
80 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
81 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
82 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
83 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
84 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
85 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
86 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
87 enum machine_mode
*, int *, int *,
89 static int all_ones_mask_p (tree
, int);
90 static tree
sign_bit_p (tree
, tree
);
91 static int simple_operand_p (tree
);
92 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
93 static tree
make_range (tree
, int *, tree
*, tree
*);
94 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
95 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
97 static tree
fold_range_test (tree
);
98 static tree
unextend (tree
, int, int, tree
);
99 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
100 static tree
optimize_minmax_comparison (tree
);
101 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
);
102 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
);
103 static tree
strip_compound_expr (tree
, tree
);
104 static int multiple_of_p (tree
, tree
, tree
);
105 static tree
constant_boolean_node (int, tree
);
106 static int count_cond (tree
, int);
107 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
, tree
,
109 static bool fold_real_zero_addition_p (tree
, tree
, int);
110 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
112 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
113 static bool reorder_operands_p (tree
, tree
);
114 static bool tree_swap_operands_p (tree
, tree
, bool);
116 static tree
fold_negate_const (tree
, tree
);
117 static tree
fold_abs_const (tree
, tree
);
118 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
120 /* The following constants represent a bit based encoding of GCC's
121 comparison operators. This encoding simplifies transformations
122 on relational comparison operators, such as AND and OR. */
123 #define COMPCODE_FALSE 0
124 #define COMPCODE_LT 1
125 #define COMPCODE_EQ 2
126 #define COMPCODE_LE 3
127 #define COMPCODE_GT 4
128 #define COMPCODE_NE 5
129 #define COMPCODE_GE 6
130 #define COMPCODE_TRUE 7
132 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
133 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
134 and SUM1. Then this yields nonzero if overflow occurred during the
137 Overflow occurs if A and B have the same sign, but A and SUM differ in
138 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
140 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
142 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
143 We do that by representing the two-word integer in 4 words, with only
144 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
145 number. The value of the word is LOWPART + HIGHPART * BASE. */
148 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
149 #define HIGHPART(x) \
150 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
151 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
153 /* Unpack a two-word integer into 4 words.
154 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
155 WORDS points to the array of HOST_WIDE_INTs. */
158 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
160 words
[0] = LOWPART (low
);
161 words
[1] = HIGHPART (low
);
162 words
[2] = LOWPART (hi
);
163 words
[3] = HIGHPART (hi
);
166 /* Pack an array of 4 words into a two-word integer.
167 WORDS points to the array of words.
168 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
171 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
174 *low
= words
[0] + words
[1] * BASE
;
175 *hi
= words
[2] + words
[3] * BASE
;
178 /* Make the integer constant T valid for its type by setting to 0 or 1 all
179 the bits in the constant that don't belong in the type.
181 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
182 nonzero, a signed overflow has already occurred in calculating T, so
186 force_fit_type (tree t
, int overflow
)
188 unsigned HOST_WIDE_INT low
;
192 if (TREE_CODE (t
) == REAL_CST
)
194 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
195 Consider doing it via real_convert now. */
199 else if (TREE_CODE (t
) != INTEGER_CST
)
202 low
= TREE_INT_CST_LOW (t
);
203 high
= TREE_INT_CST_HIGH (t
);
205 if (POINTER_TYPE_P (TREE_TYPE (t
))
206 || TREE_CODE (TREE_TYPE (t
)) == OFFSET_TYPE
)
209 prec
= TYPE_PRECISION (TREE_TYPE (t
));
211 /* First clear all bits that are beyond the type's precision. */
213 if (prec
== 2 * HOST_BITS_PER_WIDE_INT
)
215 else if (prec
> HOST_BITS_PER_WIDE_INT
)
216 TREE_INT_CST_HIGH (t
)
217 &= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
220 TREE_INT_CST_HIGH (t
) = 0;
221 if (prec
< HOST_BITS_PER_WIDE_INT
)
222 TREE_INT_CST_LOW (t
) &= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
225 /* Unsigned types do not suffer sign extension or overflow unless they
227 if (TREE_UNSIGNED (TREE_TYPE (t
))
228 && ! (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
229 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))))
232 /* If the value's sign bit is set, extend the sign. */
233 if (prec
!= 2 * HOST_BITS_PER_WIDE_INT
234 && (prec
> HOST_BITS_PER_WIDE_INT
235 ? 0 != (TREE_INT_CST_HIGH (t
)
237 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
238 : 0 != (TREE_INT_CST_LOW (t
)
239 & ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)))))
241 /* Value is negative:
242 set to 1 all the bits that are outside this type's precision. */
243 if (prec
> HOST_BITS_PER_WIDE_INT
)
244 TREE_INT_CST_HIGH (t
)
245 |= ((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
248 TREE_INT_CST_HIGH (t
) = -1;
249 if (prec
< HOST_BITS_PER_WIDE_INT
)
250 TREE_INT_CST_LOW (t
) |= ((unsigned HOST_WIDE_INT
) (-1) << prec
);
254 /* Return nonzero if signed overflow occurred. */
256 ((overflow
| (low
^ TREE_INT_CST_LOW (t
)) | (high
^ TREE_INT_CST_HIGH (t
)))
260 /* Add two doubleword integers with doubleword result.
261 Each argument is given as two `HOST_WIDE_INT' pieces.
262 One argument is L1 and H1; the other, L2 and H2.
263 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
266 add_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
267 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
268 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
270 unsigned HOST_WIDE_INT l
;
274 h
= h1
+ h2
+ (l
< l1
);
278 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
281 /* Negate a doubleword integer with doubleword result.
282 Return nonzero if the operation overflows, assuming it's signed.
283 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
284 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
287 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
288 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
294 return (*hv
& h1
) < 0;
304 /* Multiply two doubleword integers with doubleword result.
305 Return nonzero if the operation overflows, assuming it's signed.
306 Each argument is given as two `HOST_WIDE_INT' pieces.
307 One argument is L1 and H1; the other, L2 and H2.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
311 mul_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
312 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
313 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
315 HOST_WIDE_INT arg1
[4];
316 HOST_WIDE_INT arg2
[4];
317 HOST_WIDE_INT prod
[4 * 2];
318 unsigned HOST_WIDE_INT carry
;
320 unsigned HOST_WIDE_INT toplow
, neglow
;
321 HOST_WIDE_INT tophigh
, neghigh
;
323 encode (arg1
, l1
, h1
);
324 encode (arg2
, l2
, h2
);
326 memset (prod
, 0, sizeof prod
);
328 for (i
= 0; i
< 4; i
++)
331 for (j
= 0; j
< 4; j
++)
334 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
335 carry
+= arg1
[i
] * arg2
[j
];
336 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
338 prod
[k
] = LOWPART (carry
);
339 carry
= HIGHPART (carry
);
344 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
346 /* Check for overflow by calculating the top half of the answer in full;
347 it should agree with the low half's sign bit. */
348 decode (prod
+ 4, &toplow
, &tophigh
);
351 neg_double (l2
, h2
, &neglow
, &neghigh
);
352 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
356 neg_double (l1
, h1
, &neglow
, &neghigh
);
357 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
359 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
362 /* Shift the doubleword integer in L1, H1 left by COUNT places
363 keeping only PREC bits of result.
364 Shift right if COUNT is negative.
365 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
366 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
369 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
370 HOST_WIDE_INT count
, unsigned int prec
,
371 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
373 unsigned HOST_WIDE_INT signmask
;
377 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
381 if (SHIFT_COUNT_TRUNCATED
)
384 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
386 /* Shifting by the host word size is undefined according to the
387 ANSI standard, so we must handle this as a special case. */
391 else if (count
>= HOST_BITS_PER_WIDE_INT
)
393 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
398 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
399 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
403 /* Sign extend all bits that are beyond the precision. */
405 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
406 ? ((unsigned HOST_WIDE_INT
) *hv
407 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
408 : (*lv
>> (prec
- 1))) & 1);
410 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
412 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
414 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
415 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
420 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
421 *lv
|= signmask
<< prec
;
425 /* Shift the doubleword integer in L1, H1 right by COUNT places
426 keeping only PREC bits of result. COUNT must be positive.
427 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
428 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
431 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
432 HOST_WIDE_INT count
, unsigned int prec
,
433 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
436 unsigned HOST_WIDE_INT signmask
;
439 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
442 if (SHIFT_COUNT_TRUNCATED
)
445 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
447 /* Shifting by the host word size is undefined according to the
448 ANSI standard, so we must handle this as a special case. */
452 else if (count
>= HOST_BITS_PER_WIDE_INT
)
455 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
459 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
461 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
464 /* Zero / sign extend all bits that are beyond the precision. */
466 if (count
>= (HOST_WIDE_INT
)prec
)
471 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
473 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
475 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
476 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
481 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
482 *lv
|= signmask
<< (prec
- count
);
486 /* Rotate the doubleword integer in L1, H1 left by COUNT places
487 keeping only PREC bits of result.
488 Rotate right if COUNT is negative.
489 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
492 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
493 HOST_WIDE_INT count
, unsigned int prec
,
494 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
496 unsigned HOST_WIDE_INT s1l
, s2l
;
497 HOST_WIDE_INT s1h
, s2h
;
503 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
504 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
509 /* Rotate the doubleword integer in L1, H1 left by COUNT places
510 keeping only PREC bits of result. COUNT must be positive.
511 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
514 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
515 HOST_WIDE_INT count
, unsigned int prec
,
516 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
518 unsigned HOST_WIDE_INT s1l
, s2l
;
519 HOST_WIDE_INT s1h
, s2h
;
525 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
526 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
531 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
532 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
533 CODE is a tree code for a kind of division, one of
534 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
536 It controls how the quotient is rounded to an integer.
537 Return nonzero if the operation overflows.
538 UNS nonzero says do unsigned division. */
541 div_and_round_double (enum tree_code code
, int uns
,
542 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
543 HOST_WIDE_INT hnum_orig
,
544 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
545 HOST_WIDE_INT hden_orig
,
546 unsigned HOST_WIDE_INT
*lquo
,
547 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
551 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
552 HOST_WIDE_INT den
[4], quo
[4];
554 unsigned HOST_WIDE_INT work
;
555 unsigned HOST_WIDE_INT carry
= 0;
556 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
557 HOST_WIDE_INT hnum
= hnum_orig
;
558 unsigned HOST_WIDE_INT lden
= lden_orig
;
559 HOST_WIDE_INT hden
= hden_orig
;
562 if (hden
== 0 && lden
== 0)
563 overflow
= 1, lden
= 1;
565 /* Calculate quotient sign and convert operands to unsigned. */
571 /* (minimum integer) / (-1) is the only overflow case. */
572 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
573 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
579 neg_double (lden
, hden
, &lden
, &hden
);
583 if (hnum
== 0 && hden
== 0)
584 { /* single precision */
586 /* This unsigned division rounds toward zero. */
592 { /* trivial case: dividend < divisor */
593 /* hden != 0 already checked. */
600 memset (quo
, 0, sizeof quo
);
602 memset (num
, 0, sizeof num
); /* to zero 9th element */
603 memset (den
, 0, sizeof den
);
605 encode (num
, lnum
, hnum
);
606 encode (den
, lden
, hden
);
608 /* Special code for when the divisor < BASE. */
609 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
611 /* hnum != 0 already checked. */
612 for (i
= 4 - 1; i
>= 0; i
--)
614 work
= num
[i
] + carry
* BASE
;
615 quo
[i
] = work
/ lden
;
621 /* Full double precision division,
622 with thanks to Don Knuth's "Seminumerical Algorithms". */
623 int num_hi_sig
, den_hi_sig
;
624 unsigned HOST_WIDE_INT quo_est
, scale
;
626 /* Find the highest nonzero divisor digit. */
627 for (i
= 4 - 1;; i
--)
634 /* Insure that the first digit of the divisor is at least BASE/2.
635 This is required by the quotient digit estimation algorithm. */
637 scale
= BASE
/ (den
[den_hi_sig
] + 1);
639 { /* scale divisor and dividend */
641 for (i
= 0; i
<= 4 - 1; i
++)
643 work
= (num
[i
] * scale
) + carry
;
644 num
[i
] = LOWPART (work
);
645 carry
= HIGHPART (work
);
650 for (i
= 0; i
<= 4 - 1; i
++)
652 work
= (den
[i
] * scale
) + carry
;
653 den
[i
] = LOWPART (work
);
654 carry
= HIGHPART (work
);
655 if (den
[i
] != 0) den_hi_sig
= i
;
662 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
664 /* Guess the next quotient digit, quo_est, by dividing the first
665 two remaining dividend digits by the high order quotient digit.
666 quo_est is never low and is at most 2 high. */
667 unsigned HOST_WIDE_INT tmp
;
669 num_hi_sig
= i
+ den_hi_sig
+ 1;
670 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
671 if (num
[num_hi_sig
] != den
[den_hi_sig
])
672 quo_est
= work
/ den
[den_hi_sig
];
676 /* Refine quo_est so it's usually correct, and at most one high. */
677 tmp
= work
- quo_est
* den
[den_hi_sig
];
679 && (den
[den_hi_sig
- 1] * quo_est
680 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
683 /* Try QUO_EST as the quotient digit, by multiplying the
684 divisor by QUO_EST and subtracting from the remaining dividend.
685 Keep in mind that QUO_EST is the I - 1st digit. */
688 for (j
= 0; j
<= den_hi_sig
; j
++)
690 work
= quo_est
* den
[j
] + carry
;
691 carry
= HIGHPART (work
);
692 work
= num
[i
+ j
] - LOWPART (work
);
693 num
[i
+ j
] = LOWPART (work
);
694 carry
+= HIGHPART (work
) != 0;
697 /* If quo_est was high by one, then num[i] went negative and
698 we need to correct things. */
699 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
702 carry
= 0; /* add divisor back in */
703 for (j
= 0; j
<= den_hi_sig
; j
++)
705 work
= num
[i
+ j
] + den
[j
] + carry
;
706 carry
= HIGHPART (work
);
707 num
[i
+ j
] = LOWPART (work
);
710 num
[num_hi_sig
] += carry
;
713 /* Store the quotient digit. */
718 decode (quo
, lquo
, hquo
);
721 /* If result is negative, make it so. */
723 neg_double (*lquo
, *hquo
, lquo
, hquo
);
725 /* Compute trial remainder: rem = num - (quo * den) */
726 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
727 neg_double (*lrem
, *hrem
, lrem
, hrem
);
728 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
733 case TRUNC_MOD_EXPR
: /* round toward zero */
734 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
738 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
739 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
742 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
750 case CEIL_MOD_EXPR
: /* round toward positive infinity */
751 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
753 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
761 case ROUND_MOD_EXPR
: /* round to closest integer */
763 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
764 HOST_WIDE_INT habs_rem
= *hrem
;
765 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
766 HOST_WIDE_INT habs_den
= hden
, htwice
;
768 /* Get absolute values. */
770 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
772 neg_double (lden
, hden
, &labs_den
, &habs_den
);
774 /* If (2 * abs (lrem) >= abs (lden)) */
775 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
776 labs_rem
, habs_rem
, <wice
, &htwice
);
778 if (((unsigned HOST_WIDE_INT
) habs_den
779 < (unsigned HOST_WIDE_INT
) htwice
)
780 || (((unsigned HOST_WIDE_INT
) habs_den
781 == (unsigned HOST_WIDE_INT
) htwice
)
782 && (labs_den
< ltwice
)))
786 add_double (*lquo
, *hquo
,
787 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
790 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
802 /* Compute true remainder: rem = num - (quo * den) */
803 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
804 neg_double (*lrem
, *hrem
, lrem
, hrem
);
805 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
809 /* Return true if built-in mathematical function specified by CODE
810 preserves the sign of it argument, i.e. -f(x) == f(-x). */
813 negate_mathfn_p (enum built_in_function code
)
837 /* Determine whether an expression T can be cheaply negated using
838 the function negate_expr. */
841 negate_expr_p (tree t
)
843 unsigned HOST_WIDE_INT val
;
850 type
= TREE_TYPE (t
);
853 switch (TREE_CODE (t
))
856 if (TREE_UNSIGNED (type
) || ! flag_trapv
)
859 /* Check that -CST will not overflow type. */
860 prec
= TYPE_PRECISION (type
);
861 if (prec
> HOST_BITS_PER_WIDE_INT
)
863 if (TREE_INT_CST_LOW (t
) != 0)
865 prec
-= HOST_BITS_PER_WIDE_INT
;
866 val
= TREE_INT_CST_HIGH (t
);
869 val
= TREE_INT_CST_LOW (t
);
870 if (prec
< HOST_BITS_PER_WIDE_INT
)
871 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
872 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
879 return negate_expr_p (TREE_REALPART (t
))
880 && negate_expr_p (TREE_IMAGPART (t
));
883 if (FLOAT_TYPE_P (type
) && !flag_unsafe_math_optimizations
)
885 /* -(A + B) -> (-B) - A. */
886 if (negate_expr_p (TREE_OPERAND (t
, 1))
887 && reorder_operands_p (TREE_OPERAND (t
, 0),
888 TREE_OPERAND (t
, 1)))
890 /* -(A + B) -> (-A) - B. */
891 return negate_expr_p (TREE_OPERAND (t
, 0));
894 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
895 return (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
896 && reorder_operands_p (TREE_OPERAND (t
, 0),
897 TREE_OPERAND (t
, 1));
900 if (TREE_UNSIGNED (TREE_TYPE (t
)))
906 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
907 return negate_expr_p (TREE_OPERAND (t
, 1))
908 || negate_expr_p (TREE_OPERAND (t
, 0));
912 /* Negate -((double)float) as (double)(-float). */
913 if (TREE_CODE (type
) == REAL_TYPE
)
915 tree tem
= strip_float_extensions (t
);
917 return negate_expr_p (tem
);
922 /* Negate -f(x) as f(-x). */
923 if (negate_mathfn_p (builtin_mathfn_code (t
)))
924 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1)));
928 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
929 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
931 tree op1
= TREE_OPERAND (t
, 1);
932 if (TREE_INT_CST_HIGH (op1
) == 0
933 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
934 == TREE_INT_CST_LOW (op1
))
945 /* Given T, an expression, return the negation of T. Allow for T to be
946 null, in which case return null. */
957 type
= TREE_TYPE (t
);
960 switch (TREE_CODE (t
))
963 tem
= fold_negate_const (t
, type
);
964 if (! TREE_OVERFLOW (tem
)
965 || TREE_UNSIGNED (type
)
971 tem
= fold_negate_const (t
, type
);
972 /* Two's complement FP formats, such as c4x, may overflow. */
973 if (! TREE_OVERFLOW (tem
) || ! flag_trapping_math
)
974 return fold_convert (type
, tem
);
979 tree rpart
= negate_expr (TREE_REALPART (t
));
980 tree ipart
= negate_expr (TREE_IMAGPART (t
));
982 if ((TREE_CODE (rpart
) == REAL_CST
983 && TREE_CODE (ipart
) == REAL_CST
)
984 || (TREE_CODE (rpart
) == INTEGER_CST
985 && TREE_CODE (ipart
) == INTEGER_CST
))
986 return build_complex (type
, rpart
, ipart
);
991 return fold_convert (type
, TREE_OPERAND (t
, 0));
994 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
996 /* -(A + B) -> (-B) - A. */
997 if (negate_expr_p (TREE_OPERAND (t
, 1))
998 && reorder_operands_p (TREE_OPERAND (t
, 0),
999 TREE_OPERAND (t
, 1)))
1000 return fold_convert (type
,
1001 fold (build (MINUS_EXPR
, TREE_TYPE (t
),
1002 negate_expr (TREE_OPERAND (t
, 1)),
1003 TREE_OPERAND (t
, 0))));
1004 /* -(A + B) -> (-A) - B. */
1005 if (negate_expr_p (TREE_OPERAND (t
, 0)))
1006 return fold_convert (type
,
1007 fold (build (MINUS_EXPR
, TREE_TYPE (t
),
1008 negate_expr (TREE_OPERAND (t
, 0)),
1009 TREE_OPERAND (t
, 1))));
1014 /* - (A - B) -> B - A */
1015 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
1016 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
1017 return fold_convert (type
,
1018 fold (build (MINUS_EXPR
, TREE_TYPE (t
),
1019 TREE_OPERAND (t
, 1),
1020 TREE_OPERAND (t
, 0))));
1024 if (TREE_UNSIGNED (TREE_TYPE (t
)))
1030 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1032 tem
= TREE_OPERAND (t
, 1);
1033 if (negate_expr_p (tem
))
1034 return fold_convert (type
,
1035 fold (build (TREE_CODE (t
), TREE_TYPE (t
),
1036 TREE_OPERAND (t
, 0),
1037 negate_expr (tem
))));
1038 tem
= TREE_OPERAND (t
, 0);
1039 if (negate_expr_p (tem
))
1040 return fold_convert (type
,
1041 fold (build (TREE_CODE (t
), TREE_TYPE (t
),
1043 TREE_OPERAND (t
, 1))));
1048 /* Convert -((double)float) into (double)(-float). */
1049 if (TREE_CODE (type
) == REAL_TYPE
)
1051 tem
= strip_float_extensions (t
);
1052 if (tem
!= t
&& negate_expr_p (tem
))
1053 return fold_convert (type
, negate_expr (tem
));
1058 /* Negate -f(x) as f(-x). */
1059 if (negate_mathfn_p (builtin_mathfn_code (t
))
1060 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1))))
1062 tree fndecl
, arg
, arglist
;
1064 fndecl
= get_callee_fndecl (t
);
1065 arg
= negate_expr (TREE_VALUE (TREE_OPERAND (t
, 1)));
1066 arglist
= build_tree_list (NULL_TREE
, arg
);
1067 return build_function_call_expr (fndecl
, arglist
);
1072 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1073 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
1075 tree op1
= TREE_OPERAND (t
, 1);
1076 if (TREE_INT_CST_HIGH (op1
) == 0
1077 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
1078 == TREE_INT_CST_LOW (op1
))
1080 tree ntype
= TREE_UNSIGNED (type
)
1081 ? lang_hooks
.types
.signed_type (type
)
1082 : lang_hooks
.types
.unsigned_type (type
);
1083 tree temp
= fold_convert (ntype
, TREE_OPERAND (t
, 0));
1084 temp
= fold (build2 (RSHIFT_EXPR
, ntype
, temp
, op1
));
1085 return fold_convert (type
, temp
);
1094 tem
= fold (build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
));
1095 return fold_convert (type
, tem
);
1098 /* Split a tree IN into a constant, literal and variable parts that could be
1099 combined with CODE to make IN. "constant" means an expression with
1100 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1101 commutative arithmetic operation. Store the constant part into *CONP,
1102 the literal in *LITP and return the variable part. If a part isn't
1103 present, set it to null. If the tree does not decompose in this way,
1104 return the entire tree as the variable part and the other parts as null.
1106 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1107 case, we negate an operand that was subtracted. Except if it is a
1108 literal for which we use *MINUS_LITP instead.
1110 If NEGATE_P is true, we are negating all of IN, again except a literal
1111 for which we use *MINUS_LITP instead.
1113 If IN is itself a literal or constant, return it as appropriate.
1115 Note that we do not guarantee that any of the three values will be the
1116 same type as IN, but they will have the same signedness and mode. */
1119 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1120 tree
*minus_litp
, int negate_p
)
1128 /* Strip any conversions that don't change the machine mode or signedness. */
1129 STRIP_SIGN_NOPS (in
);
1131 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1133 else if (TREE_CODE (in
) == code
1134 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1135 /* We can associate addition and subtraction together (even
1136 though the C standard doesn't say so) for integers because
1137 the value is not affected. For reals, the value might be
1138 affected, so we can't. */
1139 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1140 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1142 tree op0
= TREE_OPERAND (in
, 0);
1143 tree op1
= TREE_OPERAND (in
, 1);
1144 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1145 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1147 /* First see if either of the operands is a literal, then a constant. */
1148 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1149 *litp
= op0
, op0
= 0;
1150 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1151 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1153 if (op0
!= 0 && TREE_CONSTANT (op0
))
1154 *conp
= op0
, op0
= 0;
1155 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1156 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1158 /* If we haven't dealt with either operand, this is not a case we can
1159 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1160 if (op0
!= 0 && op1
!= 0)
1165 var
= op1
, neg_var_p
= neg1_p
;
1167 /* Now do any needed negations. */
1169 *minus_litp
= *litp
, *litp
= 0;
1171 *conp
= negate_expr (*conp
);
1173 var
= negate_expr (var
);
1175 else if (TREE_CONSTANT (in
))
1183 *minus_litp
= *litp
, *litp
= 0;
1184 else if (*minus_litp
)
1185 *litp
= *minus_litp
, *minus_litp
= 0;
1186 *conp
= negate_expr (*conp
);
1187 var
= negate_expr (var
);
1193 /* Re-associate trees split by the above function. T1 and T2 are either
1194 expressions to associate or null. Return the new expression, if any. If
1195 we build an operation, do it in TYPE and with CODE. */
1198 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1205 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1206 try to fold this since we will have infinite recursion. But do
1207 deal with any NEGATE_EXPRs. */
1208 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1209 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1211 if (code
== PLUS_EXPR
)
1213 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1214 return build (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1215 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1216 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1217 return build (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1218 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1220 return build (code
, type
, fold_convert (type
, t1
),
1221 fold_convert (type
, t2
));
1224 return fold (build (code
, type
, fold_convert (type
, t1
),
1225 fold_convert (type
, t2
)));
1228 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1229 to produce a new constant.
1231 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1234 int_const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1236 unsigned HOST_WIDE_INT int1l
, int2l
;
1237 HOST_WIDE_INT int1h
, int2h
;
1238 unsigned HOST_WIDE_INT low
;
1240 unsigned HOST_WIDE_INT garbagel
;
1241 HOST_WIDE_INT garbageh
;
1243 tree type
= TREE_TYPE (arg1
);
1244 int uns
= TREE_UNSIGNED (type
);
1246 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1248 int no_overflow
= 0;
1250 int1l
= TREE_INT_CST_LOW (arg1
);
1251 int1h
= TREE_INT_CST_HIGH (arg1
);
1252 int2l
= TREE_INT_CST_LOW (arg2
);
1253 int2h
= TREE_INT_CST_HIGH (arg2
);
1258 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1262 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1266 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1272 /* It's unclear from the C standard whether shifts can overflow.
1273 The following code ignores overflow; perhaps a C standard
1274 interpretation ruling is needed. */
1275 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1283 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1288 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1292 neg_double (int2l
, int2h
, &low
, &hi
);
1293 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1294 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1298 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1301 case TRUNC_DIV_EXPR
:
1302 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1303 case EXACT_DIV_EXPR
:
1304 /* This is a shortcut for a common special case. */
1305 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1306 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1307 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1308 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1310 if (code
== CEIL_DIV_EXPR
)
1313 low
= int1l
/ int2l
, hi
= 0;
1317 /* ... fall through ... */
1319 case ROUND_DIV_EXPR
:
1320 if (int2h
== 0 && int2l
== 1)
1322 low
= int1l
, hi
= int1h
;
1325 if (int1l
== int2l
&& int1h
== int2h
1326 && ! (int1l
== 0 && int1h
== 0))
1331 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1332 &low
, &hi
, &garbagel
, &garbageh
);
1335 case TRUNC_MOD_EXPR
:
1336 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1337 /* This is a shortcut for a common special case. */
1338 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1339 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1340 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1341 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1343 if (code
== CEIL_MOD_EXPR
)
1345 low
= int1l
% int2l
, hi
= 0;
1349 /* ... fall through ... */
1351 case ROUND_MOD_EXPR
:
1352 overflow
= div_and_round_double (code
, uns
,
1353 int1l
, int1h
, int2l
, int2h
,
1354 &garbagel
, &garbageh
, &low
, &hi
);
1360 low
= (((unsigned HOST_WIDE_INT
) int1h
1361 < (unsigned HOST_WIDE_INT
) int2h
)
1362 || (((unsigned HOST_WIDE_INT
) int1h
1363 == (unsigned HOST_WIDE_INT
) int2h
)
1366 low
= (int1h
< int2h
1367 || (int1h
== int2h
&& int1l
< int2l
));
1369 if (low
== (code
== MIN_EXPR
))
1370 low
= int1l
, hi
= int1h
;
1372 low
= int2l
, hi
= int2h
;
1379 /* If this is for a sizetype, can be represented as one (signed)
1380 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1383 && ((hi
== 0 && (HOST_WIDE_INT
) low
>= 0)
1384 || (hi
== -1 && (HOST_WIDE_INT
) low
< 0))
1385 && overflow
== 0 && ! TREE_OVERFLOW (arg1
) && ! TREE_OVERFLOW (arg2
))
1386 return size_int_type_wide (low
, type
);
1389 t
= build_int_2 (low
, hi
);
1390 TREE_TYPE (t
) = TREE_TYPE (arg1
);
1395 ? (!uns
|| is_sizetype
) && overflow
1396 : (force_fit_type (t
, (!uns
|| is_sizetype
) && overflow
)
1398 | TREE_OVERFLOW (arg1
)
1399 | TREE_OVERFLOW (arg2
));
1401 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1402 So check if force_fit_type truncated the value. */
1404 && ! TREE_OVERFLOW (t
)
1405 && (TREE_INT_CST_HIGH (t
) != hi
1406 || TREE_INT_CST_LOW (t
) != low
))
1407 TREE_OVERFLOW (t
) = 1;
1409 TREE_CONSTANT_OVERFLOW (t
) = (TREE_OVERFLOW (t
)
1410 | TREE_CONSTANT_OVERFLOW (arg1
)
1411 | TREE_CONSTANT_OVERFLOW (arg2
));
1415 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1416 constant. We assume ARG1 and ARG2 have the same data type, or at least
1417 are the same kind of constant and the same machine mode.
1419 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1422 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1427 if (TREE_CODE (arg1
) == INTEGER_CST
)
1428 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1430 if (TREE_CODE (arg1
) == REAL_CST
)
1432 enum machine_mode mode
;
1435 REAL_VALUE_TYPE value
;
1438 d1
= TREE_REAL_CST (arg1
);
1439 d2
= TREE_REAL_CST (arg2
);
1441 type
= TREE_TYPE (arg1
);
1442 mode
= TYPE_MODE (type
);
1444 /* Don't perform operation if we honor signaling NaNs and
1445 either operand is a NaN. */
1446 if (HONOR_SNANS (mode
)
1447 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1450 /* Don't perform operation if it would raise a division
1451 by zero exception. */
1452 if (code
== RDIV_EXPR
1453 && REAL_VALUES_EQUAL (d2
, dconst0
)
1454 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1457 /* If either operand is a NaN, just return it. Otherwise, set up
1458 for floating-point trap; we return an overflow. */
1459 if (REAL_VALUE_ISNAN (d1
))
1461 else if (REAL_VALUE_ISNAN (d2
))
1464 REAL_ARITHMETIC (value
, code
, d1
, d2
);
1466 t
= build_real (type
, real_value_truncate (mode
, value
));
1469 = (force_fit_type (t
, 0)
1470 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1471 TREE_CONSTANT_OVERFLOW (t
)
1473 | TREE_CONSTANT_OVERFLOW (arg1
)
1474 | TREE_CONSTANT_OVERFLOW (arg2
);
1477 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1479 tree type
= TREE_TYPE (arg1
);
1480 tree r1
= TREE_REALPART (arg1
);
1481 tree i1
= TREE_IMAGPART (arg1
);
1482 tree r2
= TREE_REALPART (arg2
);
1483 tree i2
= TREE_IMAGPART (arg2
);
1489 t
= build_complex (type
,
1490 const_binop (PLUS_EXPR
, r1
, r2
, notrunc
),
1491 const_binop (PLUS_EXPR
, i1
, i2
, notrunc
));
1495 t
= build_complex (type
,
1496 const_binop (MINUS_EXPR
, r1
, r2
, notrunc
),
1497 const_binop (MINUS_EXPR
, i1
, i2
, notrunc
));
1501 t
= build_complex (type
,
1502 const_binop (MINUS_EXPR
,
1503 const_binop (MULT_EXPR
,
1505 const_binop (MULT_EXPR
,
1508 const_binop (PLUS_EXPR
,
1509 const_binop (MULT_EXPR
,
1511 const_binop (MULT_EXPR
,
1519 = const_binop (PLUS_EXPR
,
1520 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1521 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1524 t
= build_complex (type
,
1526 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1527 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1528 const_binop (PLUS_EXPR
,
1529 const_binop (MULT_EXPR
, r1
, r2
,
1531 const_binop (MULT_EXPR
, i1
, i2
,
1534 magsquared
, notrunc
),
1536 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1537 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1538 const_binop (MINUS_EXPR
,
1539 const_binop (MULT_EXPR
, i1
, r2
,
1541 const_binop (MULT_EXPR
, r1
, i2
,
1544 magsquared
, notrunc
));
1556 /* These are the hash table functions for the hash table of INTEGER_CST
1557 nodes of a sizetype. */
1559 /* Return the hash code code X, an INTEGER_CST. */
1562 size_htab_hash (const void *x
)
1566 return (TREE_INT_CST_HIGH (t
) ^ TREE_INT_CST_LOW (t
)
1567 ^ htab_hash_pointer (TREE_TYPE (t
))
1568 ^ (TREE_OVERFLOW (t
) << 20));
1571 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1572 is the same as that given by *Y, which is the same. */
1575 size_htab_eq (const void *x
, const void *y
)
1580 return (TREE_INT_CST_HIGH (xt
) == TREE_INT_CST_HIGH (yt
)
1581 && TREE_INT_CST_LOW (xt
) == TREE_INT_CST_LOW (yt
)
1582 && TREE_TYPE (xt
) == TREE_TYPE (yt
)
1583 && TREE_OVERFLOW (xt
) == TREE_OVERFLOW (yt
));
1586 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1587 bits are given by NUMBER and of the sizetype represented by KIND. */
1590 size_int_wide (HOST_WIDE_INT number
, enum size_type_kind kind
)
1592 return size_int_type_wide (number
, sizetype_tab
[(int) kind
]);
1595 /* Likewise, but the desired type is specified explicitly. */
1597 static GTY (()) tree new_const
;
1598 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
1602 size_int_type_wide (HOST_WIDE_INT number
, tree type
)
1608 size_htab
= htab_create_ggc (1024, size_htab_hash
, size_htab_eq
, NULL
);
1609 new_const
= make_node (INTEGER_CST
);
1612 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1613 hash table, we return the value from the hash table. Otherwise, we
1614 place that in the hash table and make a new node for the next time. */
1615 TREE_INT_CST_LOW (new_const
) = number
;
1616 TREE_INT_CST_HIGH (new_const
) = number
< 0 ? -1 : 0;
1617 TREE_TYPE (new_const
) = type
;
1618 TREE_OVERFLOW (new_const
) = TREE_CONSTANT_OVERFLOW (new_const
)
1619 = force_fit_type (new_const
, 0);
1621 slot
= htab_find_slot (size_htab
, new_const
, INSERT
);
1627 new_const
= make_node (INTEGER_CST
);
1631 return (tree
) *slot
;
1634 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1635 is a tree code. The type of the result is taken from the operands.
1636 Both must be the same type integer type and it must be a size type.
1637 If the operands are constant, so is the result. */
1640 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
1642 tree type
= TREE_TYPE (arg0
);
1644 if (TREE_CODE (type
) != INTEGER_TYPE
|| ! TYPE_IS_SIZETYPE (type
)
1645 || type
!= TREE_TYPE (arg1
))
1648 /* Handle the special case of two integer constants faster. */
1649 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1651 /* And some specific cases even faster than that. */
1652 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1654 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1655 && integer_zerop (arg1
))
1657 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1660 /* Handle general case of two integer constants. */
1661 return int_const_binop (code
, arg0
, arg1
, 0);
1664 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1665 return error_mark_node
;
1667 return fold (build (code
, type
, arg0
, arg1
));
1670 /* Given two values, either both of sizetype or both of bitsizetype,
1671 compute the difference between the two values. Return the value
1672 in signed type corresponding to the type of the operands. */
1675 size_diffop (tree arg0
, tree arg1
)
1677 tree type
= TREE_TYPE (arg0
);
1680 if (TREE_CODE (type
) != INTEGER_TYPE
|| ! TYPE_IS_SIZETYPE (type
)
1681 || type
!= TREE_TYPE (arg1
))
1684 /* If the type is already signed, just do the simple thing. */
1685 if (! TREE_UNSIGNED (type
))
1686 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1688 ctype
= (type
== bitsizetype
|| type
== ubitsizetype
1689 ? sbitsizetype
: ssizetype
);
1691 /* If either operand is not a constant, do the conversions to the signed
1692 type and subtract. The hardware will do the right thing with any
1693 overflow in the subtraction. */
1694 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1695 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
1696 fold_convert (ctype
, arg1
));
1698 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1699 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1700 overflow) and negate (which can't either). Special-case a result
1701 of zero while we're here. */
1702 if (tree_int_cst_equal (arg0
, arg1
))
1703 return fold_convert (ctype
, integer_zero_node
);
1704 else if (tree_int_cst_lt (arg1
, arg0
))
1705 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1707 return size_binop (MINUS_EXPR
, fold_convert (ctype
, integer_zero_node
),
1708 fold_convert (ctype
, size_binop (MINUS_EXPR
,
1713 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1714 type TYPE. If no simplification can be done return NULL_TREE. */
1717 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1722 if (TREE_TYPE (arg1
) == type
)
1725 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
1727 if (TREE_CODE (arg1
) == INTEGER_CST
)
1729 /* If we would build a constant wider than GCC supports,
1730 leave the conversion unfolded. */
1731 if (TYPE_PRECISION (type
) > 2 * HOST_BITS_PER_WIDE_INT
)
1734 /* If we are trying to make a sizetype for a small integer, use
1735 size_int to pick up cached types to reduce duplicate nodes. */
1736 if (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1737 && !TREE_CONSTANT_OVERFLOW (arg1
)
1738 && compare_tree_int (arg1
, 10000) < 0)
1739 return size_int_type_wide (TREE_INT_CST_LOW (arg1
), type
);
1741 /* Given an integer constant, make new constant with new type,
1742 appropriately sign-extended or truncated. */
1743 t
= build_int_2 (TREE_INT_CST_LOW (arg1
),
1744 TREE_INT_CST_HIGH (arg1
));
1745 TREE_TYPE (t
) = type
;
1746 /* Indicate an overflow if (1) ARG1 already overflowed,
1747 or (2) force_fit_type indicates an overflow.
1748 Tell force_fit_type that an overflow has already occurred
1749 if ARG1 is a too-large unsigned value and T is signed.
1750 But don't indicate an overflow if converting a pointer. */
1752 = ((force_fit_type (t
,
1753 (TREE_INT_CST_HIGH (arg1
) < 0
1754 && (TREE_UNSIGNED (type
)
1755 < TREE_UNSIGNED (TREE_TYPE (arg1
)))))
1756 && ! POINTER_TYPE_P (TREE_TYPE (arg1
)))
1757 || TREE_OVERFLOW (arg1
));
1758 TREE_CONSTANT_OVERFLOW (t
)
1759 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1762 else if (TREE_CODE (arg1
) == REAL_CST
)
1764 /* The following code implements the floating point to integer
1765 conversion rules required by the Java Language Specification,
1766 that IEEE NaNs are mapped to zero and values that overflow
1767 the target precision saturate, i.e. values greater than
1768 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1769 are mapped to INT_MIN. These semantics are allowed by the
1770 C and C++ standards that simply state that the behavior of
1771 FP-to-integer conversion is unspecified upon overflow. */
1773 HOST_WIDE_INT high
, low
;
1776 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1780 case FIX_TRUNC_EXPR
:
1781 real_trunc (&r
, VOIDmode
, &x
);
1785 real_ceil (&r
, VOIDmode
, &x
);
1788 case FIX_FLOOR_EXPR
:
1789 real_floor (&r
, VOIDmode
, &x
);
1796 /* If R is NaN, return zero and show we have an overflow. */
1797 if (REAL_VALUE_ISNAN (r
))
1804 /* See if R is less than the lower bound or greater than the
1809 tree lt
= TYPE_MIN_VALUE (type
);
1810 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1811 if (REAL_VALUES_LESS (r
, l
))
1814 high
= TREE_INT_CST_HIGH (lt
);
1815 low
= TREE_INT_CST_LOW (lt
);
1821 tree ut
= TYPE_MAX_VALUE (type
);
1824 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1825 if (REAL_VALUES_LESS (u
, r
))
1828 high
= TREE_INT_CST_HIGH (ut
);
1829 low
= TREE_INT_CST_LOW (ut
);
1835 REAL_VALUE_TO_INT (&low
, &high
, r
);
1837 t
= build_int_2 (low
, high
);
1838 TREE_TYPE (t
) = type
;
1840 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, overflow
);
1841 TREE_CONSTANT_OVERFLOW (t
)
1842 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1846 else if (TREE_CODE (type
) == REAL_TYPE
)
1848 if (TREE_CODE (arg1
) == INTEGER_CST
)
1849 return build_real_from_int_cst (type
, arg1
);
1850 if (TREE_CODE (arg1
) == REAL_CST
)
1852 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
1854 /* We make a copy of ARG1 so that we don't modify an
1855 existing constant tree. */
1856 t
= copy_node (arg1
);
1857 TREE_TYPE (t
) = type
;
1861 t
= build_real (type
,
1862 real_value_truncate (TYPE_MODE (type
),
1863 TREE_REAL_CST (arg1
)));
1866 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, 0);
1867 TREE_CONSTANT_OVERFLOW (t
)
1868 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1875 /* Convert expression ARG to type TYPE. Used by the middle-end for
1876 simple conversions in preference to calling the front-end's convert. */
1879 fold_convert (tree type
, tree arg
)
1881 tree orig
= TREE_TYPE (arg
);
1887 if (TREE_CODE (arg
) == ERROR_MARK
1888 || TREE_CODE (type
) == ERROR_MARK
1889 || TREE_CODE (orig
) == ERROR_MARK
)
1890 return error_mark_node
;
1892 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1893 return fold (build1 (NOP_EXPR
, type
, arg
));
1895 if (INTEGRAL_TYPE_P (type
) || POINTER_TYPE_P (type
))
1897 if (TREE_CODE (arg
) == INTEGER_CST
)
1899 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1900 if (tem
!= NULL_TREE
)
1903 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
))
1904 return fold (build1 (NOP_EXPR
, type
, arg
));
1905 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1907 tem
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1908 return fold_convert (type
, tem
);
1910 if (TREE_CODE (orig
) == VECTOR_TYPE
1911 && GET_MODE_SIZE (TYPE_MODE (type
))
1912 == GET_MODE_SIZE (TYPE_MODE (orig
)))
1913 return fold (build1 (NOP_EXPR
, type
, arg
));
1915 else if (TREE_CODE (type
) == REAL_TYPE
)
1917 if (TREE_CODE (arg
) == INTEGER_CST
)
1919 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1920 if (tem
!= NULL_TREE
)
1923 else if (TREE_CODE (arg
) == REAL_CST
)
1925 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1926 if (tem
!= NULL_TREE
)
1930 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
))
1931 return fold (build1 (FLOAT_EXPR
, type
, arg
));
1932 if (TREE_CODE (orig
) == REAL_TYPE
)
1933 return fold (build1 (flag_float_store
? CONVERT_EXPR
: NOP_EXPR
,
1935 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1937 tem
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1938 return fold_convert (type
, tem
);
1941 else if (TREE_CODE (type
) == COMPLEX_TYPE
)
1943 if (INTEGRAL_TYPE_P (orig
)
1944 || POINTER_TYPE_P (orig
)
1945 || TREE_CODE (orig
) == REAL_TYPE
)
1946 return build (COMPLEX_EXPR
, type
,
1947 fold_convert (TREE_TYPE (type
), arg
),
1948 fold_convert (TREE_TYPE (type
), integer_zero_node
));
1949 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1953 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1955 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
1956 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
1957 return fold (build (COMPLEX_EXPR
, type
, rpart
, ipart
));
1960 arg
= save_expr (arg
);
1961 rpart
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1962 ipart
= fold (build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
));
1963 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
1964 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
1965 return fold (build (COMPLEX_EXPR
, type
, rpart
, ipart
));
1968 else if (TREE_CODE (type
) == VECTOR_TYPE
)
1970 if ((INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
))
1971 && GET_MODE_SIZE (TYPE_MODE (type
))
1972 == GET_MODE_SIZE (TYPE_MODE (orig
)))
1973 return fold (build1 (NOP_EXPR
, type
, arg
));
1974 if (TREE_CODE (orig
) == VECTOR_TYPE
1975 && GET_MODE_SIZE (TYPE_MODE (type
))
1976 == GET_MODE_SIZE (TYPE_MODE (orig
)))
1977 return fold (build1 (NOP_EXPR
, type
, arg
));
1979 else if (VOID_TYPE_P (type
))
1980 return fold (build1 (CONVERT_EXPR
, type
, arg
));
1984 /* Return an expr equal to X but certainly not valid as an lvalue. */
1991 /* These things are certainly not lvalues. */
1992 if (TREE_CODE (x
) == NON_LVALUE_EXPR
1993 || TREE_CODE (x
) == INTEGER_CST
1994 || TREE_CODE (x
) == REAL_CST
1995 || TREE_CODE (x
) == STRING_CST
1996 || TREE_CODE (x
) == ADDR_EXPR
)
1999 result
= build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2000 TREE_CONSTANT (result
) = TREE_CONSTANT (x
);
2004 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2005 Zero means allow extended lvalues. */
2007 int pedantic_lvalues
;
2009 /* When pedantic, return an expr equal to X but certainly not valid as a
2010 pedantic lvalue. Otherwise, return X. */
2013 pedantic_non_lvalue (tree x
)
2015 if (pedantic_lvalues
)
2016 return non_lvalue (x
);
2021 /* Given a tree comparison code, return the code that is the logical inverse
2022 of the given code. It is not safe to do this for floating-point
2023 comparisons, except for NE_EXPR and EQ_EXPR. */
2025 static enum tree_code
2026 invert_tree_comparison (enum tree_code code
)
2047 /* Similar, but return the comparison that results if the operands are
2048 swapped. This is safe for floating-point. */
2050 static enum tree_code
2051 swap_tree_comparison (enum tree_code code
)
2072 /* Convert a comparison tree code from an enum tree_code representation
2073 into a compcode bit-based encoding. This function is the inverse of
2074 compcode_to_comparison. */
2077 comparison_to_compcode (enum tree_code code
)
2098 /* Convert a compcode bit-based encoding of a comparison operator back
2099 to GCC's enum tree_code representation. This function is the
2100 inverse of comparison_to_compcode. */
2102 static enum tree_code
2103 compcode_to_comparison (int code
)
2124 /* Return nonzero if CODE is a tree code that represents a truth value. */
2127 truth_value_p (enum tree_code code
)
2129 return (TREE_CODE_CLASS (code
) == '<'
2130 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2131 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2132 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2135 /* Return nonzero if two operands (typically of the same tree node)
2136 are necessarily equal. If either argument has side-effects this
2137 function returns zero.
2139 If ONLY_CONST is nonzero, only return nonzero for constants.
2140 This function tests whether the operands are indistinguishable;
2141 it does not test whether they are equal using C's == operation.
2142 The distinction is important for IEEE floating point, because
2143 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2144 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2146 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2147 even though it may hold multiple values during a function.
2148 This is because a GCC tree node guarantees that nothing else is
2149 executed between the evaluation of its "operands" (which may often
2150 be evaluated in arbitrary order). Hence if the operands themselves
2151 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2152 same value in each operand/subexpression. Hence a zero value for
2153 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2154 If comparing arbitrary expression trees, such as from different
2155 statements, ONLY_CONST must usually be nonzero. */
2158 operand_equal_p (tree arg0
, tree arg1
, int only_const
)
2162 /* If both types don't have the same signedness, then we can't consider
2163 them equal. We must check this before the STRIP_NOPS calls
2164 because they may change the signedness of the arguments. */
2165 if (TREE_UNSIGNED (TREE_TYPE (arg0
)) != TREE_UNSIGNED (TREE_TYPE (arg1
)))
2171 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2172 /* This is needed for conversions and for COMPONENT_REF.
2173 Might as well play it safe and always test this. */
2174 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2175 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2176 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2179 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2180 We don't care about side effects in that case because the SAVE_EXPR
2181 takes care of that for us. In all other cases, two expressions are
2182 equal if they have no side effects. If we have two identical
2183 expressions with side effects that should be treated the same due
2184 to the only side effects being identical SAVE_EXPR's, that will
2185 be detected in the recursive calls below. */
2186 if (arg0
== arg1
&& ! only_const
2187 && (TREE_CODE (arg0
) == SAVE_EXPR
2188 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2191 /* Next handle constant cases, those for which we can return 1 even
2192 if ONLY_CONST is set. */
2193 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2194 switch (TREE_CODE (arg0
))
2197 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2198 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2199 && tree_int_cst_equal (arg0
, arg1
));
2202 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2203 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2204 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2205 TREE_REAL_CST (arg1
)));
2211 if (TREE_CONSTANT_OVERFLOW (arg0
)
2212 || TREE_CONSTANT_OVERFLOW (arg1
))
2215 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2216 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2219 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2222 v1
= TREE_CHAIN (v1
);
2223 v2
= TREE_CHAIN (v2
);
2230 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2232 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2236 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2237 && ! memcmp (TREE_STRING_POINTER (arg0
),
2238 TREE_STRING_POINTER (arg1
),
2239 TREE_STRING_LENGTH (arg0
)));
2242 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2251 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2254 /* Two conversions are equal only if signedness and modes match. */
2255 if ((TREE_CODE (arg0
) == NOP_EXPR
|| TREE_CODE (arg0
) == CONVERT_EXPR
)
2256 && (TREE_UNSIGNED (TREE_TYPE (arg0
))
2257 != TREE_UNSIGNED (TREE_TYPE (arg1
))))
2260 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2261 TREE_OPERAND (arg1
, 0), 0);
2265 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0)
2266 && operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1),
2270 /* For commutative ops, allow the other order. */
2271 return (commutative_tree_code (TREE_CODE (arg0
))
2272 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2273 TREE_OPERAND (arg1
, 1), 0)
2274 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2275 TREE_OPERAND (arg1
, 0), 0));
2278 /* If either of the pointer (or reference) expressions we are
2279 dereferencing contain a side effect, these cannot be equal. */
2280 if (TREE_SIDE_EFFECTS (arg0
)
2281 || TREE_SIDE_EFFECTS (arg1
))
2284 switch (TREE_CODE (arg0
))
2287 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2288 TREE_OPERAND (arg1
, 0), 0);
2292 case ARRAY_RANGE_REF
:
2293 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2294 TREE_OPERAND (arg1
, 0), 0)
2295 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2296 TREE_OPERAND (arg1
, 1), 0));
2299 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2300 TREE_OPERAND (arg1
, 0), 0)
2301 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2302 TREE_OPERAND (arg1
, 1), 0)
2303 && operand_equal_p (TREE_OPERAND (arg0
, 2),
2304 TREE_OPERAND (arg1
, 2), 0));
2310 switch (TREE_CODE (arg0
))
2313 case TRUTH_NOT_EXPR
:
2314 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2315 TREE_OPERAND (arg1
, 0), 0);
2318 return rtx_equal_p (RTL_EXPR_RTL (arg0
), RTL_EXPR_RTL (arg1
));
2321 /* If the CALL_EXPRs call different functions, then they
2322 clearly can not be equal. */
2323 if (! operand_equal_p (TREE_OPERAND (arg0
, 0),
2324 TREE_OPERAND (arg1
, 0), 0))
2327 /* Only consider const functions equivalent. */
2328 fndecl
= get_callee_fndecl (arg0
);
2329 if (fndecl
== NULL_TREE
2330 || ! (flags_from_decl_or_type (fndecl
) & ECF_CONST
))
2333 /* Now see if all the arguments are the same. operand_equal_p
2334 does not handle TREE_LIST, so we walk the operands here
2335 feeding them to operand_equal_p. */
2336 arg0
= TREE_OPERAND (arg0
, 1);
2337 arg1
= TREE_OPERAND (arg1
, 1);
2338 while (arg0
&& arg1
)
2340 if (! operand_equal_p (TREE_VALUE (arg0
), TREE_VALUE (arg1
), 0))
2343 arg0
= TREE_CHAIN (arg0
);
2344 arg1
= TREE_CHAIN (arg1
);
2347 /* If we get here and both argument lists are exhausted
2348 then the CALL_EXPRs are equal. */
2349 return ! (arg0
|| arg1
);
2356 /* Consider __builtin_sqrt equal to sqrt. */
2357 return TREE_CODE (arg0
) == FUNCTION_DECL
2358 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2359 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2360 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
);
2367 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2368 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2370 When in doubt, return 0. */
2373 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2375 int unsignedp1
, unsignedpo
;
2376 tree primarg0
, primarg1
, primother
;
2377 unsigned int correct_width
;
2379 if (operand_equal_p (arg0
, arg1
, 0))
2382 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2383 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2386 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2387 and see if the inner values are the same. This removes any
2388 signedness comparison, which doesn't matter here. */
2389 primarg0
= arg0
, primarg1
= arg1
;
2390 STRIP_NOPS (primarg0
);
2391 STRIP_NOPS (primarg1
);
2392 if (operand_equal_p (primarg0
, primarg1
, 0))
2395 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2396 actual comparison operand, ARG0.
2398 First throw away any conversions to wider types
2399 already present in the operands. */
2401 primarg1
= get_narrower (arg1
, &unsignedp1
);
2402 primother
= get_narrower (other
, &unsignedpo
);
2404 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2405 if (unsignedp1
== unsignedpo
2406 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2407 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2409 tree type
= TREE_TYPE (arg0
);
2411 /* Make sure shorter operand is extended the right way
2412 to match the longer operand. */
2413 primarg1
= fold_convert (lang_hooks
.types
.signed_or_unsigned_type
2414 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2416 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2423 /* See if ARG is an expression that is either a comparison or is performing
2424 arithmetic on comparisons. The comparisons must only be comparing
2425 two different values, which will be stored in *CVAL1 and *CVAL2; if
2426 they are nonzero it means that some operands have already been found.
2427 No variables may be used anywhere else in the expression except in the
2428 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2429 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2431 If this is true, return 1. Otherwise, return zero. */
2434 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2436 enum tree_code code
= TREE_CODE (arg
);
2437 char class = TREE_CODE_CLASS (code
);
2439 /* We can handle some of the 'e' cases here. */
2440 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
2442 else if (class == 'e'
2443 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2444 || code
== COMPOUND_EXPR
))
2447 else if (class == 'e' && code
== SAVE_EXPR
&& SAVE_EXPR_RTL (arg
) == 0
2448 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2450 /* If we've already found a CVAL1 or CVAL2, this expression is
2451 two complex to handle. */
2452 if (*cval1
|| *cval2
)
2462 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2465 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2466 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2467 cval1
, cval2
, save_p
));
2473 if (code
== COND_EXPR
)
2474 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2475 cval1
, cval2
, save_p
)
2476 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2477 cval1
, cval2
, save_p
)
2478 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2479 cval1
, cval2
, save_p
));
2483 /* First see if we can handle the first operand, then the second. For
2484 the second operand, we know *CVAL1 can't be zero. It must be that
2485 one side of the comparison is each of the values; test for the
2486 case where this isn't true by failing if the two operands
2489 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2490 TREE_OPERAND (arg
, 1), 0))
2494 *cval1
= TREE_OPERAND (arg
, 0);
2495 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2497 else if (*cval2
== 0)
2498 *cval2
= TREE_OPERAND (arg
, 0);
2499 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2504 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2506 else if (*cval2
== 0)
2507 *cval2
= TREE_OPERAND (arg
, 1);
2508 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2520 /* ARG is a tree that is known to contain just arithmetic operations and
2521 comparisons. Evaluate the operations in the tree substituting NEW0 for
2522 any occurrence of OLD0 as an operand of a comparison and likewise for
2526 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
2528 tree type
= TREE_TYPE (arg
);
2529 enum tree_code code
= TREE_CODE (arg
);
2530 char class = TREE_CODE_CLASS (code
);
2532 /* We can handle some of the 'e' cases here. */
2533 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
2535 else if (class == 'e'
2536 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2542 return fold (build1 (code
, type
,
2543 eval_subst (TREE_OPERAND (arg
, 0),
2544 old0
, new0
, old1
, new1
)));
2547 return fold (build (code
, type
,
2548 eval_subst (TREE_OPERAND (arg
, 0),
2549 old0
, new0
, old1
, new1
),
2550 eval_subst (TREE_OPERAND (arg
, 1),
2551 old0
, new0
, old1
, new1
)));
2557 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
2560 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
2563 return fold (build (code
, type
,
2564 eval_subst (TREE_OPERAND (arg
, 0),
2565 old0
, new0
, old1
, new1
),
2566 eval_subst (TREE_OPERAND (arg
, 1),
2567 old0
, new0
, old1
, new1
),
2568 eval_subst (TREE_OPERAND (arg
, 2),
2569 old0
, new0
, old1
, new1
)));
2573 /* Fall through - ??? */
2577 tree arg0
= TREE_OPERAND (arg
, 0);
2578 tree arg1
= TREE_OPERAND (arg
, 1);
2580 /* We need to check both for exact equality and tree equality. The
2581 former will be true if the operand has a side-effect. In that
2582 case, we know the operand occurred exactly once. */
2584 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2586 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2589 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2591 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2594 return fold (build (code
, type
, arg0
, arg1
));
2602 /* Return a tree for the case when the result of an expression is RESULT
2603 converted to TYPE and OMITTED was previously an operand of the expression
2604 but is now not needed (e.g., we folded OMITTED * 0).
2606 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2607 the conversion of RESULT to TYPE. */
2610 omit_one_operand (tree type
, tree result
, tree omitted
)
2612 tree t
= fold_convert (type
, result
);
2614 if (TREE_SIDE_EFFECTS (omitted
))
2615 return build (COMPOUND_EXPR
, type
, omitted
, t
);
2617 return non_lvalue (t
);
2620 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2623 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
2625 tree t
= fold_convert (type
, result
);
2627 if (TREE_SIDE_EFFECTS (omitted
))
2628 return build (COMPOUND_EXPR
, type
, omitted
, t
);
2630 return pedantic_non_lvalue (t
);
2633 /* Return a simplified tree node for the truth-negation of ARG. This
2634 never alters ARG itself. We assume that ARG is an operation that
2635 returns a truth value (0 or 1). */
2638 invert_truthvalue (tree arg
)
2640 tree type
= TREE_TYPE (arg
);
2641 enum tree_code code
= TREE_CODE (arg
);
2643 if (code
== ERROR_MARK
)
2646 /* If this is a comparison, we can simply invert it, except for
2647 floating-point non-equality comparisons, in which case we just
2648 enclose a TRUTH_NOT_EXPR around what we have. */
2650 if (TREE_CODE_CLASS (code
) == '<')
2652 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg
, 0)))
2653 && !flag_unsafe_math_optimizations
2656 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2657 else if (code
== UNORDERED_EXPR
2658 || code
== ORDERED_EXPR
2659 || code
== UNEQ_EXPR
2660 || code
== UNLT_EXPR
2661 || code
== UNLE_EXPR
2662 || code
== UNGT_EXPR
2663 || code
== UNGE_EXPR
)
2664 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2666 return build (invert_tree_comparison (code
), type
,
2667 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
2673 return fold_convert (type
, build_int_2 (integer_zerop (arg
), 0));
2675 case TRUTH_AND_EXPR
:
2676 return build (TRUTH_OR_EXPR
, type
,
2677 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2678 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2681 return build (TRUTH_AND_EXPR
, type
,
2682 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2683 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2685 case TRUTH_XOR_EXPR
:
2686 /* Here we can invert either operand. We invert the first operand
2687 unless the second operand is a TRUTH_NOT_EXPR in which case our
2688 result is the XOR of the first operand with the inside of the
2689 negation of the second operand. */
2691 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
2692 return build (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
2693 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
2695 return build (TRUTH_XOR_EXPR
, type
,
2696 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2697 TREE_OPERAND (arg
, 1));
2699 case TRUTH_ANDIF_EXPR
:
2700 return build (TRUTH_ORIF_EXPR
, type
,
2701 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2702 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2704 case TRUTH_ORIF_EXPR
:
2705 return build (TRUTH_ANDIF_EXPR
, type
,
2706 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2707 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2709 case TRUTH_NOT_EXPR
:
2710 return TREE_OPERAND (arg
, 0);
2713 return build (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2714 invert_truthvalue (TREE_OPERAND (arg
, 1)),
2715 invert_truthvalue (TREE_OPERAND (arg
, 2)));
2718 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2719 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2721 case NON_LVALUE_EXPR
:
2722 return invert_truthvalue (TREE_OPERAND (arg
, 0));
2727 return build1 (TREE_CODE (arg
), type
,
2728 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2731 if (!integer_onep (TREE_OPERAND (arg
, 1)))
2733 return build (EQ_EXPR
, type
, arg
,
2734 fold_convert (type
, integer_zero_node
));
2737 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2739 case CLEANUP_POINT_EXPR
:
2740 return build1 (CLEANUP_POINT_EXPR
, type
,
2741 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2746 if (TREE_CODE (TREE_TYPE (arg
)) != BOOLEAN_TYPE
)
2748 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2751 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2752 operands are another bit-wise operation with a common input. If so,
2753 distribute the bit operations to save an operation and possibly two if
2754 constants are involved. For example, convert
2755 (A | B) & (A | C) into A | (B & C)
2756 Further simplification will occur if B and C are constants.
2758 If this optimization cannot be done, 0 will be returned. */
2761 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
2766 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2767 || TREE_CODE (arg0
) == code
2768 || (TREE_CODE (arg0
) != BIT_AND_EXPR
2769 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
2772 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
2774 common
= TREE_OPERAND (arg0
, 0);
2775 left
= TREE_OPERAND (arg0
, 1);
2776 right
= TREE_OPERAND (arg1
, 1);
2778 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
2780 common
= TREE_OPERAND (arg0
, 0);
2781 left
= TREE_OPERAND (arg0
, 1);
2782 right
= TREE_OPERAND (arg1
, 0);
2784 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
2786 common
= TREE_OPERAND (arg0
, 1);
2787 left
= TREE_OPERAND (arg0
, 0);
2788 right
= TREE_OPERAND (arg1
, 1);
2790 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
2792 common
= TREE_OPERAND (arg0
, 1);
2793 left
= TREE_OPERAND (arg0
, 0);
2794 right
= TREE_OPERAND (arg1
, 0);
2799 return fold (build (TREE_CODE (arg0
), type
, common
,
2800 fold (build (code
, type
, left
, right
))));
2803 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2804 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2807 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
2810 tree result
= build (BIT_FIELD_REF
, type
, inner
,
2811 size_int (bitsize
), bitsize_int (bitpos
));
2813 TREE_UNSIGNED (result
) = unsignedp
;
2818 /* Optimize a bit-field compare.
2820 There are two cases: First is a compare against a constant and the
2821 second is a comparison of two items where the fields are at the same
2822 bit position relative to the start of a chunk (byte, halfword, word)
2823 large enough to contain it. In these cases we can avoid the shift
2824 implicit in bitfield extractions.
2826 For constants, we emit a compare of the shifted constant with the
2827 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2828 compared. For two fields at the same position, we do the ANDs with the
2829 similar mask and compare the result of the ANDs.
2831 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2832 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2833 are the left and right operands of the comparison, respectively.
2835 If the optimization described above can be done, we return the resulting
2836 tree. Otherwise we return zero. */
2839 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
2842 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
2843 tree type
= TREE_TYPE (lhs
);
2844 tree signed_type
, unsigned_type
;
2845 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
2846 enum machine_mode lmode
, rmode
, nmode
;
2847 int lunsignedp
, runsignedp
;
2848 int lvolatilep
= 0, rvolatilep
= 0;
2849 tree linner
, rinner
= NULL_TREE
;
2853 /* Get all the information about the extractions being done. If the bit size
2854 if the same as the size of the underlying object, we aren't doing an
2855 extraction at all and so can do nothing. We also don't want to
2856 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2857 then will no longer be able to replace it. */
2858 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
2859 &lunsignedp
, &lvolatilep
);
2860 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
2861 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
2866 /* If this is not a constant, we can only do something if bit positions,
2867 sizes, and signedness are the same. */
2868 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
2869 &runsignedp
, &rvolatilep
);
2871 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
2872 || lunsignedp
!= runsignedp
|| offset
!= 0
2873 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
2877 /* See if we can find a mode to refer to this field. We should be able to,
2878 but fail if we can't. */
2879 nmode
= get_best_mode (lbitsize
, lbitpos
,
2880 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
2881 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
2882 TYPE_ALIGN (TREE_TYPE (rinner
))),
2883 word_mode
, lvolatilep
|| rvolatilep
);
2884 if (nmode
== VOIDmode
)
2887 /* Set signed and unsigned types of the precision of this mode for the
2889 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
2890 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
2892 /* Compute the bit position and size for the new reference and our offset
2893 within it. If the new reference is the same size as the original, we
2894 won't optimize anything, so return zero. */
2895 nbitsize
= GET_MODE_BITSIZE (nmode
);
2896 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
2898 if (nbitsize
== lbitsize
)
2901 if (BYTES_BIG_ENDIAN
)
2902 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
2904 /* Make the mask to be used against the extracted field. */
2905 mask
= build_int_2 (~0, ~0);
2906 TREE_TYPE (mask
) = unsigned_type
;
2907 force_fit_type (mask
, 0);
2908 mask
= fold_convert (unsigned_type
, mask
);
2909 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
2910 mask
= const_binop (RSHIFT_EXPR
, mask
,
2911 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
2914 /* If not comparing with constant, just rework the comparison
2916 return build (code
, compare_type
,
2917 build (BIT_AND_EXPR
, unsigned_type
,
2918 make_bit_field_ref (linner
, unsigned_type
,
2919 nbitsize
, nbitpos
, 1),
2921 build (BIT_AND_EXPR
, unsigned_type
,
2922 make_bit_field_ref (rinner
, unsigned_type
,
2923 nbitsize
, nbitpos
, 1),
2926 /* Otherwise, we are handling the constant case. See if the constant is too
2927 big for the field. Warn and return a tree of for 0 (false) if so. We do
2928 this not only for its own sake, but to avoid having to test for this
2929 error case below. If we didn't, we might generate wrong code.
2931 For unsigned fields, the constant shifted right by the field length should
2932 be all zero. For signed fields, the high-order bits should agree with
2937 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
2938 fold_convert (unsigned_type
, rhs
),
2939 size_int (lbitsize
), 0)))
2941 warning ("comparison is always %d due to width of bit-field",
2943 return fold_convert (compare_type
,
2945 ? integer_one_node
: integer_zero_node
));
2950 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
2951 size_int (lbitsize
- 1), 0);
2952 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
2954 warning ("comparison is always %d due to width of bit-field",
2956 return fold_convert (compare_type
,
2958 ? integer_one_node
: integer_zero_node
));
2962 /* Single-bit compares should always be against zero. */
2963 if (lbitsize
== 1 && ! integer_zerop (rhs
))
2965 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
2966 rhs
= fold_convert (type
, integer_zero_node
);
2969 /* Make a new bitfield reference, shift the constant over the
2970 appropriate number of bits and mask it with the computed mask
2971 (in case this was a signed field). If we changed it, make a new one. */
2972 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
2975 TREE_SIDE_EFFECTS (lhs
) = 1;
2976 TREE_THIS_VOLATILE (lhs
) = 1;
2979 rhs
= fold (const_binop (BIT_AND_EXPR
,
2980 const_binop (LSHIFT_EXPR
,
2981 fold_convert (unsigned_type
, rhs
),
2982 size_int (lbitpos
), 0),
2985 return build (code
, compare_type
,
2986 build (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
2990 /* Subroutine for fold_truthop: decode a field reference.
2992 If EXP is a comparison reference, we return the innermost reference.
2994 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2995 set to the starting bit number.
2997 If the innermost field can be completely contained in a mode-sized
2998 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3000 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3001 otherwise it is not changed.
3003 *PUNSIGNEDP is set to the signedness of the field.
3005 *PMASK is set to the mask used. This is either contained in a
3006 BIT_AND_EXPR or derived from the width of the field.
3008 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3010 Return 0 if this is not a component reference or is one that we can't
3011 do anything with. */
3014 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
3015 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3016 int *punsignedp
, int *pvolatilep
,
3017 tree
*pmask
, tree
*pand_mask
)
3019 tree outer_type
= 0;
3021 tree mask
, inner
, offset
;
3023 unsigned int precision
;
3025 /* All the optimizations using this function assume integer fields.
3026 There are problems with FP fields since the type_for_size call
3027 below can fail for, e.g., XFmode. */
3028 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3031 /* We are interested in the bare arrangement of bits, so strip everything
3032 that doesn't affect the machine mode. However, record the type of the
3033 outermost expression if it may matter below. */
3034 if (TREE_CODE (exp
) == NOP_EXPR
3035 || TREE_CODE (exp
) == CONVERT_EXPR
3036 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3037 outer_type
= TREE_TYPE (exp
);
3040 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3042 and_mask
= TREE_OPERAND (exp
, 1);
3043 exp
= TREE_OPERAND (exp
, 0);
3044 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3045 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3049 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3050 punsignedp
, pvolatilep
);
3051 if ((inner
== exp
&& and_mask
== 0)
3052 || *pbitsize
< 0 || offset
!= 0
3053 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3056 /* If the number of bits in the reference is the same as the bitsize of
3057 the outer type, then the outer type gives the signedness. Otherwise
3058 (in case of a small bitfield) the signedness is unchanged. */
3059 if (outer_type
&& *pbitsize
== tree_low_cst (TYPE_SIZE (outer_type
), 1))
3060 *punsignedp
= TREE_UNSIGNED (outer_type
);
3062 /* Compute the mask to access the bitfield. */
3063 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3064 precision
= TYPE_PRECISION (unsigned_type
);
3066 mask
= build_int_2 (~0, ~0);
3067 TREE_TYPE (mask
) = unsigned_type
;
3068 force_fit_type (mask
, 0);
3069 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3070 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3072 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3074 mask
= fold (build (BIT_AND_EXPR
, unsigned_type
,
3075 fold_convert (unsigned_type
, and_mask
), mask
));
3078 *pand_mask
= and_mask
;
3082 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3086 all_ones_mask_p (tree mask
, int size
)
3088 tree type
= TREE_TYPE (mask
);
3089 unsigned int precision
= TYPE_PRECISION (type
);
3092 tmask
= build_int_2 (~0, ~0);
3093 TREE_TYPE (tmask
) = lang_hooks
.types
.signed_type (type
);
3094 force_fit_type (tmask
, 0);
3096 tree_int_cst_equal (mask
,
3097 const_binop (RSHIFT_EXPR
,
3098 const_binop (LSHIFT_EXPR
, tmask
,
3099 size_int (precision
- size
),
3101 size_int (precision
- size
), 0));
3104 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3105 represents the sign bit of EXP's type. If EXP represents a sign
3106 or zero extension, also test VAL against the unextended type.
3107 The return value is the (sub)expression whose sign bit is VAL,
3108 or NULL_TREE otherwise. */
3111 sign_bit_p (tree exp
, tree val
)
3113 unsigned HOST_WIDE_INT mask_lo
, lo
;
3114 HOST_WIDE_INT mask_hi
, hi
;
3118 /* Tree EXP must have an integral type. */
3119 t
= TREE_TYPE (exp
);
3120 if (! INTEGRAL_TYPE_P (t
))
3123 /* Tree VAL must be an integer constant. */
3124 if (TREE_CODE (val
) != INTEGER_CST
3125 || TREE_CONSTANT_OVERFLOW (val
))
3128 width
= TYPE_PRECISION (t
);
3129 if (width
> HOST_BITS_PER_WIDE_INT
)
3131 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3134 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3135 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3141 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3144 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3145 >> (HOST_BITS_PER_WIDE_INT
- width
));
3148 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3149 treat VAL as if it were unsigned. */
3150 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3151 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3154 /* Handle extension from a narrower type. */
3155 if (TREE_CODE (exp
) == NOP_EXPR
3156 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3157 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3162 /* Subroutine for fold_truthop: determine if an operand is simple enough
3163 to be evaluated unconditionally. */
3166 simple_operand_p (tree exp
)
3168 /* Strip any conversions that don't change the machine mode. */
3169 while ((TREE_CODE (exp
) == NOP_EXPR
3170 || TREE_CODE (exp
) == CONVERT_EXPR
)
3171 && (TYPE_MODE (TREE_TYPE (exp
))
3172 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
3173 exp
= TREE_OPERAND (exp
, 0);
3175 return (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c'
3177 && ! TREE_ADDRESSABLE (exp
)
3178 && ! TREE_THIS_VOLATILE (exp
)
3179 && ! DECL_NONLOCAL (exp
)
3180 /* Don't regard global variables as simple. They may be
3181 allocated in ways unknown to the compiler (shared memory,
3182 #pragma weak, etc). */
3183 && ! TREE_PUBLIC (exp
)
3184 && ! DECL_EXTERNAL (exp
)
3185 /* Loading a static variable is unduly expensive, but global
3186 registers aren't expensive. */
3187 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3190 /* The following functions are subroutines to fold_range_test and allow it to
3191 try to change a logical combination of comparisons into a range test.
3194 X == 2 || X == 3 || X == 4 || X == 5
3198 (unsigned) (X - 2) <= 3
3200 We describe each set of comparisons as being either inside or outside
3201 a range, using a variable named like IN_P, and then describe the
3202 range with a lower and upper bound. If one of the bounds is omitted,
3203 it represents either the highest or lowest value of the type.
3205 In the comments below, we represent a range by two numbers in brackets
3206 preceded by a "+" to designate being inside that range, or a "-" to
3207 designate being outside that range, so the condition can be inverted by
3208 flipping the prefix. An omitted bound is represented by a "-". For
3209 example, "- [-, 10]" means being outside the range starting at the lowest
3210 possible value and ending at 10, in other words, being greater than 10.
3211 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3214 We set up things so that the missing bounds are handled in a consistent
3215 manner so neither a missing bound nor "true" and "false" need to be
3216 handled using a special case. */
3218 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3219 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3220 and UPPER1_P are nonzero if the respective argument is an upper bound
3221 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3222 must be specified for a comparison. ARG1 will be converted to ARG0's
3223 type if both are specified. */
3226 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3227 tree arg1
, int upper1_p
)
3233 /* If neither arg represents infinity, do the normal operation.
3234 Else, if not a comparison, return infinity. Else handle the special
3235 comparison rules. Note that most of the cases below won't occur, but
3236 are handled for consistency. */
3238 if (arg0
!= 0 && arg1
!= 0)
3240 tem
= fold (build (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3241 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
)));
3243 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3246 if (TREE_CODE_CLASS (code
) != '<')
3249 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3250 for neither. In real maths, we cannot assume open ended ranges are
3251 the same. But, this is computer arithmetic, where numbers are finite.
3252 We can therefore make the transformation of any unbounded range with
3253 the value Z, Z being greater than any representable number. This permits
3254 us to treat unbounded ranges as equal. */
3255 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3256 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3260 result
= sgn0
== sgn1
;
3263 result
= sgn0
!= sgn1
;
3266 result
= sgn0
< sgn1
;
3269 result
= sgn0
<= sgn1
;
3272 result
= sgn0
> sgn1
;
3275 result
= sgn0
>= sgn1
;
3281 return fold_convert (type
, result
? integer_one_node
: integer_zero_node
);
3284 /* Given EXP, a logical expression, set the range it is testing into
3285 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3286 actually being tested. *PLOW and *PHIGH will be made of the same type
3287 as the returned expression. If EXP is not a comparison, we will most
3288 likely not be returning a useful value and range. */
3291 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
)
3293 enum tree_code code
;
3294 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, type
= NULL_TREE
;
3295 tree orig_type
= NULL_TREE
;
3297 tree low
, high
, n_low
, n_high
;
3299 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3300 and see if we can refine the range. Some of the cases below may not
3301 happen, but it doesn't seem worth worrying about this. We "continue"
3302 the outer loop when we've changed something; otherwise we "break"
3303 the switch, which will "break" the while. */
3306 low
= high
= fold_convert (TREE_TYPE (exp
), integer_zero_node
);
3310 code
= TREE_CODE (exp
);
3312 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3314 if (first_rtl_op (code
) > 0)
3315 arg0
= TREE_OPERAND (exp
, 0);
3316 if (TREE_CODE_CLASS (code
) == '<'
3317 || TREE_CODE_CLASS (code
) == '1'
3318 || TREE_CODE_CLASS (code
) == '2')
3319 type
= TREE_TYPE (arg0
);
3320 if (TREE_CODE_CLASS (code
) == '2'
3321 || TREE_CODE_CLASS (code
) == '<'
3322 || (TREE_CODE_CLASS (code
) == 'e'
3323 && TREE_CODE_LENGTH (code
) > 1))
3324 arg1
= TREE_OPERAND (exp
, 1);
3327 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3328 lose a cast by accident. */
3329 if (type
!= NULL_TREE
&& orig_type
== NULL_TREE
)
3334 case TRUTH_NOT_EXPR
:
3335 in_p
= ! in_p
, exp
= arg0
;
3338 case EQ_EXPR
: case NE_EXPR
:
3339 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3340 /* We can only do something if the range is testing for zero
3341 and if the second operand is an integer constant. Note that
3342 saying something is "in" the range we make is done by
3343 complementing IN_P since it will set in the initial case of
3344 being not equal to zero; "out" is leaving it alone. */
3345 if (low
== 0 || high
== 0
3346 || ! integer_zerop (low
) || ! integer_zerop (high
)
3347 || TREE_CODE (arg1
) != INTEGER_CST
)
3352 case NE_EXPR
: /* - [c, c] */
3355 case EQ_EXPR
: /* + [c, c] */
3356 in_p
= ! in_p
, low
= high
= arg1
;
3358 case GT_EXPR
: /* - [-, c] */
3359 low
= 0, high
= arg1
;
3361 case GE_EXPR
: /* + [c, -] */
3362 in_p
= ! in_p
, low
= arg1
, high
= 0;
3364 case LT_EXPR
: /* - [c, -] */
3365 low
= arg1
, high
= 0;
3367 case LE_EXPR
: /* + [-, c] */
3368 in_p
= ! in_p
, low
= 0, high
= arg1
;
3376 /* If this is an unsigned comparison, we also know that EXP is
3377 greater than or equal to zero. We base the range tests we make
3378 on that fact, so we record it here so we can parse existing
3380 if (TREE_UNSIGNED (type
) && (low
== 0 || high
== 0))
3382 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, in_p
, low
, high
,
3383 1, fold_convert (type
, integer_zero_node
),
3387 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3389 /* If the high bound is missing, but we have a nonzero low
3390 bound, reverse the range so it goes from zero to the low bound
3392 if (high
== 0 && low
&& ! integer_zerop (low
))
3395 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3396 integer_one_node
, 0);
3397 low
= fold_convert (type
, integer_zero_node
);
3403 /* (-x) IN [a,b] -> x in [-b, -a] */
3404 n_low
= range_binop (MINUS_EXPR
, type
,
3405 fold_convert (type
, integer_zero_node
),
3407 n_high
= range_binop (MINUS_EXPR
, type
,
3408 fold_convert (type
, integer_zero_node
),
3410 low
= n_low
, high
= n_high
;
3416 exp
= build (MINUS_EXPR
, type
, negate_expr (arg0
),
3417 fold_convert (type
, integer_one_node
));
3420 case PLUS_EXPR
: case MINUS_EXPR
:
3421 if (TREE_CODE (arg1
) != INTEGER_CST
)
3424 /* If EXP is signed, any overflow in the computation is undefined,
3425 so we don't worry about it so long as our computations on
3426 the bounds don't overflow. For unsigned, overflow is defined
3427 and this is exactly the right thing. */
3428 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3429 type
, low
, 0, arg1
, 0);
3430 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3431 type
, high
, 1, arg1
, 0);
3432 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3433 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3436 /* Check for an unsigned range which has wrapped around the maximum
3437 value thus making n_high < n_low, and normalize it. */
3438 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3440 low
= range_binop (PLUS_EXPR
, type
, n_high
, 0,
3441 integer_one_node
, 0);
3442 high
= range_binop (MINUS_EXPR
, type
, n_low
, 0,
3443 integer_one_node
, 0);
3445 /* If the range is of the form +/- [ x+1, x ], we won't
3446 be able to normalize it. But then, it represents the
3447 whole range or the empty set, so make it
3449 if (tree_int_cst_equal (n_low
, low
)
3450 && tree_int_cst_equal (n_high
, high
))
3456 low
= n_low
, high
= n_high
;
3461 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
3462 if (TYPE_PRECISION (type
) > TYPE_PRECISION (orig_type
))
3465 if (! INTEGRAL_TYPE_P (type
)
3466 || (low
!= 0 && ! int_fits_type_p (low
, type
))
3467 || (high
!= 0 && ! int_fits_type_p (high
, type
)))
3470 n_low
= low
, n_high
= high
;
3473 n_low
= fold_convert (type
, n_low
);
3476 n_high
= fold_convert (type
, n_high
);
3478 /* If we're converting from an unsigned to a signed type,
3479 we will be doing the comparison as unsigned. The tests above
3480 have already verified that LOW and HIGH are both positive.
3482 So we have to make sure that the original unsigned value will
3483 be interpreted as positive. */
3484 if (TREE_UNSIGNED (type
) && ! TREE_UNSIGNED (TREE_TYPE (exp
)))
3486 tree equiv_type
= lang_hooks
.types
.type_for_mode
3487 (TYPE_MODE (type
), 1);
3490 /* A range without an upper bound is, naturally, unbounded.
3491 Since convert would have cropped a very large value, use
3492 the max value for the destination type. */
3494 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3495 : TYPE_MAX_VALUE (type
);
3497 if (TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (exp
)))
3498 high_positive
= fold (build (RSHIFT_EXPR
, type
,
3502 integer_one_node
)));
3504 /* If the low bound is specified, "and" the range with the
3505 range for which the original unsigned value will be
3509 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3510 1, n_low
, n_high
, 1,
3511 fold_convert (type
, integer_zero_node
),
3515 in_p
= (n_in_p
== in_p
);
3519 /* Otherwise, "or" the range with the range of the input
3520 that will be interpreted as negative. */
3521 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3522 0, n_low
, n_high
, 1,
3523 fold_convert (type
, integer_zero_node
),
3527 in_p
= (in_p
!= n_in_p
);
3532 low
= n_low
, high
= n_high
;
3542 /* If EXP is a constant, we can evaluate whether this is true or false. */
3543 if (TREE_CODE (exp
) == INTEGER_CST
)
3545 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
3547 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3553 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3557 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3558 type, TYPE, return an expression to test if EXP is in (or out of, depending
3559 on IN_P) the range. */
3562 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
3564 tree etype
= TREE_TYPE (exp
);
3568 && (0 != (value
= build_range_check (type
, exp
, 1, low
, high
))))
3569 return invert_truthvalue (value
);
3571 if (low
== 0 && high
== 0)
3572 return fold_convert (type
, integer_one_node
);
3575 return fold (build (LE_EXPR
, type
, exp
, high
));
3578 return fold (build (GE_EXPR
, type
, exp
, low
));
3580 if (operand_equal_p (low
, high
, 0))
3581 return fold (build (EQ_EXPR
, type
, exp
, low
));
3583 if (integer_zerop (low
))
3585 if (! TREE_UNSIGNED (etype
))
3587 etype
= lang_hooks
.types
.unsigned_type (etype
);
3588 high
= fold_convert (etype
, high
);
3589 exp
= fold_convert (etype
, exp
);
3591 return build_range_check (type
, exp
, 1, 0, high
);
3594 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3595 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
3597 unsigned HOST_WIDE_INT lo
;
3601 prec
= TYPE_PRECISION (etype
);
3602 if (prec
<= HOST_BITS_PER_WIDE_INT
)
3605 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
3609 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
3610 lo
= (unsigned HOST_WIDE_INT
) -1;
3613 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
3615 if (TREE_UNSIGNED (etype
))
3617 etype
= lang_hooks
.types
.signed_type (etype
);
3618 exp
= fold_convert (etype
, exp
);
3620 return fold (build (GT_EXPR
, type
, exp
,
3621 fold_convert (etype
, integer_zero_node
)));
3625 if (0 != (value
= const_binop (MINUS_EXPR
, high
, low
, 0))
3626 && ! TREE_OVERFLOW (value
))
3627 return build_range_check (type
,
3628 fold (build (MINUS_EXPR
, etype
, exp
, low
)),
3629 1, fold_convert (etype
, integer_zero_node
),
3635 /* Given two ranges, see if we can merge them into one. Return 1 if we
3636 can, 0 if we can't. Set the output range into the specified parameters. */
3639 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
3640 tree high0
, int in1_p
, tree low1
, tree high1
)
3648 int lowequal
= ((low0
== 0 && low1
== 0)
3649 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3650 low0
, 0, low1
, 0)));
3651 int highequal
= ((high0
== 0 && high1
== 0)
3652 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3653 high0
, 1, high1
, 1)));
3655 /* Make range 0 be the range that starts first, or ends last if they
3656 start at the same value. Swap them if it isn't. */
3657 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3660 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3661 high1
, 1, high0
, 1))))
3663 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
3664 tem
= low0
, low0
= low1
, low1
= tem
;
3665 tem
= high0
, high0
= high1
, high1
= tem
;
3668 /* Now flag two cases, whether the ranges are disjoint or whether the
3669 second range is totally subsumed in the first. Note that the tests
3670 below are simplified by the ones above. */
3671 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
3672 high0
, 1, low1
, 0));
3673 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3674 high1
, 1, high0
, 1));
3676 /* We now have four cases, depending on whether we are including or
3677 excluding the two ranges. */
3680 /* If they don't overlap, the result is false. If the second range
3681 is a subset it is the result. Otherwise, the range is from the start
3682 of the second to the end of the first. */
3684 in_p
= 0, low
= high
= 0;
3686 in_p
= 1, low
= low1
, high
= high1
;
3688 in_p
= 1, low
= low1
, high
= high0
;
3691 else if (in0_p
&& ! in1_p
)
3693 /* If they don't overlap, the result is the first range. If they are
3694 equal, the result is false. If the second range is a subset of the
3695 first, and the ranges begin at the same place, we go from just after
3696 the end of the first range to the end of the second. If the second
3697 range is not a subset of the first, or if it is a subset and both
3698 ranges end at the same place, the range starts at the start of the
3699 first range and ends just before the second range.
3700 Otherwise, we can't describe this as a single range. */
3702 in_p
= 1, low
= low0
, high
= high0
;
3703 else if (lowequal
&& highequal
)
3704 in_p
= 0, low
= high
= 0;
3705 else if (subset
&& lowequal
)
3707 in_p
= 1, high
= high0
;
3708 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high1
, 0,
3709 integer_one_node
, 0);
3711 else if (! subset
|| highequal
)
3713 in_p
= 1, low
= low0
;
3714 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
3715 integer_one_node
, 0);
3721 else if (! in0_p
&& in1_p
)
3723 /* If they don't overlap, the result is the second range. If the second
3724 is a subset of the first, the result is false. Otherwise,
3725 the range starts just after the first range and ends at the
3726 end of the second. */
3728 in_p
= 1, low
= low1
, high
= high1
;
3729 else if (subset
|| highequal
)
3730 in_p
= 0, low
= high
= 0;
3733 in_p
= 1, high
= high1
;
3734 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
3735 integer_one_node
, 0);
3741 /* The case where we are excluding both ranges. Here the complex case
3742 is if they don't overlap. In that case, the only time we have a
3743 range is if they are adjacent. If the second is a subset of the
3744 first, the result is the first. Otherwise, the range to exclude
3745 starts at the beginning of the first range and ends at the end of the
3749 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3750 range_binop (PLUS_EXPR
, NULL_TREE
,
3752 integer_one_node
, 1),
3754 in_p
= 0, low
= low0
, high
= high1
;
3759 in_p
= 0, low
= low0
, high
= high0
;
3761 in_p
= 0, low
= low0
, high
= high1
;
3764 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3768 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3769 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3772 /* EXP is some logical combination of boolean tests. See if we can
3773 merge it into some range test. Return the new tree if so. */
3776 fold_range_test (tree exp
)
3778 int or_op
= (TREE_CODE (exp
) == TRUTH_ORIF_EXPR
3779 || TREE_CODE (exp
) == TRUTH_OR_EXPR
);
3780 int in0_p
, in1_p
, in_p
;
3781 tree low0
, low1
, low
, high0
, high1
, high
;
3782 tree lhs
= make_range (TREE_OPERAND (exp
, 0), &in0_p
, &low0
, &high0
);
3783 tree rhs
= make_range (TREE_OPERAND (exp
, 1), &in1_p
, &low1
, &high1
);
3786 /* If this is an OR operation, invert both sides; we will invert
3787 again at the end. */
3789 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
3791 /* If both expressions are the same, if we can merge the ranges, and we
3792 can build the range test, return it or it inverted. If one of the
3793 ranges is always true or always false, consider it to be the same
3794 expression as the other. */
3795 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
3796 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
3798 && 0 != (tem
= (build_range_check (TREE_TYPE (exp
),
3800 : rhs
!= 0 ? rhs
: integer_zero_node
,
3802 return or_op
? invert_truthvalue (tem
) : tem
;
3804 /* On machines where the branch cost is expensive, if this is a
3805 short-circuited branch and the underlying object on both sides
3806 is the same, make a non-short-circuit operation. */
3807 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3808 && lhs
!= 0 && rhs
!= 0
3809 && (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
3810 || TREE_CODE (exp
) == TRUTH_ORIF_EXPR
)
3811 && operand_equal_p (lhs
, rhs
, 0))
3813 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3814 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3815 which cases we can't do this. */
3816 if (simple_operand_p (lhs
))
3817 return build (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
3818 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
3819 TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
3820 TREE_OPERAND (exp
, 1));
3822 else if (lang_hooks
.decls
.global_bindings_p () == 0
3823 && ! CONTAINS_PLACEHOLDER_P (lhs
))
3825 tree common
= save_expr (lhs
);
3827 if (0 != (lhs
= build_range_check (TREE_TYPE (exp
), common
,
3828 or_op
? ! in0_p
: in0_p
,
3830 && (0 != (rhs
= build_range_check (TREE_TYPE (exp
), common
,
3831 or_op
? ! in1_p
: in1_p
,
3833 return build (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
3834 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
3835 TREE_TYPE (exp
), lhs
, rhs
);
3842 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3843 bit value. Arrange things so the extra bits will be set to zero if and
3844 only if C is signed-extended to its full width. If MASK is nonzero,
3845 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3848 unextend (tree c
, int p
, int unsignedp
, tree mask
)
3850 tree type
= TREE_TYPE (c
);
3851 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
3854 if (p
== modesize
|| unsignedp
)
3857 /* We work by getting just the sign bit into the low-order bit, then
3858 into the high-order bit, then sign-extend. We then XOR that value
3860 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
3861 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
3863 /* We must use a signed type in order to get an arithmetic right shift.
3864 However, we must also avoid introducing accidental overflows, so that
3865 a subsequent call to integer_zerop will work. Hence we must
3866 do the type conversion here. At this point, the constant is either
3867 zero or one, and the conversion to a signed type can never overflow.
3868 We could get an overflow if this conversion is done anywhere else. */
3869 if (TREE_UNSIGNED (type
))
3870 temp
= fold_convert (lang_hooks
.types
.signed_type (type
), temp
);
3872 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
3873 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
3875 temp
= const_binop (BIT_AND_EXPR
, temp
,
3876 fold_convert (TREE_TYPE (c
), mask
), 0);
3877 /* If necessary, convert the type back to match the type of C. */
3878 if (TREE_UNSIGNED (type
))
3879 temp
= fold_convert (type
, temp
);
3881 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
3884 /* Find ways of folding logical expressions of LHS and RHS:
3885 Try to merge two comparisons to the same innermost item.
3886 Look for range tests like "ch >= '0' && ch <= '9'".
3887 Look for combinations of simple terms on machines with expensive branches
3888 and evaluate the RHS unconditionally.
3890 For example, if we have p->a == 2 && p->b == 4 and we can make an
3891 object large enough to span both A and B, we can do this with a comparison
3892 against the object ANDed with the a mask.
3894 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3895 operations to do this with one comparison.
3897 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3898 function and the one above.
3900 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3901 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3903 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3906 We return the simplified tree or 0 if no optimization is possible. */
3909 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
3911 /* If this is the "or" of two comparisons, we can do something if
3912 the comparisons are NE_EXPR. If this is the "and", we can do something
3913 if the comparisons are EQ_EXPR. I.e.,
3914 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3916 WANTED_CODE is this operation code. For single bit fields, we can
3917 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3918 comparison for one-bit fields. */
3920 enum tree_code wanted_code
;
3921 enum tree_code lcode
, rcode
;
3922 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
3923 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
3924 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
3925 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
3926 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
3927 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
3928 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
3929 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
3930 enum machine_mode lnmode
, rnmode
;
3931 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
3932 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
3933 tree l_const
, r_const
;
3934 tree lntype
, rntype
, result
;
3935 int first_bit
, end_bit
;
3938 /* Start by getting the comparison codes. Fail if anything is volatile.
3939 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3940 it were surrounded with a NE_EXPR. */
3942 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
3945 lcode
= TREE_CODE (lhs
);
3946 rcode
= TREE_CODE (rhs
);
3948 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
3949 lcode
= NE_EXPR
, lhs
= build (NE_EXPR
, truth_type
, lhs
, integer_zero_node
);
3951 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
3952 rcode
= NE_EXPR
, rhs
= build (NE_EXPR
, truth_type
, rhs
, integer_zero_node
);
3954 if (TREE_CODE_CLASS (lcode
) != '<' || TREE_CODE_CLASS (rcode
) != '<')
3957 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
3958 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
3960 ll_arg
= TREE_OPERAND (lhs
, 0);
3961 lr_arg
= TREE_OPERAND (lhs
, 1);
3962 rl_arg
= TREE_OPERAND (rhs
, 0);
3963 rr_arg
= TREE_OPERAND (rhs
, 1);
3965 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3966 if (simple_operand_p (ll_arg
)
3967 && simple_operand_p (lr_arg
)
3968 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg
)))
3972 if (operand_equal_p (ll_arg
, rl_arg
, 0)
3973 && operand_equal_p (lr_arg
, rr_arg
, 0))
3975 int lcompcode
, rcompcode
;
3977 lcompcode
= comparison_to_compcode (lcode
);
3978 rcompcode
= comparison_to_compcode (rcode
);
3979 compcode
= (code
== TRUTH_AND_EXPR
)
3980 ? lcompcode
& rcompcode
3981 : lcompcode
| rcompcode
;
3983 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
3984 && operand_equal_p (lr_arg
, rl_arg
, 0))
3986 int lcompcode
, rcompcode
;
3988 rcode
= swap_tree_comparison (rcode
);
3989 lcompcode
= comparison_to_compcode (lcode
);
3990 rcompcode
= comparison_to_compcode (rcode
);
3991 compcode
= (code
== TRUTH_AND_EXPR
)
3992 ? lcompcode
& rcompcode
3993 : lcompcode
| rcompcode
;
3998 if (compcode
== COMPCODE_TRUE
)
3999 return fold_convert (truth_type
, integer_one_node
);
4000 else if (compcode
== COMPCODE_FALSE
)
4001 return fold_convert (truth_type
, integer_zero_node
);
4002 else if (compcode
!= -1)
4003 return build (compcode_to_comparison (compcode
),
4004 truth_type
, ll_arg
, lr_arg
);
4007 /* If the RHS can be evaluated unconditionally and its operands are
4008 simple, it wins to evaluate the RHS unconditionally on machines
4009 with expensive branches. In this case, this isn't a comparison
4010 that can be merged. Avoid doing this if the RHS is a floating-point
4011 comparison since those can trap. */
4013 if (BRANCH_COST
>= 2
4014 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
4015 && simple_operand_p (rl_arg
)
4016 && simple_operand_p (rr_arg
))
4018 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4019 if (code
== TRUTH_OR_EXPR
4020 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
4021 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
4022 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4023 return build (NE_EXPR
, truth_type
,
4024 build (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4028 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4029 if (code
== TRUTH_AND_EXPR
4030 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
4031 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
4032 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
4033 return build (EQ_EXPR
, truth_type
,
4034 build (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
4038 return build (code
, truth_type
, lhs
, rhs
);
4041 /* See if the comparisons can be merged. Then get all the parameters for
4044 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
4045 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
4049 ll_inner
= decode_field_reference (ll_arg
,
4050 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
4051 &ll_unsignedp
, &volatilep
, &ll_mask
,
4053 lr_inner
= decode_field_reference (lr_arg
,
4054 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
4055 &lr_unsignedp
, &volatilep
, &lr_mask
,
4057 rl_inner
= decode_field_reference (rl_arg
,
4058 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
4059 &rl_unsignedp
, &volatilep
, &rl_mask
,
4061 rr_inner
= decode_field_reference (rr_arg
,
4062 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
4063 &rr_unsignedp
, &volatilep
, &rr_mask
,
4066 /* It must be true that the inner operation on the lhs of each
4067 comparison must be the same if we are to be able to do anything.
4068 Then see if we have constants. If not, the same must be true for
4070 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
4071 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
4074 if (TREE_CODE (lr_arg
) == INTEGER_CST
4075 && TREE_CODE (rr_arg
) == INTEGER_CST
)
4076 l_const
= lr_arg
, r_const
= rr_arg
;
4077 else if (lr_inner
== 0 || rr_inner
== 0
4078 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
4081 l_const
= r_const
= 0;
4083 /* If either comparison code is not correct for our logical operation,
4084 fail. However, we can convert a one-bit comparison against zero into
4085 the opposite comparison against that bit being set in the field. */
4087 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
4088 if (lcode
!= wanted_code
)
4090 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
4092 /* Make the left operand unsigned, since we are only interested
4093 in the value of one bit. Otherwise we are doing the wrong
4102 /* This is analogous to the code for l_const above. */
4103 if (rcode
!= wanted_code
)
4105 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
4114 /* After this point all optimizations will generate bit-field
4115 references, which we might not want. */
4116 if (! lang_hooks
.can_use_bit_fields_p ())
4119 /* See if we can find a mode that contains both fields being compared on
4120 the left. If we can't, fail. Otherwise, update all constants and masks
4121 to be relative to a field of that size. */
4122 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
4123 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
4124 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4125 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
4127 if (lnmode
== VOIDmode
)
4130 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
4131 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
4132 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
4133 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
4135 if (BYTES_BIG_ENDIAN
)
4137 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
4138 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
4141 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
4142 size_int (xll_bitpos
), 0);
4143 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
4144 size_int (xrl_bitpos
), 0);
4148 l_const
= fold_convert (lntype
, l_const
);
4149 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
4150 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
4151 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
4152 fold (build1 (BIT_NOT_EXPR
,
4156 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
4158 return fold_convert (truth_type
,
4159 wanted_code
== NE_EXPR
4160 ? integer_one_node
: integer_zero_node
);
4165 r_const
= fold_convert (lntype
, r_const
);
4166 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
4167 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
4168 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
4169 fold (build1 (BIT_NOT_EXPR
,
4173 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
4175 return fold_convert (truth_type
,
4176 wanted_code
== NE_EXPR
4177 ? integer_one_node
: integer_zero_node
);
4181 /* If the right sides are not constant, do the same for it. Also,
4182 disallow this optimization if a size or signedness mismatch occurs
4183 between the left and right sides. */
4186 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
4187 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
4188 /* Make sure the two fields on the right
4189 correspond to the left without being swapped. */
4190 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
4193 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
4194 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
4195 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4196 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
4198 if (rnmode
== VOIDmode
)
4201 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
4202 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
4203 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
4204 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
4206 if (BYTES_BIG_ENDIAN
)
4208 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
4209 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
4212 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
4213 size_int (xlr_bitpos
), 0);
4214 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
4215 size_int (xrr_bitpos
), 0);
4217 /* Make a mask that corresponds to both fields being compared.
4218 Do this for both items being compared. If the operands are the
4219 same size and the bits being compared are in the same position
4220 then we can do this by masking both and comparing the masked
4222 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4223 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
4224 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
4226 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4227 ll_unsignedp
|| rl_unsignedp
);
4228 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4229 lhs
= build (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
4231 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
4232 lr_unsignedp
|| rr_unsignedp
);
4233 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
4234 rhs
= build (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
4236 return build (wanted_code
, truth_type
, lhs
, rhs
);
4239 /* There is still another way we can do something: If both pairs of
4240 fields being compared are adjacent, we may be able to make a wider
4241 field containing them both.
4243 Note that we still must mask the lhs/rhs expressions. Furthermore,
4244 the mask must be shifted to account for the shift done by
4245 make_bit_field_ref. */
4246 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
4247 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
4248 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
4249 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
4253 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
4254 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
4255 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
4256 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
4258 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
4259 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
4260 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
4261 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
4263 /* Convert to the smaller type before masking out unwanted bits. */
4265 if (lntype
!= rntype
)
4267 if (lnbitsize
> rnbitsize
)
4269 lhs
= fold_convert (rntype
, lhs
);
4270 ll_mask
= fold_convert (rntype
, ll_mask
);
4273 else if (lnbitsize
< rnbitsize
)
4275 rhs
= fold_convert (lntype
, rhs
);
4276 lr_mask
= fold_convert (lntype
, lr_mask
);
4281 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
4282 lhs
= build (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
4284 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
4285 rhs
= build (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
4287 return build (wanted_code
, truth_type
, lhs
, rhs
);
4293 /* Handle the case of comparisons with constants. If there is something in
4294 common between the masks, those bits of the constants must be the same.
4295 If not, the condition is always false. Test for this to avoid generating
4296 incorrect code below. */
4297 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
4298 if (! integer_zerop (result
)
4299 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
4300 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
4302 if (wanted_code
== NE_EXPR
)
4304 warning ("`or' of unmatched not-equal tests is always 1");
4305 return fold_convert (truth_type
, integer_one_node
);
4309 warning ("`and' of mutually exclusive equal-tests is always 0");
4310 return fold_convert (truth_type
, integer_zero_node
);
4314 /* Construct the expression we will return. First get the component
4315 reference we will make. Unless the mask is all ones the width of
4316 that field, perform the mask operation. Then compare with the
4318 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4319 ll_unsignedp
|| rl_unsignedp
);
4321 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4322 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4323 result
= build (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
4325 return build (wanted_code
, truth_type
, result
,
4326 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
4329 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4333 optimize_minmax_comparison (tree t
)
4335 tree type
= TREE_TYPE (t
);
4336 tree arg0
= TREE_OPERAND (t
, 0);
4337 enum tree_code op_code
;
4338 tree comp_const
= TREE_OPERAND (t
, 1);
4340 int consts_equal
, consts_lt
;
4343 STRIP_SIGN_NOPS (arg0
);
4345 op_code
= TREE_CODE (arg0
);
4346 minmax_const
= TREE_OPERAND (arg0
, 1);
4347 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
4348 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
4349 inner
= TREE_OPERAND (arg0
, 0);
4351 /* If something does not permit us to optimize, return the original tree. */
4352 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
4353 || TREE_CODE (comp_const
) != INTEGER_CST
4354 || TREE_CONSTANT_OVERFLOW (comp_const
)
4355 || TREE_CODE (minmax_const
) != INTEGER_CST
4356 || TREE_CONSTANT_OVERFLOW (minmax_const
))
4359 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4360 and GT_EXPR, doing the rest with recursive calls using logical
4362 switch (TREE_CODE (t
))
4364 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
4366 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t
)));
4370 fold (build (TRUTH_ORIF_EXPR
, type
,
4371 optimize_minmax_comparison
4372 (build (EQ_EXPR
, type
, arg0
, comp_const
)),
4373 optimize_minmax_comparison
4374 (build (GT_EXPR
, type
, arg0
, comp_const
))));
4377 if (op_code
== MAX_EXPR
&& consts_equal
)
4378 /* MAX (X, 0) == 0 -> X <= 0 */
4379 return fold (build (LE_EXPR
, type
, inner
, comp_const
));
4381 else if (op_code
== MAX_EXPR
&& consts_lt
)
4382 /* MAX (X, 0) == 5 -> X == 5 */
4383 return fold (build (EQ_EXPR
, type
, inner
, comp_const
));
4385 else if (op_code
== MAX_EXPR
)
4386 /* MAX (X, 0) == -1 -> false */
4387 return omit_one_operand (type
, integer_zero_node
, inner
);
4389 else if (consts_equal
)
4390 /* MIN (X, 0) == 0 -> X >= 0 */
4391 return fold (build (GE_EXPR
, type
, inner
, comp_const
));
4394 /* MIN (X, 0) == 5 -> false */
4395 return omit_one_operand (type
, integer_zero_node
, inner
);
4398 /* MIN (X, 0) == -1 -> X == -1 */
4399 return fold (build (EQ_EXPR
, type
, inner
, comp_const
));
4402 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
4403 /* MAX (X, 0) > 0 -> X > 0
4404 MAX (X, 0) > 5 -> X > 5 */
4405 return fold (build (GT_EXPR
, type
, inner
, comp_const
));
4407 else if (op_code
== MAX_EXPR
)
4408 /* MAX (X, 0) > -1 -> true */
4409 return omit_one_operand (type
, integer_one_node
, inner
);
4411 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
4412 /* MIN (X, 0) > 0 -> false
4413 MIN (X, 0) > 5 -> false */
4414 return omit_one_operand (type
, integer_zero_node
, inner
);
4417 /* MIN (X, 0) > -1 -> X > -1 */
4418 return fold (build (GT_EXPR
, type
, inner
, comp_const
));
4425 /* T is an integer expression that is being multiplied, divided, or taken a
4426 modulus (CODE says which and what kind of divide or modulus) by a
4427 constant C. See if we can eliminate that operation by folding it with
4428 other operations already in T. WIDE_TYPE, if non-null, is a type that
4429 should be used for the computation if wider than our type.
4431 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4432 (X * 2) + (Y * 4). We must, however, be assured that either the original
4433 expression would not overflow or that overflow is undefined for the type
4434 in the language in question.
4436 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4437 the machine has a multiply-accumulate insn or that this is part of an
4438 addressing calculation.
4440 If we return a non-null expression, it is an equivalent form of the
4441 original computation, but need not be in the original type. */
4444 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
)
4446 /* To avoid exponential search depth, refuse to allow recursion past
4447 three levels. Beyond that (1) it's highly unlikely that we'll find
4448 something interesting and (2) we've probably processed it before
4449 when we built the inner expression. */
4458 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
);
4465 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
)
4467 tree type
= TREE_TYPE (t
);
4468 enum tree_code tcode
= TREE_CODE (t
);
4469 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
4470 > GET_MODE_SIZE (TYPE_MODE (type
)))
4471 ? wide_type
: type
);
4473 int same_p
= tcode
== code
;
4474 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
4476 /* Don't deal with constants of zero here; they confuse the code below. */
4477 if (integer_zerop (c
))
4480 if (TREE_CODE_CLASS (tcode
) == '1')
4481 op0
= TREE_OPERAND (t
, 0);
4483 if (TREE_CODE_CLASS (tcode
) == '2')
4484 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
4486 /* Note that we need not handle conditional operations here since fold
4487 already handles those cases. So just do arithmetic here. */
4491 /* For a constant, we can always simplify if we are a multiply
4492 or (for divide and modulus) if it is a multiple of our constant. */
4493 if (code
== MULT_EXPR
4494 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
4495 return const_binop (code
, fold_convert (ctype
, t
),
4496 fold_convert (ctype
, c
), 0);
4499 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
4500 /* If op0 is an expression ... */
4501 if ((TREE_CODE_CLASS (TREE_CODE (op0
)) == '<'
4502 || TREE_CODE_CLASS (TREE_CODE (op0
)) == '1'
4503 || TREE_CODE_CLASS (TREE_CODE (op0
)) == '2'
4504 || TREE_CODE_CLASS (TREE_CODE (op0
)) == 'e')
4505 /* ... and is unsigned, and its type is smaller than ctype,
4506 then we cannot pass through as widening. */
4507 && ((TREE_UNSIGNED (TREE_TYPE (op0
))
4508 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
4509 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
4510 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
4511 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
4512 /* ... or its type is larger than ctype,
4513 then we cannot pass through this truncation. */
4514 || (GET_MODE_SIZE (TYPE_MODE (ctype
))
4515 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
4516 /* ... or signedness changes for division or modulus,
4517 then we cannot pass through this conversion. */
4518 || (code
!= MULT_EXPR
4519 && (TREE_UNSIGNED (ctype
)
4520 != TREE_UNSIGNED (TREE_TYPE (op0
))))))
4523 /* Pass the constant down and see if we can make a simplification. If
4524 we can, replace this expression with the inner simplification for
4525 possible later conversion to our or some other type. */
4526 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
4527 && TREE_CODE (t2
) == INTEGER_CST
4528 && ! TREE_CONSTANT_OVERFLOW (t2
)
4529 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
4531 ? ctype
: NULL_TREE
))))
4535 case NEGATE_EXPR
: case ABS_EXPR
:
4536 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
4537 return fold (build1 (tcode
, ctype
, fold_convert (ctype
, t1
)));
4540 case MIN_EXPR
: case MAX_EXPR
:
4541 /* If widening the type changes the signedness, then we can't perform
4542 this optimization as that changes the result. */
4543 if (TREE_UNSIGNED (ctype
) != TREE_UNSIGNED (type
))
4546 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4547 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
4548 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
4550 if (tree_int_cst_sgn (c
) < 0)
4551 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
4553 return fold (build (tcode
, ctype
, fold_convert (ctype
, t1
),
4554 fold_convert (ctype
, t2
)));
4558 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
4559 /* If the second operand is constant, this is a multiplication
4560 or floor division, by a power of two, so we can treat it that
4561 way unless the multiplier or divisor overflows. */
4562 if (TREE_CODE (op1
) == INTEGER_CST
4563 /* const_binop may not detect overflow correctly,
4564 so check for it explicitly here. */
4565 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
4566 && TREE_INT_CST_HIGH (op1
) == 0
4567 && 0 != (t1
= fold_convert (ctype
,
4568 const_binop (LSHIFT_EXPR
,
4571 && ! TREE_OVERFLOW (t1
))
4572 return extract_muldiv (build (tcode
== LSHIFT_EXPR
4573 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
4574 ctype
, fold_convert (ctype
, op0
), t1
),
4575 c
, code
, wide_type
);
4578 case PLUS_EXPR
: case MINUS_EXPR
:
4579 /* See if we can eliminate the operation on both sides. If we can, we
4580 can return a new PLUS or MINUS. If we can't, the only remaining
4581 cases where we can do anything are if the second operand is a
4583 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
4584 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
4585 if (t1
!= 0 && t2
!= 0
4586 && (code
== MULT_EXPR
4587 /* If not multiplication, we can only do this if both operands
4588 are divisible by c. */
4589 || (multiple_of_p (ctype
, op0
, c
)
4590 && multiple_of_p (ctype
, op1
, c
))))
4591 return fold (build (tcode
, ctype
, fold_convert (ctype
, t1
),
4592 fold_convert (ctype
, t2
)));
4594 /* If this was a subtraction, negate OP1 and set it to be an addition.
4595 This simplifies the logic below. */
4596 if (tcode
== MINUS_EXPR
)
4597 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
4599 if (TREE_CODE (op1
) != INTEGER_CST
)
4602 /* If either OP1 or C are negative, this optimization is not safe for
4603 some of the division and remainder types while for others we need
4604 to change the code. */
4605 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
4607 if (code
== CEIL_DIV_EXPR
)
4608 code
= FLOOR_DIV_EXPR
;
4609 else if (code
== FLOOR_DIV_EXPR
)
4610 code
= CEIL_DIV_EXPR
;
4611 else if (code
!= MULT_EXPR
4612 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
4616 /* If it's a multiply or a division/modulus operation of a multiple
4617 of our constant, do the operation and verify it doesn't overflow. */
4618 if (code
== MULT_EXPR
4619 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
4621 op1
= const_binop (code
, fold_convert (ctype
, op1
),
4622 fold_convert (ctype
, c
), 0);
4623 /* We allow the constant to overflow with wrapping semantics. */
4625 || (TREE_OVERFLOW (op1
) && ! flag_wrapv
))
4631 /* If we have an unsigned type is not a sizetype, we cannot widen
4632 the operation since it will change the result if the original
4633 computation overflowed. */
4634 if (TREE_UNSIGNED (ctype
)
4635 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
4639 /* If we were able to eliminate our operation from the first side,
4640 apply our operation to the second side and reform the PLUS. */
4641 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
4642 return fold (build (tcode
, ctype
, fold_convert (ctype
, t1
), op1
));
4644 /* The last case is if we are a multiply. In that case, we can
4645 apply the distributive law to commute the multiply and addition
4646 if the multiplication of the constants doesn't overflow. */
4647 if (code
== MULT_EXPR
)
4648 return fold (build (tcode
, ctype
,
4649 fold (build (code
, ctype
,
4650 fold_convert (ctype
, op0
),
4651 fold_convert (ctype
, c
))),
4657 /* We have a special case here if we are doing something like
4658 (C * 8) % 4 since we know that's zero. */
4659 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
4660 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
4661 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
4662 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
4663 return omit_one_operand (type
, integer_zero_node
, op0
);
4665 /* ... fall through ... */
4667 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
4668 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
4669 /* If we can extract our operation from the LHS, do so and return a
4670 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4671 do something only if the second operand is a constant. */
4673 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
4674 return fold (build (tcode
, ctype
, fold_convert (ctype
, t1
),
4675 fold_convert (ctype
, op1
)));
4676 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
4677 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
4678 return fold (build (tcode
, ctype
, fold_convert (ctype
, op0
),
4679 fold_convert (ctype
, t1
)));
4680 else if (TREE_CODE (op1
) != INTEGER_CST
)
4683 /* If these are the same operation types, we can associate them
4684 assuming no overflow. */
4686 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
4687 fold_convert (ctype
, c
), 0))
4688 && ! TREE_OVERFLOW (t1
))
4689 return fold (build (tcode
, ctype
, fold_convert (ctype
, op0
), t1
));
4691 /* If these operations "cancel" each other, we have the main
4692 optimizations of this pass, which occur when either constant is a
4693 multiple of the other, in which case we replace this with either an
4694 operation or CODE or TCODE.
4696 If we have an unsigned type that is not a sizetype, we cannot do
4697 this since it will change the result if the original computation
4699 if ((! TREE_UNSIGNED (ctype
)
4700 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
4702 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
4703 || (tcode
== MULT_EXPR
4704 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
4705 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
4707 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
4708 return fold (build (tcode
, ctype
, fold_convert (ctype
, op0
),
4709 fold_convert (ctype
,
4710 const_binop (TRUNC_DIV_EXPR
,
4712 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
4713 return fold (build (code
, ctype
, fold_convert (ctype
, op0
),
4714 fold_convert (ctype
,
4715 const_binop (TRUNC_DIV_EXPR
,
4727 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4728 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4729 that we may sometimes modify the tree. */
4732 strip_compound_expr (tree t
, tree s
)
4734 enum tree_code code
= TREE_CODE (t
);
4736 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4737 if (code
== COMPOUND_EXPR
&& TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
4738 && TREE_OPERAND (TREE_OPERAND (t
, 0), 0) == s
)
4739 return TREE_OPERAND (t
, 1);
4741 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4742 don't bother handling any other types. */
4743 else if (code
== COND_EXPR
)
4745 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
4746 TREE_OPERAND (t
, 1) = strip_compound_expr (TREE_OPERAND (t
, 1), s
);
4747 TREE_OPERAND (t
, 2) = strip_compound_expr (TREE_OPERAND (t
, 2), s
);
4749 else if (TREE_CODE_CLASS (code
) == '1')
4750 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
4751 else if (TREE_CODE_CLASS (code
) == '<'
4752 || TREE_CODE_CLASS (code
) == '2')
4754 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
4755 TREE_OPERAND (t
, 1) = strip_compound_expr (TREE_OPERAND (t
, 1), s
);
4761 /* Return a node which has the indicated constant VALUE (either 0 or
4762 1), and is of the indicated TYPE. */
4765 constant_boolean_node (int value
, tree type
)
4767 if (type
== integer_type_node
)
4768 return value
? integer_one_node
: integer_zero_node
;
4769 else if (TREE_CODE (type
) == BOOLEAN_TYPE
)
4770 return lang_hooks
.truthvalue_conversion (value
? integer_one_node
4771 : integer_zero_node
);
4774 tree t
= build_int_2 (value
, 0);
4776 TREE_TYPE (t
) = type
;
4781 /* Utility function for the following routine, to see how complex a nesting of
4782 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4783 we don't care (to avoid spending too much time on complex expressions.). */
4786 count_cond (tree expr
, int lim
)
4790 if (TREE_CODE (expr
) != COND_EXPR
)
4795 ctrue
= count_cond (TREE_OPERAND (expr
, 1), lim
- 1);
4796 cfalse
= count_cond (TREE_OPERAND (expr
, 2), lim
- 1 - ctrue
);
4797 return MIN (lim
, 1 + ctrue
+ cfalse
);
4800 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4801 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4802 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4803 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4804 COND is the first argument to CODE; otherwise (as in the example
4805 given here), it is the second argument. TYPE is the type of the
4806 original expression. */
4809 fold_binary_op_with_conditional_arg (enum tree_code code
, tree type
,
4810 tree cond
, tree arg
, int cond_first_p
)
4812 tree test
, true_value
, false_value
;
4813 tree lhs
= NULL_TREE
;
4814 tree rhs
= NULL_TREE
;
4815 /* In the end, we'll produce a COND_EXPR. Both arms of the
4816 conditional expression will be binary operations. The left-hand
4817 side of the expression to be executed if the condition is true
4818 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4819 of the expression to be executed if the condition is true will be
4820 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4821 but apply to the expression to be executed if the conditional is
4827 /* These are the codes to use for the left-hand side and right-hand
4828 side of the COND_EXPR. Normally, they are the same as CODE. */
4829 enum tree_code lhs_code
= code
;
4830 enum tree_code rhs_code
= code
;
4831 /* And these are the types of the expressions. */
4832 tree lhs_type
= type
;
4833 tree rhs_type
= type
;
4838 true_rhs
= false_rhs
= &arg
;
4839 true_lhs
= &true_value
;
4840 false_lhs
= &false_value
;
4844 true_lhs
= false_lhs
= &arg
;
4845 true_rhs
= &true_value
;
4846 false_rhs
= &false_value
;
4849 if (TREE_CODE (cond
) == COND_EXPR
)
4851 test
= TREE_OPERAND (cond
, 0);
4852 true_value
= TREE_OPERAND (cond
, 1);
4853 false_value
= TREE_OPERAND (cond
, 2);
4854 /* If this operand throws an expression, then it does not make
4855 sense to try to perform a logical or arithmetic operation
4856 involving it. Instead of building `a + throw 3' for example,
4857 we simply build `a, throw 3'. */
4858 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
4862 lhs_code
= COMPOUND_EXPR
;
4863 lhs_type
= void_type_node
;
4868 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
4872 rhs_code
= COMPOUND_EXPR
;
4873 rhs_type
= void_type_node
;
4881 tree testtype
= TREE_TYPE (cond
);
4883 true_value
= fold_convert (testtype
, integer_one_node
);
4884 false_value
= fold_convert (testtype
, integer_zero_node
);
4887 /* If ARG is complex we want to make sure we only evaluate it once. Though
4888 this is only required if it is volatile, it might be more efficient even
4889 if it is not. However, if we succeed in folding one part to a constant,
4890 we do not need to make this SAVE_EXPR. Since we do this optimization
4891 primarily to see if we do end up with constant and this SAVE_EXPR
4892 interferes with later optimizations, suppressing it when we can is
4895 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4896 do so. Don't try to see if the result is a constant if an arm is a
4897 COND_EXPR since we get exponential behavior in that case. */
4899 if (saved_expr_p (arg
))
4901 else if (lhs
== 0 && rhs
== 0
4902 && !TREE_CONSTANT (arg
)
4903 && lang_hooks
.decls
.global_bindings_p () == 0
4904 && ((TREE_CODE (arg
) != VAR_DECL
&& TREE_CODE (arg
) != PARM_DECL
)
4905 || TREE_SIDE_EFFECTS (arg
)))
4907 if (TREE_CODE (true_value
) != COND_EXPR
)
4908 lhs
= fold (build (lhs_code
, lhs_type
, *true_lhs
, *true_rhs
));
4910 if (TREE_CODE (false_value
) != COND_EXPR
)
4911 rhs
= fold (build (rhs_code
, rhs_type
, *false_lhs
, *false_rhs
));
4913 if ((lhs
== 0 || ! TREE_CONSTANT (lhs
))
4914 && (rhs
== 0 || !TREE_CONSTANT (rhs
)))
4916 arg
= save_expr (arg
);
4918 save
= saved_expr_p (arg
);
4923 lhs
= fold (build (lhs_code
, lhs_type
, *true_lhs
, *true_rhs
));
4925 rhs
= fold (build (rhs_code
, rhs_type
, *false_lhs
, *false_rhs
));
4927 test
= fold (build (COND_EXPR
, type
, test
, lhs
, rhs
));
4929 /* If ARG involves a SAVE_EXPR, we need to ensure it is evaluated
4930 ahead of the COND_EXPR we made. Otherwise we would have it only
4931 evaluated in one branch, with the other branch using the result
4932 but missing the evaluation code. Beware that the save_expr call
4933 above might not return a SAVE_EXPR, so testing the TREE_CODE
4934 of ARG is not enough to decide here. Â */
4936 return build (COMPOUND_EXPR
, type
,
4937 fold_convert (void_type_node
, arg
),
4938 strip_compound_expr (test
, arg
));
4940 return fold_convert (type
, test
);
4944 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4946 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4947 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4948 ADDEND is the same as X.
4950 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4951 and finite. The problematic cases are when X is zero, and its mode
4952 has signed zeros. In the case of rounding towards -infinity,
4953 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4954 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4957 fold_real_zero_addition_p (tree type
, tree addend
, int negate
)
4959 if (!real_zerop (addend
))
4962 /* Don't allow the fold with -fsignaling-nans. */
4963 if (HONOR_SNANS (TYPE_MODE (type
)))
4966 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4967 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
4970 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4971 if (TREE_CODE (addend
) == REAL_CST
4972 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
4975 /* The mode has signed zeros, and we have to honor their sign.
4976 In this situation, there is only one case we can return true for.
4977 X - 0 is the same as X unless rounding towards -infinity is
4979 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
4982 /* Subroutine of fold() that checks comparisons of built-in math
4983 functions against real constants.
4985 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4986 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4987 is the type of the result and ARG0 and ARG1 are the operands of the
4988 comparison. ARG1 must be a TREE_REAL_CST.
4990 The function returns the constant folded tree if a simplification
4991 can be made, and NULL_TREE otherwise. */
4994 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
4995 tree type
, tree arg0
, tree arg1
)
4999 if (BUILTIN_SQRT_P (fcode
))
5001 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5002 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
5004 c
= TREE_REAL_CST (arg1
);
5005 if (REAL_VALUE_NEGATIVE (c
))
5007 /* sqrt(x) < y is always false, if y is negative. */
5008 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
5009 return omit_one_operand (type
,
5010 fold_convert (type
, integer_zero_node
),
5013 /* sqrt(x) > y is always true, if y is negative and we
5014 don't care about NaNs, i.e. negative values of x. */
5015 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
5016 return omit_one_operand (type
,
5017 fold_convert (type
, integer_one_node
),
5020 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5021 return fold (build (GE_EXPR
, type
, arg
,
5022 build_real (TREE_TYPE (arg
), dconst0
)));
5024 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
5028 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5029 real_convert (&c2
, mode
, &c2
);
5031 if (REAL_VALUE_ISINF (c2
))
5033 /* sqrt(x) > y is x == +Inf, when y is very large. */
5034 if (HONOR_INFINITIES (mode
))
5035 return fold (build (EQ_EXPR
, type
, arg
,
5036 build_real (TREE_TYPE (arg
), c2
)));
5038 /* sqrt(x) > y is always false, when y is very large
5039 and we don't care about infinities. */
5040 return omit_one_operand (type
,
5041 fold_convert (type
, integer_zero_node
),
5045 /* sqrt(x) > c is the same as x > c*c. */
5046 return fold (build (code
, type
, arg
,
5047 build_real (TREE_TYPE (arg
), c2
)));
5049 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
5053 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5054 real_convert (&c2
, mode
, &c2
);
5056 if (REAL_VALUE_ISINF (c2
))
5058 /* sqrt(x) < y is always true, when y is a very large
5059 value and we don't care about NaNs or Infinities. */
5060 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
5061 return omit_one_operand (type
,
5062 fold_convert (type
, integer_one_node
),
5065 /* sqrt(x) < y is x != +Inf when y is very large and we
5066 don't care about NaNs. */
5067 if (! HONOR_NANS (mode
))
5068 return fold (build (NE_EXPR
, type
, arg
,
5069 build_real (TREE_TYPE (arg
), c2
)));
5071 /* sqrt(x) < y is x >= 0 when y is very large and we
5072 don't care about Infinities. */
5073 if (! HONOR_INFINITIES (mode
))
5074 return fold (build (GE_EXPR
, type
, arg
,
5075 build_real (TREE_TYPE (arg
), dconst0
)));
5077 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5078 if (lang_hooks
.decls
.global_bindings_p () != 0
5079 || CONTAINS_PLACEHOLDER_P (arg
))
5082 arg
= save_expr (arg
);
5083 return fold (build (TRUTH_ANDIF_EXPR
, type
,
5084 fold (build (GE_EXPR
, type
, arg
,
5085 build_real (TREE_TYPE (arg
),
5087 fold (build (NE_EXPR
, type
, arg
,
5088 build_real (TREE_TYPE (arg
),
5092 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5093 if (! HONOR_NANS (mode
))
5094 return fold (build (code
, type
, arg
,
5095 build_real (TREE_TYPE (arg
), c2
)));
5097 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5098 if (lang_hooks
.decls
.global_bindings_p () == 0
5099 && ! CONTAINS_PLACEHOLDER_P (arg
))
5101 arg
= save_expr (arg
);
5102 return fold (build (TRUTH_ANDIF_EXPR
, type
,
5103 fold (build (GE_EXPR
, type
, arg
,
5104 build_real (TREE_TYPE (arg
),
5106 fold (build (code
, type
, arg
,
5107 build_real (TREE_TYPE (arg
),
5116 /* Subroutine of fold() that optimizes comparisons against Infinities,
5117 either +Inf or -Inf.
5119 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5120 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5121 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5123 The function returns the constant folded tree if a simplification
5124 can be made, and NULL_TREE otherwise. */
5127 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5129 enum machine_mode mode
;
5130 REAL_VALUE_TYPE max
;
5134 mode
= TYPE_MODE (TREE_TYPE (arg0
));
5136 /* For negative infinity swap the sense of the comparison. */
5137 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
5139 code
= swap_tree_comparison (code
);
5144 /* x > +Inf is always false, if with ignore sNANs. */
5145 if (HONOR_SNANS (mode
))
5147 return omit_one_operand (type
,
5148 fold_convert (type
, integer_zero_node
),
5152 /* x <= +Inf is always true, if we don't case about NaNs. */
5153 if (! HONOR_NANS (mode
))
5154 return omit_one_operand (type
,
5155 fold_convert (type
, integer_one_node
),
5158 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5159 if (lang_hooks
.decls
.global_bindings_p () == 0
5160 && ! CONTAINS_PLACEHOLDER_P (arg0
))
5162 arg0
= save_expr (arg0
);
5163 return fold (build (EQ_EXPR
, type
, arg0
, arg0
));
5169 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5170 real_maxval (&max
, neg
, mode
);
5171 return fold (build (neg
? LT_EXPR
: GT_EXPR
, type
,
5172 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5175 /* x < +Inf is always equal to x <= DBL_MAX. */
5176 real_maxval (&max
, neg
, mode
);
5177 return fold (build (neg
? GE_EXPR
: LE_EXPR
, type
,
5178 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5181 /* x != +Inf is always equal to !(x > DBL_MAX). */
5182 real_maxval (&max
, neg
, mode
);
5183 if (! HONOR_NANS (mode
))
5184 return fold (build (neg
? GE_EXPR
: LE_EXPR
, type
,
5185 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5186 temp
= fold (build (neg
? LT_EXPR
: GT_EXPR
, type
,
5187 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5188 return fold (build1 (TRUTH_NOT_EXPR
, type
, temp
));
5197 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5198 equality/inequality test, then return a simplified form of
5199 the test using shifts and logical operations. Otherwise return
5200 NULL. TYPE is the desired result type. */
5203 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
5206 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5208 if (code
== TRUTH_NOT_EXPR
)
5210 code
= TREE_CODE (arg0
);
5211 if (code
!= NE_EXPR
&& code
!= EQ_EXPR
)
5214 /* Extract the arguments of the EQ/NE. */
5215 arg1
= TREE_OPERAND (arg0
, 1);
5216 arg0
= TREE_OPERAND (arg0
, 0);
5218 /* This requires us to invert the code. */
5219 code
= (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
);
5222 /* If this is testing a single bit, we can optimize the test. */
5223 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
5224 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
5225 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
5227 tree inner
= TREE_OPERAND (arg0
, 0);
5228 tree type
= TREE_TYPE (arg0
);
5229 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
5230 enum machine_mode operand_mode
= TYPE_MODE (type
);
5232 tree signed_type
, unsigned_type
, intermediate_type
;
5235 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5236 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5237 arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
5238 if (arg00
!= NULL_TREE
)
5240 tree stype
= lang_hooks
.types
.signed_type (TREE_TYPE (arg00
));
5241 return fold (build (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, result_type
,
5242 fold_convert (stype
, arg00
),
5243 fold_convert (stype
, integer_zero_node
)));
5246 /* At this point, we know that arg0 is not testing the sign bit. */
5247 if (TYPE_PRECISION (type
) - 1 == bitnum
)
5250 /* Otherwise we have (A & C) != 0 where C is a single bit,
5251 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5252 Similarly for (A & C) == 0. */
5254 /* If INNER is a right shift of a constant and it plus BITNUM does
5255 not overflow, adjust BITNUM and INNER. */
5256 if (TREE_CODE (inner
) == RSHIFT_EXPR
5257 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
5258 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
5259 && bitnum
< TYPE_PRECISION (type
)
5260 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
5261 bitnum
- TYPE_PRECISION (type
)))
5263 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
5264 inner
= TREE_OPERAND (inner
, 0);
5267 /* If we are going to be able to omit the AND below, we must do our
5268 operations as unsigned. If we must use the AND, we have a choice.
5269 Normally unsigned is faster, but for some machines signed is. */
5270 #ifdef LOAD_EXTEND_OP
5271 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1);
5276 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
5277 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
5278 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
5279 inner
= fold_convert (intermediate_type
, inner
);
5282 inner
= build (RSHIFT_EXPR
, intermediate_type
,
5283 inner
, size_int (bitnum
));
5285 if (code
== EQ_EXPR
)
5286 inner
= build (BIT_XOR_EXPR
, intermediate_type
,
5287 inner
, integer_one_node
);
5289 /* Put the AND last so it can combine with more things. */
5290 inner
= build (BIT_AND_EXPR
, intermediate_type
,
5291 inner
, integer_one_node
);
5293 /* Make sure to return the proper type. */
5294 inner
= fold_convert (result_type
, inner
);
5301 /* Check whether we are allowed to reorder operands arg0 and arg1,
5302 such that the evaluation of arg1 occurs before arg0. */
5305 reorder_operands_p (tree arg0
, tree arg1
)
5307 if (! flag_evaluation_order
)
5309 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
5311 return ! TREE_SIDE_EFFECTS (arg0
)
5312 && ! TREE_SIDE_EFFECTS (arg1
);
5315 /* Test whether it is preferable two swap two operands, ARG0 and
5316 ARG1, for example because ARG0 is an integer constant and ARG1
5317 isn't. If REORDER is true, only recommend swapping if we can
5318 evaluate the operands in reverse order. */
5321 tree_swap_operands_p (tree arg0
, tree arg1
, bool reorder
)
5323 STRIP_SIGN_NOPS (arg0
);
5324 STRIP_SIGN_NOPS (arg1
);
5326 if (TREE_CODE (arg1
) == INTEGER_CST
)
5328 if (TREE_CODE (arg0
) == INTEGER_CST
)
5331 if (TREE_CODE (arg1
) == REAL_CST
)
5333 if (TREE_CODE (arg0
) == REAL_CST
)
5336 if (TREE_CODE (arg1
) == COMPLEX_CST
)
5338 if (TREE_CODE (arg0
) == COMPLEX_CST
)
5341 if (TREE_CONSTANT (arg1
))
5343 if (TREE_CONSTANT (arg0
))
5349 if (reorder
&& flag_evaluation_order
5350 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
5361 /* Perform constant folding and related simplification of EXPR.
5362 The related simplifications include x*1 => x, x*0 => 0, etc.,
5363 and application of the associative law.
5364 NOP_EXPR conversions may be removed freely (as long as we
5365 are careful not to change the C type of the overall expression)
5366 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5367 but we can constant-fold them if they have constant operands. */
5369 #ifdef ENABLE_FOLD_CHECKING
5370 # define fold(x) fold_1 (x)
5371 static tree
fold_1 (tree
);
5377 const tree t
= expr
;
5378 const tree type
= TREE_TYPE (expr
);
5379 tree t1
= NULL_TREE
;
5381 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
5382 enum tree_code code
= TREE_CODE (t
);
5383 int kind
= TREE_CODE_CLASS (code
);
5384 /* WINS will be nonzero when the switch is done
5385 if all operands are constant. */
5388 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5389 Likewise for a SAVE_EXPR that's already been evaluated. */
5390 if (code
== RTL_EXPR
|| (code
== SAVE_EXPR
&& SAVE_EXPR_RTL (t
) != 0))
5393 /* Return right away if a constant. */
5397 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
5401 /* Special case for conversion ops that can have fixed point args. */
5402 arg0
= TREE_OPERAND (t
, 0);
5404 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5406 STRIP_SIGN_NOPS (arg0
);
5408 if (arg0
!= 0 && TREE_CODE (arg0
) == COMPLEX_CST
)
5409 subop
= TREE_REALPART (arg0
);
5413 if (subop
!= 0 && TREE_CODE (subop
) != INTEGER_CST
5414 && TREE_CODE (subop
) != REAL_CST
)
5415 /* Note that TREE_CONSTANT isn't enough:
5416 static var addresses are constant but we can't
5417 do arithmetic on them. */
5420 else if (IS_EXPR_CODE_CLASS (kind
))
5422 int len
= first_rtl_op (code
);
5424 for (i
= 0; i
< len
; i
++)
5426 tree op
= TREE_OPERAND (t
, i
);
5430 continue; /* Valid for CALL_EXPR, at least. */
5432 /* Strip any conversions that don't change the mode. This is
5433 safe for every expression, except for a comparison expression
5434 because its signedness is derived from its operands. So, in
5435 the latter case, only strip conversions that don't change the
5438 Note that this is done as an internal manipulation within the
5439 constant folder, in order to find the simplest representation
5440 of the arguments so that their form can be studied. In any
5441 cases, the appropriate type conversions should be put back in
5442 the tree that will get out of the constant folder. */
5444 STRIP_SIGN_NOPS (op
);
5448 if (TREE_CODE (op
) == COMPLEX_CST
)
5449 subop
= TREE_REALPART (op
);
5453 if (TREE_CODE (subop
) != INTEGER_CST
5454 && TREE_CODE (subop
) != REAL_CST
)
5455 /* Note that TREE_CONSTANT isn't enough:
5456 static var addresses are constant but we can't
5457 do arithmetic on them. */
5467 /* If this is a commutative operation, and ARG0 is a constant, move it
5468 to ARG1 to reduce the number of tests below. */
5469 if (commutative_tree_code (code
)
5470 && tree_swap_operands_p (arg0
, arg1
, true))
5471 return fold (build (code
, type
, TREE_OPERAND (t
, 1),
5472 TREE_OPERAND (t
, 0)));
5474 /* Now WINS is set as described above,
5475 ARG0 is the first operand of EXPR,
5476 and ARG1 is the second operand (if it has more than one operand).
5478 First check for cases where an arithmetic operation is applied to a
5479 compound, conditional, or comparison operation. Push the arithmetic
5480 operation inside the compound or conditional to see if any folding
5481 can then be done. Convert comparison to conditional for this purpose.
5482 The also optimizes non-constant cases that used to be done in
5485 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5486 one of the operands is a comparison and the other is a comparison, a
5487 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5488 code below would make the expression more complex. Change it to a
5489 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5490 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5492 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
5493 || code
== EQ_EXPR
|| code
== NE_EXPR
)
5494 && ((truth_value_p (TREE_CODE (arg0
))
5495 && (truth_value_p (TREE_CODE (arg1
))
5496 || (TREE_CODE (arg1
) == BIT_AND_EXPR
5497 && integer_onep (TREE_OPERAND (arg1
, 1)))))
5498 || (truth_value_p (TREE_CODE (arg1
))
5499 && (truth_value_p (TREE_CODE (arg0
))
5500 || (TREE_CODE (arg0
) == BIT_AND_EXPR
5501 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
5503 tem
= fold (build (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
5504 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
5508 if (code
== EQ_EXPR
)
5509 tem
= invert_truthvalue (tem
);
5514 if (TREE_CODE_CLASS (code
) == '1')
5516 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
5517 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5518 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))));
5519 else if (TREE_CODE (arg0
) == COND_EXPR
)
5521 tree arg01
= TREE_OPERAND (arg0
, 1);
5522 tree arg02
= TREE_OPERAND (arg0
, 2);
5523 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
5524 arg01
= fold (build1 (code
, type
, arg01
));
5525 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
5526 arg02
= fold (build1 (code
, type
, arg02
));
5527 tem
= fold (build (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5530 /* If this was a conversion, and all we did was to move into
5531 inside the COND_EXPR, bring it back out. But leave it if
5532 it is a conversion from integer to integer and the
5533 result precision is no wider than a word since such a
5534 conversion is cheap and may be optimized away by combine,
5535 while it couldn't if it were outside the COND_EXPR. Then return
5536 so we don't get into an infinite recursion loop taking the
5537 conversion out and then back in. */
5539 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
5540 || code
== NON_LVALUE_EXPR
)
5541 && TREE_CODE (tem
) == COND_EXPR
5542 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
5543 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
5544 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
5545 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
5546 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
5547 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
5548 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
5550 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
5551 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
))
5552 tem
= build1 (code
, type
,
5554 TREE_TYPE (TREE_OPERAND
5555 (TREE_OPERAND (tem
, 1), 0)),
5556 TREE_OPERAND (tem
, 0),
5557 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
5558 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
5561 else if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<')
5562 return fold (build (COND_EXPR
, type
, arg0
,
5563 fold (build1 (code
, type
, integer_one_node
)),
5564 fold (build1 (code
, type
, integer_zero_node
))));
5566 else if (TREE_CODE_CLASS (code
) == '<'
5567 && TREE_CODE (arg0
) == COMPOUND_EXPR
)
5568 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5569 fold (build (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
5570 else if (TREE_CODE_CLASS (code
) == '<'
5571 && TREE_CODE (arg1
) == COMPOUND_EXPR
)
5572 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
5573 fold (build (code
, type
, arg0
, TREE_OPERAND (arg1
, 1))));
5574 else if (TREE_CODE_CLASS (code
) == '2'
5575 || TREE_CODE_CLASS (code
) == '<')
5577 if (TREE_CODE (arg1
) == COMPOUND_EXPR
5578 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1
, 0))
5579 && ! TREE_SIDE_EFFECTS (arg0
))
5580 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
5581 fold (build (code
, type
,
5582 arg0
, TREE_OPERAND (arg1
, 1))));
5583 else if ((TREE_CODE (arg1
) == COND_EXPR
5584 || (TREE_CODE_CLASS (TREE_CODE (arg1
)) == '<'
5585 && TREE_CODE_CLASS (code
) != '<'))
5586 && (TREE_CODE (arg0
) != COND_EXPR
5587 || count_cond (arg0
, 25) + count_cond (arg1
, 25) <= 25)
5588 && (! TREE_SIDE_EFFECTS (arg0
)
5589 || (lang_hooks
.decls
.global_bindings_p () == 0
5590 && ! CONTAINS_PLACEHOLDER_P (arg0
))))
5592 fold_binary_op_with_conditional_arg (code
, type
, arg1
, arg0
,
5593 /*cond_first_p=*/0);
5594 else if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
5595 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5596 fold (build (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
5597 else if ((TREE_CODE (arg0
) == COND_EXPR
5598 || (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
5599 && TREE_CODE_CLASS (code
) != '<'))
5600 && (TREE_CODE (arg1
) != COND_EXPR
5601 || count_cond (arg0
, 25) + count_cond (arg1
, 25) <= 25)
5602 && (! TREE_SIDE_EFFECTS (arg1
)
5603 || (lang_hooks
.decls
.global_bindings_p () == 0
5604 && ! CONTAINS_PLACEHOLDER_P (arg1
))))
5606 fold_binary_op_with_conditional_arg (code
, type
, arg0
, arg1
,
5607 /*cond_first_p=*/1);
5621 return fold (DECL_INITIAL (t
));
5626 case FIX_TRUNC_EXPR
:
5628 case FIX_FLOOR_EXPR
:
5629 if (TREE_TYPE (TREE_OPERAND (t
, 0)) == type
)
5630 return TREE_OPERAND (t
, 0);
5632 /* Handle cases of two conversions in a row. */
5633 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
5634 || TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
)
5636 tree inside_type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
5637 tree inter_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
5638 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
5639 int inside_ptr
= POINTER_TYPE_P (inside_type
);
5640 int inside_float
= FLOAT_TYPE_P (inside_type
);
5641 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
5642 int inside_unsignedp
= TREE_UNSIGNED (inside_type
);
5643 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
5644 int inter_ptr
= POINTER_TYPE_P (inter_type
);
5645 int inter_float
= FLOAT_TYPE_P (inter_type
);
5646 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
5647 int inter_unsignedp
= TREE_UNSIGNED (inter_type
);
5648 int final_int
= INTEGRAL_TYPE_P (type
);
5649 int final_ptr
= POINTER_TYPE_P (type
);
5650 int final_float
= FLOAT_TYPE_P (type
);
5651 unsigned int final_prec
= TYPE_PRECISION (type
);
5652 int final_unsignedp
= TREE_UNSIGNED (type
);
5654 /* In addition to the cases of two conversions in a row
5655 handled below, if we are converting something to its own
5656 type via an object of identical or wider precision, neither
5657 conversion is needed. */
5658 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
5659 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
5660 && inter_prec
>= final_prec
)
5661 return fold (build1 (code
, type
,
5662 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
5664 /* Likewise, if the intermediate and final types are either both
5665 float or both integer, we don't need the middle conversion if
5666 it is wider than the final type and doesn't change the signedness
5667 (for integers). Avoid this if the final type is a pointer
5668 since then we sometimes need the inner conversion. Likewise if
5669 the outer has a precision not equal to the size of its mode. */
5670 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
5671 || (inter_float
&& inside_float
))
5672 && inter_prec
>= inside_prec
5673 && (inter_float
|| inter_unsignedp
== inside_unsignedp
)
5674 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
5675 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
5677 return fold (build1 (code
, type
,
5678 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
5680 /* If we have a sign-extension of a zero-extended value, we can
5681 replace that by a single zero-extension. */
5682 if (inside_int
&& inter_int
&& final_int
5683 && inside_prec
< inter_prec
&& inter_prec
< final_prec
5684 && inside_unsignedp
&& !inter_unsignedp
)
5685 return fold (build1 (code
, type
,
5686 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
5688 /* Two conversions in a row are not needed unless:
5689 - some conversion is floating-point (overstrict for now), or
5690 - the intermediate type is narrower than both initial and
5692 - the intermediate type and innermost type differ in signedness,
5693 and the outermost type is wider than the intermediate, or
5694 - the initial type is a pointer type and the precisions of the
5695 intermediate and final types differ, or
5696 - the final type is a pointer type and the precisions of the
5697 initial and intermediate types differ. */
5698 if (! inside_float
&& ! inter_float
&& ! final_float
5699 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
5700 && ! (inside_int
&& inter_int
5701 && inter_unsignedp
!= inside_unsignedp
5702 && inter_prec
< final_prec
)
5703 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
5704 == (final_unsignedp
&& final_prec
> inter_prec
))
5705 && ! (inside_ptr
&& inter_prec
!= final_prec
)
5706 && ! (final_ptr
&& inside_prec
!= inter_prec
)
5707 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
5708 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
5710 return fold (build1 (code
, type
,
5711 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
5714 if (TREE_CODE (TREE_OPERAND (t
, 0)) == MODIFY_EXPR
5715 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t
, 0), 1))
5716 /* Detect assigning a bitfield. */
5717 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)) == COMPONENT_REF
5718 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 1))))
5720 /* Don't leave an assignment inside a conversion
5721 unless assigning a bitfield. */
5722 tree prev
= TREE_OPERAND (t
, 0);
5723 tem
= copy_node (t
);
5724 TREE_OPERAND (tem
, 0) = TREE_OPERAND (prev
, 1);
5725 /* First do the assignment, then return converted constant. */
5726 tem
= build (COMPOUND_EXPR
, TREE_TYPE (tem
), prev
, fold (tem
));
5727 TREE_NO_UNUSED_WARNING (tem
) = 1;
5728 TREE_USED (tem
) = 1;
5732 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5733 constants (if x has signed type, the sign bit cannot be set
5734 in c). This folds extension into the BIT_AND_EXPR. */
5735 if (INTEGRAL_TYPE_P (type
)
5736 && TREE_CODE (type
) != BOOLEAN_TYPE
5737 && TREE_CODE (TREE_OPERAND (t
, 0)) == BIT_AND_EXPR
5738 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 1)) == INTEGER_CST
)
5740 tree
and = TREE_OPERAND (t
, 0);
5741 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
5744 if (TREE_UNSIGNED (TREE_TYPE (and))
5745 || (TYPE_PRECISION (type
)
5746 <= TYPE_PRECISION (TREE_TYPE (and))))
5748 else if (TYPE_PRECISION (TREE_TYPE (and1
))
5749 <= HOST_BITS_PER_WIDE_INT
5750 && host_integerp (and1
, 1))
5752 unsigned HOST_WIDE_INT cst
;
5754 cst
= tree_low_cst (and1
, 1);
5755 cst
&= (HOST_WIDE_INT
) -1
5756 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
5757 change
= (cst
== 0);
5758 #ifdef LOAD_EXTEND_OP
5760 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
5763 tree uns
= lang_hooks
.types
.unsigned_type (TREE_TYPE (and0
));
5764 and0
= fold_convert (uns
, and0
);
5765 and1
= fold_convert (uns
, and1
);
5770 return fold (build (BIT_AND_EXPR
, type
,
5771 fold_convert (type
, and0
),
5772 fold_convert (type
, and1
)));
5775 tem
= fold_convert_const (code
, type
, arg0
);
5776 return tem
? tem
: t
;
5778 case VIEW_CONVERT_EXPR
:
5779 if (TREE_CODE (TREE_OPERAND (t
, 0)) == VIEW_CONVERT_EXPR
)
5780 return build1 (VIEW_CONVERT_EXPR
, type
,
5781 TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
5785 if (TREE_CODE (arg0
) == CONSTRUCTOR
5786 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
5788 tree m
= purpose_member (arg1
, CONSTRUCTOR_ELTS (arg0
));
5790 return TREE_VALUE (m
);
5795 if (TREE_CONSTANT (t
) != wins
)
5797 tem
= copy_node (t
);
5798 TREE_CONSTANT (tem
) = wins
;
5804 if (negate_expr_p (arg0
))
5805 return fold_convert (type
, negate_expr (arg0
));
5810 && (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
))
5811 return fold_abs_const (arg0
, type
);
5812 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
5813 return fold (build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0)));
5814 /* Convert fabs((double)float) into (double)fabsf(float). */
5815 else if (TREE_CODE (arg0
) == NOP_EXPR
5816 && TREE_CODE (type
) == REAL_TYPE
)
5818 tree targ0
= strip_float_extensions (arg0
);
5820 return fold_convert (type
, fold (build1 (ABS_EXPR
,
5824 else if (tree_expr_nonnegative_p (arg0
))
5829 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
5830 return fold_convert (type
, arg0
);
5831 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
5832 return build (COMPLEX_EXPR
, type
,
5833 TREE_OPERAND (arg0
, 0),
5834 negate_expr (TREE_OPERAND (arg0
, 1)));
5835 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
5836 return build_complex (type
, TREE_REALPART (arg0
),
5837 negate_expr (TREE_IMAGPART (arg0
)));
5838 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
5839 return fold (build (TREE_CODE (arg0
), type
,
5840 fold (build1 (CONJ_EXPR
, type
,
5841 TREE_OPERAND (arg0
, 0))),
5842 fold (build1 (CONJ_EXPR
,
5843 type
, TREE_OPERAND (arg0
, 1)))));
5844 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
5845 return TREE_OPERAND (arg0
, 0);
5851 tem
= build_int_2 (~ TREE_INT_CST_LOW (arg0
),
5852 ~ TREE_INT_CST_HIGH (arg0
));
5853 TREE_TYPE (tem
) = type
;
5854 force_fit_type (tem
, 0);
5855 TREE_OVERFLOW (tem
) = TREE_OVERFLOW (arg0
);
5856 TREE_CONSTANT_OVERFLOW (tem
) = TREE_CONSTANT_OVERFLOW (arg0
);
5859 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
5860 return TREE_OPERAND (arg0
, 0);
5864 /* A + (-B) -> A - B */
5865 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
5866 return fold (build (MINUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
5867 /* (-A) + B -> B - A */
5868 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
5869 return fold (build (MINUS_EXPR
, type
, arg1
, TREE_OPERAND (arg0
, 0)));
5870 else if (! FLOAT_TYPE_P (type
))
5872 if (integer_zerop (arg1
))
5873 return non_lvalue (fold_convert (type
, arg0
));
5875 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5876 with a constant, and the two constants have no bits in common,
5877 we should treat this as a BIT_IOR_EXPR since this may produce more
5879 if (TREE_CODE (arg0
) == BIT_AND_EXPR
5880 && TREE_CODE (arg1
) == BIT_AND_EXPR
5881 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
5882 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
5883 && integer_zerop (const_binop (BIT_AND_EXPR
,
5884 TREE_OPERAND (arg0
, 1),
5885 TREE_OPERAND (arg1
, 1), 0)))
5887 code
= BIT_IOR_EXPR
;
5891 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5892 (plus (plus (mult) (mult)) (foo)) so that we can
5893 take advantage of the factoring cases below. */
5894 if ((TREE_CODE (arg0
) == PLUS_EXPR
5895 && TREE_CODE (arg1
) == MULT_EXPR
)
5896 || (TREE_CODE (arg1
) == PLUS_EXPR
5897 && TREE_CODE (arg0
) == MULT_EXPR
))
5899 tree parg0
, parg1
, parg
, marg
;
5901 if (TREE_CODE (arg0
) == PLUS_EXPR
)
5902 parg
= arg0
, marg
= arg1
;
5904 parg
= arg1
, marg
= arg0
;
5905 parg0
= TREE_OPERAND (parg
, 0);
5906 parg1
= TREE_OPERAND (parg
, 1);
5910 if (TREE_CODE (parg0
) == MULT_EXPR
5911 && TREE_CODE (parg1
) != MULT_EXPR
)
5912 return fold (build (PLUS_EXPR
, type
,
5913 fold (build (PLUS_EXPR
, type
,
5914 fold_convert (type
, parg0
),
5915 fold_convert (type
, marg
))),
5916 fold_convert (type
, parg1
)));
5917 if (TREE_CODE (parg0
) != MULT_EXPR
5918 && TREE_CODE (parg1
) == MULT_EXPR
)
5919 return fold (build (PLUS_EXPR
, type
,
5920 fold (build (PLUS_EXPR
, type
,
5921 fold_convert (type
, parg1
),
5922 fold_convert (type
, marg
))),
5923 fold_convert (type
, parg0
)));
5926 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
)
5928 tree arg00
, arg01
, arg10
, arg11
;
5929 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
5931 /* (A * C) + (B * C) -> (A+B) * C.
5932 We are most concerned about the case where C is a constant,
5933 but other combinations show up during loop reduction. Since
5934 it is not difficult, try all four possibilities. */
5936 arg00
= TREE_OPERAND (arg0
, 0);
5937 arg01
= TREE_OPERAND (arg0
, 1);
5938 arg10
= TREE_OPERAND (arg1
, 0);
5939 arg11
= TREE_OPERAND (arg1
, 1);
5942 if (operand_equal_p (arg01
, arg11
, 0))
5943 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
5944 else if (operand_equal_p (arg00
, arg10
, 0))
5945 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
5946 else if (operand_equal_p (arg00
, arg11
, 0))
5947 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
5948 else if (operand_equal_p (arg01
, arg10
, 0))
5949 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
5951 /* No identical multiplicands; see if we can find a common
5952 power-of-two factor in non-power-of-two multiplies. This
5953 can help in multi-dimensional array access. */
5954 else if (TREE_CODE (arg01
) == INTEGER_CST
5955 && TREE_CODE (arg11
) == INTEGER_CST
5956 && TREE_INT_CST_HIGH (arg01
) == 0
5957 && TREE_INT_CST_HIGH (arg11
) == 0)
5959 HOST_WIDE_INT int01
, int11
, tmp
;
5960 int01
= TREE_INT_CST_LOW (arg01
);
5961 int11
= TREE_INT_CST_LOW (arg11
);
5963 /* Move min of absolute values to int11. */
5964 if ((int01
>= 0 ? int01
: -int01
)
5965 < (int11
>= 0 ? int11
: -int11
))
5967 tmp
= int01
, int01
= int11
, int11
= tmp
;
5968 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
5969 alt0
= arg01
, arg01
= arg11
, arg11
= alt0
;
5972 if (exact_log2 (int11
) > 0 && int01
% int11
== 0)
5974 alt0
= fold (build (MULT_EXPR
, type
, arg00
,
5975 build_int_2 (int01
/ int11
, 0)));
5982 return fold (build (MULT_EXPR
, type
,
5983 fold (build (PLUS_EXPR
, type
, alt0
, alt1
)),
5989 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5990 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
5991 return non_lvalue (fold_convert (type
, arg0
));
5993 /* Likewise if the operands are reversed. */
5994 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
5995 return non_lvalue (fold_convert (type
, arg1
));
5997 /* Convert x+x into x*2.0. */
5998 if (operand_equal_p (arg0
, arg1
, 0)
5999 && SCALAR_FLOAT_TYPE_P (type
))
6000 return fold (build (MULT_EXPR
, type
, arg0
,
6001 build_real (type
, dconst2
)));
6003 /* Convert x*c+x into x*(c+1). */
6004 if (flag_unsafe_math_optimizations
6005 && TREE_CODE (arg0
) == MULT_EXPR
6006 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
6007 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
6008 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
6012 c
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
6013 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6014 return fold (build (MULT_EXPR
, type
, arg1
,
6015 build_real (type
, c
)));
6018 /* Convert x+x*c into x*(c+1). */
6019 if (flag_unsafe_math_optimizations
6020 && TREE_CODE (arg1
) == MULT_EXPR
6021 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
6022 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
6023 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
6027 c
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
6028 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6029 return fold (build (MULT_EXPR
, type
, arg0
,
6030 build_real (type
, c
)));
6033 /* Convert x*c1+x*c2 into x*(c1+c2). */
6034 if (flag_unsafe_math_optimizations
6035 && TREE_CODE (arg0
) == MULT_EXPR
6036 && TREE_CODE (arg1
) == MULT_EXPR
6037 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
6038 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
6039 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
6040 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
6041 && operand_equal_p (TREE_OPERAND (arg0
, 0),
6042 TREE_OPERAND (arg1
, 0), 0))
6044 REAL_VALUE_TYPE c1
, c2
;
6046 c1
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
6047 c2
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
6048 real_arithmetic (&c1
, PLUS_EXPR
, &c1
, &c2
);
6049 return fold (build (MULT_EXPR
, type
,
6050 TREE_OPERAND (arg0
, 0),
6051 build_real (type
, c1
)));
6056 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6057 is a rotate of A by C1 bits. */
6058 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6059 is a rotate of A by B bits. */
6061 enum tree_code code0
, code1
;
6062 code0
= TREE_CODE (arg0
);
6063 code1
= TREE_CODE (arg1
);
6064 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
6065 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
6066 && operand_equal_p (TREE_OPERAND (arg0
, 0),
6067 TREE_OPERAND (arg1
, 0), 0)
6068 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
6070 tree tree01
, tree11
;
6071 enum tree_code code01
, code11
;
6073 tree01
= TREE_OPERAND (arg0
, 1);
6074 tree11
= TREE_OPERAND (arg1
, 1);
6075 STRIP_NOPS (tree01
);
6076 STRIP_NOPS (tree11
);
6077 code01
= TREE_CODE (tree01
);
6078 code11
= TREE_CODE (tree11
);
6079 if (code01
== INTEGER_CST
6080 && code11
== INTEGER_CST
6081 && TREE_INT_CST_HIGH (tree01
) == 0
6082 && TREE_INT_CST_HIGH (tree11
) == 0
6083 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
6084 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
6085 return build (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6086 code0
== LSHIFT_EXPR
? tree01
: tree11
);
6087 else if (code11
== MINUS_EXPR
)
6089 tree tree110
, tree111
;
6090 tree110
= TREE_OPERAND (tree11
, 0);
6091 tree111
= TREE_OPERAND (tree11
, 1);
6092 STRIP_NOPS (tree110
);
6093 STRIP_NOPS (tree111
);
6094 if (TREE_CODE (tree110
) == INTEGER_CST
6095 && 0 == compare_tree_int (tree110
,
6097 (TREE_TYPE (TREE_OPERAND
6099 && operand_equal_p (tree01
, tree111
, 0))
6100 return build ((code0
== LSHIFT_EXPR
6103 type
, TREE_OPERAND (arg0
, 0), tree01
);
6105 else if (code01
== MINUS_EXPR
)
6107 tree tree010
, tree011
;
6108 tree010
= TREE_OPERAND (tree01
, 0);
6109 tree011
= TREE_OPERAND (tree01
, 1);
6110 STRIP_NOPS (tree010
);
6111 STRIP_NOPS (tree011
);
6112 if (TREE_CODE (tree010
) == INTEGER_CST
6113 && 0 == compare_tree_int (tree010
,
6115 (TREE_TYPE (TREE_OPERAND
6117 && operand_equal_p (tree11
, tree011
, 0))
6118 return build ((code0
!= LSHIFT_EXPR
6121 type
, TREE_OPERAND (arg0
, 0), tree11
);
6127 /* In most languages, can't associate operations on floats through
6128 parentheses. Rather than remember where the parentheses were, we
6129 don't associate floats at all, unless the user has specified
6130 -funsafe-math-optimizations. */
6133 && (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
6135 tree var0
, con0
, lit0
, minus_lit0
;
6136 tree var1
, con1
, lit1
, minus_lit1
;
6138 /* Split both trees into variables, constants, and literals. Then
6139 associate each group together, the constants with literals,
6140 then the result with variables. This increases the chances of
6141 literals being recombined later and of generating relocatable
6142 expressions for the sum of a constant and literal. */
6143 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
6144 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
6145 code
== MINUS_EXPR
);
6147 /* Only do something if we found more than two objects. Otherwise,
6148 nothing has changed and we risk infinite recursion. */
6149 if (2 < ((var0
!= 0) + (var1
!= 0)
6150 + (con0
!= 0) + (con1
!= 0)
6151 + (lit0
!= 0) + (lit1
!= 0)
6152 + (minus_lit0
!= 0) + (minus_lit1
!= 0)))
6154 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6155 if (code
== MINUS_EXPR
)
6158 var0
= associate_trees (var0
, var1
, code
, type
);
6159 con0
= associate_trees (con0
, con1
, code
, type
);
6160 lit0
= associate_trees (lit0
, lit1
, code
, type
);
6161 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
6163 /* Preserve the MINUS_EXPR if the negative part of the literal is
6164 greater than the positive part. Otherwise, the multiplicative
6165 folding code (i.e extract_muldiv) may be fooled in case
6166 unsigned constants are subtracted, like in the following
6167 example: ((X*2 + 4) - 8U)/2. */
6168 if (minus_lit0
&& lit0
)
6170 if (TREE_CODE (lit0
) == INTEGER_CST
6171 && TREE_CODE (minus_lit0
) == INTEGER_CST
6172 && tree_int_cst_lt (lit0
, minus_lit0
))
6174 minus_lit0
= associate_trees (minus_lit0
, lit0
,
6180 lit0
= associate_trees (lit0
, minus_lit0
,
6188 return fold_convert (type
,
6189 associate_trees (var0
, minus_lit0
,
6193 con0
= associate_trees (con0
, minus_lit0
,
6195 return fold_convert (type
,
6196 associate_trees (var0
, con0
,
6201 con0
= associate_trees (con0
, lit0
, code
, type
);
6202 return fold_convert (type
, associate_trees (var0
, con0
,
6209 t1
= const_binop (code
, arg0
, arg1
, 0);
6210 if (t1
!= NULL_TREE
)
6212 /* The return value should always have
6213 the same type as the original expression. */
6214 if (TREE_TYPE (t1
) != type
)
6215 t1
= fold_convert (type
, t1
);
6222 /* A - (-B) -> A + B */
6223 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
6224 return fold (build (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
6225 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6226 if (TREE_CODE (arg0
) == NEGATE_EXPR
6227 && (FLOAT_TYPE_P (type
)
6228 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
))
6229 && negate_expr_p (arg1
)
6230 && reorder_operands_p (arg0
, arg1
))
6231 return fold (build (MINUS_EXPR
, type
, negate_expr (arg1
),
6232 TREE_OPERAND (arg0
, 0)));
6234 if (! FLOAT_TYPE_P (type
))
6236 if (! wins
&& integer_zerop (arg0
))
6237 return negate_expr (fold_convert (type
, arg1
));
6238 if (integer_zerop (arg1
))
6239 return non_lvalue (fold_convert (type
, arg0
));
6241 /* Fold A - (A & B) into ~B & A. */
6242 if (!TREE_SIDE_EFFECTS (arg0
)
6243 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
6245 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
6246 return fold (build (BIT_AND_EXPR
, type
,
6247 fold (build1 (BIT_NOT_EXPR
, type
,
6248 TREE_OPERAND (arg1
, 0))),
6250 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
6251 return fold (build (BIT_AND_EXPR
, type
,
6252 fold (build1 (BIT_NOT_EXPR
, type
,
6253 TREE_OPERAND (arg1
, 1))),
6257 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6258 any power of 2 minus 1. */
6259 if (TREE_CODE (arg0
) == BIT_AND_EXPR
6260 && TREE_CODE (arg1
) == BIT_AND_EXPR
6261 && operand_equal_p (TREE_OPERAND (arg0
, 0),
6262 TREE_OPERAND (arg1
, 0), 0))
6264 tree mask0
= TREE_OPERAND (arg0
, 1);
6265 tree mask1
= TREE_OPERAND (arg1
, 1);
6266 tree tem
= fold (build1 (BIT_NOT_EXPR
, type
, mask0
));
6268 if (operand_equal_p (tem
, mask1
, 0))
6270 tem
= fold (build (BIT_XOR_EXPR
, type
,
6271 TREE_OPERAND (arg0
, 0), mask1
));
6272 return fold (build (MINUS_EXPR
, type
, tem
, mask1
));
6277 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6278 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
6279 return non_lvalue (fold_convert (type
, arg0
));
6281 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6282 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6283 (-ARG1 + ARG0) reduces to -ARG1. */
6284 else if (!wins
&& fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
6285 return negate_expr (fold_convert (type
, arg1
));
6287 /* Fold &x - &x. This can happen from &x.foo - &x.
6288 This is unsafe for certain floats even in non-IEEE formats.
6289 In IEEE, it is unsafe because it does wrong for NaNs.
6290 Also note that operand_equal_p is always false if an operand
6293 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
6294 && operand_equal_p (arg0
, arg1
, 0))
6295 return fold_convert (type
, integer_zero_node
);
6297 /* A - B -> A + (-B) if B is easily negatable. */
6298 if (!wins
&& negate_expr_p (arg1
)
6299 && (FLOAT_TYPE_P (type
)
6300 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
)))
6301 return fold (build (PLUS_EXPR
, type
, arg0
, negate_expr (arg1
)));
6303 if (TREE_CODE (arg0
) == MULT_EXPR
6304 && TREE_CODE (arg1
) == MULT_EXPR
6305 && (INTEGRAL_TYPE_P (type
) || flag_unsafe_math_optimizations
))
6307 /* (A * C) - (B * C) -> (A-B) * C. */
6308 if (operand_equal_p (TREE_OPERAND (arg0
, 1),
6309 TREE_OPERAND (arg1
, 1), 0))
6310 return fold (build (MULT_EXPR
, type
,
6311 fold (build (MINUS_EXPR
, type
,
6312 TREE_OPERAND (arg0
, 0),
6313 TREE_OPERAND (arg1
, 0))),
6314 TREE_OPERAND (arg0
, 1)));
6315 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6316 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
6317 TREE_OPERAND (arg1
, 0), 0))
6318 return fold (build (MULT_EXPR
, type
,
6319 TREE_OPERAND (arg0
, 0),
6320 fold (build (MINUS_EXPR
, type
,
6321 TREE_OPERAND (arg0
, 1),
6322 TREE_OPERAND (arg1
, 1)))));
6328 /* (-A) * (-B) -> A * B */
6329 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
6330 return fold (build (MULT_EXPR
, type
,
6331 TREE_OPERAND (arg0
, 0),
6332 negate_expr (arg1
)));
6333 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
6334 return fold (build (MULT_EXPR
, type
,
6336 TREE_OPERAND (arg1
, 0)));
6338 if (! FLOAT_TYPE_P (type
))
6340 if (integer_zerop (arg1
))
6341 return omit_one_operand (type
, arg1
, arg0
);
6342 if (integer_onep (arg1
))
6343 return non_lvalue (fold_convert (type
, arg0
));
6345 /* (a * (1 << b)) is (a << b) */
6346 if (TREE_CODE (arg1
) == LSHIFT_EXPR
6347 && integer_onep (TREE_OPERAND (arg1
, 0)))
6348 return fold (build (LSHIFT_EXPR
, type
, arg0
,
6349 TREE_OPERAND (arg1
, 1)));
6350 if (TREE_CODE (arg0
) == LSHIFT_EXPR
6351 && integer_onep (TREE_OPERAND (arg0
, 0)))
6352 return fold (build (LSHIFT_EXPR
, type
, arg1
,
6353 TREE_OPERAND (arg0
, 1)));
6355 if (TREE_CODE (arg1
) == INTEGER_CST
6356 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0),
6357 fold_convert (type
, arg1
),
6359 return fold_convert (type
, tem
);
6364 /* Maybe fold x * 0 to 0. The expressions aren't the same
6365 when x is NaN, since x * 0 is also NaN. Nor are they the
6366 same in modes with signed zeros, since multiplying a
6367 negative value by 0 gives -0, not +0. */
6368 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
6369 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
6370 && real_zerop (arg1
))
6371 return omit_one_operand (type
, arg1
, arg0
);
6372 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6373 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
6374 && real_onep (arg1
))
6375 return non_lvalue (fold_convert (type
, arg0
));
6377 /* Transform x * -1.0 into -x. */
6378 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
6379 && real_minus_onep (arg1
))
6380 return fold (build1 (NEGATE_EXPR
, type
, arg0
));
6382 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6383 if (flag_unsafe_math_optimizations
6384 && TREE_CODE (arg0
) == RDIV_EXPR
6385 && TREE_CODE (arg1
) == REAL_CST
6386 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
6388 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
6391 return fold (build (RDIV_EXPR
, type
, tem
,
6392 TREE_OPERAND (arg0
, 1)));
6395 if (flag_unsafe_math_optimizations
)
6397 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
6398 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
6400 /* Optimizations of sqrt(...)*sqrt(...). */
6401 if (fcode0
== fcode1
&& BUILTIN_SQRT_P (fcode0
))
6403 tree sqrtfn
, arg
, arglist
;
6404 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
6405 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
6407 /* Optimize sqrt(x)*sqrt(x) as x. */
6408 if (operand_equal_p (arg00
, arg10
, 0)
6409 && ! HONOR_SNANS (TYPE_MODE (type
)))
6412 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6413 sqrtfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6414 arg
= fold (build (MULT_EXPR
, type
, arg00
, arg10
));
6415 arglist
= build_tree_list (NULL_TREE
, arg
);
6416 return build_function_call_expr (sqrtfn
, arglist
);
6419 /* Optimize expN(x)*expN(y) as expN(x+y). */
6420 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
6422 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6423 tree arg
= build (PLUS_EXPR
, type
,
6424 TREE_VALUE (TREE_OPERAND (arg0
, 1)),
6425 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
6426 tree arglist
= build_tree_list (NULL_TREE
, fold (arg
));
6427 return build_function_call_expr (expfn
, arglist
);
6430 /* Optimizations of pow(...)*pow(...). */
6431 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
6432 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
6433 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
6435 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
6436 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
6438 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
6439 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
6442 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6443 if (operand_equal_p (arg01
, arg11
, 0))
6445 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6446 tree arg
= build (MULT_EXPR
, type
, arg00
, arg10
);
6447 tree arglist
= tree_cons (NULL_TREE
, fold (arg
),
6448 build_tree_list (NULL_TREE
,
6450 return build_function_call_expr (powfn
, arglist
);
6453 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6454 if (operand_equal_p (arg00
, arg10
, 0))
6456 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6457 tree arg
= fold (build (PLUS_EXPR
, type
, arg01
, arg11
));
6458 tree arglist
= tree_cons (NULL_TREE
, arg00
,
6459 build_tree_list (NULL_TREE
,
6461 return build_function_call_expr (powfn
, arglist
);
6465 /* Optimize tan(x)*cos(x) as sin(x). */
6466 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
6467 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
6468 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
6469 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
6470 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
6471 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
6472 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
6473 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
6481 sinfn
= implicit_built_in_decls
[BUILT_IN_SIN
];
6485 sinfn
= implicit_built_in_decls
[BUILT_IN_SINF
];
6489 sinfn
= implicit_built_in_decls
[BUILT_IN_SINL
];
6495 if (sinfn
!= NULL_TREE
)
6496 return build_function_call_expr (sinfn
,
6497 TREE_OPERAND (arg0
, 1));
6500 /* Optimize x*pow(x,c) as pow(x,c+1). */
6501 if (fcode1
== BUILT_IN_POW
6502 || fcode1
== BUILT_IN_POWF
6503 || fcode1
== BUILT_IN_POWL
)
6505 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
6506 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
6508 if (TREE_CODE (arg11
) == REAL_CST
6509 && ! TREE_CONSTANT_OVERFLOW (arg11
)
6510 && operand_equal_p (arg0
, arg10
, 0))
6512 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
6516 c
= TREE_REAL_CST (arg11
);
6517 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6518 arg
= build_real (type
, c
);
6519 arglist
= build_tree_list (NULL_TREE
, arg
);
6520 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
6521 return build_function_call_expr (powfn
, arglist
);
6525 /* Optimize pow(x,c)*x as pow(x,c+1). */
6526 if (fcode0
== BUILT_IN_POW
6527 || fcode0
== BUILT_IN_POWF
6528 || fcode0
== BUILT_IN_POWL
)
6530 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
6531 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
6533 if (TREE_CODE (arg01
) == REAL_CST
6534 && ! TREE_CONSTANT_OVERFLOW (arg01
)
6535 && operand_equal_p (arg1
, arg00
, 0))
6537 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6541 c
= TREE_REAL_CST (arg01
);
6542 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6543 arg
= build_real (type
, c
);
6544 arglist
= build_tree_list (NULL_TREE
, arg
);
6545 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
6546 return build_function_call_expr (powfn
, arglist
);
6550 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6552 && operand_equal_p (arg0
, arg1
, 0))
6556 if (type
== double_type_node
)
6557 powfn
= implicit_built_in_decls
[BUILT_IN_POW
];
6558 else if (type
== float_type_node
)
6559 powfn
= implicit_built_in_decls
[BUILT_IN_POWF
];
6560 else if (type
== long_double_type_node
)
6561 powfn
= implicit_built_in_decls
[BUILT_IN_POWL
];
6567 tree arg
= build_real (type
, dconst2
);
6568 tree arglist
= build_tree_list (NULL_TREE
, arg
);
6569 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
6570 return build_function_call_expr (powfn
, arglist
);
6579 if (integer_all_onesp (arg1
))
6580 return omit_one_operand (type
, arg1
, arg0
);
6581 if (integer_zerop (arg1
))
6582 return non_lvalue (fold_convert (type
, arg0
));
6583 if (operand_equal_p (arg0
, arg1
, 0))
6584 return non_lvalue (fold_convert (type
, arg0
));
6585 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
6586 if (t1
!= NULL_TREE
)
6589 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6591 This results in more efficient code for machines without a NAND
6592 instruction. Combine will canonicalize to the first form
6593 which will allow use of NAND instructions provided by the
6594 backend if they exist. */
6595 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
6596 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
6598 return fold (build1 (BIT_NOT_EXPR
, type
,
6599 build (BIT_AND_EXPR
, type
,
6600 TREE_OPERAND (arg0
, 0),
6601 TREE_OPERAND (arg1
, 0))));
6604 /* See if this can be simplified into a rotate first. If that
6605 is unsuccessful continue in the association code. */
6609 if (integer_zerop (arg1
))
6610 return non_lvalue (fold_convert (type
, arg0
));
6611 if (integer_all_onesp (arg1
))
6612 return fold (build1 (BIT_NOT_EXPR
, type
, arg0
));
6613 if (operand_equal_p (arg0
, arg1
, 0))
6614 return omit_one_operand (type
, integer_zero_node
, arg0
);
6616 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6617 with a constant, and the two constants have no bits in common,
6618 we should treat this as a BIT_IOR_EXPR since this may produce more
6620 if (TREE_CODE (arg0
) == BIT_AND_EXPR
6621 && TREE_CODE (arg1
) == BIT_AND_EXPR
6622 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
6623 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
6624 && integer_zerop (const_binop (BIT_AND_EXPR
,
6625 TREE_OPERAND (arg0
, 1),
6626 TREE_OPERAND (arg1
, 1), 0)))
6628 code
= BIT_IOR_EXPR
;
6632 /* See if this can be simplified into a rotate first. If that
6633 is unsuccessful continue in the association code. */
6637 if (integer_all_onesp (arg1
))
6638 return non_lvalue (fold_convert (type
, arg0
));
6639 if (integer_zerop (arg1
))
6640 return omit_one_operand (type
, arg1
, arg0
);
6641 if (operand_equal_p (arg0
, arg1
, 0))
6642 return non_lvalue (fold_convert (type
, arg0
));
6643 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
6644 if (t1
!= NULL_TREE
)
6646 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6647 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
6648 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
6651 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
6653 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
6654 && (~TREE_INT_CST_LOW (arg1
)
6655 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
6656 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
6659 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6661 This results in more efficient code for machines without a NOR
6662 instruction. Combine will canonicalize to the first form
6663 which will allow use of NOR instructions provided by the
6664 backend if they exist. */
6665 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
6666 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
6668 return fold (build1 (BIT_NOT_EXPR
, type
,
6669 build (BIT_IOR_EXPR
, type
,
6670 TREE_OPERAND (arg0
, 0),
6671 TREE_OPERAND (arg1
, 0))));
6677 /* Don't touch a floating-point divide by zero unless the mode
6678 of the constant can represent infinity. */
6679 if (TREE_CODE (arg1
) == REAL_CST
6680 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
6681 && real_zerop (arg1
))
6684 /* (-A) / (-B) -> A / B */
6685 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
6686 return fold (build (RDIV_EXPR
, type
,
6687 TREE_OPERAND (arg0
, 0),
6688 negate_expr (arg1
)));
6689 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
6690 return fold (build (RDIV_EXPR
, type
,
6692 TREE_OPERAND (arg1
, 0)));
6694 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6695 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
6696 && real_onep (arg1
))
6697 return non_lvalue (fold_convert (type
, arg0
));
6699 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6700 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
6701 && real_minus_onep (arg1
))
6702 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
6704 /* If ARG1 is a constant, we can convert this to a multiply by the
6705 reciprocal. This does not have the same rounding properties,
6706 so only do this if -funsafe-math-optimizations. We can actually
6707 always safely do it if ARG1 is a power of two, but it's hard to
6708 tell if it is or not in a portable manner. */
6709 if (TREE_CODE (arg1
) == REAL_CST
)
6711 if (flag_unsafe_math_optimizations
6712 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
6714 return fold (build (MULT_EXPR
, type
, arg0
, tem
));
6715 /* Find the reciprocal if optimizing and the result is exact. */
6719 r
= TREE_REAL_CST (arg1
);
6720 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
6722 tem
= build_real (type
, r
);
6723 return fold (build (MULT_EXPR
, type
, arg0
, tem
));
6727 /* Convert A/B/C to A/(B*C). */
6728 if (flag_unsafe_math_optimizations
6729 && TREE_CODE (arg0
) == RDIV_EXPR
)
6730 return fold (build (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6731 fold (build (MULT_EXPR
, type
,
6732 TREE_OPERAND (arg0
, 1), arg1
))));
6734 /* Convert A/(B/C) to (A/B)*C. */
6735 if (flag_unsafe_math_optimizations
6736 && TREE_CODE (arg1
) == RDIV_EXPR
)
6737 return fold (build (MULT_EXPR
, type
,
6738 fold (build (RDIV_EXPR
, type
, arg0
,
6739 TREE_OPERAND (arg1
, 0))),
6740 TREE_OPERAND (arg1
, 1)));
6742 /* Convert C1/(X*C2) into (C1/C2)/X. */
6743 if (flag_unsafe_math_optimizations
6744 && TREE_CODE (arg1
) == MULT_EXPR
6745 && TREE_CODE (arg0
) == REAL_CST
6746 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
6748 tree tem
= const_binop (RDIV_EXPR
, arg0
,
6749 TREE_OPERAND (arg1
, 1), 0);
6751 return fold (build (RDIV_EXPR
, type
, tem
,
6752 TREE_OPERAND (arg1
, 0)));
6755 if (flag_unsafe_math_optimizations
)
6757 enum built_in_function fcode
= builtin_mathfn_code (arg1
);
6758 /* Optimize x/expN(y) into x*expN(-y). */
6759 if (BUILTIN_EXPONENT_P (fcode
))
6761 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
6762 tree arg
= build1 (NEGATE_EXPR
, type
,
6763 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
6764 tree arglist
= build_tree_list (NULL_TREE
, fold (arg
));
6765 arg1
= build_function_call_expr (expfn
, arglist
);
6766 return fold (build (MULT_EXPR
, type
, arg0
, arg1
));
6769 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6770 if (fcode
== BUILT_IN_POW
6771 || fcode
== BUILT_IN_POWF
6772 || fcode
== BUILT_IN_POWL
)
6774 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
6775 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
6776 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
, 1)));
6777 tree neg11
= fold (build1 (NEGATE_EXPR
, type
, arg11
));
6778 tree arglist
= tree_cons(NULL_TREE
, arg10
,
6779 build_tree_list (NULL_TREE
, neg11
));
6780 arg1
= build_function_call_expr (powfn
, arglist
);
6781 return fold (build (MULT_EXPR
, type
, arg0
, arg1
));
6785 if (flag_unsafe_math_optimizations
)
6787 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
6788 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
6790 /* Optimize sin(x)/cos(x) as tan(x). */
6791 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
6792 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
6793 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
6794 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
6795 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
6799 if (fcode0
== BUILT_IN_SIN
)
6800 tanfn
= implicit_built_in_decls
[BUILT_IN_TAN
];
6801 else if (fcode0
== BUILT_IN_SINF
)
6802 tanfn
= implicit_built_in_decls
[BUILT_IN_TANF
];
6803 else if (fcode0
== BUILT_IN_SINL
)
6804 tanfn
= implicit_built_in_decls
[BUILT_IN_TANL
];
6808 if (tanfn
!= NULL_TREE
)
6809 return build_function_call_expr (tanfn
,
6810 TREE_OPERAND (arg0
, 1));
6813 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6814 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
6815 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
6816 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
6817 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
6818 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
6822 if (fcode0
== BUILT_IN_COS
)
6823 tanfn
= implicit_built_in_decls
[BUILT_IN_TAN
];
6824 else if (fcode0
== BUILT_IN_COSF
)
6825 tanfn
= implicit_built_in_decls
[BUILT_IN_TANF
];
6826 else if (fcode0
== BUILT_IN_COSL
)
6827 tanfn
= implicit_built_in_decls
[BUILT_IN_TANL
];
6831 if (tanfn
!= NULL_TREE
)
6833 tree tmp
= TREE_OPERAND (arg0
, 1);
6834 tmp
= build_function_call_expr (tanfn
, tmp
);
6835 return fold (build (RDIV_EXPR
, type
,
6836 build_real (type
, dconst1
),
6841 /* Optimize pow(x,c)/x as pow(x,c-1). */
6842 if (fcode0
== BUILT_IN_POW
6843 || fcode0
== BUILT_IN_POWF
6844 || fcode0
== BUILT_IN_POWL
)
6846 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
6847 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
6848 if (TREE_CODE (arg01
) == REAL_CST
6849 && ! TREE_CONSTANT_OVERFLOW (arg01
)
6850 && operand_equal_p (arg1
, arg00
, 0))
6852 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6856 c
= TREE_REAL_CST (arg01
);
6857 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
6858 arg
= build_real (type
, c
);
6859 arglist
= build_tree_list (NULL_TREE
, arg
);
6860 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
6861 return build_function_call_expr (powfn
, arglist
);
6867 case TRUNC_DIV_EXPR
:
6868 case ROUND_DIV_EXPR
:
6869 case FLOOR_DIV_EXPR
:
6871 case EXACT_DIV_EXPR
:
6872 if (integer_onep (arg1
))
6873 return non_lvalue (fold_convert (type
, arg0
));
6874 if (integer_zerop (arg1
))
6877 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6878 operation, EXACT_DIV_EXPR.
6880 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6881 At one time others generated faster code, it's not clear if they do
6882 after the last round to changes to the DIV code in expmed.c. */
6883 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
6884 && multiple_of_p (type
, arg0
, arg1
))
6885 return fold (build (EXACT_DIV_EXPR
, type
, arg0
, arg1
));
6887 if (TREE_CODE (arg1
) == INTEGER_CST
6888 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
6890 return fold_convert (type
, tem
);
6895 case FLOOR_MOD_EXPR
:
6896 case ROUND_MOD_EXPR
:
6897 case TRUNC_MOD_EXPR
:
6898 if (integer_onep (arg1
))
6899 return omit_one_operand (type
, integer_zero_node
, arg0
);
6900 if (integer_zerop (arg1
))
6903 if (TREE_CODE (arg1
) == INTEGER_CST
6904 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
6906 return fold_convert (type
, tem
);
6912 if (integer_all_onesp (arg0
))
6913 return omit_one_operand (type
, arg0
, arg1
);
6917 /* Optimize -1 >> x for arithmetic right shifts. */
6918 if (integer_all_onesp (arg0
) && ! TREE_UNSIGNED (type
))
6919 return omit_one_operand (type
, arg0
, arg1
);
6920 /* ... fall through ... */
6924 if (integer_zerop (arg1
))
6925 return non_lvalue (fold_convert (type
, arg0
));
6926 if (integer_zerop (arg0
))
6927 return omit_one_operand (type
, arg0
, arg1
);
6929 /* Since negative shift count is not well-defined,
6930 don't try to compute it in the compiler. */
6931 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
6933 /* Rewrite an LROTATE_EXPR by a constant into an
6934 RROTATE_EXPR by a new constant. */
6935 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
6937 tree tem
= build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type
)), 0);
6938 tem
= fold_convert (TREE_TYPE (arg1
), tem
);
6939 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
6940 return fold (build (RROTATE_EXPR
, type
, arg0
, tem
));
6943 /* If we have a rotate of a bit operation with the rotate count and
6944 the second operand of the bit operation both constant,
6945 permute the two operations. */
6946 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
6947 && (TREE_CODE (arg0
) == BIT_AND_EXPR
6948 || TREE_CODE (arg0
) == BIT_IOR_EXPR
6949 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
6950 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
6951 return fold (build (TREE_CODE (arg0
), type
,
6952 fold (build (code
, type
,
6953 TREE_OPERAND (arg0
, 0), arg1
)),
6954 fold (build (code
, type
,
6955 TREE_OPERAND (arg0
, 1), arg1
))));
6957 /* Two consecutive rotates adding up to the width of the mode can
6959 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
6960 && TREE_CODE (arg0
) == RROTATE_EXPR
6961 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
6962 && TREE_INT_CST_HIGH (arg1
) == 0
6963 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
6964 && ((TREE_INT_CST_LOW (arg1
)
6965 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
6966 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
6967 return TREE_OPERAND (arg0
, 0);
6972 if (operand_equal_p (arg0
, arg1
, 0))
6973 return omit_one_operand (type
, arg0
, arg1
);
6974 if (INTEGRAL_TYPE_P (type
)
6975 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), 1))
6976 return omit_one_operand (type
, arg1
, arg0
);
6980 if (operand_equal_p (arg0
, arg1
, 0))
6981 return omit_one_operand (type
, arg0
, arg1
);
6982 if (INTEGRAL_TYPE_P (type
)
6983 && TYPE_MAX_VALUE (type
)
6984 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), 1))
6985 return omit_one_operand (type
, arg1
, arg0
);
6988 case TRUTH_NOT_EXPR
:
6989 /* Note that the operand of this must be an int
6990 and its values must be 0 or 1.
6991 ("true" is a fixed value perhaps depending on the language,
6992 but we don't handle values other than 1 correctly yet.) */
6993 tem
= invert_truthvalue (arg0
);
6994 /* Avoid infinite recursion. */
6995 if (TREE_CODE (tem
) == TRUTH_NOT_EXPR
)
6997 tem
= fold_single_bit_test (code
, arg0
, arg1
, type
);
7002 return fold_convert (type
, tem
);
7004 case TRUTH_ANDIF_EXPR
:
7005 /* Note that the operands of this must be ints
7006 and their values must be 0 or 1.
7007 ("true" is a fixed value perhaps depending on the language.) */
7008 /* If first arg is constant zero, return it. */
7009 if (integer_zerop (arg0
))
7010 return fold_convert (type
, arg0
);
7011 case TRUTH_AND_EXPR
:
7012 /* If either arg is constant true, drop it. */
7013 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
7014 return non_lvalue (fold_convert (type
, arg1
));
7015 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
7016 /* Preserve sequence points. */
7017 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
7018 return non_lvalue (fold_convert (type
, arg0
));
7019 /* If second arg is constant zero, result is zero, but first arg
7020 must be evaluated. */
7021 if (integer_zerop (arg1
))
7022 return omit_one_operand (type
, arg1
, arg0
);
7023 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7024 case will be handled here. */
7025 if (integer_zerop (arg0
))
7026 return omit_one_operand (type
, arg0
, arg1
);
7029 /* We only do these simplifications if we are optimizing. */
7033 /* Check for things like (A || B) && (A || C). We can convert this
7034 to A || (B && C). Note that either operator can be any of the four
7035 truth and/or operations and the transformation will still be
7036 valid. Also note that we only care about order for the
7037 ANDIF and ORIF operators. If B contains side effects, this
7038 might change the truth-value of A. */
7039 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
7040 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
7041 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
7042 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
7043 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
7044 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
7046 tree a00
= TREE_OPERAND (arg0
, 0);
7047 tree a01
= TREE_OPERAND (arg0
, 1);
7048 tree a10
= TREE_OPERAND (arg1
, 0);
7049 tree a11
= TREE_OPERAND (arg1
, 1);
7050 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
7051 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
7052 && (code
== TRUTH_AND_EXPR
7053 || code
== TRUTH_OR_EXPR
));
7055 if (operand_equal_p (a00
, a10
, 0))
7056 return fold (build (TREE_CODE (arg0
), type
, a00
,
7057 fold (build (code
, type
, a01
, a11
))));
7058 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
7059 return fold (build (TREE_CODE (arg0
), type
, a00
,
7060 fold (build (code
, type
, a01
, a10
))));
7061 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
7062 return fold (build (TREE_CODE (arg0
), type
, a01
,
7063 fold (build (code
, type
, a00
, a11
))));
7065 /* This case if tricky because we must either have commutative
7066 operators or else A10 must not have side-effects. */
7068 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
7069 && operand_equal_p (a01
, a11
, 0))
7070 return fold (build (TREE_CODE (arg0
), type
,
7071 fold (build (code
, type
, a00
, a10
)),
7075 /* See if we can build a range comparison. */
7076 if (0 != (tem
= fold_range_test (t
)))
7079 /* Check for the possibility of merging component references. If our
7080 lhs is another similar operation, try to merge its rhs with our
7081 rhs. Then try to merge our lhs and rhs. */
7082 if (TREE_CODE (arg0
) == code
7083 && 0 != (tem
= fold_truthop (code
, type
,
7084 TREE_OPERAND (arg0
, 1), arg1
)))
7085 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
7087 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
7092 case TRUTH_ORIF_EXPR
:
7093 /* Note that the operands of this must be ints
7094 and their values must be 0 or true.
7095 ("true" is a fixed value perhaps depending on the language.) */
7096 /* If first arg is constant true, return it. */
7097 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
7098 return fold_convert (type
, arg0
);
7100 /* If either arg is constant zero, drop it. */
7101 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
7102 return non_lvalue (fold_convert (type
, arg1
));
7103 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
7104 /* Preserve sequence points. */
7105 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
7106 return non_lvalue (fold_convert (type
, arg0
));
7107 /* If second arg is constant true, result is true, but we must
7108 evaluate first arg. */
7109 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
7110 return omit_one_operand (type
, arg1
, arg0
);
7111 /* Likewise for first arg, but note this only occurs here for
7113 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
7114 return omit_one_operand (type
, arg0
, arg1
);
7117 case TRUTH_XOR_EXPR
:
7118 /* If either arg is constant zero, drop it. */
7119 if (integer_zerop (arg0
))
7120 return non_lvalue (fold_convert (type
, arg1
));
7121 if (integer_zerop (arg1
))
7122 return non_lvalue (fold_convert (type
, arg0
));
7123 /* If either arg is constant true, this is a logical inversion. */
7124 if (integer_onep (arg0
))
7125 return non_lvalue (fold_convert (type
, invert_truthvalue (arg1
)));
7126 if (integer_onep (arg1
))
7127 return non_lvalue (fold_convert (type
, invert_truthvalue (arg0
)));
7136 /* If one arg is a real or integer constant, put it last. */
7137 if (tree_swap_operands_p (arg0
, arg1
, true))
7138 return fold (build (swap_tree_comparison (code
), type
, arg1
, arg0
));
7140 /* If this is an equality comparison of the address of a non-weak
7141 object against zero, then we know the result. */
7142 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7143 && TREE_CODE (arg0
) == ADDR_EXPR
7144 && DECL_P (TREE_OPERAND (arg0
, 0))
7145 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
7146 && integer_zerop (arg1
))
7148 if (code
== EQ_EXPR
)
7149 return integer_zero_node
;
7151 return integer_one_node
;
7154 /* If this is an equality comparison of the address of two non-weak,
7155 unaliased symbols neither of which are extern (since we do not
7156 have access to attributes for externs), then we know the result. */
7157 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7158 && TREE_CODE (arg0
) == ADDR_EXPR
7159 && DECL_P (TREE_OPERAND (arg0
, 0))
7160 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
7161 && ! lookup_attribute ("alias",
7162 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
7163 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
7164 && TREE_CODE (arg1
) == ADDR_EXPR
7165 && DECL_P (TREE_OPERAND (arg1
, 0))
7166 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
7167 && ! lookup_attribute ("alias",
7168 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
7169 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
7171 if (code
== EQ_EXPR
)
7172 return (operand_equal_p (arg0
, arg1
, 0)
7173 ? integer_one_node
: integer_zero_node
);
7175 return (operand_equal_p (arg0
, arg1
, 0)
7176 ? integer_zero_node
: integer_one_node
);
7179 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
7181 tree targ0
= strip_float_extensions (arg0
);
7182 tree targ1
= strip_float_extensions (arg1
);
7183 tree newtype
= TREE_TYPE (targ0
);
7185 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
7186 newtype
= TREE_TYPE (targ1
);
7188 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7189 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
7190 return fold (build (code
, type
, fold_convert (newtype
, targ0
),
7191 fold_convert (newtype
, targ1
)));
7193 /* (-a) CMP (-b) -> b CMP a */
7194 if (TREE_CODE (arg0
) == NEGATE_EXPR
7195 && TREE_CODE (arg1
) == NEGATE_EXPR
)
7196 return fold (build (code
, type
, TREE_OPERAND (arg1
, 0),
7197 TREE_OPERAND (arg0
, 0)));
7199 if (TREE_CODE (arg1
) == REAL_CST
)
7201 REAL_VALUE_TYPE cst
;
7202 cst
= TREE_REAL_CST (arg1
);
7204 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7205 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
7207 fold (build (swap_tree_comparison (code
), type
,
7208 TREE_OPERAND (arg0
, 0),
7209 build_real (TREE_TYPE (arg1
),
7210 REAL_VALUE_NEGATE (cst
))));
7212 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7213 /* a CMP (-0) -> a CMP 0 */
7214 if (REAL_VALUE_MINUS_ZERO (cst
))
7215 return fold (build (code
, type
, arg0
,
7216 build_real (TREE_TYPE (arg1
), dconst0
)));
7218 /* x != NaN is always true, other ops are always false. */
7219 if (REAL_VALUE_ISNAN (cst
)
7220 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
7222 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
7223 return omit_one_operand (type
, fold_convert (type
, tem
), arg0
);
7226 /* Fold comparisons against infinity. */
7227 if (REAL_VALUE_ISINF (cst
))
7229 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
7230 if (tem
!= NULL_TREE
)
7235 /* If this is a comparison of a real constant with a PLUS_EXPR
7236 or a MINUS_EXPR of a real constant, we can convert it into a
7237 comparison with a revised real constant as long as no overflow
7238 occurs when unsafe_math_optimizations are enabled. */
7239 if (flag_unsafe_math_optimizations
7240 && TREE_CODE (arg1
) == REAL_CST
7241 && (TREE_CODE (arg0
) == PLUS_EXPR
7242 || TREE_CODE (arg0
) == MINUS_EXPR
)
7243 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
7244 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
7245 ? MINUS_EXPR
: PLUS_EXPR
,
7246 arg1
, TREE_OPERAND (arg0
, 1), 0))
7247 && ! TREE_CONSTANT_OVERFLOW (tem
))
7248 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
7250 /* Likewise, we can simplify a comparison of a real constant with
7251 a MINUS_EXPR whose first operand is also a real constant, i.e.
7252 (c1 - x) < c2 becomes x > c1-c2. */
7253 if (flag_unsafe_math_optimizations
7254 && TREE_CODE (arg1
) == REAL_CST
7255 && TREE_CODE (arg0
) == MINUS_EXPR
7256 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
7257 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
7259 && ! TREE_CONSTANT_OVERFLOW (tem
))
7260 return fold (build (swap_tree_comparison (code
), type
,
7261 TREE_OPERAND (arg0
, 1), tem
));
7263 /* Fold comparisons against built-in math functions. */
7264 if (TREE_CODE (arg1
) == REAL_CST
7265 && flag_unsafe_math_optimizations
7266 && ! flag_errno_math
)
7268 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
7270 if (fcode
!= END_BUILTINS
)
7272 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
7273 if (tem
!= NULL_TREE
)
7279 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7280 if (TREE_CONSTANT (arg1
)
7281 && (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
7282 || TREE_CODE (arg0
) == POSTDECREMENT_EXPR
)
7283 /* This optimization is invalid for ordered comparisons
7284 if CONST+INCR overflows or if foo+incr might overflow.
7285 This optimization is invalid for floating point due to rounding.
7286 For pointer types we assume overflow doesn't happen. */
7287 && (POINTER_TYPE_P (TREE_TYPE (arg0
))
7288 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
7289 && (code
== EQ_EXPR
|| code
== NE_EXPR
))))
7291 tree varop
, newconst
;
7293 if (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
)
7295 newconst
= fold (build2 (PLUS_EXPR
, TREE_TYPE (arg0
),
7296 arg1
, TREE_OPERAND (arg0
, 1)));
7297 varop
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (arg0
),
7298 TREE_OPERAND (arg0
, 0),
7299 TREE_OPERAND (arg0
, 1));
7303 newconst
= fold (build2 (MINUS_EXPR
, TREE_TYPE (arg0
),
7304 arg1
, TREE_OPERAND (arg0
, 1)));
7305 varop
= build2 (PREDECREMENT_EXPR
, TREE_TYPE (arg0
),
7306 TREE_OPERAND (arg0
, 0),
7307 TREE_OPERAND (arg0
, 1));
7311 /* If VAROP is a reference to a bitfield, we must mask
7312 the constant by the width of the field. */
7313 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
7314 && DECL_BIT_FIELD(TREE_OPERAND (TREE_OPERAND (varop
, 0), 1)))
7316 tree fielddecl
= TREE_OPERAND (TREE_OPERAND (varop
, 0), 1);
7317 int size
= TREE_INT_CST_LOW (DECL_SIZE (fielddecl
));
7318 tree folded_compare
;
7321 /* First check whether the comparison would come out
7322 always the same. If we don't do that we would
7323 change the meaning with the masking. */
7324 folded_compare
= fold (build2 (code
, type
,
7325 TREE_OPERAND (varop
, 0),
7327 if (integer_zerop (folded_compare
)
7328 || integer_onep (folded_compare
))
7329 return omit_one_operand (type
, folded_compare
, varop
);
7331 if (size
< HOST_BITS_PER_WIDE_INT
)
7333 unsigned HOST_WIDE_INT lo
= ((unsigned HOST_WIDE_INT
) 1
7335 mask
= build_int_2 (lo
, 0);
7337 else if (size
< 2 * HOST_BITS_PER_WIDE_INT
)
7339 HOST_WIDE_INT hi
= ((HOST_WIDE_INT
) 1
7340 << (size
- HOST_BITS_PER_WIDE_INT
)) - 1;
7341 mask
= build_int_2 (~0, hi
);
7346 mask
= fold_convert (TREE_TYPE (varop
), mask
);
7347 newconst
= fold (build2 (BIT_AND_EXPR
, TREE_TYPE (varop
),
7352 return fold (build2 (code
, type
, varop
, newconst
));
7355 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7356 This transformation affects the cases which are handled in later
7357 optimizations involving comparisons with non-negative constants. */
7358 if (TREE_CODE (arg1
) == INTEGER_CST
7359 && TREE_CODE (arg0
) != INTEGER_CST
7360 && tree_int_cst_sgn (arg1
) > 0)
7365 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
7366 return fold (build (GT_EXPR
, type
, arg0
, arg1
));
7369 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
7370 return fold (build (LE_EXPR
, type
, arg0
, arg1
));
7377 /* Comparisons with the highest or lowest possible integer of
7378 the specified size will have known values. */
7380 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1
)));
7382 if (TREE_CODE (arg1
) == INTEGER_CST
7383 && ! TREE_CONSTANT_OVERFLOW (arg1
)
7384 && width
<= HOST_BITS_PER_WIDE_INT
7385 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
7386 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
7388 unsigned HOST_WIDE_INT signed_max
;
7389 unsigned HOST_WIDE_INT max
, min
;
7391 signed_max
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1)) - 1;
7393 if (TREE_UNSIGNED (TREE_TYPE (arg1
)))
7395 max
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
7401 min
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
7404 if (TREE_INT_CST_HIGH (arg1
) == 0
7405 && TREE_INT_CST_LOW (arg1
) == max
)
7409 return omit_one_operand (type
,
7414 return fold (build (EQ_EXPR
, type
, arg0
, arg1
));
7417 return omit_one_operand (type
,
7422 return fold (build (NE_EXPR
, type
, arg0
, arg1
));
7424 /* The GE_EXPR and LT_EXPR cases above are not normally
7425 reached because of previous transformations. */
7430 else if (TREE_INT_CST_HIGH (arg1
) == 0
7431 && TREE_INT_CST_LOW (arg1
) == max
- 1)
7435 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
7436 return fold (build (EQ_EXPR
, type
, arg0
, arg1
));
7438 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
7439 return fold (build (NE_EXPR
, type
, arg0
, arg1
));
7443 else if (TREE_INT_CST_HIGH (arg1
) == (min
? -1 : 0)
7444 && TREE_INT_CST_LOW (arg1
) == min
)
7448 return omit_one_operand (type
,
7453 return fold (build (EQ_EXPR
, type
, arg0
, arg1
));
7456 return omit_one_operand (type
,
7461 return fold (build (NE_EXPR
, type
, arg0
, arg1
));
7466 else if (TREE_INT_CST_HIGH (arg1
) == (min
? -1 : 0)
7467 && TREE_INT_CST_LOW (arg1
) == min
+ 1)
7471 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
7472 return fold (build (NE_EXPR
, type
, arg0
, arg1
));
7474 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
7475 return fold (build (EQ_EXPR
, type
, arg0
, arg1
));
7480 else if (TREE_INT_CST_HIGH (arg1
) == 0
7481 && TREE_INT_CST_LOW (arg1
) == signed_max
7482 && TREE_UNSIGNED (TREE_TYPE (arg1
))
7483 /* signed_type does not work on pointer types. */
7484 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
7486 /* The following case also applies to X < signed_max+1
7487 and X >= signed_max+1 because previous transformations. */
7488 if (code
== LE_EXPR
|| code
== GT_EXPR
)
7491 st0
= lang_hooks
.types
.signed_type (TREE_TYPE (arg0
));
7492 st1
= lang_hooks
.types
.signed_type (TREE_TYPE (arg1
));
7494 (build (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
7495 type
, fold_convert (st0
, arg0
),
7496 fold_convert (st1
, integer_zero_node
)));
7502 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7503 a MINUS_EXPR of a constant, we can convert it into a comparison with
7504 a revised constant as long as no overflow occurs. */
7505 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7506 && TREE_CODE (arg1
) == INTEGER_CST
7507 && (TREE_CODE (arg0
) == PLUS_EXPR
7508 || TREE_CODE (arg0
) == MINUS_EXPR
)
7509 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7510 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
7511 ? MINUS_EXPR
: PLUS_EXPR
,
7512 arg1
, TREE_OPERAND (arg0
, 1), 0))
7513 && ! TREE_CONSTANT_OVERFLOW (tem
))
7514 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
7516 /* Similarly for a NEGATE_EXPR. */
7517 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7518 && TREE_CODE (arg0
) == NEGATE_EXPR
7519 && TREE_CODE (arg1
) == INTEGER_CST
7520 && 0 != (tem
= negate_expr (arg1
))
7521 && TREE_CODE (tem
) == INTEGER_CST
7522 && ! TREE_CONSTANT_OVERFLOW (tem
))
7523 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
7525 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7526 for !=. Don't do this for ordered comparisons due to overflow. */
7527 else if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7528 && integer_zerop (arg1
) && TREE_CODE (arg0
) == MINUS_EXPR
)
7529 return fold (build (code
, type
,
7530 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1)));
7532 /* If we are widening one operand of an integer comparison,
7533 see if the other operand is similarly being widened. Perhaps we
7534 can do the comparison in the narrower type. */
7535 else if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
7536 && TREE_CODE (arg0
) == NOP_EXPR
7537 && (tem
= get_unwidened (arg0
, NULL_TREE
)) != arg0
7538 && (t1
= get_unwidened (arg1
, TREE_TYPE (tem
))) != 0
7539 && (TREE_TYPE (t1
) == TREE_TYPE (tem
)
7540 || (TREE_CODE (t1
) == INTEGER_CST
7541 && int_fits_type_p (t1
, TREE_TYPE (tem
)))))
7542 return fold (build (code
, type
, tem
,
7543 fold_convert (TREE_TYPE (tem
), t1
)));
7545 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7546 constant, we can simplify it. */
7547 else if (TREE_CODE (arg1
) == INTEGER_CST
7548 && (TREE_CODE (arg0
) == MIN_EXPR
7549 || TREE_CODE (arg0
) == MAX_EXPR
)
7550 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
7551 return optimize_minmax_comparison (t
);
7553 /* If we are comparing an ABS_EXPR with a constant, we can
7554 convert all the cases into explicit comparisons, but they may
7555 well not be faster than doing the ABS and one comparison.
7556 But ABS (X) <= C is a range comparison, which becomes a subtraction
7557 and a comparison, and is probably faster. */
7558 else if (code
== LE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
7559 && TREE_CODE (arg0
) == ABS_EXPR
7560 && ! TREE_SIDE_EFFECTS (arg0
)
7561 && (0 != (tem
= negate_expr (arg1
)))
7562 && TREE_CODE (tem
) == INTEGER_CST
7563 && ! TREE_CONSTANT_OVERFLOW (tem
))
7564 return fold (build (TRUTH_ANDIF_EXPR
, type
,
7565 build (GE_EXPR
, type
, TREE_OPERAND (arg0
, 0), tem
),
7566 build (LE_EXPR
, type
,
7567 TREE_OPERAND (arg0
, 0), arg1
)));
7569 /* If this is an EQ or NE comparison with zero and ARG0 is
7570 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7571 two operations, but the latter can be done in one less insn
7572 on machines that have only two-operand insns or on which a
7573 constant cannot be the first operand. */
7574 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
7575 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
7577 if (TREE_CODE (TREE_OPERAND (arg0
, 0)) == LSHIFT_EXPR
7578 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0)))
7580 fold (build (code
, type
,
7581 build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
7583 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
7584 TREE_OPERAND (arg0
, 1),
7585 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1)),
7586 fold_convert (TREE_TYPE (arg0
),
7589 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
7590 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
7592 fold (build (code
, type
,
7593 build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
7595 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
7596 TREE_OPERAND (arg0
, 0),
7597 TREE_OPERAND (TREE_OPERAND (arg0
, 1), 1)),
7598 fold_convert (TREE_TYPE (arg0
),
7603 /* If this is an NE or EQ comparison of zero against the result of a
7604 signed MOD operation whose second operand is a power of 2, make
7605 the MOD operation unsigned since it is simpler and equivalent. */
7606 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7607 && integer_zerop (arg1
)
7608 && ! TREE_UNSIGNED (TREE_TYPE (arg0
))
7609 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
7610 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
7611 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
7612 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
7613 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
7615 tree newtype
= lang_hooks
.types
.unsigned_type (TREE_TYPE (arg0
));
7616 tree newmod
= build (TREE_CODE (arg0
), newtype
,
7617 fold_convert (newtype
,
7618 TREE_OPERAND (arg0
, 0)),
7619 fold_convert (newtype
,
7620 TREE_OPERAND (arg0
, 1)));
7622 return build (code
, type
, newmod
, fold_convert (newtype
, arg1
));
7625 /* If this is an NE comparison of zero with an AND of one, remove the
7626 comparison since the AND will give the correct value. */
7627 if (code
== NE_EXPR
&& integer_zerop (arg1
)
7628 && TREE_CODE (arg0
) == BIT_AND_EXPR
7629 && integer_onep (TREE_OPERAND (arg0
, 1)))
7630 return fold_convert (type
, arg0
);
7632 /* If we have (A & C) == C where C is a power of 2, convert this into
7633 (A & C) != 0. Similarly for NE_EXPR. */
7634 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7635 && TREE_CODE (arg0
) == BIT_AND_EXPR
7636 && integer_pow2p (TREE_OPERAND (arg0
, 1))
7637 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
7638 return fold (build (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
7639 arg0
, integer_zero_node
));
7641 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7642 2, then fold the expression into shifts and logical operations. */
7643 tem
= fold_single_bit_test (code
, arg0
, arg1
, type
);
7647 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7648 Similarly for NE_EXPR. */
7649 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7650 && TREE_CODE (arg0
) == BIT_AND_EXPR
7651 && TREE_CODE (arg1
) == INTEGER_CST
7652 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
7655 = fold (build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
7656 arg1
, build1 (BIT_NOT_EXPR
,
7657 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
7658 TREE_OPERAND (arg0
, 1))));
7659 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
7660 if (integer_nonzerop (dandnotc
))
7661 return omit_one_operand (type
, rslt
, arg0
);
7664 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7665 Similarly for NE_EXPR. */
7666 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7667 && TREE_CODE (arg0
) == BIT_IOR_EXPR
7668 && TREE_CODE (arg1
) == INTEGER_CST
7669 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
7672 = fold (build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
7673 TREE_OPERAND (arg0
, 1),
7674 build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
)));
7675 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
7676 if (integer_nonzerop (candnotd
))
7677 return omit_one_operand (type
, rslt
, arg0
);
7680 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7681 and similarly for >= into !=. */
7682 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
7683 && TREE_UNSIGNED (TREE_TYPE (arg0
))
7684 && TREE_CODE (arg1
) == LSHIFT_EXPR
7685 && integer_onep (TREE_OPERAND (arg1
, 0)))
7686 return build (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
7687 build (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
7688 TREE_OPERAND (arg1
, 1)),
7689 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
7691 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
7692 && TREE_UNSIGNED (TREE_TYPE (arg0
))
7693 && (TREE_CODE (arg1
) == NOP_EXPR
7694 || TREE_CODE (arg1
) == CONVERT_EXPR
)
7695 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
7696 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
7698 build (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
7699 fold_convert (TREE_TYPE (arg0
),
7700 build (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
7701 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
7703 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
7705 /* Simplify comparison of something with itself. (For IEEE
7706 floating-point, we can only do some of these simplifications.) */
7707 if (operand_equal_p (arg0
, arg1
, 0))
7712 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
7713 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
7714 return constant_boolean_node (1, type
);
7719 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
7720 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
7721 return constant_boolean_node (1, type
);
7722 return fold (build (EQ_EXPR
, type
, arg0
, arg1
));
7725 /* For NE, we can only do this simplification if integer
7726 or we don't honor IEEE floating point NaNs. */
7727 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
7728 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
7730 /* ... fall through ... */
7733 return constant_boolean_node (0, type
);
7739 /* If we are comparing an expression that just has comparisons
7740 of two integer values, arithmetic expressions of those comparisons,
7741 and constants, we can simplify it. There are only three cases
7742 to check: the two values can either be equal, the first can be
7743 greater, or the second can be greater. Fold the expression for
7744 those three values. Since each value must be 0 or 1, we have
7745 eight possibilities, each of which corresponds to the constant 0
7746 or 1 or one of the six possible comparisons.
7748 This handles common cases like (a > b) == 0 but also handles
7749 expressions like ((x > y) - (y > x)) > 0, which supposedly
7750 occur in macroized code. */
7752 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
7754 tree cval1
= 0, cval2
= 0;
7757 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
7758 /* Don't handle degenerate cases here; they should already
7759 have been handled anyway. */
7760 && cval1
!= 0 && cval2
!= 0
7761 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
7762 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
7763 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
7764 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
7765 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
7766 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
7767 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
7769 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
7770 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
7772 /* We can't just pass T to eval_subst in case cval1 or cval2
7773 was the same as ARG1. */
7776 = fold (build (code
, type
,
7777 eval_subst (arg0
, cval1
, maxval
, cval2
, minval
),
7780 = fold (build (code
, type
,
7781 eval_subst (arg0
, cval1
, maxval
, cval2
, maxval
),
7784 = fold (build (code
, type
,
7785 eval_subst (arg0
, cval1
, minval
, cval2
, maxval
),
7788 /* All three of these results should be 0 or 1. Confirm they
7789 are. Then use those values to select the proper code
7792 if ((integer_zerop (high_result
)
7793 || integer_onep (high_result
))
7794 && (integer_zerop (equal_result
)
7795 || integer_onep (equal_result
))
7796 && (integer_zerop (low_result
)
7797 || integer_onep (low_result
)))
7799 /* Make a 3-bit mask with the high-order bit being the
7800 value for `>', the next for '=', and the low for '<'. */
7801 switch ((integer_onep (high_result
) * 4)
7802 + (integer_onep (equal_result
) * 2)
7803 + integer_onep (low_result
))
7807 return omit_one_operand (type
, integer_zero_node
, arg0
);
7828 return omit_one_operand (type
, integer_one_node
, arg0
);
7831 tem
= build (code
, type
, cval1
, cval2
);
7833 return save_expr (tem
);
7840 /* If this is a comparison of a field, we may be able to simplify it. */
7841 if (((TREE_CODE (arg0
) == COMPONENT_REF
7842 && lang_hooks
.can_use_bit_fields_p ())
7843 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
7844 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
7845 /* Handle the constant case even without -O
7846 to make sure the warnings are given. */
7847 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
7849 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
7854 /* If this is a comparison of complex values and either or both sides
7855 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7856 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7857 This may prevent needless evaluations. */
7858 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7859 && TREE_CODE (TREE_TYPE (arg0
)) == COMPLEX_TYPE
7860 && (TREE_CODE (arg0
) == COMPLEX_EXPR
7861 || TREE_CODE (arg1
) == COMPLEX_EXPR
7862 || TREE_CODE (arg0
) == COMPLEX_CST
7863 || TREE_CODE (arg1
) == COMPLEX_CST
))
7865 tree subtype
= TREE_TYPE (TREE_TYPE (arg0
));
7866 tree real0
, imag0
, real1
, imag1
;
7868 arg0
= save_expr (arg0
);
7869 arg1
= save_expr (arg1
);
7870 real0
= fold (build1 (REALPART_EXPR
, subtype
, arg0
));
7871 imag0
= fold (build1 (IMAGPART_EXPR
, subtype
, arg0
));
7872 real1
= fold (build1 (REALPART_EXPR
, subtype
, arg1
));
7873 imag1
= fold (build1 (IMAGPART_EXPR
, subtype
, arg1
));
7875 return fold (build ((code
== EQ_EXPR
? TRUTH_ANDIF_EXPR
7878 fold (build (code
, type
, real0
, real1
)),
7879 fold (build (code
, type
, imag0
, imag1
))));
7882 /* Optimize comparisons of strlen vs zero to a compare of the
7883 first character of the string vs zero. To wit,
7884 strlen(ptr) == 0 => *ptr == 0
7885 strlen(ptr) != 0 => *ptr != 0
7886 Other cases should reduce to one of these two (or a constant)
7887 due to the return value of strlen being unsigned. */
7888 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7889 && integer_zerop (arg1
)
7890 && TREE_CODE (arg0
) == CALL_EXPR
)
7892 tree fndecl
= get_callee_fndecl (arg0
);
7896 && DECL_BUILT_IN (fndecl
)
7897 && DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_MD
7898 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
7899 && (arglist
= TREE_OPERAND (arg0
, 1))
7900 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
7901 && ! TREE_CHAIN (arglist
))
7902 return fold (build (code
, type
,
7903 build1 (INDIRECT_REF
, char_type_node
,
7904 TREE_VALUE(arglist
)),
7905 integer_zero_node
));
7908 /* Both ARG0 and ARG1 are known to be constants at this point. */
7909 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
7910 return (t1
== NULL_TREE
? t
: t1
);
7913 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7914 so all simple results must be passed through pedantic_non_lvalue. */
7915 if (TREE_CODE (arg0
) == INTEGER_CST
)
7917 tem
= TREE_OPERAND (t
, (integer_zerop (arg0
) ? 2 : 1));
7918 /* Only optimize constant conditions when the selected branch
7919 has the same type as the COND_EXPR. This avoids optimizing
7920 away "c ? x : throw", where the throw has a void type. */
7921 if (! VOID_TYPE_P (TREE_TYPE (tem
))
7922 || VOID_TYPE_P (type
))
7923 return pedantic_non_lvalue (tem
);
7926 if (operand_equal_p (arg1
, TREE_OPERAND (t
, 2), 0))
7927 return pedantic_omit_one_operand (type
, arg1
, arg0
);
7929 /* If we have A op B ? A : C, we may be able to convert this to a
7930 simpler expression, depending on the operation and the values
7931 of B and C. Signed zeros prevent all of these transformations,
7932 for reasons given above each one. */
7934 if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
7935 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
7936 arg1
, TREE_OPERAND (arg0
, 1))
7937 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
7939 tree arg2
= TREE_OPERAND (t
, 2);
7940 enum tree_code comp_code
= TREE_CODE (arg0
);
7944 /* If we have A op 0 ? A : -A, consider applying the following
7947 A == 0? A : -A same as -A
7948 A != 0? A : -A same as A
7949 A >= 0? A : -A same as abs (A)
7950 A > 0? A : -A same as abs (A)
7951 A <= 0? A : -A same as -abs (A)
7952 A < 0? A : -A same as -abs (A)
7954 None of these transformations work for modes with signed
7955 zeros. If A is +/-0, the first two transformations will
7956 change the sign of the result (from +0 to -0, or vice
7957 versa). The last four will fix the sign of the result,
7958 even though the original expressions could be positive or
7959 negative, depending on the sign of A.
7961 Note that all these transformations are correct if A is
7962 NaN, since the two alternatives (A and -A) are also NaNs. */
7963 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
7964 ? real_zerop (TREE_OPERAND (arg0
, 1))
7965 : integer_zerop (TREE_OPERAND (arg0
, 1)))
7966 && TREE_CODE (arg2
) == NEGATE_EXPR
7967 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
7971 tem
= fold_convert (TREE_TYPE (TREE_OPERAND (t
, 1)), arg1
);
7972 tem
= fold_convert (type
, negate_expr (tem
));
7973 return pedantic_non_lvalue (tem
);
7975 return pedantic_non_lvalue (fold_convert (type
, arg1
));
7978 if (TREE_UNSIGNED (TREE_TYPE (arg1
)))
7979 arg1
= fold_convert (lang_hooks
.types
.signed_type
7980 (TREE_TYPE (arg1
)), arg1
);
7981 arg1
= fold (build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
));
7982 return pedantic_non_lvalue (fold_convert (type
, arg1
));
7985 if (TREE_UNSIGNED (TREE_TYPE (arg1
)))
7986 arg1
= fold_convert (lang_hooks
.types
.signed_type
7987 (TREE_TYPE (arg1
)), arg1
);
7988 arg1
= fold (build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
));
7989 arg1
= negate_expr (fold_convert (type
, arg1
));
7990 return pedantic_non_lvalue (arg1
);
7995 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
7996 A == 0 ? A : 0 is always 0 unless A is -0. Note that
7997 both transformations are correct when A is NaN: A != 0
7998 is then true, and A == 0 is false. */
8000 if (integer_zerop (TREE_OPERAND (arg0
, 1)) && integer_zerop (arg2
))
8002 if (comp_code
== NE_EXPR
)
8003 return pedantic_non_lvalue (fold_convert (type
, arg1
));
8004 else if (comp_code
== EQ_EXPR
)
8005 return pedantic_non_lvalue (fold_convert (type
, integer_zero_node
));
8008 /* Try some transformations of A op B ? A : B.
8010 A == B? A : B same as B
8011 A != B? A : B same as A
8012 A >= B? A : B same as max (A, B)
8013 A > B? A : B same as max (B, A)
8014 A <= B? A : B same as min (A, B)
8015 A < B? A : B same as min (B, A)
8017 As above, these transformations don't work in the presence
8018 of signed zeros. For example, if A and B are zeros of
8019 opposite sign, the first two transformations will change
8020 the sign of the result. In the last four, the original
8021 expressions give different results for (A=+0, B=-0) and
8022 (A=-0, B=+0), but the transformed expressions do not.
8024 The first two transformations are correct if either A or B
8025 is a NaN. In the first transformation, the condition will
8026 be false, and B will indeed be chosen. In the case of the
8027 second transformation, the condition A != B will be true,
8028 and A will be chosen.
8030 The conversions to max() and min() are not correct if B is
8031 a number and A is not. The conditions in the original
8032 expressions will be false, so all four give B. The min()
8033 and max() versions would give a NaN instead. */
8034 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 1),
8035 arg2
, TREE_OPERAND (arg0
, 0)))
8037 tree comp_op0
= TREE_OPERAND (arg0
, 0);
8038 tree comp_op1
= TREE_OPERAND (arg0
, 1);
8039 tree comp_type
= TREE_TYPE (comp_op0
);
8041 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8042 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
8052 return pedantic_non_lvalue (fold_convert (type
, arg2
));
8054 return pedantic_non_lvalue (fold_convert (type
, arg1
));
8057 /* In C++ a ?: expression can be an lvalue, so put the
8058 operand which will be used if they are equal first
8059 so that we can convert this back to the
8060 corresponding COND_EXPR. */
8061 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
8062 return pedantic_non_lvalue (fold_convert
8063 (type
, fold (build (MIN_EXPR
, comp_type
,
8064 (comp_code
== LE_EXPR
8065 ? comp_op0
: comp_op1
),
8066 (comp_code
== LE_EXPR
8067 ? comp_op1
: comp_op0
)))));
8071 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
8072 return pedantic_non_lvalue (fold_convert
8073 (type
, fold (build (MAX_EXPR
, comp_type
,
8074 (comp_code
== GE_EXPR
8075 ? comp_op0
: comp_op1
),
8076 (comp_code
== GE_EXPR
8077 ? comp_op1
: comp_op0
)))));
8084 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8085 we might still be able to simplify this. For example,
8086 if C1 is one less or one more than C2, this might have started
8087 out as a MIN or MAX and been transformed by this function.
8088 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8090 if (INTEGRAL_TYPE_P (type
)
8091 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8092 && TREE_CODE (arg2
) == INTEGER_CST
)
8096 /* We can replace A with C1 in this case. */
8097 arg1
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
8098 return fold (build (code
, type
, TREE_OPERAND (t
, 0), arg1
,
8099 TREE_OPERAND (t
, 2)));
8102 /* If C1 is C2 + 1, this is min(A, C2). */
8103 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
), 1)
8104 && operand_equal_p (TREE_OPERAND (arg0
, 1),
8105 const_binop (PLUS_EXPR
, arg2
,
8106 integer_one_node
, 0), 1))
8107 return pedantic_non_lvalue
8108 (fold (build (MIN_EXPR
, type
, arg1
, arg2
)));
8112 /* If C1 is C2 - 1, this is min(A, C2). */
8113 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
), 1)
8114 && operand_equal_p (TREE_OPERAND (arg0
, 1),
8115 const_binop (MINUS_EXPR
, arg2
,
8116 integer_one_node
, 0), 1))
8117 return pedantic_non_lvalue
8118 (fold (build (MIN_EXPR
, type
, arg1
, arg2
)));
8122 /* If C1 is C2 - 1, this is max(A, C2). */
8123 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
), 1)
8124 && operand_equal_p (TREE_OPERAND (arg0
, 1),
8125 const_binop (MINUS_EXPR
, arg2
,
8126 integer_one_node
, 0), 1))
8127 return pedantic_non_lvalue
8128 (fold (build (MAX_EXPR
, type
, arg1
, arg2
)));
8132 /* If C1 is C2 + 1, this is max(A, C2). */
8133 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
), 1)
8134 && operand_equal_p (TREE_OPERAND (arg0
, 1),
8135 const_binop (PLUS_EXPR
, arg2
,
8136 integer_one_node
, 0), 1))
8137 return pedantic_non_lvalue
8138 (fold (build (MAX_EXPR
, type
, arg1
, arg2
)));
8147 /* If the second operand is simpler than the third, swap them
8148 since that produces better jump optimization results. */
8149 if (tree_swap_operands_p (TREE_OPERAND (t
, 1),
8150 TREE_OPERAND (t
, 2), false))
8152 /* See if this can be inverted. If it can't, possibly because
8153 it was a floating-point inequality comparison, don't do
8155 tem
= invert_truthvalue (arg0
);
8157 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
8158 return fold (build (code
, type
, tem
,
8159 TREE_OPERAND (t
, 2), TREE_OPERAND (t
, 1)));
8162 /* Convert A ? 1 : 0 to simply A. */
8163 if (integer_onep (TREE_OPERAND (t
, 1))
8164 && integer_zerop (TREE_OPERAND (t
, 2))
8165 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8166 call to fold will try to move the conversion inside
8167 a COND, which will recurse. In that case, the COND_EXPR
8168 is probably the best choice, so leave it alone. */
8169 && type
== TREE_TYPE (arg0
))
8170 return pedantic_non_lvalue (arg0
);
8172 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8173 over COND_EXPR in cases such as floating point comparisons. */
8174 if (integer_zerop (TREE_OPERAND (t
, 1))
8175 && integer_onep (TREE_OPERAND (t
, 2))
8176 && truth_value_p (TREE_CODE (arg0
)))
8177 return pedantic_non_lvalue (fold_convert (type
,
8178 invert_truthvalue (arg0
)));
8180 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8181 operation is simply A & 2. */
8183 if (integer_zerop (TREE_OPERAND (t
, 2))
8184 && TREE_CODE (arg0
) == NE_EXPR
8185 && integer_zerop (TREE_OPERAND (arg0
, 1))
8186 && integer_pow2p (arg1
)
8187 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
8188 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
8190 return pedantic_non_lvalue (fold_convert (type
,
8191 TREE_OPERAND (arg0
, 0)));
8193 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8194 if (integer_zerop (TREE_OPERAND (t
, 2))
8195 && truth_value_p (TREE_CODE (arg0
))
8196 && truth_value_p (TREE_CODE (arg1
)))
8197 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR
, type
,
8200 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8201 if (integer_onep (TREE_OPERAND (t
, 2))
8202 && truth_value_p (TREE_CODE (arg0
))
8203 && truth_value_p (TREE_CODE (arg1
)))
8205 /* Only perform transformation if ARG0 is easily inverted. */
8206 tem
= invert_truthvalue (arg0
);
8207 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
8208 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR
, type
,
8215 /* When pedantic, a compound expression can be neither an lvalue
8216 nor an integer constant expression. */
8217 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
8219 /* Don't let (0, 0) be null pointer constant. */
8220 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
8221 : fold_convert (type
, arg1
);
8222 return pedantic_non_lvalue (tem
);
8226 return build_complex (type
, arg0
, arg1
);
8230 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8232 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8233 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
8234 TREE_OPERAND (arg0
, 1));
8235 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
8236 return TREE_REALPART (arg0
);
8237 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8238 return fold (build (TREE_CODE (arg0
), type
,
8239 fold (build1 (REALPART_EXPR
, type
,
8240 TREE_OPERAND (arg0
, 0))),
8241 fold (build1 (REALPART_EXPR
,
8242 type
, TREE_OPERAND (arg0
, 1)))));
8246 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8247 return fold_convert (type
, integer_zero_node
);
8248 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8249 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
8250 TREE_OPERAND (arg0
, 0));
8251 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
8252 return TREE_IMAGPART (arg0
);
8253 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8254 return fold (build (TREE_CODE (arg0
), type
,
8255 fold (build1 (IMAGPART_EXPR
, type
,
8256 TREE_OPERAND (arg0
, 0))),
8257 fold (build1 (IMAGPART_EXPR
, type
,
8258 TREE_OPERAND (arg0
, 1)))));
8261 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8263 case CLEANUP_POINT_EXPR
:
8264 if (! has_cleanups (arg0
))
8265 return TREE_OPERAND (t
, 0);
8268 enum tree_code code0
= TREE_CODE (arg0
);
8269 int kind0
= TREE_CODE_CLASS (code0
);
8270 tree arg00
= TREE_OPERAND (arg0
, 0);
8273 if (kind0
== '1' || code0
== TRUTH_NOT_EXPR
)
8274 return fold (build1 (code0
, type
,
8275 fold (build1 (CLEANUP_POINT_EXPR
,
8276 TREE_TYPE (arg00
), arg00
))));
8278 if (kind0
== '<' || kind0
== '2'
8279 || code0
== TRUTH_ANDIF_EXPR
|| code0
== TRUTH_ORIF_EXPR
8280 || code0
== TRUTH_AND_EXPR
|| code0
== TRUTH_OR_EXPR
8281 || code0
== TRUTH_XOR_EXPR
)
8283 arg01
= TREE_OPERAND (arg0
, 1);
8285 if (TREE_CONSTANT (arg00
)
8286 || ((code0
== TRUTH_ANDIF_EXPR
|| code0
== TRUTH_ORIF_EXPR
)
8287 && ! has_cleanups (arg00
)))
8288 return fold (build (code0
, type
, arg00
,
8289 fold (build1 (CLEANUP_POINT_EXPR
,
8290 TREE_TYPE (arg01
), arg01
))));
8292 if (TREE_CONSTANT (arg01
))
8293 return fold (build (code0
, type
,
8294 fold (build1 (CLEANUP_POINT_EXPR
,
8295 TREE_TYPE (arg00
), arg00
)),
8303 /* Check for a built-in function. */
8304 if (TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
8305 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0))
8307 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)))
8309 tree tmp
= fold_builtin (t
);
8317 } /* switch (code) */
8320 #ifdef ENABLE_FOLD_CHECKING
8323 static void fold_checksum_tree (tree
, struct md5_ctx
*, htab_t
);
8324 static void fold_check_failed (tree
, tree
);
8325 void print_fold_checksum (tree
);
8327 /* When --enable-checking=fold, compute a digest of expr before
8328 and after actual fold call to see if fold did not accidentally
8329 change original expr. */
8336 unsigned char checksum_before
[16], checksum_after
[16];
8339 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
8340 md5_init_ctx (&ctx
);
8341 fold_checksum_tree (expr
, &ctx
, ht
);
8342 md5_finish_ctx (&ctx
, checksum_before
);
8345 ret
= fold_1 (expr
);
8347 md5_init_ctx (&ctx
);
8348 fold_checksum_tree (expr
, &ctx
, ht
);
8349 md5_finish_ctx (&ctx
, checksum_after
);
8352 if (memcmp (checksum_before
, checksum_after
, 16))
8353 fold_check_failed (expr
, ret
);
8359 print_fold_checksum (tree expr
)
8362 unsigned char checksum
[16], cnt
;
8365 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
8366 md5_init_ctx (&ctx
);
8367 fold_checksum_tree (expr
, &ctx
, ht
);
8368 md5_finish_ctx (&ctx
, checksum
);
8370 for (cnt
= 0; cnt
< 16; ++cnt
)
8371 fprintf (stderr
, "%02x", checksum
[cnt
]);
8372 putc ('\n', stderr
);
8376 fold_check_failed (tree expr ATTRIBUTE_UNUSED
, tree ret ATTRIBUTE_UNUSED
)
8378 internal_error ("fold check: original tree changed by fold");
8382 fold_checksum_tree (tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
8385 enum tree_code code
;
8386 char buf
[sizeof (struct tree_decl
)];
8389 if (sizeof (struct tree_exp
) + 5 * sizeof (tree
)
8390 > sizeof (struct tree_decl
)
8391 || sizeof (struct tree_type
) > sizeof (struct tree_decl
))
8395 slot
= htab_find_slot (ht
, expr
, INSERT
);
8399 code
= TREE_CODE (expr
);
8400 if (code
== SAVE_EXPR
&& SAVE_EXPR_NOPLACEHOLDER (expr
))
8402 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8403 memcpy (buf
, expr
, tree_size (expr
));
8405 SAVE_EXPR_NOPLACEHOLDER (expr
) = 0;
8407 else if (TREE_CODE_CLASS (code
) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr
))
8409 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8410 memcpy (buf
, expr
, tree_size (expr
));
8412 SET_DECL_ASSEMBLER_NAME (expr
, NULL
);
8414 else if (TREE_CODE_CLASS (code
) == 't'
8415 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)))
8417 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8418 memcpy (buf
, expr
, tree_size (expr
));
8420 TYPE_POINTER_TO (expr
) = NULL
;
8421 TYPE_REFERENCE_TO (expr
) = NULL
;
8423 md5_process_bytes (expr
, tree_size (expr
), ctx
);
8424 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
8425 if (TREE_CODE_CLASS (code
) != 't' && TREE_CODE_CLASS (code
) != 'd')
8426 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
8427 len
= TREE_CODE_LENGTH (code
);
8428 switch (TREE_CODE_CLASS (code
))
8434 md5_process_bytes (TREE_STRING_POINTER (expr
),
8435 TREE_STRING_LENGTH (expr
), ctx
);
8438 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
8439 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
8442 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
8452 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
8453 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
8456 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
8457 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
8466 case SAVE_EXPR
: len
= 2; break;
8467 case GOTO_SUBROUTINE_EXPR
: len
= 0; break;
8468 case RTL_EXPR
: len
= 0; break;
8469 case WITH_CLEANUP_EXPR
: len
= 2; break;
8478 for (i
= 0; i
< len
; ++i
)
8479 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
8482 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
8483 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
8484 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
8485 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
8486 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
8487 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
8488 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
8489 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
8490 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
8491 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
8492 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
8495 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
8496 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
8497 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
8498 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
8499 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
8500 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
8501 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
8502 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
8503 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
8504 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
8513 /* Perform constant folding and related simplification of initializer
8514 expression EXPR. This behaves identically to "fold" but ignores
8515 potential run-time traps and exceptions that fold must preserve. */
8518 fold_initializer (tree expr
)
8520 int saved_signaling_nans
= flag_signaling_nans
;
8521 int saved_trapping_math
= flag_trapping_math
;
8522 int saved_trapv
= flag_trapv
;
8525 flag_signaling_nans
= 0;
8526 flag_trapping_math
= 0;
8529 result
= fold (expr
);
8531 flag_signaling_nans
= saved_signaling_nans
;
8532 flag_trapping_math
= saved_trapping_math
;
8533 flag_trapv
= saved_trapv
;
8538 /* Determine if first argument is a multiple of second argument. Return 0 if
8539 it is not, or we cannot easily determined it to be.
8541 An example of the sort of thing we care about (at this point; this routine
8542 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8543 fold cases do now) is discovering that
8545 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8551 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8553 This code also handles discovering that
8555 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8557 is a multiple of 8 so we don't have to worry about dealing with a
8560 Note that we *look* inside a SAVE_EXPR only to determine how it was
8561 calculated; it is not safe for fold to do much of anything else with the
8562 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8563 at run time. For example, the latter example above *cannot* be implemented
8564 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8565 evaluation time of the original SAVE_EXPR is not necessarily the same at
8566 the time the new expression is evaluated. The only optimization of this
8567 sort that would be valid is changing
8569 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8573 SAVE_EXPR (I) * SAVE_EXPR (J)
8575 (where the same SAVE_EXPR (J) is used in the original and the
8576 transformed version). */
8579 multiple_of_p (tree type
, tree top
, tree bottom
)
8581 if (operand_equal_p (top
, bottom
, 0))
8584 if (TREE_CODE (type
) != INTEGER_TYPE
)
8587 switch (TREE_CODE (top
))
8590 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
8591 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
8595 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
8596 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
8599 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
8603 op1
= TREE_OPERAND (top
, 1);
8604 /* const_binop may not detect overflow correctly,
8605 so check for it explicitly here. */
8606 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
8607 > TREE_INT_CST_LOW (op1
)
8608 && TREE_INT_CST_HIGH (op1
) == 0
8609 && 0 != (t1
= fold_convert (type
,
8610 const_binop (LSHIFT_EXPR
,
8613 && ! TREE_OVERFLOW (t1
))
8614 return multiple_of_p (type
, t1
, bottom
);
8619 /* Can't handle conversions from non-integral or wider integral type. */
8620 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
8621 || (TYPE_PRECISION (type
)
8622 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
8625 /* .. fall through ... */
8628 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
8631 if (TREE_CODE (bottom
) != INTEGER_CST
8632 || (TREE_UNSIGNED (type
)
8633 && (tree_int_cst_sgn (top
) < 0
8634 || tree_int_cst_sgn (bottom
) < 0)))
8636 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,
8644 /* Return true if `t' is known to be non-negative. */
8647 tree_expr_nonnegative_p (tree t
)
8649 switch (TREE_CODE (t
))
8655 return tree_int_cst_sgn (t
) >= 0;
8658 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
8661 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
8662 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8663 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8665 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8666 both unsigned and at least 2 bits shorter than the result. */
8667 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
8668 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
8669 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
8671 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
8672 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
8673 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TREE_UNSIGNED (inner1
)
8674 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TREE_UNSIGNED (inner2
))
8676 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
8677 TYPE_PRECISION (inner2
)) + 1;
8678 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
8684 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
8686 /* x * x for floating point x is always non-negative. */
8687 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
8689 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8690 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8693 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8694 both unsigned and their total bits is shorter than the result. */
8695 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
8696 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
8697 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
8699 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
8700 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
8701 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TREE_UNSIGNED (inner1
)
8702 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TREE_UNSIGNED (inner2
))
8703 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
8704 < TYPE_PRECISION (TREE_TYPE (t
));
8708 case TRUNC_DIV_EXPR
:
8710 case FLOOR_DIV_EXPR
:
8711 case ROUND_DIV_EXPR
:
8712 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8713 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8715 case TRUNC_MOD_EXPR
:
8717 case FLOOR_MOD_EXPR
:
8718 case ROUND_MOD_EXPR
:
8719 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8722 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8723 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8727 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
8728 tree outer_type
= TREE_TYPE (t
);
8730 if (TREE_CODE (outer_type
) == REAL_TYPE
)
8732 if (TREE_CODE (inner_type
) == REAL_TYPE
)
8733 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8734 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
8736 if (TREE_UNSIGNED (inner_type
))
8738 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8741 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
8743 if (TREE_CODE (inner_type
) == REAL_TYPE
)
8744 return tree_expr_nonnegative_p (TREE_OPERAND (t
,0));
8745 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
8746 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
8747 && TREE_UNSIGNED (inner_type
);
8753 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
8754 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
8756 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8758 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8759 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8761 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8762 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8764 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8766 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8768 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8769 case NON_LVALUE_EXPR
:
8770 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8772 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8774 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t
));
8778 tree fndecl
= get_callee_fndecl (t
);
8779 tree arglist
= TREE_OPERAND (t
, 1);
8781 && DECL_BUILT_IN (fndecl
)
8782 && DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_MD
)
8783 switch (DECL_FUNCTION_CODE (fndecl
))
8785 #define CASE_BUILTIN_F(BUILT_IN_FN) \
8786 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
8787 #define CASE_BUILTIN_I(BUILT_IN_FN) \
8788 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
8790 CASE_BUILTIN_F (BUILT_IN_ACOS
)
8791 CASE_BUILTIN_F (BUILT_IN_ACOSH
)
8792 CASE_BUILTIN_F (BUILT_IN_CABS
)
8793 CASE_BUILTIN_F (BUILT_IN_COSH
)
8794 CASE_BUILTIN_F (BUILT_IN_ERFC
)
8795 CASE_BUILTIN_F (BUILT_IN_EXP
)
8796 CASE_BUILTIN_F (BUILT_IN_EXP10
)
8797 CASE_BUILTIN_F (BUILT_IN_EXP2
)
8798 CASE_BUILTIN_F (BUILT_IN_FABS
)
8799 CASE_BUILTIN_F (BUILT_IN_FDIM
)
8800 CASE_BUILTIN_F (BUILT_IN_FREXP
)
8801 CASE_BUILTIN_F (BUILT_IN_HYPOT
)
8802 CASE_BUILTIN_F (BUILT_IN_POW10
)
8803 CASE_BUILTIN_F (BUILT_IN_SQRT
)
8804 CASE_BUILTIN_I (BUILT_IN_FFS
)
8805 CASE_BUILTIN_I (BUILT_IN_PARITY
)
8806 CASE_BUILTIN_I (BUILT_IN_POPCOUNT
)
8810 CASE_BUILTIN_F (BUILT_IN_ASINH
)
8811 CASE_BUILTIN_F (BUILT_IN_ATAN
)
8812 CASE_BUILTIN_F (BUILT_IN_ATANH
)
8813 CASE_BUILTIN_F (BUILT_IN_CBRT
)
8814 CASE_BUILTIN_F (BUILT_IN_CEIL
)
8815 CASE_BUILTIN_F (BUILT_IN_ERF
)
8816 CASE_BUILTIN_F (BUILT_IN_EXPM1
)
8817 CASE_BUILTIN_F (BUILT_IN_FLOOR
)
8818 CASE_BUILTIN_F (BUILT_IN_FMOD
)
8819 CASE_BUILTIN_F (BUILT_IN_LDEXP
)
8820 CASE_BUILTIN_F (BUILT_IN_LLRINT
)
8821 CASE_BUILTIN_F (BUILT_IN_LLROUND
)
8822 CASE_BUILTIN_F (BUILT_IN_LRINT
)
8823 CASE_BUILTIN_F (BUILT_IN_LROUND
)
8824 CASE_BUILTIN_F (BUILT_IN_MODF
)
8825 CASE_BUILTIN_F (BUILT_IN_NEARBYINT
)
8826 CASE_BUILTIN_F (BUILT_IN_POW
)
8827 CASE_BUILTIN_F (BUILT_IN_RINT
)
8828 CASE_BUILTIN_F (BUILT_IN_ROUND
)
8829 CASE_BUILTIN_F (BUILT_IN_SIGNBIT
)
8830 CASE_BUILTIN_F (BUILT_IN_SINH
)
8831 CASE_BUILTIN_F (BUILT_IN_TANH
)
8832 CASE_BUILTIN_F (BUILT_IN_TRUNC
)
8833 /* True if the 1st argument is nonnegative. */
8834 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
8836 CASE_BUILTIN_F(BUILT_IN_FMAX
)
8837 /* True if the 1st OR 2nd arguments are nonnegative. */
8838 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
8839 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
8841 CASE_BUILTIN_F(BUILT_IN_FMIN
)
8842 /* True if the 1st AND 2nd arguments are nonnegative. */
8843 return tree_expr_nonnegative_p (TREE_VALUE (arglist
))
8844 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
8846 CASE_BUILTIN_F(BUILT_IN_COPYSIGN
)
8847 /* True if the 2nd argument is nonnegative. */
8848 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist
)));
8852 #undef CASE_BUILTIN_F
8853 #undef CASE_BUILTIN_I
8857 /* ... fall through ... */
8860 if (truth_value_p (TREE_CODE (t
)))
8861 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8865 /* We don't know sign of `t', so be conservative and return false. */
8869 /* Return true when T is an address and is known to be nonzero.
8870 For floating point we further ensure that T is not denormal.
8871 Similar logic is present in nonzero_address in rtlanal.h */
8874 tree_expr_nonzero_p (tree t
)
8876 tree type
= TREE_TYPE (t
);
8878 /* Doing something usefull for floating point would need more work. */
8879 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
8882 switch (TREE_CODE (t
))
8885 if (!TREE_UNSIGNED (type
) && !flag_wrapv
)
8886 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
8889 return !integer_zerop (t
);
8892 if (!TREE_UNSIGNED (type
) && !flag_wrapv
)
8894 /* With the presence of negative values it is hard
8895 to say something. */
8896 if (!tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8897 || !tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
8899 /* One of operands must be positive and the other non-negative. */
8900 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
8901 || tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
8906 if (!TREE_UNSIGNED (type
) && !flag_wrapv
)
8908 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
8909 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
8915 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
8916 tree outer_type
= TREE_TYPE (t
);
8918 return (TYPE_PRECISION (inner_type
) >= TYPE_PRECISION (outer_type
)
8919 && tree_expr_nonzero_p (TREE_OPERAND (t
, 0)));
8924 /* Weak declarations may link to NULL. */
8925 if (DECL_P (TREE_OPERAND (t
, 0)))
8926 return !DECL_WEAK (TREE_OPERAND (t
, 0));
8927 /* Constants and all other cases are never weak. */
8931 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
8932 && tree_expr_nonzero_p (TREE_OPERAND (t
, 2)));
8935 return (tree_expr_nonzero_p (TREE_OPERAND (t
, 0))
8936 && tree_expr_nonzero_p (TREE_OPERAND (t
, 1)));
8939 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 0)))
8941 /* When both operands are nonzero, then MAX must be too. */
8942 if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1)))
8945 /* MAX where operand 0 is positive is positive. */
8946 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8948 /* MAX where operand 1 is positive is positive. */
8949 else if (tree_expr_nonzero_p (TREE_OPERAND (t
, 1))
8950 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1)))
8957 return tree_expr_nonzero_p (TREE_OPERAND (t
, 1));
8960 case NON_LVALUE_EXPR
:
8961 return tree_expr_nonzero_p (TREE_OPERAND (t
, 0));
8969 /* Return true if `r' is known to be non-negative.
8970 Only handles constants at the moment. */
8973 rtl_expr_nonnegative_p (rtx r
)
8975 switch (GET_CODE (r
))
8978 return INTVAL (r
) >= 0;
8981 if (GET_MODE (r
) == VOIDmode
)
8982 return CONST_DOUBLE_HIGH (r
) >= 0;
8990 units
= CONST_VECTOR_NUNITS (r
);
8992 for (i
= 0; i
< units
; ++i
)
8994 elt
= CONST_VECTOR_ELT (r
, i
);
8995 if (!rtl_expr_nonnegative_p (elt
))
9004 /* These are always nonnegative. */
9012 /* Return the tree for neg (ARG0) when ARG0 is known to be either
9013 an integer constant or real constant.
9015 TYPE is the type of the result. */
9018 fold_negate_const (tree arg0
, tree type
)
9022 if (TREE_CODE (arg0
) == INTEGER_CST
)
9024 unsigned HOST_WIDE_INT low
;
9026 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
9027 TREE_INT_CST_HIGH (arg0
),
9029 t
= build_int_2 (low
, high
);
9030 TREE_TYPE (t
) = type
;
9032 = (TREE_OVERFLOW (arg0
)
9033 | force_fit_type (t
, overflow
&& !TREE_UNSIGNED (type
)));
9034 TREE_CONSTANT_OVERFLOW (t
)
9035 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
9037 else if (TREE_CODE (arg0
) == REAL_CST
)
9038 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
9039 #ifdef ENABLE_CHECKING
9047 /* Return the tree for abs (ARG0) when ARG0 is known to be either
9048 an integer constant or real constant.
9050 TYPE is the type of the result. */
9053 fold_abs_const (tree arg0
, tree type
)
9057 if (TREE_CODE (arg0
) == INTEGER_CST
)
9059 /* If the value is unsigned, then the absolute value is
9060 the same as the ordinary value. */
9061 if (TREE_UNSIGNED (type
))
9063 /* Similarly, if the value is non-negative. */
9064 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
9066 /* If the value is negative, then the absolute value is
9070 unsigned HOST_WIDE_INT low
;
9072 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
9073 TREE_INT_CST_HIGH (arg0
),
9075 t
= build_int_2 (low
, high
);
9076 TREE_TYPE (t
) = type
;
9078 = (TREE_OVERFLOW (arg0
)
9079 | force_fit_type (t
, overflow
));
9080 TREE_CONSTANT_OVERFLOW (t
)
9081 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
9085 else if (TREE_CODE (arg0
) == REAL_CST
)
9087 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
9088 return build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
9092 #ifdef ENABLE_CHECKING
9100 /* Given CODE, a relational operator, the target type, TYPE and two
9101 constant operands OP0 and OP1, return the result of the
9102 relational operation. If the result is not a compile time
9103 constant, then return NULL_TREE. */
9106 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
9111 /* From here on, the only cases we handle are when the result is
9112 known to be a constant.
9114 To compute GT, swap the arguments and do LT.
9115 To compute GE, do LT and invert the result.
9116 To compute LE, swap the arguments, do LT and invert the result.
9117 To compute NE, do EQ and invert the result.
9119 Therefore, the code below must handle only EQ and LT. */
9121 if (code
== LE_EXPR
|| code
== GT_EXPR
)
9123 tem
= op0
, op0
= op1
, op1
= tem
;
9124 code
= swap_tree_comparison (code
);
9127 /* Note that it is safe to invert for real values here because we
9128 will check below in the one case that it matters. */
9132 if (code
== NE_EXPR
|| code
== GE_EXPR
)
9135 code
= invert_tree_comparison (code
);
9138 /* Compute a result for LT or EQ if args permit;
9139 Otherwise return T. */
9140 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
9142 if (code
== EQ_EXPR
)
9143 tem
= build_int_2 (tree_int_cst_equal (op0
, op1
), 0);
9145 tem
= build_int_2 ((TREE_UNSIGNED (TREE_TYPE (op0
))
9146 ? INT_CST_LT_UNSIGNED (op0
, op1
)
9147 : INT_CST_LT (op0
, op1
)),
9151 else if (code
== EQ_EXPR
&& !TREE_SIDE_EFFECTS (op0
)
9152 && integer_zerop (op1
) && tree_expr_nonzero_p (op0
))
9153 tem
= build_int_2 (0, 0);
9155 /* Two real constants can be compared explicitly. */
9156 else if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
9158 /* If either operand is a NaN, the result is false with two
9159 exceptions: First, an NE_EXPR is true on NaNs, but that case
9160 is already handled correctly since we will be inverting the
9161 result for NE_EXPR. Second, if we had inverted a LE_EXPR
9162 or a GE_EXPR into a LT_EXPR, we must return true so that it
9163 will be inverted into false. */
9165 if (REAL_VALUE_ISNAN (TREE_REAL_CST (op0
))
9166 || REAL_VALUE_ISNAN (TREE_REAL_CST (op1
)))
9167 tem
= build_int_2 (invert
&& code
== LT_EXPR
, 0);
9169 else if (code
== EQ_EXPR
)
9170 tem
= build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (op0
),
9171 TREE_REAL_CST (op1
)),
9174 tem
= build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (op0
),
9175 TREE_REAL_CST (op1
)),
9179 if (tem
== NULL_TREE
)
9183 TREE_INT_CST_LOW (tem
) ^= 1;
9185 TREE_TYPE (tem
) = type
;
9186 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
9187 return (*lang_hooks
.truthvalue_conversion
) (tem
);
9191 #include "gt-fold-const.h"