1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 static void encode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
, HOST_WIDE_INT
);
62 static void decode (HOST_WIDE_INT
*, unsigned HOST_WIDE_INT
*, HOST_WIDE_INT
*);
63 static bool negate_mathfn_p (enum built_in_function
);
64 static bool negate_expr_p (tree
);
65 static tree
negate_expr (tree
);
66 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
67 static tree
associate_trees (tree
, tree
, enum tree_code
, tree
);
68 static tree
int_const_binop (enum tree_code
, tree
, tree
, int);
69 static tree
const_binop (enum tree_code
, tree
, tree
, int);
70 static hashval_t
size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
73 static tree
fold_convert (tree
, tree
);
74 static enum tree_code
invert_tree_comparison (enum tree_code
);
75 static enum tree_code
swap_tree_comparison (enum tree_code
);
76 static int comparison_to_compcode (enum tree_code
);
77 static enum tree_code
compcode_to_comparison (int);
78 static int truth_value_p (enum tree_code
);
79 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
80 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
81 static tree
eval_subst (tree
, tree
, tree
, tree
, tree
);
82 static tree
pedantic_omit_one_operand (tree
, tree
, tree
);
83 static tree
distribute_bit_expr (enum tree_code
, tree
, tree
, tree
);
84 static tree
make_bit_field_ref (tree
, tree
, int, int, int);
85 static tree
optimize_bit_field_compare (enum tree_code
, tree
, tree
, tree
);
86 static tree
decode_field_reference (tree
, HOST_WIDE_INT
*, HOST_WIDE_INT
*,
87 enum machine_mode
*, int *, int *,
89 static int all_ones_mask_p (tree
, int);
90 static tree
sign_bit_p (tree
, tree
);
91 static int simple_operand_p (tree
);
92 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
93 static tree
make_range (tree
, int *, tree
*, tree
*);
94 static tree
build_range_check (tree
, tree
, int, tree
, tree
);
95 static int merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int, tree
,
97 static tree
fold_range_test (tree
);
98 static tree
unextend (tree
, int, int, tree
);
99 static tree
fold_truthop (enum tree_code
, tree
, tree
, tree
);
100 static tree
optimize_minmax_comparison (tree
);
101 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
);
102 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
);
103 static tree
strip_compound_expr (tree
, tree
);
104 static int multiple_of_p (tree
, tree
, tree
);
105 static tree
constant_boolean_node (int, tree
);
106 static int count_cond (tree
, int);
107 static tree
fold_binary_op_with_conditional_arg (enum tree_code
, tree
, tree
,
109 static bool fold_real_zero_addition_p (tree
, tree
, int);
110 static tree
fold_mathfn_compare (enum built_in_function
, enum tree_code
,
112 static tree
fold_inf_compare (enum tree_code
, tree
, tree
, tree
);
113 static bool reorder_operands_p (tree
, tree
);
114 static bool tree_swap_operands_p (tree
, tree
, bool);
116 /* The following constants represent a bit based encoding of GCC's
117 comparison operators. This encoding simplifies transformations
118 on relational comparison operators, such as AND and OR. */
119 #define COMPCODE_FALSE 0
120 #define COMPCODE_LT 1
121 #define COMPCODE_EQ 2
122 #define COMPCODE_LE 3
123 #define COMPCODE_GT 4
124 #define COMPCODE_NE 5
125 #define COMPCODE_GE 6
126 #define COMPCODE_TRUE 7
128 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
129 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
130 and SUM1. Then this yields nonzero if overflow occurred during the
133 Overflow occurs if A and B have the same sign, but A and SUM differ in
134 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
136 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
138 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
139 We do that by representing the two-word integer in 4 words, with only
140 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
141 number. The value of the word is LOWPART + HIGHPART * BASE. */
144 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
145 #define HIGHPART(x) \
146 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
147 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
149 /* Unpack a two-word integer into 4 words.
150 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
151 WORDS points to the array of HOST_WIDE_INTs. */
154 encode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT low
, HOST_WIDE_INT hi
)
156 words
[0] = LOWPART (low
);
157 words
[1] = HIGHPART (low
);
158 words
[2] = LOWPART (hi
);
159 words
[3] = HIGHPART (hi
);
162 /* Pack an array of 4 words into a two-word integer.
163 WORDS points to the array of words.
164 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
167 decode (HOST_WIDE_INT
*words
, unsigned HOST_WIDE_INT
*low
,
170 *low
= words
[0] + words
[1] * BASE
;
171 *hi
= words
[2] + words
[3] * BASE
;
174 /* Make the integer constant T valid for its type by setting to 0 or 1 all
175 the bits in the constant that don't belong in the type.
177 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
178 nonzero, a signed overflow has already occurred in calculating T, so
182 force_fit_type (tree t
, int overflow
)
184 unsigned HOST_WIDE_INT low
;
188 if (TREE_CODE (t
) == REAL_CST
)
190 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
191 Consider doing it via real_convert now. */
195 else if (TREE_CODE (t
) != INTEGER_CST
)
198 low
= TREE_INT_CST_LOW (t
);
199 high
= TREE_INT_CST_HIGH (t
);
201 if (POINTER_TYPE_P (TREE_TYPE (t
))
202 || TREE_CODE (TREE_TYPE (t
)) == OFFSET_TYPE
)
205 prec
= TYPE_PRECISION (TREE_TYPE (t
));
207 /* First clear all bits that are beyond the type's precision. */
209 if (prec
== 2 * HOST_BITS_PER_WIDE_INT
)
211 else if (prec
> HOST_BITS_PER_WIDE_INT
)
212 TREE_INT_CST_HIGH (t
)
213 &= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
216 TREE_INT_CST_HIGH (t
) = 0;
217 if (prec
< HOST_BITS_PER_WIDE_INT
)
218 TREE_INT_CST_LOW (t
) &= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
221 /* Unsigned types do not suffer sign extension or overflow unless they
223 if (TREE_UNSIGNED (TREE_TYPE (t
))
224 && ! (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
225 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))))
228 /* If the value's sign bit is set, extend the sign. */
229 if (prec
!= 2 * HOST_BITS_PER_WIDE_INT
230 && (prec
> HOST_BITS_PER_WIDE_INT
231 ? 0 != (TREE_INT_CST_HIGH (t
)
233 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
234 : 0 != (TREE_INT_CST_LOW (t
)
235 & ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)))))
237 /* Value is negative:
238 set to 1 all the bits that are outside this type's precision. */
239 if (prec
> HOST_BITS_PER_WIDE_INT
)
240 TREE_INT_CST_HIGH (t
)
241 |= ((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
244 TREE_INT_CST_HIGH (t
) = -1;
245 if (prec
< HOST_BITS_PER_WIDE_INT
)
246 TREE_INT_CST_LOW (t
) |= ((unsigned HOST_WIDE_INT
) (-1) << prec
);
250 /* Return nonzero if signed overflow occurred. */
252 ((overflow
| (low
^ TREE_INT_CST_LOW (t
)) | (high
^ TREE_INT_CST_HIGH (t
)))
256 /* Add two doubleword integers with doubleword result.
257 Each argument is given as two `HOST_WIDE_INT' pieces.
258 One argument is L1 and H1; the other, L2 and H2.
259 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
262 add_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
263 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
264 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
266 unsigned HOST_WIDE_INT l
;
270 h
= h1
+ h2
+ (l
< l1
);
274 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
277 /* Negate a doubleword integer with doubleword result.
278 Return nonzero if the operation overflows, assuming it's signed.
279 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
280 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
283 neg_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
284 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
290 return (*hv
& h1
) < 0;
300 /* Multiply two doubleword integers with doubleword result.
301 Return nonzero if the operation overflows, assuming it's signed.
302 Each argument is given as two `HOST_WIDE_INT' pieces.
303 One argument is L1 and H1; the other, L2 and H2.
304 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
307 mul_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
308 unsigned HOST_WIDE_INT l2
, HOST_WIDE_INT h2
,
309 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
311 HOST_WIDE_INT arg1
[4];
312 HOST_WIDE_INT arg2
[4];
313 HOST_WIDE_INT prod
[4 * 2];
314 unsigned HOST_WIDE_INT carry
;
316 unsigned HOST_WIDE_INT toplow
, neglow
;
317 HOST_WIDE_INT tophigh
, neghigh
;
319 encode (arg1
, l1
, h1
);
320 encode (arg2
, l2
, h2
);
322 memset (prod
, 0, sizeof prod
);
324 for (i
= 0; i
< 4; i
++)
327 for (j
= 0; j
< 4; j
++)
330 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
331 carry
+= arg1
[i
] * arg2
[j
];
332 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
334 prod
[k
] = LOWPART (carry
);
335 carry
= HIGHPART (carry
);
340 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
342 /* Check for overflow by calculating the top half of the answer in full;
343 it should agree with the low half's sign bit. */
344 decode (prod
+ 4, &toplow
, &tophigh
);
347 neg_double (l2
, h2
, &neglow
, &neghigh
);
348 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
352 neg_double (l1
, h1
, &neglow
, &neghigh
);
353 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
355 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
358 /* Shift the doubleword integer in L1, H1 left by COUNT places
359 keeping only PREC bits of result.
360 Shift right if COUNT is negative.
361 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
362 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
365 lshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
366 HOST_WIDE_INT count
, unsigned int prec
,
367 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
, int arith
)
369 unsigned HOST_WIDE_INT signmask
;
373 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
377 #ifdef SHIFT_COUNT_TRUNCATED
378 if (SHIFT_COUNT_TRUNCATED
)
382 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
384 /* Shifting by the host word size is undefined according to the
385 ANSI standard, so we must handle this as a special case. */
389 else if (count
>= HOST_BITS_PER_WIDE_INT
)
391 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
396 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
397 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
401 /* Sign extend all bits that are beyond the precision. */
403 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
404 ? ((unsigned HOST_WIDE_INT
) *hv
405 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
406 : (*lv
>> (prec
- 1))) & 1);
408 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
410 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
412 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
413 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
418 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
419 *lv
|= signmask
<< prec
;
423 /* Shift the doubleword integer in L1, H1 right by COUNT places
424 keeping only PREC bits of result. COUNT must be positive.
425 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
426 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
429 rshift_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
430 HOST_WIDE_INT count
, unsigned int prec
,
431 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
,
434 unsigned HOST_WIDE_INT signmask
;
437 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
440 #ifdef SHIFT_COUNT_TRUNCATED
441 if (SHIFT_COUNT_TRUNCATED
)
445 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
447 /* Shifting by the host word size is undefined according to the
448 ANSI standard, so we must handle this as a special case. */
452 else if (count
>= HOST_BITS_PER_WIDE_INT
)
455 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
459 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
461 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
464 /* Zero / sign extend all bits that are beyond the precision. */
466 if (count
>= (HOST_WIDE_INT
)prec
)
471 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
473 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
475 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
476 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
481 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
482 *lv
|= signmask
<< (prec
- count
);
486 /* Rotate the doubleword integer in L1, H1 left by COUNT places
487 keeping only PREC bits of result.
488 Rotate right if COUNT is negative.
489 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
492 lrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
493 HOST_WIDE_INT count
, unsigned int prec
,
494 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
496 unsigned HOST_WIDE_INT s1l
, s2l
;
497 HOST_WIDE_INT s1h
, s2h
;
503 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
504 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
509 /* Rotate the doubleword integer in L1, H1 left by COUNT places
510 keeping only PREC bits of result. COUNT must be positive.
511 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
514 rrotate_double (unsigned HOST_WIDE_INT l1
, HOST_WIDE_INT h1
,
515 HOST_WIDE_INT count
, unsigned int prec
,
516 unsigned HOST_WIDE_INT
*lv
, HOST_WIDE_INT
*hv
)
518 unsigned HOST_WIDE_INT s1l
, s2l
;
519 HOST_WIDE_INT s1h
, s2h
;
525 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
526 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
531 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
532 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
533 CODE is a tree code for a kind of division, one of
534 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
536 It controls how the quotient is rounded to an integer.
537 Return nonzero if the operation overflows.
538 UNS nonzero says do unsigned division. */
541 div_and_round_double (enum tree_code code
, int uns
,
542 unsigned HOST_WIDE_INT lnum_orig
, /* num == numerator == dividend */
543 HOST_WIDE_INT hnum_orig
,
544 unsigned HOST_WIDE_INT lden_orig
, /* den == denominator == divisor */
545 HOST_WIDE_INT hden_orig
,
546 unsigned HOST_WIDE_INT
*lquo
,
547 HOST_WIDE_INT
*hquo
, unsigned HOST_WIDE_INT
*lrem
,
551 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
552 HOST_WIDE_INT den
[4], quo
[4];
554 unsigned HOST_WIDE_INT work
;
555 unsigned HOST_WIDE_INT carry
= 0;
556 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
557 HOST_WIDE_INT hnum
= hnum_orig
;
558 unsigned HOST_WIDE_INT lden
= lden_orig
;
559 HOST_WIDE_INT hden
= hden_orig
;
562 if (hden
== 0 && lden
== 0)
563 overflow
= 1, lden
= 1;
565 /* Calculate quotient sign and convert operands to unsigned. */
571 /* (minimum integer) / (-1) is the only overflow case. */
572 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
573 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
579 neg_double (lden
, hden
, &lden
, &hden
);
583 if (hnum
== 0 && hden
== 0)
584 { /* single precision */
586 /* This unsigned division rounds toward zero. */
592 { /* trivial case: dividend < divisor */
593 /* hden != 0 already checked. */
600 memset (quo
, 0, sizeof quo
);
602 memset (num
, 0, sizeof num
); /* to zero 9th element */
603 memset (den
, 0, sizeof den
);
605 encode (num
, lnum
, hnum
);
606 encode (den
, lden
, hden
);
608 /* Special code for when the divisor < BASE. */
609 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
611 /* hnum != 0 already checked. */
612 for (i
= 4 - 1; i
>= 0; i
--)
614 work
= num
[i
] + carry
* BASE
;
615 quo
[i
] = work
/ lden
;
621 /* Full double precision division,
622 with thanks to Don Knuth's "Seminumerical Algorithms". */
623 int num_hi_sig
, den_hi_sig
;
624 unsigned HOST_WIDE_INT quo_est
, scale
;
626 /* Find the highest nonzero divisor digit. */
627 for (i
= 4 - 1;; i
--)
634 /* Insure that the first digit of the divisor is at least BASE/2.
635 This is required by the quotient digit estimation algorithm. */
637 scale
= BASE
/ (den
[den_hi_sig
] + 1);
639 { /* scale divisor and dividend */
641 for (i
= 0; i
<= 4 - 1; i
++)
643 work
= (num
[i
] * scale
) + carry
;
644 num
[i
] = LOWPART (work
);
645 carry
= HIGHPART (work
);
650 for (i
= 0; i
<= 4 - 1; i
++)
652 work
= (den
[i
] * scale
) + carry
;
653 den
[i
] = LOWPART (work
);
654 carry
= HIGHPART (work
);
655 if (den
[i
] != 0) den_hi_sig
= i
;
662 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
664 /* Guess the next quotient digit, quo_est, by dividing the first
665 two remaining dividend digits by the high order quotient digit.
666 quo_est is never low and is at most 2 high. */
667 unsigned HOST_WIDE_INT tmp
;
669 num_hi_sig
= i
+ den_hi_sig
+ 1;
670 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
671 if (num
[num_hi_sig
] != den
[den_hi_sig
])
672 quo_est
= work
/ den
[den_hi_sig
];
676 /* Refine quo_est so it's usually correct, and at most one high. */
677 tmp
= work
- quo_est
* den
[den_hi_sig
];
679 && (den
[den_hi_sig
- 1] * quo_est
680 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
683 /* Try QUO_EST as the quotient digit, by multiplying the
684 divisor by QUO_EST and subtracting from the remaining dividend.
685 Keep in mind that QUO_EST is the I - 1st digit. */
688 for (j
= 0; j
<= den_hi_sig
; j
++)
690 work
= quo_est
* den
[j
] + carry
;
691 carry
= HIGHPART (work
);
692 work
= num
[i
+ j
] - LOWPART (work
);
693 num
[i
+ j
] = LOWPART (work
);
694 carry
+= HIGHPART (work
) != 0;
697 /* If quo_est was high by one, then num[i] went negative and
698 we need to correct things. */
699 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
702 carry
= 0; /* add divisor back in */
703 for (j
= 0; j
<= den_hi_sig
; j
++)
705 work
= num
[i
+ j
] + den
[j
] + carry
;
706 carry
= HIGHPART (work
);
707 num
[i
+ j
] = LOWPART (work
);
710 num
[num_hi_sig
] += carry
;
713 /* Store the quotient digit. */
718 decode (quo
, lquo
, hquo
);
721 /* If result is negative, make it so. */
723 neg_double (*lquo
, *hquo
, lquo
, hquo
);
725 /* compute trial remainder: rem = num - (quo * den) */
726 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
727 neg_double (*lrem
, *hrem
, lrem
, hrem
);
728 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
733 case TRUNC_MOD_EXPR
: /* round toward zero */
734 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
738 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
739 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
742 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
750 case CEIL_MOD_EXPR
: /* round toward positive infinity */
751 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
753 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
761 case ROUND_MOD_EXPR
: /* round to closest integer */
763 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
764 HOST_WIDE_INT habs_rem
= *hrem
;
765 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
766 HOST_WIDE_INT habs_den
= hden
, htwice
;
768 /* Get absolute values. */
770 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
772 neg_double (lden
, hden
, &labs_den
, &habs_den
);
774 /* If (2 * abs (lrem) >= abs (lden)) */
775 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
776 labs_rem
, habs_rem
, <wice
, &htwice
);
778 if (((unsigned HOST_WIDE_INT
) habs_den
779 < (unsigned HOST_WIDE_INT
) htwice
)
780 || (((unsigned HOST_WIDE_INT
) habs_den
781 == (unsigned HOST_WIDE_INT
) htwice
)
782 && (labs_den
< ltwice
)))
786 add_double (*lquo
, *hquo
,
787 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
790 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
802 /* Compute true remainder: rem = num - (quo * den) */
803 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
804 neg_double (*lrem
, *hrem
, lrem
, hrem
);
805 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
809 /* Return true if built-in mathematical function specified by CODE
810 preserves the sign of it argument, i.e. -f(x) == f(-x). */
813 negate_mathfn_p (enum built_in_function code
)
838 /* Determine whether an expression T can be cheaply negated using
839 the function negate_expr. */
842 negate_expr_p (tree t
)
844 unsigned HOST_WIDE_INT val
;
851 type
= TREE_TYPE (t
);
854 switch (TREE_CODE (t
))
857 if (TREE_UNSIGNED (type
) || ! flag_trapv
)
860 /* Check that -CST will not overflow type. */
861 prec
= TYPE_PRECISION (type
);
862 if (prec
> HOST_BITS_PER_WIDE_INT
)
864 if (TREE_INT_CST_LOW (t
) != 0)
866 prec
-= HOST_BITS_PER_WIDE_INT
;
867 val
= TREE_INT_CST_HIGH (t
);
870 val
= TREE_INT_CST_LOW (t
);
871 if (prec
< HOST_BITS_PER_WIDE_INT
)
872 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
873 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
880 return negate_expr_p (TREE_REALPART (t
))
881 && negate_expr_p (TREE_IMAGPART (t
));
884 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
885 return (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
886 && reorder_operands_p (TREE_OPERAND (t
, 0),
887 TREE_OPERAND (t
, 1));
890 if (TREE_UNSIGNED (TREE_TYPE (t
)))
896 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
897 return negate_expr_p (TREE_OPERAND (t
, 1))
898 || negate_expr_p (TREE_OPERAND (t
, 0));
902 /* Negate -((double)float) as (double)(-float). */
903 if (TREE_CODE (type
) == REAL_TYPE
)
905 tree tem
= strip_float_extensions (t
);
907 return negate_expr_p (tem
);
912 /* Negate -f(x) as f(-x). */
913 if (negate_mathfn_p (builtin_mathfn_code (t
)))
914 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1)));
923 /* Given T, an expression, return the negation of T. Allow for T to be
924 null, in which case return null. */
935 type
= TREE_TYPE (t
);
938 switch (TREE_CODE (t
))
942 unsigned HOST_WIDE_INT low
;
944 int overflow
= neg_double (TREE_INT_CST_LOW (t
),
945 TREE_INT_CST_HIGH (t
),
947 tem
= build_int_2 (low
, high
);
948 TREE_TYPE (tem
) = type
;
951 | force_fit_type (tem
, overflow
&& !TREE_UNSIGNED (type
)));
952 TREE_CONSTANT_OVERFLOW (tem
)
953 = TREE_OVERFLOW (tem
) | TREE_CONSTANT_OVERFLOW (t
);
955 if (! TREE_OVERFLOW (tem
)
956 || TREE_UNSIGNED (type
)
962 tem
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (t
)));
963 /* Two's complement FP formats, such as c4x, may overflow. */
964 if (! TREE_OVERFLOW (tem
) || ! flag_trapping_math
)
965 return fold_convert (type
, tem
);
970 tree rpart
= negate_expr (TREE_REALPART (t
));
971 tree ipart
= negate_expr (TREE_IMAGPART (t
));
973 if ((TREE_CODE (rpart
) == REAL_CST
974 && TREE_CODE (ipart
) == REAL_CST
)
975 || (TREE_CODE (rpart
) == INTEGER_CST
976 && TREE_CODE (ipart
) == INTEGER_CST
))
977 return build_complex (type
, rpart
, ipart
);
982 return fold_convert (type
, TREE_OPERAND (t
, 0));
985 /* - (A - B) -> B - A */
986 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
987 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
988 return fold_convert (type
,
989 fold (build (MINUS_EXPR
, TREE_TYPE (t
),
991 TREE_OPERAND (t
, 0))));
995 if (TREE_UNSIGNED (TREE_TYPE (t
)))
1001 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
1003 tem
= TREE_OPERAND (t
, 1);
1004 if (negate_expr_p (tem
))
1005 return fold_convert (type
,
1006 fold (build (TREE_CODE (t
), TREE_TYPE (t
),
1007 TREE_OPERAND (t
, 0),
1008 negate_expr (tem
))));
1009 tem
= TREE_OPERAND (t
, 0);
1010 if (negate_expr_p (tem
))
1011 return fold_convert (type
,
1012 fold (build (TREE_CODE (t
), TREE_TYPE (t
),
1014 TREE_OPERAND (t
, 1))));
1019 /* Convert -((double)float) into (double)(-float). */
1020 if (TREE_CODE (type
) == REAL_TYPE
)
1022 tem
= strip_float_extensions (t
);
1023 if (tem
!= t
&& negate_expr_p (tem
))
1024 return fold_convert (type
, negate_expr (tem
));
1029 /* Negate -f(x) as f(-x). */
1030 if (negate_mathfn_p (builtin_mathfn_code (t
))
1031 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t
, 1))))
1033 tree fndecl
, arg
, arglist
;
1035 fndecl
= get_callee_fndecl (t
);
1036 arg
= negate_expr (TREE_VALUE (TREE_OPERAND (t
, 1)));
1037 arglist
= build_tree_list (NULL_TREE
, arg
);
1038 return build_function_call_expr (fndecl
, arglist
);
1046 tem
= fold (build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
));
1047 return fold_convert (type
, tem
);
1050 /* Split a tree IN into a constant, literal and variable parts that could be
1051 combined with CODE to make IN. "constant" means an expression with
1052 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1053 commutative arithmetic operation. Store the constant part into *CONP,
1054 the literal in *LITP and return the variable part. If a part isn't
1055 present, set it to null. If the tree does not decompose in this way,
1056 return the entire tree as the variable part and the other parts as null.
1058 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1059 case, we negate an operand that was subtracted. Except if it is a
1060 literal for which we use *MINUS_LITP instead.
1062 If NEGATE_P is true, we are negating all of IN, again except a literal
1063 for which we use *MINUS_LITP instead.
1065 If IN is itself a literal or constant, return it as appropriate.
1067 Note that we do not guarantee that any of the three values will be the
1068 same type as IN, but they will have the same signedness and mode. */
1071 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
1072 tree
*minus_litp
, int negate_p
)
1080 /* Strip any conversions that don't change the machine mode or signedness. */
1081 STRIP_SIGN_NOPS (in
);
1083 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
1085 else if (TREE_CODE (in
) == code
1086 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
1087 /* We can associate addition and subtraction together (even
1088 though the C standard doesn't say so) for integers because
1089 the value is not affected. For reals, the value might be
1090 affected, so we can't. */
1091 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
1092 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
1094 tree op0
= TREE_OPERAND (in
, 0);
1095 tree op1
= TREE_OPERAND (in
, 1);
1096 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
1097 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
1099 /* First see if either of the operands is a literal, then a constant. */
1100 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
1101 *litp
= op0
, op0
= 0;
1102 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
1103 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
1105 if (op0
!= 0 && TREE_CONSTANT (op0
))
1106 *conp
= op0
, op0
= 0;
1107 else if (op1
!= 0 && TREE_CONSTANT (op1
))
1108 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
1110 /* If we haven't dealt with either operand, this is not a case we can
1111 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1112 if (op0
!= 0 && op1
!= 0)
1117 var
= op1
, neg_var_p
= neg1_p
;
1119 /* Now do any needed negations. */
1121 *minus_litp
= *litp
, *litp
= 0;
1123 *conp
= negate_expr (*conp
);
1125 var
= negate_expr (var
);
1127 else if (TREE_CONSTANT (in
))
1135 *minus_litp
= *litp
, *litp
= 0;
1136 else if (*minus_litp
)
1137 *litp
= *minus_litp
, *minus_litp
= 0;
1138 *conp
= negate_expr (*conp
);
1139 var
= negate_expr (var
);
1145 /* Re-associate trees split by the above function. T1 and T2 are either
1146 expressions to associate or null. Return the new expression, if any. If
1147 we build an operation, do it in TYPE and with CODE. */
1150 associate_trees (tree t1
, tree t2
, enum tree_code code
, tree type
)
1157 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1158 try to fold this since we will have infinite recursion. But do
1159 deal with any NEGATE_EXPRs. */
1160 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1161 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1163 if (code
== PLUS_EXPR
)
1165 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1166 return build (MINUS_EXPR
, type
, fold_convert (type
, t2
),
1167 fold_convert (type
, TREE_OPERAND (t1
, 0)));
1168 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1169 return build (MINUS_EXPR
, type
, fold_convert (type
, t1
),
1170 fold_convert (type
, TREE_OPERAND (t2
, 0)));
1172 return build (code
, type
, fold_convert (type
, t1
),
1173 fold_convert (type
, t2
));
1176 return fold (build (code
, type
, fold_convert (type
, t1
),
1177 fold_convert (type
, t2
)));
1180 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1181 to produce a new constant.
1183 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1186 int_const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1188 unsigned HOST_WIDE_INT int1l
, int2l
;
1189 HOST_WIDE_INT int1h
, int2h
;
1190 unsigned HOST_WIDE_INT low
;
1192 unsigned HOST_WIDE_INT garbagel
;
1193 HOST_WIDE_INT garbageh
;
1195 tree type
= TREE_TYPE (arg1
);
1196 int uns
= TREE_UNSIGNED (type
);
1198 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1200 int no_overflow
= 0;
1202 int1l
= TREE_INT_CST_LOW (arg1
);
1203 int1h
= TREE_INT_CST_HIGH (arg1
);
1204 int2l
= TREE_INT_CST_LOW (arg2
);
1205 int2h
= TREE_INT_CST_HIGH (arg2
);
1210 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1214 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1218 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1224 /* It's unclear from the C standard whether shifts can overflow.
1225 The following code ignores overflow; perhaps a C standard
1226 interpretation ruling is needed. */
1227 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1235 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1240 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1244 neg_double (int2l
, int2h
, &low
, &hi
);
1245 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1246 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1250 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1253 case TRUNC_DIV_EXPR
:
1254 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1255 case EXACT_DIV_EXPR
:
1256 /* This is a shortcut for a common special case. */
1257 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1258 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1259 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1260 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1262 if (code
== CEIL_DIV_EXPR
)
1265 low
= int1l
/ int2l
, hi
= 0;
1269 /* ... fall through ... */
1271 case ROUND_DIV_EXPR
:
1272 if (int2h
== 0 && int2l
== 1)
1274 low
= int1l
, hi
= int1h
;
1277 if (int1l
== int2l
&& int1h
== int2h
1278 && ! (int1l
== 0 && int1h
== 0))
1283 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1284 &low
, &hi
, &garbagel
, &garbageh
);
1287 case TRUNC_MOD_EXPR
:
1288 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1289 /* This is a shortcut for a common special case. */
1290 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1291 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1292 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1293 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1295 if (code
== CEIL_MOD_EXPR
)
1297 low
= int1l
% int2l
, hi
= 0;
1301 /* ... fall through ... */
1303 case ROUND_MOD_EXPR
:
1304 overflow
= div_and_round_double (code
, uns
,
1305 int1l
, int1h
, int2l
, int2h
,
1306 &garbagel
, &garbageh
, &low
, &hi
);
1312 low
= (((unsigned HOST_WIDE_INT
) int1h
1313 < (unsigned HOST_WIDE_INT
) int2h
)
1314 || (((unsigned HOST_WIDE_INT
) int1h
1315 == (unsigned HOST_WIDE_INT
) int2h
)
1318 low
= (int1h
< int2h
1319 || (int1h
== int2h
&& int1l
< int2l
));
1321 if (low
== (code
== MIN_EXPR
))
1322 low
= int1l
, hi
= int1h
;
1324 low
= int2l
, hi
= int2h
;
1331 /* If this is for a sizetype, can be represented as one (signed)
1332 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1335 && ((hi
== 0 && (HOST_WIDE_INT
) low
>= 0)
1336 || (hi
== -1 && (HOST_WIDE_INT
) low
< 0))
1337 && overflow
== 0 && ! TREE_OVERFLOW (arg1
) && ! TREE_OVERFLOW (arg2
))
1338 return size_int_type_wide (low
, type
);
1341 t
= build_int_2 (low
, hi
);
1342 TREE_TYPE (t
) = TREE_TYPE (arg1
);
1347 ? (!uns
|| is_sizetype
) && overflow
1348 : (force_fit_type (t
, (!uns
|| is_sizetype
) && overflow
)
1350 | TREE_OVERFLOW (arg1
)
1351 | TREE_OVERFLOW (arg2
));
1353 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1354 So check if force_fit_type truncated the value. */
1356 && ! TREE_OVERFLOW (t
)
1357 && (TREE_INT_CST_HIGH (t
) != hi
1358 || TREE_INT_CST_LOW (t
) != low
))
1359 TREE_OVERFLOW (t
) = 1;
1361 TREE_CONSTANT_OVERFLOW (t
) = (TREE_OVERFLOW (t
)
1362 | TREE_CONSTANT_OVERFLOW (arg1
)
1363 | TREE_CONSTANT_OVERFLOW (arg2
));
1367 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1368 constant. We assume ARG1 and ARG2 have the same data type, or at least
1369 are the same kind of constant and the same machine mode.
1371 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1374 const_binop (enum tree_code code
, tree arg1
, tree arg2
, int notrunc
)
1379 if (TREE_CODE (arg1
) == INTEGER_CST
)
1380 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1382 if (TREE_CODE (arg1
) == REAL_CST
)
1384 enum machine_mode mode
;
1387 REAL_VALUE_TYPE value
;
1390 d1
= TREE_REAL_CST (arg1
);
1391 d2
= TREE_REAL_CST (arg2
);
1393 type
= TREE_TYPE (arg1
);
1394 mode
= TYPE_MODE (type
);
1396 /* Don't perform operation if we honor signaling NaNs and
1397 either operand is a NaN. */
1398 if (HONOR_SNANS (mode
)
1399 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1402 /* Don't perform operation if it would raise a division
1403 by zero exception. */
1404 if (code
== RDIV_EXPR
1405 && REAL_VALUES_EQUAL (d2
, dconst0
)
1406 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1409 /* If either operand is a NaN, just return it. Otherwise, set up
1410 for floating-point trap; we return an overflow. */
1411 if (REAL_VALUE_ISNAN (d1
))
1413 else if (REAL_VALUE_ISNAN (d2
))
1416 REAL_ARITHMETIC (value
, code
, d1
, d2
);
1418 t
= build_real (type
, real_value_truncate (mode
, value
));
1421 = (force_fit_type (t
, 0)
1422 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1423 TREE_CONSTANT_OVERFLOW (t
)
1425 | TREE_CONSTANT_OVERFLOW (arg1
)
1426 | TREE_CONSTANT_OVERFLOW (arg2
);
1429 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1431 tree type
= TREE_TYPE (arg1
);
1432 tree r1
= TREE_REALPART (arg1
);
1433 tree i1
= TREE_IMAGPART (arg1
);
1434 tree r2
= TREE_REALPART (arg2
);
1435 tree i2
= TREE_IMAGPART (arg2
);
1441 t
= build_complex (type
,
1442 const_binop (PLUS_EXPR
, r1
, r2
, notrunc
),
1443 const_binop (PLUS_EXPR
, i1
, i2
, notrunc
));
1447 t
= build_complex (type
,
1448 const_binop (MINUS_EXPR
, r1
, r2
, notrunc
),
1449 const_binop (MINUS_EXPR
, i1
, i2
, notrunc
));
1453 t
= build_complex (type
,
1454 const_binop (MINUS_EXPR
,
1455 const_binop (MULT_EXPR
,
1457 const_binop (MULT_EXPR
,
1460 const_binop (PLUS_EXPR
,
1461 const_binop (MULT_EXPR
,
1463 const_binop (MULT_EXPR
,
1470 tree t1
, t2
, real
, imag
;
1472 = const_binop (PLUS_EXPR
,
1473 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1474 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1477 t1
= const_binop (PLUS_EXPR
,
1478 const_binop (MULT_EXPR
, r1
, r2
, notrunc
),
1479 const_binop (MULT_EXPR
, i1
, i2
, notrunc
),
1481 t2
= const_binop (MINUS_EXPR
,
1482 const_binop (MULT_EXPR
, i1
, r2
, notrunc
),
1483 const_binop (MULT_EXPR
, r1
, i2
, notrunc
),
1486 if (INTEGRAL_TYPE_P (TREE_TYPE (r1
)))
1488 real
= const_binop (TRUNC_DIV_EXPR
, t1
, magsquared
, notrunc
);
1489 imag
= const_binop (TRUNC_DIV_EXPR
, t2
, magsquared
, notrunc
);
1493 real
= const_binop (RDIV_EXPR
, t1
, magsquared
, notrunc
);
1494 imag
= const_binop (RDIV_EXPR
, t2
, magsquared
, notrunc
);
1499 t
= build_complex (type
, real
, imag
);
1511 /* These are the hash table functions for the hash table of INTEGER_CST
1512 nodes of a sizetype. */
1514 /* Return the hash code code X, an INTEGER_CST. */
1517 size_htab_hash (const void *x
)
1521 return (TREE_INT_CST_HIGH (t
) ^ TREE_INT_CST_LOW (t
)
1522 ^ htab_hash_pointer (TREE_TYPE (t
))
1523 ^ (TREE_OVERFLOW (t
) << 20));
1526 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1527 is the same as that given by *Y, which is the same. */
1530 size_htab_eq (const void *x
, const void *y
)
1535 return (TREE_INT_CST_HIGH (xt
) == TREE_INT_CST_HIGH (yt
)
1536 && TREE_INT_CST_LOW (xt
) == TREE_INT_CST_LOW (yt
)
1537 && TREE_TYPE (xt
) == TREE_TYPE (yt
)
1538 && TREE_OVERFLOW (xt
) == TREE_OVERFLOW (yt
));
1541 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1542 bits are given by NUMBER and of the sizetype represented by KIND. */
1545 size_int_wide (HOST_WIDE_INT number
, enum size_type_kind kind
)
1547 return size_int_type_wide (number
, sizetype_tab
[(int) kind
]);
1550 /* Likewise, but the desired type is specified explicitly. */
1552 static GTY (()) tree new_const
;
1553 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
1557 size_int_type_wide (HOST_WIDE_INT number
, tree type
)
1563 size_htab
= htab_create_ggc (1024, size_htab_hash
, size_htab_eq
, NULL
);
1564 new_const
= make_node (INTEGER_CST
);
1567 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1568 hash table, we return the value from the hash table. Otherwise, we
1569 place that in the hash table and make a new node for the next time. */
1570 TREE_INT_CST_LOW (new_const
) = number
;
1571 TREE_INT_CST_HIGH (new_const
) = number
< 0 ? -1 : 0;
1572 TREE_TYPE (new_const
) = type
;
1573 TREE_OVERFLOW (new_const
) = TREE_CONSTANT_OVERFLOW (new_const
)
1574 = force_fit_type (new_const
, 0);
1576 slot
= htab_find_slot (size_htab
, new_const
, INSERT
);
1582 new_const
= make_node (INTEGER_CST
);
1586 return (tree
) *slot
;
1589 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1590 is a tree code. The type of the result is taken from the operands.
1591 Both must be the same type integer type and it must be a size type.
1592 If the operands are constant, so is the result. */
1595 size_binop (enum tree_code code
, tree arg0
, tree arg1
)
1597 tree type
= TREE_TYPE (arg0
);
1599 if (TREE_CODE (type
) != INTEGER_TYPE
|| ! TYPE_IS_SIZETYPE (type
)
1600 || type
!= TREE_TYPE (arg1
))
1603 /* Handle the special case of two integer constants faster. */
1604 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1606 /* And some specific cases even faster than that. */
1607 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1609 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1610 && integer_zerop (arg1
))
1612 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1615 /* Handle general case of two integer constants. */
1616 return int_const_binop (code
, arg0
, arg1
, 0);
1619 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1620 return error_mark_node
;
1622 return fold (build (code
, type
, arg0
, arg1
));
1625 /* Given two values, either both of sizetype or both of bitsizetype,
1626 compute the difference between the two values. Return the value
1627 in signed type corresponding to the type of the operands. */
1630 size_diffop (tree arg0
, tree arg1
)
1632 tree type
= TREE_TYPE (arg0
);
1635 if (TREE_CODE (type
) != INTEGER_TYPE
|| ! TYPE_IS_SIZETYPE (type
)
1636 || type
!= TREE_TYPE (arg1
))
1639 /* If the type is already signed, just do the simple thing. */
1640 if (! TREE_UNSIGNED (type
))
1641 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1643 ctype
= (type
== bitsizetype
|| type
== ubitsizetype
1644 ? sbitsizetype
: ssizetype
);
1646 /* If either operand is not a constant, do the conversions to the signed
1647 type and subtract. The hardware will do the right thing with any
1648 overflow in the subtraction. */
1649 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1650 return size_binop (MINUS_EXPR
, fold_convert (ctype
, arg0
),
1651 fold_convert (ctype
, arg1
));
1653 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1654 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1655 overflow) and negate (which can't either). Special-case a result
1656 of zero while we're here. */
1657 if (tree_int_cst_equal (arg0
, arg1
))
1658 return fold_convert (ctype
, integer_zero_node
);
1659 else if (tree_int_cst_lt (arg1
, arg0
))
1660 return fold_convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1662 return size_binop (MINUS_EXPR
, fold_convert (ctype
, integer_zero_node
),
1663 fold_convert (ctype
, size_binop (MINUS_EXPR
,
1668 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1669 type TYPE. If no simplification can be done return NULL_TREE. */
1672 fold_convert_const (enum tree_code code ATTRIBUTE_UNUSED
, tree type
,
1678 if (TREE_TYPE (arg1
) == type
)
1681 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
1683 if (TREE_CODE (arg1
) == INTEGER_CST
)
1685 /* If we would build a constant wider than GCC supports,
1686 leave the conversion unfolded. */
1687 if (TYPE_PRECISION (type
) > 2 * HOST_BITS_PER_WIDE_INT
)
1690 /* If we are trying to make a sizetype for a small integer, use
1691 size_int to pick up cached types to reduce duplicate nodes. */
1692 if (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1693 && !TREE_CONSTANT_OVERFLOW (arg1
)
1694 && compare_tree_int (arg1
, 10000) < 0)
1695 return size_int_type_wide (TREE_INT_CST_LOW (arg1
), type
);
1697 /* Given an integer constant, make new constant with new type,
1698 appropriately sign-extended or truncated. */
1699 t
= build_int_2 (TREE_INT_CST_LOW (arg1
),
1700 TREE_INT_CST_HIGH (arg1
));
1701 TREE_TYPE (t
) = type
;
1702 /* Indicate an overflow if (1) ARG1 already overflowed,
1703 or (2) force_fit_type indicates an overflow.
1704 Tell force_fit_type that an overflow has already occurred
1705 if ARG1 is a too-large unsigned value and T is signed.
1706 But don't indicate an overflow if converting a pointer. */
1708 = ((force_fit_type (t
,
1709 (TREE_INT_CST_HIGH (arg1
) < 0
1710 && (TREE_UNSIGNED (type
)
1711 < TREE_UNSIGNED (TREE_TYPE (arg1
)))))
1712 && ! POINTER_TYPE_P (TREE_TYPE (arg1
)))
1713 || TREE_OVERFLOW (arg1
));
1714 TREE_CONSTANT_OVERFLOW (t
)
1715 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1718 else if (TREE_CODE (arg1
) == REAL_CST
)
1720 /* The following code implements the floating point to integer
1721 conversion rules required by the Java Language Specification,
1722 that IEEE NaNs are mapped to zero and values that overflow
1723 the target precision saturate, i.e. values greater than
1724 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1725 are mapped to INT_MIN. These semantics are allowed by the
1726 C and C++ standards that simply state that the behavior of
1727 FP-to-integer conversion is unspecified upon overflow. */
1729 HOST_WIDE_INT high
, low
;
1731 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1732 /* If x is NaN, return zero and show we have an overflow. */
1733 if (REAL_VALUE_ISNAN (x
))
1740 /* See if X will be in range after truncation towards 0.
1741 To compensate for truncation, move the bounds away from 0,
1742 but reject if X exactly equals the adjusted bounds. */
1746 tree lt
= TYPE_MIN_VALUE (type
);
1747 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1748 REAL_ARITHMETIC (l
, MINUS_EXPR
, l
, dconst1
);
1749 if (! REAL_VALUES_LESS (l
, x
))
1752 high
= TREE_INT_CST_HIGH (lt
);
1753 low
= TREE_INT_CST_LOW (lt
);
1759 tree ut
= TYPE_MAX_VALUE (type
);
1762 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1763 REAL_ARITHMETIC (u
, PLUS_EXPR
, u
, dconst1
);
1764 if (! REAL_VALUES_LESS (x
, u
))
1767 high
= TREE_INT_CST_HIGH (ut
);
1768 low
= TREE_INT_CST_LOW (ut
);
1774 REAL_VALUE_TO_INT (&low
, &high
, x
);
1776 t
= build_int_2 (low
, high
);
1777 TREE_TYPE (t
) = type
;
1779 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, overflow
);
1780 TREE_CONSTANT_OVERFLOW (t
)
1781 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1785 else if (TREE_CODE (type
) == REAL_TYPE
)
1787 if (TREE_CODE (arg1
) == INTEGER_CST
)
1788 return build_real_from_int_cst (type
, arg1
);
1789 if (TREE_CODE (arg1
) == REAL_CST
)
1791 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
1793 /* We make a copy of ARG1 so that we don't modify an
1794 existing constant tree. */
1795 t
= copy_node (arg1
);
1796 TREE_TYPE (t
) = type
;
1800 t
= build_real (type
,
1801 real_value_truncate (TYPE_MODE (type
),
1802 TREE_REAL_CST (arg1
)));
1805 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, 0);
1806 TREE_CONSTANT_OVERFLOW (t
)
1807 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1814 /* Convert expression ARG to type TYPE. Used by the middle-end for
1815 simple conversions in preference to calling the front-end's convert. */
1818 fold_convert (tree type
, tree arg
)
1820 tree orig
= TREE_TYPE (arg
);
1826 if (TREE_CODE (arg
) == ERROR_MARK
1827 || TREE_CODE (type
) == ERROR_MARK
1828 || TREE_CODE (orig
) == ERROR_MARK
)
1829 return error_mark_node
;
1831 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1832 return fold (build1 (NOP_EXPR
, type
, arg
));
1834 if (INTEGRAL_TYPE_P (type
) || POINTER_TYPE_P (type
)
1835 || TREE_CODE (type
) == OFFSET_TYPE
)
1837 if (TREE_CODE (arg
) == INTEGER_CST
)
1839 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1840 if (tem
!= NULL_TREE
)
1843 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1844 || TREE_CODE (orig
) == OFFSET_TYPE
)
1845 return fold (build1 (NOP_EXPR
, type
, arg
));
1846 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1848 tem
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1849 return fold_convert (type
, tem
);
1851 if (TREE_CODE (orig
) == VECTOR_TYPE
1852 && GET_MODE_SIZE (TYPE_MODE (type
))
1853 == GET_MODE_SIZE (TYPE_MODE (orig
)))
1854 return fold (build1 (NOP_EXPR
, type
, arg
));
1856 else if (TREE_CODE (type
) == REAL_TYPE
)
1858 if (TREE_CODE (arg
) == INTEGER_CST
)
1860 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1861 if (tem
!= NULL_TREE
)
1864 else if (TREE_CODE (arg
) == REAL_CST
)
1866 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1867 if (tem
!= NULL_TREE
)
1871 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
))
1872 return fold (build1 (FLOAT_EXPR
, type
, arg
));
1873 if (TREE_CODE (orig
) == REAL_TYPE
)
1874 return fold (build1 (flag_float_store
? CONVERT_EXPR
: NOP_EXPR
,
1876 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1878 tem
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1879 return fold_convert (type
, tem
);
1882 else if (TREE_CODE (type
) == COMPLEX_TYPE
)
1884 if (INTEGRAL_TYPE_P (orig
)
1885 || POINTER_TYPE_P (orig
)
1886 || TREE_CODE (orig
) == REAL_TYPE
)
1887 return build (COMPLEX_EXPR
, type
,
1888 fold_convert (TREE_TYPE (type
), arg
),
1889 fold_convert (TREE_TYPE (type
), integer_zero_node
));
1890 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1894 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1896 rpart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 0));
1897 ipart
= fold_convert (TREE_TYPE (type
), TREE_OPERAND (arg
, 1));
1898 return fold (build (COMPLEX_EXPR
, type
, rpart
, ipart
));
1901 arg
= save_expr (arg
);
1902 rpart
= fold (build1 (REALPART_EXPR
, TREE_TYPE (orig
), arg
));
1903 ipart
= fold (build1 (IMAGPART_EXPR
, TREE_TYPE (orig
), arg
));
1904 rpart
= fold_convert (TREE_TYPE (type
), rpart
);
1905 ipart
= fold_convert (TREE_TYPE (type
), ipart
);
1906 return fold (build (COMPLEX_EXPR
, type
, rpart
, ipart
));
1909 else if (TREE_CODE (type
) == VECTOR_TYPE
)
1911 if ((INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
))
1912 && GET_MODE_SIZE (TYPE_MODE (type
))
1913 == GET_MODE_SIZE (TYPE_MODE (orig
)))
1914 return fold (build1 (NOP_EXPR
, type
, arg
));
1915 if (TREE_CODE (orig
) == VECTOR_TYPE
1916 && GET_MODE_SIZE (TYPE_MODE (type
))
1917 == GET_MODE_SIZE (TYPE_MODE (orig
)))
1918 return fold (build1 (NOP_EXPR
, type
, arg
));
1920 else if (VOID_TYPE_P (type
))
1921 return fold (build1 (CONVERT_EXPR
, type
, arg
));
1925 /* Return an expr equal to X but certainly not valid as an lvalue. */
1932 /* These things are certainly not lvalues. */
1933 if (TREE_CODE (x
) == NON_LVALUE_EXPR
1934 || TREE_CODE (x
) == INTEGER_CST
1935 || TREE_CODE (x
) == REAL_CST
1936 || TREE_CODE (x
) == STRING_CST
1937 || TREE_CODE (x
) == ADDR_EXPR
)
1940 result
= build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
1941 TREE_CONSTANT (result
) = TREE_CONSTANT (x
);
1945 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1946 Zero means allow extended lvalues. */
1948 int pedantic_lvalues
;
1950 /* When pedantic, return an expr equal to X but certainly not valid as a
1951 pedantic lvalue. Otherwise, return X. */
1954 pedantic_non_lvalue (tree x
)
1956 if (pedantic_lvalues
)
1957 return non_lvalue (x
);
1962 /* Given a tree comparison code, return the code that is the logical inverse
1963 of the given code. It is not safe to do this for floating-point
1964 comparisons, except for NE_EXPR and EQ_EXPR. */
1966 static enum tree_code
1967 invert_tree_comparison (enum tree_code code
)
1988 /* Similar, but return the comparison that results if the operands are
1989 swapped. This is safe for floating-point. */
1991 static enum tree_code
1992 swap_tree_comparison (enum tree_code code
)
2013 /* Convert a comparison tree code from an enum tree_code representation
2014 into a compcode bit-based encoding. This function is the inverse of
2015 compcode_to_comparison. */
2018 comparison_to_compcode (enum tree_code code
)
2039 /* Convert a compcode bit-based encoding of a comparison operator back
2040 to GCC's enum tree_code representation. This function is the
2041 inverse of comparison_to_compcode. */
2043 static enum tree_code
2044 compcode_to_comparison (int code
)
2065 /* Return nonzero if CODE is a tree code that represents a truth value. */
2068 truth_value_p (enum tree_code code
)
2070 return (TREE_CODE_CLASS (code
) == '<'
2071 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
2072 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
2073 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
2076 /* Return nonzero if two operands (typically of the same tree node)
2077 are necessarily equal. If either argument has side-effects this
2078 function returns zero.
2080 If ONLY_CONST is nonzero, only return nonzero for constants.
2081 This function tests whether the operands are indistinguishable;
2082 it does not test whether they are equal using C's == operation.
2083 The distinction is important for IEEE floating point, because
2084 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2085 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2087 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2088 even though it may hold multiple values during a function.
2089 This is because a GCC tree node guarantees that nothing else is
2090 executed between the evaluation of its "operands" (which may often
2091 be evaluated in arbitrary order). Hence if the operands themselves
2092 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2093 same value in each operand/subexpression. Hence a zero value for
2094 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2095 If comparing arbitrary expression trees, such as from different
2096 statements, ONLY_CONST must usually be nonzero. */
2099 operand_equal_p (tree arg0
, tree arg1
, int only_const
)
2103 /* If both types don't have the same signedness, then we can't consider
2104 them equal. We must check this before the STRIP_NOPS calls
2105 because they may change the signedness of the arguments. */
2106 if (TREE_UNSIGNED (TREE_TYPE (arg0
)) != TREE_UNSIGNED (TREE_TYPE (arg1
)))
2112 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2113 /* This is needed for conversions and for COMPONENT_REF.
2114 Might as well play it safe and always test this. */
2115 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2116 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2117 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2120 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2121 We don't care about side effects in that case because the SAVE_EXPR
2122 takes care of that for us. In all other cases, two expressions are
2123 equal if they have no side effects. If we have two identical
2124 expressions with side effects that should be treated the same due
2125 to the only side effects being identical SAVE_EXPR's, that will
2126 be detected in the recursive calls below. */
2127 if (arg0
== arg1
&& ! only_const
2128 && (TREE_CODE (arg0
) == SAVE_EXPR
2129 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2132 /* Next handle constant cases, those for which we can return 1 even
2133 if ONLY_CONST is set. */
2134 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2135 switch (TREE_CODE (arg0
))
2138 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2139 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2140 && tree_int_cst_equal (arg0
, arg1
));
2143 return (! TREE_CONSTANT_OVERFLOW (arg0
)
2144 && ! TREE_CONSTANT_OVERFLOW (arg1
)
2145 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2146 TREE_REAL_CST (arg1
)));
2152 if (TREE_CONSTANT_OVERFLOW (arg0
)
2153 || TREE_CONSTANT_OVERFLOW (arg1
))
2156 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2157 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2160 if (!operand_equal_p (v1
, v2
, only_const
))
2162 v1
= TREE_CHAIN (v1
);
2163 v2
= TREE_CHAIN (v2
);
2170 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2172 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2176 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2177 && ! memcmp (TREE_STRING_POINTER (arg0
),
2178 TREE_STRING_POINTER (arg1
),
2179 TREE_STRING_LENGTH (arg0
)));
2182 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2191 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2194 /* Two conversions are equal only if signedness and modes match. */
2195 switch (TREE_CODE (arg0
))
2200 case FIX_TRUNC_EXPR
:
2201 case FIX_FLOOR_EXPR
:
2202 case FIX_ROUND_EXPR
:
2203 if (TREE_UNSIGNED (TREE_TYPE (arg0
))
2204 != TREE_UNSIGNED (TREE_TYPE (arg1
)))
2211 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2212 TREE_OPERAND (arg1
, 0), 0);
2216 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0)
2217 && operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1),
2221 /* For commutative ops, allow the other order. */
2222 return ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
2223 || TREE_CODE (arg0
) == MIN_EXPR
|| TREE_CODE (arg0
) == MAX_EXPR
2224 || TREE_CODE (arg0
) == BIT_IOR_EXPR
2225 || TREE_CODE (arg0
) == BIT_XOR_EXPR
2226 || TREE_CODE (arg0
) == BIT_AND_EXPR
2227 || TREE_CODE (arg0
) == NE_EXPR
|| TREE_CODE (arg0
) == EQ_EXPR
)
2228 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2229 TREE_OPERAND (arg1
, 1), 0)
2230 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2231 TREE_OPERAND (arg1
, 0), 0));
2234 /* If either of the pointer (or reference) expressions we are
2235 dereferencing contain a side effect, these cannot be equal. */
2236 if (TREE_SIDE_EFFECTS (arg0
)
2237 || TREE_SIDE_EFFECTS (arg1
))
2240 switch (TREE_CODE (arg0
))
2243 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2244 TREE_OPERAND (arg1
, 0), 0);
2248 case ARRAY_RANGE_REF
:
2249 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2250 TREE_OPERAND (arg1
, 0), 0)
2251 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2252 TREE_OPERAND (arg1
, 1), 0));
2255 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2256 TREE_OPERAND (arg1
, 0), 0)
2257 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2258 TREE_OPERAND (arg1
, 1), 0)
2259 && operand_equal_p (TREE_OPERAND (arg0
, 2),
2260 TREE_OPERAND (arg1
, 2), 0));
2266 switch (TREE_CODE (arg0
))
2269 case TRUTH_NOT_EXPR
:
2270 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2271 TREE_OPERAND (arg1
, 0), 0);
2274 return rtx_equal_p (RTL_EXPR_RTL (arg0
), RTL_EXPR_RTL (arg1
));
2277 /* If the CALL_EXPRs call different functions, then they
2278 clearly can not be equal. */
2279 if (! operand_equal_p (TREE_OPERAND (arg0
, 0),
2280 TREE_OPERAND (arg1
, 0), 0))
2283 /* Only consider const functions equivalent. */
2284 fndecl
= get_callee_fndecl (arg0
);
2285 if (fndecl
== NULL_TREE
2286 || ! (flags_from_decl_or_type (fndecl
) & ECF_CONST
))
2289 /* Now see if all the arguments are the same. operand_equal_p
2290 does not handle TREE_LIST, so we walk the operands here
2291 feeding them to operand_equal_p. */
2292 arg0
= TREE_OPERAND (arg0
, 1);
2293 arg1
= TREE_OPERAND (arg1
, 1);
2294 while (arg0
&& arg1
)
2296 if (! operand_equal_p (TREE_VALUE (arg0
), TREE_VALUE (arg1
), 0))
2299 arg0
= TREE_CHAIN (arg0
);
2300 arg1
= TREE_CHAIN (arg1
);
2303 /* If we get here and both argument lists are exhausted
2304 then the CALL_EXPRs are equal. */
2305 return ! (arg0
|| arg1
);
2312 /* Consider __builtin_sqrt equal to sqrt. */
2313 return TREE_CODE (arg0
) == FUNCTION_DECL
2314 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2315 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2316 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
);
2323 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2324 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2326 When in doubt, return 0. */
2329 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2331 int unsignedp1
, unsignedpo
;
2332 tree primarg0
, primarg1
, primother
;
2333 unsigned int correct_width
;
2335 if (operand_equal_p (arg0
, arg1
, 0))
2338 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2339 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2342 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2343 and see if the inner values are the same. This removes any
2344 signedness comparison, which doesn't matter here. */
2345 primarg0
= arg0
, primarg1
= arg1
;
2346 STRIP_NOPS (primarg0
);
2347 STRIP_NOPS (primarg1
);
2348 if (operand_equal_p (primarg0
, primarg1
, 0))
2351 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2352 actual comparison operand, ARG0.
2354 First throw away any conversions to wider types
2355 already present in the operands. */
2357 primarg1
= get_narrower (arg1
, &unsignedp1
);
2358 primother
= get_narrower (other
, &unsignedpo
);
2360 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2361 if (unsignedp1
== unsignedpo
2362 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2363 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2365 tree type
= TREE_TYPE (arg0
);
2367 /* Make sure shorter operand is extended the right way
2368 to match the longer operand. */
2369 primarg1
= fold_convert ((*lang_hooks
.types
.signed_or_unsigned_type
)
2370 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2372 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2379 /* See if ARG is an expression that is either a comparison or is performing
2380 arithmetic on comparisons. The comparisons must only be comparing
2381 two different values, which will be stored in *CVAL1 and *CVAL2; if
2382 they are nonzero it means that some operands have already been found.
2383 No variables may be used anywhere else in the expression except in the
2384 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2385 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2387 If this is true, return 1. Otherwise, return zero. */
2390 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2392 enum tree_code code
= TREE_CODE (arg
);
2393 char class = TREE_CODE_CLASS (code
);
2395 /* We can handle some of the 'e' cases here. */
2396 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
2398 else if (class == 'e'
2399 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2400 || code
== COMPOUND_EXPR
))
2403 else if (class == 'e' && code
== SAVE_EXPR
&& SAVE_EXPR_RTL (arg
) == 0
2404 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2406 /* If we've already found a CVAL1 or CVAL2, this expression is
2407 two complex to handle. */
2408 if (*cval1
|| *cval2
)
2418 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2421 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2422 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2423 cval1
, cval2
, save_p
));
2429 if (code
== COND_EXPR
)
2430 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2431 cval1
, cval2
, save_p
)
2432 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2433 cval1
, cval2
, save_p
)
2434 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2435 cval1
, cval2
, save_p
));
2439 /* First see if we can handle the first operand, then the second. For
2440 the second operand, we know *CVAL1 can't be zero. It must be that
2441 one side of the comparison is each of the values; test for the
2442 case where this isn't true by failing if the two operands
2445 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2446 TREE_OPERAND (arg
, 1), 0))
2450 *cval1
= TREE_OPERAND (arg
, 0);
2451 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2453 else if (*cval2
== 0)
2454 *cval2
= TREE_OPERAND (arg
, 0);
2455 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2460 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2462 else if (*cval2
== 0)
2463 *cval2
= TREE_OPERAND (arg
, 1);
2464 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2476 /* ARG is a tree that is known to contain just arithmetic operations and
2477 comparisons. Evaluate the operations in the tree substituting NEW0 for
2478 any occurrence of OLD0 as an operand of a comparison and likewise for
2482 eval_subst (tree arg
, tree old0
, tree new0
, tree old1
, tree new1
)
2484 tree type
= TREE_TYPE (arg
);
2485 enum tree_code code
= TREE_CODE (arg
);
2486 char class = TREE_CODE_CLASS (code
);
2488 /* We can handle some of the 'e' cases here. */
2489 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
2491 else if (class == 'e'
2492 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2498 return fold (build1 (code
, type
,
2499 eval_subst (TREE_OPERAND (arg
, 0),
2500 old0
, new0
, old1
, new1
)));
2503 return fold (build (code
, type
,
2504 eval_subst (TREE_OPERAND (arg
, 0),
2505 old0
, new0
, old1
, new1
),
2506 eval_subst (TREE_OPERAND (arg
, 1),
2507 old0
, new0
, old1
, new1
)));
2513 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
2516 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
2519 return fold (build (code
, type
,
2520 eval_subst (TREE_OPERAND (arg
, 0),
2521 old0
, new0
, old1
, new1
),
2522 eval_subst (TREE_OPERAND (arg
, 1),
2523 old0
, new0
, old1
, new1
),
2524 eval_subst (TREE_OPERAND (arg
, 2),
2525 old0
, new0
, old1
, new1
)));
2529 /* Fall through - ??? */
2533 tree arg0
= TREE_OPERAND (arg
, 0);
2534 tree arg1
= TREE_OPERAND (arg
, 1);
2536 /* We need to check both for exact equality and tree equality. The
2537 former will be true if the operand has a side-effect. In that
2538 case, we know the operand occurred exactly once. */
2540 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2542 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2545 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2547 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2550 return fold (build (code
, type
, arg0
, arg1
));
2558 /* Return a tree for the case when the result of an expression is RESULT
2559 converted to TYPE and OMITTED was previously an operand of the expression
2560 but is now not needed (e.g., we folded OMITTED * 0).
2562 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2563 the conversion of RESULT to TYPE. */
2566 omit_one_operand (tree type
, tree result
, tree omitted
)
2568 tree t
= fold_convert (type
, result
);
2570 if (TREE_SIDE_EFFECTS (omitted
))
2571 return build (COMPOUND_EXPR
, type
, omitted
, t
);
2573 return non_lvalue (t
);
2576 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2579 pedantic_omit_one_operand (tree type
, tree result
, tree omitted
)
2581 tree t
= fold_convert (type
, result
);
2583 if (TREE_SIDE_EFFECTS (omitted
))
2584 return build (COMPOUND_EXPR
, type
, omitted
, t
);
2586 return pedantic_non_lvalue (t
);
2589 /* Return a simplified tree node for the truth-negation of ARG. This
2590 never alters ARG itself. We assume that ARG is an operation that
2591 returns a truth value (0 or 1). */
2594 invert_truthvalue (tree arg
)
2596 tree type
= TREE_TYPE (arg
);
2597 enum tree_code code
= TREE_CODE (arg
);
2599 if (code
== ERROR_MARK
)
2602 /* If this is a comparison, we can simply invert it, except for
2603 floating-point non-equality comparisons, in which case we just
2604 enclose a TRUTH_NOT_EXPR around what we have. */
2606 if (TREE_CODE_CLASS (code
) == '<')
2608 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg
, 0)))
2609 && !flag_unsafe_math_optimizations
2612 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2614 return build (invert_tree_comparison (code
), type
,
2615 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
2621 return fold_convert (type
, build_int_2 (integer_zerop (arg
), 0));
2623 case TRUTH_AND_EXPR
:
2624 return build (TRUTH_OR_EXPR
, type
,
2625 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2626 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2629 return build (TRUTH_AND_EXPR
, type
,
2630 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2631 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2633 case TRUTH_XOR_EXPR
:
2634 /* Here we can invert either operand. We invert the first operand
2635 unless the second operand is a TRUTH_NOT_EXPR in which case our
2636 result is the XOR of the first operand with the inside of the
2637 negation of the second operand. */
2639 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
2640 return build (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
2641 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
2643 return build (TRUTH_XOR_EXPR
, type
,
2644 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2645 TREE_OPERAND (arg
, 1));
2647 case TRUTH_ANDIF_EXPR
:
2648 return build (TRUTH_ORIF_EXPR
, type
,
2649 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2650 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2652 case TRUTH_ORIF_EXPR
:
2653 return build (TRUTH_ANDIF_EXPR
, type
,
2654 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2655 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2657 case TRUTH_NOT_EXPR
:
2658 return TREE_OPERAND (arg
, 0);
2661 return build (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2662 invert_truthvalue (TREE_OPERAND (arg
, 1)),
2663 invert_truthvalue (TREE_OPERAND (arg
, 2)));
2666 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2667 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2669 case WITH_RECORD_EXPR
:
2670 return build (WITH_RECORD_EXPR
, type
,
2671 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2672 TREE_OPERAND (arg
, 1));
2674 case NON_LVALUE_EXPR
:
2675 return invert_truthvalue (TREE_OPERAND (arg
, 0));
2680 return build1 (TREE_CODE (arg
), type
,
2681 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2684 if (!integer_onep (TREE_OPERAND (arg
, 1)))
2686 return build (EQ_EXPR
, type
, arg
,
2687 fold_convert (type
, integer_zero_node
));
2690 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2692 case CLEANUP_POINT_EXPR
:
2693 return build1 (CLEANUP_POINT_EXPR
, type
,
2694 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2699 if (TREE_CODE (TREE_TYPE (arg
)) != BOOLEAN_TYPE
)
2701 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2704 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2705 operands are another bit-wise operation with a common input. If so,
2706 distribute the bit operations to save an operation and possibly two if
2707 constants are involved. For example, convert
2708 (A | B) & (A | C) into A | (B & C)
2709 Further simplification will occur if B and C are constants.
2711 If this optimization cannot be done, 0 will be returned. */
2714 distribute_bit_expr (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
2719 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2720 || TREE_CODE (arg0
) == code
2721 || (TREE_CODE (arg0
) != BIT_AND_EXPR
2722 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
2725 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
2727 common
= TREE_OPERAND (arg0
, 0);
2728 left
= TREE_OPERAND (arg0
, 1);
2729 right
= TREE_OPERAND (arg1
, 1);
2731 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
2733 common
= TREE_OPERAND (arg0
, 0);
2734 left
= TREE_OPERAND (arg0
, 1);
2735 right
= TREE_OPERAND (arg1
, 0);
2737 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
2739 common
= TREE_OPERAND (arg0
, 1);
2740 left
= TREE_OPERAND (arg0
, 0);
2741 right
= TREE_OPERAND (arg1
, 1);
2743 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
2745 common
= TREE_OPERAND (arg0
, 1);
2746 left
= TREE_OPERAND (arg0
, 0);
2747 right
= TREE_OPERAND (arg1
, 0);
2752 return fold (build (TREE_CODE (arg0
), type
, common
,
2753 fold (build (code
, type
, left
, right
))));
2756 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2757 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2760 make_bit_field_ref (tree inner
, tree type
, int bitsize
, int bitpos
,
2763 tree result
= build (BIT_FIELD_REF
, type
, inner
,
2764 size_int (bitsize
), bitsize_int (bitpos
));
2766 TREE_UNSIGNED (result
) = unsignedp
;
2771 /* Optimize a bit-field compare.
2773 There are two cases: First is a compare against a constant and the
2774 second is a comparison of two items where the fields are at the same
2775 bit position relative to the start of a chunk (byte, halfword, word)
2776 large enough to contain it. In these cases we can avoid the shift
2777 implicit in bitfield extractions.
2779 For constants, we emit a compare of the shifted constant with the
2780 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2781 compared. For two fields at the same position, we do the ANDs with the
2782 similar mask and compare the result of the ANDs.
2784 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2785 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2786 are the left and right operands of the comparison, respectively.
2788 If the optimization described above can be done, we return the resulting
2789 tree. Otherwise we return zero. */
2792 optimize_bit_field_compare (enum tree_code code
, tree compare_type
,
2795 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
2796 tree type
= TREE_TYPE (lhs
);
2797 tree signed_type
, unsigned_type
;
2798 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
2799 enum machine_mode lmode
, rmode
, nmode
;
2800 int lunsignedp
, runsignedp
;
2801 int lvolatilep
= 0, rvolatilep
= 0;
2802 tree linner
, rinner
= NULL_TREE
;
2806 /* Get all the information about the extractions being done. If the bit size
2807 if the same as the size of the underlying object, we aren't doing an
2808 extraction at all and so can do nothing. We also don't want to
2809 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2810 then will no longer be able to replace it. */
2811 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
2812 &lunsignedp
, &lvolatilep
);
2813 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
2814 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
2819 /* If this is not a constant, we can only do something if bit positions,
2820 sizes, and signedness are the same. */
2821 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
2822 &runsignedp
, &rvolatilep
);
2824 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
2825 || lunsignedp
!= runsignedp
|| offset
!= 0
2826 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
2830 /* See if we can find a mode to refer to this field. We should be able to,
2831 but fail if we can't. */
2832 nmode
= get_best_mode (lbitsize
, lbitpos
,
2833 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
2834 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
2835 TYPE_ALIGN (TREE_TYPE (rinner
))),
2836 word_mode
, lvolatilep
|| rvolatilep
);
2837 if (nmode
== VOIDmode
)
2840 /* Set signed and unsigned types of the precision of this mode for the
2842 signed_type
= (*lang_hooks
.types
.type_for_mode
) (nmode
, 0);
2843 unsigned_type
= (*lang_hooks
.types
.type_for_mode
) (nmode
, 1);
2845 /* Compute the bit position and size for the new reference and our offset
2846 within it. If the new reference is the same size as the original, we
2847 won't optimize anything, so return zero. */
2848 nbitsize
= GET_MODE_BITSIZE (nmode
);
2849 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
2851 if (nbitsize
== lbitsize
)
2854 if (BYTES_BIG_ENDIAN
)
2855 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
2857 /* Make the mask to be used against the extracted field. */
2858 mask
= build_int_2 (~0, ~0);
2859 TREE_TYPE (mask
) = unsigned_type
;
2860 force_fit_type (mask
, 0);
2861 mask
= fold_convert (unsigned_type
, mask
);
2862 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
2863 mask
= const_binop (RSHIFT_EXPR
, mask
,
2864 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
2867 /* If not comparing with constant, just rework the comparison
2869 return build (code
, compare_type
,
2870 build (BIT_AND_EXPR
, unsigned_type
,
2871 make_bit_field_ref (linner
, unsigned_type
,
2872 nbitsize
, nbitpos
, 1),
2874 build (BIT_AND_EXPR
, unsigned_type
,
2875 make_bit_field_ref (rinner
, unsigned_type
,
2876 nbitsize
, nbitpos
, 1),
2879 /* Otherwise, we are handling the constant case. See if the constant is too
2880 big for the field. Warn and return a tree of for 0 (false) if so. We do
2881 this not only for its own sake, but to avoid having to test for this
2882 error case below. If we didn't, we might generate wrong code.
2884 For unsigned fields, the constant shifted right by the field length should
2885 be all zero. For signed fields, the high-order bits should agree with
2890 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
2891 fold_convert (unsigned_type
, rhs
),
2892 size_int (lbitsize
), 0)))
2894 warning ("comparison is always %d due to width of bit-field",
2896 return fold_convert (compare_type
,
2898 ? integer_one_node
: integer_zero_node
));
2903 tree tem
= const_binop (RSHIFT_EXPR
, fold_convert (signed_type
, rhs
),
2904 size_int (lbitsize
- 1), 0);
2905 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
2907 warning ("comparison is always %d due to width of bit-field",
2909 return fold_convert (compare_type
,
2911 ? integer_one_node
: integer_zero_node
));
2915 /* Single-bit compares should always be against zero. */
2916 if (lbitsize
== 1 && ! integer_zerop (rhs
))
2918 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
2919 rhs
= fold_convert (type
, integer_zero_node
);
2922 /* Make a new bitfield reference, shift the constant over the
2923 appropriate number of bits and mask it with the computed mask
2924 (in case this was a signed field). If we changed it, make a new one. */
2925 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
2928 TREE_SIDE_EFFECTS (lhs
) = 1;
2929 TREE_THIS_VOLATILE (lhs
) = 1;
2932 rhs
= fold (const_binop (BIT_AND_EXPR
,
2933 const_binop (LSHIFT_EXPR
,
2934 fold_convert (unsigned_type
, rhs
),
2935 size_int (lbitpos
), 0),
2938 return build (code
, compare_type
,
2939 build (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
2943 /* Subroutine for fold_truthop: decode a field reference.
2945 If EXP is a comparison reference, we return the innermost reference.
2947 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2948 set to the starting bit number.
2950 If the innermost field can be completely contained in a mode-sized
2951 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2953 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2954 otherwise it is not changed.
2956 *PUNSIGNEDP is set to the signedness of the field.
2958 *PMASK is set to the mask used. This is either contained in a
2959 BIT_AND_EXPR or derived from the width of the field.
2961 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2963 Return 0 if this is not a component reference or is one that we can't
2964 do anything with. */
2967 decode_field_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
2968 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
2969 int *punsignedp
, int *pvolatilep
,
2970 tree
*pmask
, tree
*pand_mask
)
2972 tree outer_type
= 0;
2974 tree mask
, inner
, offset
;
2976 unsigned int precision
;
2978 /* All the optimizations using this function assume integer fields.
2979 There are problems with FP fields since the type_for_size call
2980 below can fail for, e.g., XFmode. */
2981 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
2984 /* We are interested in the bare arrangement of bits, so strip everything
2985 that doesn't affect the machine mode. However, record the type of the
2986 outermost expression if it may matter below. */
2987 if (TREE_CODE (exp
) == NOP_EXPR
2988 || TREE_CODE (exp
) == CONVERT_EXPR
2989 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
2990 outer_type
= TREE_TYPE (exp
);
2993 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
2995 and_mask
= TREE_OPERAND (exp
, 1);
2996 exp
= TREE_OPERAND (exp
, 0);
2997 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
2998 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3002 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3003 punsignedp
, pvolatilep
);
3004 if ((inner
== exp
&& and_mask
== 0)
3005 || *pbitsize
< 0 || offset
!= 0
3006 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3009 /* If the number of bits in the reference is the same as the bitsize of
3010 the outer type, then the outer type gives the signedness. Otherwise
3011 (in case of a small bitfield) the signedness is unchanged. */
3012 if (outer_type
&& *pbitsize
== tree_low_cst (TYPE_SIZE (outer_type
), 1))
3013 *punsignedp
= TREE_UNSIGNED (outer_type
);
3015 /* Compute the mask to access the bitfield. */
3016 unsigned_type
= (*lang_hooks
.types
.type_for_size
) (*pbitsize
, 1);
3017 precision
= TYPE_PRECISION (unsigned_type
);
3019 mask
= build_int_2 (~0, ~0);
3020 TREE_TYPE (mask
) = unsigned_type
;
3021 force_fit_type (mask
, 0);
3022 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3023 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
3025 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3027 mask
= fold (build (BIT_AND_EXPR
, unsigned_type
,
3028 fold_convert (unsigned_type
, and_mask
), mask
));
3031 *pand_mask
= and_mask
;
3035 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3039 all_ones_mask_p (tree mask
, int size
)
3041 tree type
= TREE_TYPE (mask
);
3042 unsigned int precision
= TYPE_PRECISION (type
);
3045 tmask
= build_int_2 (~0, ~0);
3046 TREE_TYPE (tmask
) = (*lang_hooks
.types
.signed_type
) (type
);
3047 force_fit_type (tmask
, 0);
3049 tree_int_cst_equal (mask
,
3050 const_binop (RSHIFT_EXPR
,
3051 const_binop (LSHIFT_EXPR
, tmask
,
3052 size_int (precision
- size
),
3054 size_int (precision
- size
), 0));
3057 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3058 represents the sign bit of EXP's type. If EXP represents a sign
3059 or zero extension, also test VAL against the unextended type.
3060 The return value is the (sub)expression whose sign bit is VAL,
3061 or NULL_TREE otherwise. */
3064 sign_bit_p (tree exp
, tree val
)
3066 unsigned HOST_WIDE_INT mask_lo
, lo
;
3067 HOST_WIDE_INT mask_hi
, hi
;
3071 /* Tree EXP must have an integral type. */
3072 t
= TREE_TYPE (exp
);
3073 if (! INTEGRAL_TYPE_P (t
))
3076 /* Tree VAL must be an integer constant. */
3077 if (TREE_CODE (val
) != INTEGER_CST
3078 || TREE_CONSTANT_OVERFLOW (val
))
3081 width
= TYPE_PRECISION (t
);
3082 if (width
> HOST_BITS_PER_WIDE_INT
)
3084 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3087 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3088 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3094 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3097 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3098 >> (HOST_BITS_PER_WIDE_INT
- width
));
3101 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3102 treat VAL as if it were unsigned. */
3103 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3104 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3107 /* Handle extension from a narrower type. */
3108 if (TREE_CODE (exp
) == NOP_EXPR
3109 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3110 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3115 /* Subroutine for fold_truthop: determine if an operand is simple enough
3116 to be evaluated unconditionally. */
3119 simple_operand_p (tree exp
)
3121 /* Strip any conversions that don't change the machine mode. */
3122 while ((TREE_CODE (exp
) == NOP_EXPR
3123 || TREE_CODE (exp
) == CONVERT_EXPR
)
3124 && (TYPE_MODE (TREE_TYPE (exp
))
3125 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
3126 exp
= TREE_OPERAND (exp
, 0);
3128 return (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c'
3130 && ! TREE_ADDRESSABLE (exp
)
3131 && ! TREE_THIS_VOLATILE (exp
)
3132 && ! DECL_NONLOCAL (exp
)
3133 /* Don't regard global variables as simple. They may be
3134 allocated in ways unknown to the compiler (shared memory,
3135 #pragma weak, etc). */
3136 && ! TREE_PUBLIC (exp
)
3137 && ! DECL_EXTERNAL (exp
)
3138 /* Loading a static variable is unduly expensive, but global
3139 registers aren't expensive. */
3140 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3143 /* The following functions are subroutines to fold_range_test and allow it to
3144 try to change a logical combination of comparisons into a range test.
3147 X == 2 || X == 3 || X == 4 || X == 5
3151 (unsigned) (X - 2) <= 3
3153 We describe each set of comparisons as being either inside or outside
3154 a range, using a variable named like IN_P, and then describe the
3155 range with a lower and upper bound. If one of the bounds is omitted,
3156 it represents either the highest or lowest value of the type.
3158 In the comments below, we represent a range by two numbers in brackets
3159 preceded by a "+" to designate being inside that range, or a "-" to
3160 designate being outside that range, so the condition can be inverted by
3161 flipping the prefix. An omitted bound is represented by a "-". For
3162 example, "- [-, 10]" means being outside the range starting at the lowest
3163 possible value and ending at 10, in other words, being greater than 10.
3164 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3167 We set up things so that the missing bounds are handled in a consistent
3168 manner so neither a missing bound nor "true" and "false" need to be
3169 handled using a special case. */
3171 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3172 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3173 and UPPER1_P are nonzero if the respective argument is an upper bound
3174 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3175 must be specified for a comparison. ARG1 will be converted to ARG0's
3176 type if both are specified. */
3179 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3180 tree arg1
, int upper1_p
)
3186 /* If neither arg represents infinity, do the normal operation.
3187 Else, if not a comparison, return infinity. Else handle the special
3188 comparison rules. Note that most of the cases below won't occur, but
3189 are handled for consistency. */
3191 if (arg0
!= 0 && arg1
!= 0)
3193 tem
= fold (build (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3194 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
)));
3196 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3199 if (TREE_CODE_CLASS (code
) != '<')
3202 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3203 for neither. In real maths, we cannot assume open ended ranges are
3204 the same. But, this is computer arithmetic, where numbers are finite.
3205 We can therefore make the transformation of any unbounded range with
3206 the value Z, Z being greater than any representable number. This permits
3207 us to treat unbounded ranges as equal. */
3208 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3209 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3213 result
= sgn0
== sgn1
;
3216 result
= sgn0
!= sgn1
;
3219 result
= sgn0
< sgn1
;
3222 result
= sgn0
<= sgn1
;
3225 result
= sgn0
> sgn1
;
3228 result
= sgn0
>= sgn1
;
3234 return fold_convert (type
, result
? integer_one_node
: integer_zero_node
);
3237 /* Given EXP, a logical expression, set the range it is testing into
3238 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3239 actually being tested. *PLOW and *PHIGH will be made of the same type
3240 as the returned expression. If EXP is not a comparison, we will most
3241 likely not be returning a useful value and range. */
3244 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
)
3246 enum tree_code code
;
3247 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, type
= NULL_TREE
;
3248 tree orig_type
= NULL_TREE
;
3250 tree low
, high
, n_low
, n_high
;
3252 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3253 and see if we can refine the range. Some of the cases below may not
3254 happen, but it doesn't seem worth worrying about this. We "continue"
3255 the outer loop when we've changed something; otherwise we "break"
3256 the switch, which will "break" the while. */
3259 low
= high
= fold_convert (TREE_TYPE (exp
), integer_zero_node
);
3263 code
= TREE_CODE (exp
);
3265 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3267 if (first_rtl_op (code
) > 0)
3268 arg0
= TREE_OPERAND (exp
, 0);
3269 if (TREE_CODE_CLASS (code
) == '<'
3270 || TREE_CODE_CLASS (code
) == '1'
3271 || TREE_CODE_CLASS (code
) == '2')
3272 type
= TREE_TYPE (arg0
);
3273 if (TREE_CODE_CLASS (code
) == '2'
3274 || TREE_CODE_CLASS (code
) == '<'
3275 || (TREE_CODE_CLASS (code
) == 'e'
3276 && TREE_CODE_LENGTH (code
) > 1))
3277 arg1
= TREE_OPERAND (exp
, 1);
3280 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3281 lose a cast by accident. */
3282 if (type
!= NULL_TREE
&& orig_type
== NULL_TREE
)
3287 case TRUTH_NOT_EXPR
:
3288 in_p
= ! in_p
, exp
= arg0
;
3291 case EQ_EXPR
: case NE_EXPR
:
3292 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3293 /* We can only do something if the range is testing for zero
3294 and if the second operand is an integer constant. Note that
3295 saying something is "in" the range we make is done by
3296 complementing IN_P since it will set in the initial case of
3297 being not equal to zero; "out" is leaving it alone. */
3298 if (low
== 0 || high
== 0
3299 || ! integer_zerop (low
) || ! integer_zerop (high
)
3300 || TREE_CODE (arg1
) != INTEGER_CST
)
3305 case NE_EXPR
: /* - [c, c] */
3308 case EQ_EXPR
: /* + [c, c] */
3309 in_p
= ! in_p
, low
= high
= arg1
;
3311 case GT_EXPR
: /* - [-, c] */
3312 low
= 0, high
= arg1
;
3314 case GE_EXPR
: /* + [c, -] */
3315 in_p
= ! in_p
, low
= arg1
, high
= 0;
3317 case LT_EXPR
: /* - [c, -] */
3318 low
= arg1
, high
= 0;
3320 case LE_EXPR
: /* + [-, c] */
3321 in_p
= ! in_p
, low
= 0, high
= arg1
;
3329 /* If this is an unsigned comparison, we also know that EXP is
3330 greater than or equal to zero. We base the range tests we make
3331 on that fact, so we record it here so we can parse existing
3333 if (TREE_UNSIGNED (type
) && (low
== 0 || high
== 0))
3335 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, in_p
, low
, high
,
3336 1, fold_convert (type
, integer_zero_node
),
3340 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3342 /* If the high bound is missing, but we have a nonzero low
3343 bound, reverse the range so it goes from zero to the low bound
3345 if (high
== 0 && low
&& ! integer_zerop (low
))
3348 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3349 integer_one_node
, 0);
3350 low
= fold_convert (type
, integer_zero_node
);
3356 /* (-x) IN [a,b] -> x in [-b, -a] */
3357 n_low
= range_binop (MINUS_EXPR
, type
,
3358 fold_convert (type
, integer_zero_node
),
3360 n_high
= range_binop (MINUS_EXPR
, type
,
3361 fold_convert (type
, integer_zero_node
),
3363 low
= n_low
, high
= n_high
;
3369 exp
= build (MINUS_EXPR
, type
, negate_expr (arg0
),
3370 fold_convert (type
, integer_one_node
));
3373 case PLUS_EXPR
: case MINUS_EXPR
:
3374 if (TREE_CODE (arg1
) != INTEGER_CST
)
3377 /* If EXP is signed, any overflow in the computation is undefined,
3378 so we don't worry about it so long as our computations on
3379 the bounds don't overflow. For unsigned, overflow is defined
3380 and this is exactly the right thing. */
3381 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3382 type
, low
, 0, arg1
, 0);
3383 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3384 type
, high
, 1, arg1
, 0);
3385 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3386 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3389 /* Check for an unsigned range which has wrapped around the maximum
3390 value thus making n_high < n_low, and normalize it. */
3391 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3393 low
= range_binop (PLUS_EXPR
, type
, n_high
, 0,
3394 integer_one_node
, 0);
3395 high
= range_binop (MINUS_EXPR
, type
, n_low
, 0,
3396 integer_one_node
, 0);
3398 /* If the range is of the form +/- [ x+1, x ], we won't
3399 be able to normalize it. But then, it represents the
3400 whole range or the empty set, so make it
3402 if (tree_int_cst_equal (n_low
, low
)
3403 && tree_int_cst_equal (n_high
, high
))
3409 low
= n_low
, high
= n_high
;
3414 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
3415 if (TYPE_PRECISION (type
) > TYPE_PRECISION (orig_type
))
3418 if (! INTEGRAL_TYPE_P (type
)
3419 || (low
!= 0 && ! int_fits_type_p (low
, type
))
3420 || (high
!= 0 && ! int_fits_type_p (high
, type
)))
3423 n_low
= low
, n_high
= high
;
3426 n_low
= fold_convert (type
, n_low
);
3429 n_high
= fold_convert (type
, n_high
);
3431 /* If we're converting from an unsigned to a signed type,
3432 we will be doing the comparison as unsigned. The tests above
3433 have already verified that LOW and HIGH are both positive.
3435 So we have to make sure that the original unsigned value will
3436 be interpreted as positive. */
3437 if (TREE_UNSIGNED (type
) && ! TREE_UNSIGNED (TREE_TYPE (exp
)))
3439 tree equiv_type
= (*lang_hooks
.types
.type_for_mode
)
3440 (TYPE_MODE (type
), 1);
3443 /* A range without an upper bound is, naturally, unbounded.
3444 Since convert would have cropped a very large value, use
3445 the max value for the destination type. */
3447 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3448 : TYPE_MAX_VALUE (type
);
3450 if (TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (exp
)))
3451 high_positive
= fold (build (RSHIFT_EXPR
, type
,
3455 integer_one_node
)));
3457 /* If the low bound is specified, "and" the range with the
3458 range for which the original unsigned value will be
3462 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3463 1, n_low
, n_high
, 1,
3464 fold_convert (type
, integer_zero_node
),
3468 in_p
= (n_in_p
== in_p
);
3472 /* Otherwise, "or" the range with the range of the input
3473 that will be interpreted as negative. */
3474 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3475 0, n_low
, n_high
, 1,
3476 fold_convert (type
, integer_zero_node
),
3480 in_p
= (in_p
!= n_in_p
);
3485 low
= n_low
, high
= n_high
;
3495 /* If EXP is a constant, we can evaluate whether this is true or false. */
3496 if (TREE_CODE (exp
) == INTEGER_CST
)
3498 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
3500 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3506 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3510 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3511 type, TYPE, return an expression to test if EXP is in (or out of, depending
3512 on IN_P) the range. */
3515 build_range_check (tree type
, tree exp
, int in_p
, tree low
, tree high
)
3517 tree etype
= TREE_TYPE (exp
);
3521 && (0 != (value
= build_range_check (type
, exp
, 1, low
, high
))))
3522 return invert_truthvalue (value
);
3524 if (low
== 0 && high
== 0)
3525 return fold_convert (type
, integer_one_node
);
3528 return fold (build (LE_EXPR
, type
, exp
, high
));
3531 return fold (build (GE_EXPR
, type
, exp
, low
));
3533 if (operand_equal_p (low
, high
, 0))
3534 return fold (build (EQ_EXPR
, type
, exp
, low
));
3536 if (integer_zerop (low
))
3538 if (! TREE_UNSIGNED (etype
))
3540 etype
= (*lang_hooks
.types
.unsigned_type
) (etype
);
3541 high
= fold_convert (etype
, high
);
3542 exp
= fold_convert (etype
, exp
);
3544 return build_range_check (type
, exp
, 1, 0, high
);
3547 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3548 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
3550 unsigned HOST_WIDE_INT lo
;
3554 /* For enums the comparison will be done in the underlying type,
3555 so using enum's precision is wrong here.
3556 Consider e.g. enum { A, B, C, D, E }, low == B and high == D. */
3557 if (TREE_CODE (etype
) == ENUMERAL_TYPE
)
3558 prec
= GET_MODE_BITSIZE (TYPE_MODE (etype
));
3560 prec
= TYPE_PRECISION (etype
);
3561 if (prec
<= HOST_BITS_PER_WIDE_INT
)
3564 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
3568 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
3569 lo
= (unsigned HOST_WIDE_INT
) -1;
3572 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
3574 if (TREE_UNSIGNED (etype
))
3576 etype
= (*lang_hooks
.types
.signed_type
) (etype
);
3577 exp
= fold_convert (etype
, exp
);
3579 return fold (build (GT_EXPR
, type
, exp
,
3580 fold_convert (etype
, integer_zero_node
)));
3584 if (0 != (value
= const_binop (MINUS_EXPR
, high
, low
, 0))
3585 && ! TREE_OVERFLOW (value
))
3586 return build_range_check (type
,
3587 fold (build (MINUS_EXPR
, etype
, exp
, low
)),
3588 1, fold_convert (etype
, integer_zero_node
),
3594 /* Given two ranges, see if we can merge them into one. Return 1 if we
3595 can, 0 if we can't. Set the output range into the specified parameters. */
3598 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
3599 tree high0
, int in1_p
, tree low1
, tree high1
)
3607 int lowequal
= ((low0
== 0 && low1
== 0)
3608 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3609 low0
, 0, low1
, 0)));
3610 int highequal
= ((high0
== 0 && high1
== 0)
3611 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3612 high0
, 1, high1
, 1)));
3614 /* Make range 0 be the range that starts first, or ends last if they
3615 start at the same value. Swap them if it isn't. */
3616 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3619 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3620 high1
, 1, high0
, 1))))
3622 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
3623 tem
= low0
, low0
= low1
, low1
= tem
;
3624 tem
= high0
, high0
= high1
, high1
= tem
;
3627 /* Now flag two cases, whether the ranges are disjoint or whether the
3628 second range is totally subsumed in the first. Note that the tests
3629 below are simplified by the ones above. */
3630 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
3631 high0
, 1, low1
, 0));
3632 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3633 high1
, 1, high0
, 1));
3635 /* We now have four cases, depending on whether we are including or
3636 excluding the two ranges. */
3639 /* If they don't overlap, the result is false. If the second range
3640 is a subset it is the result. Otherwise, the range is from the start
3641 of the second to the end of the first. */
3643 in_p
= 0, low
= high
= 0;
3645 in_p
= 1, low
= low1
, high
= high1
;
3647 in_p
= 1, low
= low1
, high
= high0
;
3650 else if (in0_p
&& ! in1_p
)
3652 /* If they don't overlap, the result is the first range. If they are
3653 equal, the result is false. If the second range is a subset of the
3654 first, and the ranges begin at the same place, we go from just after
3655 the end of the first range to the end of the second. If the second
3656 range is not a subset of the first, or if it is a subset and both
3657 ranges end at the same place, the range starts at the start of the
3658 first range and ends just before the second range.
3659 Otherwise, we can't describe this as a single range. */
3661 in_p
= 1, low
= low0
, high
= high0
;
3662 else if (lowequal
&& highequal
)
3663 in_p
= 0, low
= high
= 0;
3664 else if (subset
&& lowequal
)
3666 in_p
= 1, high
= high0
;
3667 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high1
, 0,
3668 integer_one_node
, 0);
3670 else if (! subset
|| highequal
)
3672 in_p
= 1, low
= low0
;
3673 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
3674 integer_one_node
, 0);
3680 else if (! in0_p
&& in1_p
)
3682 /* If they don't overlap, the result is the second range. If the second
3683 is a subset of the first, the result is false. Otherwise,
3684 the range starts just after the first range and ends at the
3685 end of the second. */
3687 in_p
= 1, low
= low1
, high
= high1
;
3688 else if (subset
|| highequal
)
3689 in_p
= 0, low
= high
= 0;
3692 in_p
= 1, high
= high1
;
3693 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
3694 integer_one_node
, 0);
3700 /* The case where we are excluding both ranges. Here the complex case
3701 is if they don't overlap. In that case, the only time we have a
3702 range is if they are adjacent. If the second is a subset of the
3703 first, the result is the first. Otherwise, the range to exclude
3704 starts at the beginning of the first range and ends at the end of the
3708 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3709 range_binop (PLUS_EXPR
, NULL_TREE
,
3711 integer_one_node
, 1),
3713 in_p
= 0, low
= low0
, high
= high1
;
3718 in_p
= 0, low
= low0
, high
= high0
;
3720 in_p
= 0, low
= low0
, high
= high1
;
3723 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3727 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3728 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3731 /* EXP is some logical combination of boolean tests. See if we can
3732 merge it into some range test. Return the new tree if so. */
3735 fold_range_test (tree exp
)
3737 int or_op
= (TREE_CODE (exp
) == TRUTH_ORIF_EXPR
3738 || TREE_CODE (exp
) == TRUTH_OR_EXPR
);
3739 int in0_p
, in1_p
, in_p
;
3740 tree low0
, low1
, low
, high0
, high1
, high
;
3741 tree lhs
= make_range (TREE_OPERAND (exp
, 0), &in0_p
, &low0
, &high0
);
3742 tree rhs
= make_range (TREE_OPERAND (exp
, 1), &in1_p
, &low1
, &high1
);
3745 /* If this is an OR operation, invert both sides; we will invert
3746 again at the end. */
3748 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
3750 /* If both expressions are the same, if we can merge the ranges, and we
3751 can build the range test, return it or it inverted. If one of the
3752 ranges is always true or always false, consider it to be the same
3753 expression as the other. */
3754 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
3755 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
3757 && 0 != (tem
= (build_range_check (TREE_TYPE (exp
),
3759 : rhs
!= 0 ? rhs
: integer_zero_node
,
3761 return or_op
? invert_truthvalue (tem
) : tem
;
3763 /* On machines where the branch cost is expensive, if this is a
3764 short-circuited branch and the underlying object on both sides
3765 is the same, make a non-short-circuit operation. */
3766 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3767 && lhs
!= 0 && rhs
!= 0
3768 && (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
3769 || TREE_CODE (exp
) == TRUTH_ORIF_EXPR
)
3770 && operand_equal_p (lhs
, rhs
, 0))
3772 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3773 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3774 which cases we can't do this. */
3775 if (simple_operand_p (lhs
))
3776 return build (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
3777 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
3778 TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
3779 TREE_OPERAND (exp
, 1));
3781 else if ((*lang_hooks
.decls
.global_bindings_p
) () == 0
3782 && ! CONTAINS_PLACEHOLDER_P (lhs
))
3784 tree common
= save_expr (lhs
);
3786 if (0 != (lhs
= build_range_check (TREE_TYPE (exp
), common
,
3787 or_op
? ! in0_p
: in0_p
,
3789 && (0 != (rhs
= build_range_check (TREE_TYPE (exp
), common
,
3790 or_op
? ! in1_p
: in1_p
,
3792 return build (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
3793 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
3794 TREE_TYPE (exp
), lhs
, rhs
);
3801 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3802 bit value. Arrange things so the extra bits will be set to zero if and
3803 only if C is signed-extended to its full width. If MASK is nonzero,
3804 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3807 unextend (tree c
, int p
, int unsignedp
, tree mask
)
3809 tree type
= TREE_TYPE (c
);
3810 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
3813 if (p
== modesize
|| unsignedp
)
3816 /* We work by getting just the sign bit into the low-order bit, then
3817 into the high-order bit, then sign-extend. We then XOR that value
3819 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
3820 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
3822 /* We must use a signed type in order to get an arithmetic right shift.
3823 However, we must also avoid introducing accidental overflows, so that
3824 a subsequent call to integer_zerop will work. Hence we must
3825 do the type conversion here. At this point, the constant is either
3826 zero or one, and the conversion to a signed type can never overflow.
3827 We could get an overflow if this conversion is done anywhere else. */
3828 if (TREE_UNSIGNED (type
))
3829 temp
= fold_convert ((*lang_hooks
.types
.signed_type
) (type
), temp
);
3831 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
3832 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
3834 temp
= const_binop (BIT_AND_EXPR
, temp
,
3835 fold_convert (TREE_TYPE (c
), mask
), 0);
3836 /* If necessary, convert the type back to match the type of C. */
3837 if (TREE_UNSIGNED (type
))
3838 temp
= fold_convert (type
, temp
);
3840 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
3843 /* Find ways of folding logical expressions of LHS and RHS:
3844 Try to merge two comparisons to the same innermost item.
3845 Look for range tests like "ch >= '0' && ch <= '9'".
3846 Look for combinations of simple terms on machines with expensive branches
3847 and evaluate the RHS unconditionally.
3849 For example, if we have p->a == 2 && p->b == 4 and we can make an
3850 object large enough to span both A and B, we can do this with a comparison
3851 against the object ANDed with the a mask.
3853 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3854 operations to do this with one comparison.
3856 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3857 function and the one above.
3859 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3860 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3862 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3865 We return the simplified tree or 0 if no optimization is possible. */
3868 fold_truthop (enum tree_code code
, tree truth_type
, tree lhs
, tree rhs
)
3870 /* If this is the "or" of two comparisons, we can do something if
3871 the comparisons are NE_EXPR. If this is the "and", we can do something
3872 if the comparisons are EQ_EXPR. I.e.,
3873 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3875 WANTED_CODE is this operation code. For single bit fields, we can
3876 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3877 comparison for one-bit fields. */
3879 enum tree_code wanted_code
;
3880 enum tree_code lcode
, rcode
;
3881 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
3882 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
3883 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
3884 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
3885 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
3886 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
3887 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
3888 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
3889 enum machine_mode lnmode
, rnmode
;
3890 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
3891 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
3892 tree l_const
, r_const
;
3893 tree lntype
, rntype
, result
;
3894 int first_bit
, end_bit
;
3897 /* Start by getting the comparison codes. Fail if anything is volatile.
3898 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3899 it were surrounded with a NE_EXPR. */
3901 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
3904 lcode
= TREE_CODE (lhs
);
3905 rcode
= TREE_CODE (rhs
);
3907 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
3908 lcode
= NE_EXPR
, lhs
= build (NE_EXPR
, truth_type
, lhs
, integer_zero_node
);
3910 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
3911 rcode
= NE_EXPR
, rhs
= build (NE_EXPR
, truth_type
, rhs
, integer_zero_node
);
3913 if (TREE_CODE_CLASS (lcode
) != '<' || TREE_CODE_CLASS (rcode
) != '<')
3916 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
3917 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
3919 ll_arg
= TREE_OPERAND (lhs
, 0);
3920 lr_arg
= TREE_OPERAND (lhs
, 1);
3921 rl_arg
= TREE_OPERAND (rhs
, 0);
3922 rr_arg
= TREE_OPERAND (rhs
, 1);
3924 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3925 if (simple_operand_p (ll_arg
)
3926 && simple_operand_p (lr_arg
)
3927 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg
)))
3931 if (operand_equal_p (ll_arg
, rl_arg
, 0)
3932 && operand_equal_p (lr_arg
, rr_arg
, 0))
3934 int lcompcode
, rcompcode
;
3936 lcompcode
= comparison_to_compcode (lcode
);
3937 rcompcode
= comparison_to_compcode (rcode
);
3938 compcode
= (code
== TRUTH_AND_EXPR
)
3939 ? lcompcode
& rcompcode
3940 : lcompcode
| rcompcode
;
3942 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
3943 && operand_equal_p (lr_arg
, rl_arg
, 0))
3945 int lcompcode
, rcompcode
;
3947 rcode
= swap_tree_comparison (rcode
);
3948 lcompcode
= comparison_to_compcode (lcode
);
3949 rcompcode
= comparison_to_compcode (rcode
);
3950 compcode
= (code
== TRUTH_AND_EXPR
)
3951 ? lcompcode
& rcompcode
3952 : lcompcode
| rcompcode
;
3957 if (compcode
== COMPCODE_TRUE
)
3958 return fold_convert (truth_type
, integer_one_node
);
3959 else if (compcode
== COMPCODE_FALSE
)
3960 return fold_convert (truth_type
, integer_zero_node
);
3961 else if (compcode
!= -1)
3962 return build (compcode_to_comparison (compcode
),
3963 truth_type
, ll_arg
, lr_arg
);
3966 /* If the RHS can be evaluated unconditionally and its operands are
3967 simple, it wins to evaluate the RHS unconditionally on machines
3968 with expensive branches. In this case, this isn't a comparison
3969 that can be merged. Avoid doing this if the RHS is a floating-point
3970 comparison since those can trap. */
3972 if (BRANCH_COST
>= 2
3973 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
3974 && simple_operand_p (rl_arg
)
3975 && simple_operand_p (rr_arg
))
3977 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3978 if (code
== TRUTH_OR_EXPR
3979 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
3980 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
3981 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
3982 return build (NE_EXPR
, truth_type
,
3983 build (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
3987 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3988 if (code
== TRUTH_AND_EXPR
3989 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
3990 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
3991 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
3992 return build (EQ_EXPR
, truth_type
,
3993 build (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
3997 return build (code
, truth_type
, lhs
, rhs
);
4000 /* See if the comparisons can be merged. Then get all the parameters for
4003 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
4004 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
4008 ll_inner
= decode_field_reference (ll_arg
,
4009 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
4010 &ll_unsignedp
, &volatilep
, &ll_mask
,
4012 lr_inner
= decode_field_reference (lr_arg
,
4013 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
4014 &lr_unsignedp
, &volatilep
, &lr_mask
,
4016 rl_inner
= decode_field_reference (rl_arg
,
4017 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
4018 &rl_unsignedp
, &volatilep
, &rl_mask
,
4020 rr_inner
= decode_field_reference (rr_arg
,
4021 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
4022 &rr_unsignedp
, &volatilep
, &rr_mask
,
4025 /* It must be true that the inner operation on the lhs of each
4026 comparison must be the same if we are to be able to do anything.
4027 Then see if we have constants. If not, the same must be true for
4029 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
4030 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
4033 if (TREE_CODE (lr_arg
) == INTEGER_CST
4034 && TREE_CODE (rr_arg
) == INTEGER_CST
)
4035 l_const
= lr_arg
, r_const
= rr_arg
;
4036 else if (lr_inner
== 0 || rr_inner
== 0
4037 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
4040 l_const
= r_const
= 0;
4042 /* If either comparison code is not correct for our logical operation,
4043 fail. However, we can convert a one-bit comparison against zero into
4044 the opposite comparison against that bit being set in the field. */
4046 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
4047 if (lcode
!= wanted_code
)
4049 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
4051 /* Make the left operand unsigned, since we are only interested
4052 in the value of one bit. Otherwise we are doing the wrong
4061 /* This is analogous to the code for l_const above. */
4062 if (rcode
!= wanted_code
)
4064 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
4073 /* After this point all optimizations will generate bit-field
4074 references, which we might not want. */
4075 if (! (*lang_hooks
.can_use_bit_fields_p
) ())
4078 /* See if we can find a mode that contains both fields being compared on
4079 the left. If we can't, fail. Otherwise, update all constants and masks
4080 to be relative to a field of that size. */
4081 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
4082 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
4083 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4084 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
4086 if (lnmode
== VOIDmode
)
4089 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
4090 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
4091 lntype
= (*lang_hooks
.types
.type_for_size
) (lnbitsize
, 1);
4092 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
4094 if (BYTES_BIG_ENDIAN
)
4096 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
4097 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
4100 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, ll_mask
),
4101 size_int (xll_bitpos
), 0);
4102 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert (lntype
, rl_mask
),
4103 size_int (xrl_bitpos
), 0);
4107 l_const
= fold_convert (lntype
, l_const
);
4108 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
4109 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
4110 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
4111 fold (build1 (BIT_NOT_EXPR
,
4115 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
4117 return fold_convert (truth_type
,
4118 wanted_code
== NE_EXPR
4119 ? integer_one_node
: integer_zero_node
);
4124 r_const
= fold_convert (lntype
, r_const
);
4125 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
4126 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
4127 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
4128 fold (build1 (BIT_NOT_EXPR
,
4132 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
4134 return fold_convert (truth_type
,
4135 wanted_code
== NE_EXPR
4136 ? integer_one_node
: integer_zero_node
);
4140 /* If the right sides are not constant, do the same for it. Also,
4141 disallow this optimization if a size or signedness mismatch occurs
4142 between the left and right sides. */
4145 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
4146 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
4147 /* Make sure the two fields on the right
4148 correspond to the left without being swapped. */
4149 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
4152 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
4153 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
4154 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
4155 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
4157 if (rnmode
== VOIDmode
)
4160 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
4161 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
4162 rntype
= (*lang_hooks
.types
.type_for_size
) (rnbitsize
, 1);
4163 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
4165 if (BYTES_BIG_ENDIAN
)
4167 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
4168 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
4171 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, lr_mask
),
4172 size_int (xlr_bitpos
), 0);
4173 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert (rntype
, rr_mask
),
4174 size_int (xrr_bitpos
), 0);
4176 /* Make a mask that corresponds to both fields being compared.
4177 Do this for both items being compared. If the operands are the
4178 same size and the bits being compared are in the same position
4179 then we can do this by masking both and comparing the masked
4181 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4182 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
4183 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
4185 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4186 ll_unsignedp
|| rl_unsignedp
);
4187 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4188 lhs
= build (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
4190 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
4191 lr_unsignedp
|| rr_unsignedp
);
4192 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
4193 rhs
= build (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
4195 return build (wanted_code
, truth_type
, lhs
, rhs
);
4198 /* There is still another way we can do something: If both pairs of
4199 fields being compared are adjacent, we may be able to make a wider
4200 field containing them both.
4202 Note that we still must mask the lhs/rhs expressions. Furthermore,
4203 the mask must be shifted to account for the shift done by
4204 make_bit_field_ref. */
4205 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
4206 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
4207 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
4208 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
4212 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
4213 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
4214 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
4215 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
4217 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
4218 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
4219 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
4220 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
4222 /* Convert to the smaller type before masking out unwanted bits. */
4224 if (lntype
!= rntype
)
4226 if (lnbitsize
> rnbitsize
)
4228 lhs
= fold_convert (rntype
, lhs
);
4229 ll_mask
= fold_convert (rntype
, ll_mask
);
4232 else if (lnbitsize
< rnbitsize
)
4234 rhs
= fold_convert (lntype
, rhs
);
4235 lr_mask
= fold_convert (lntype
, lr_mask
);
4240 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
4241 lhs
= build (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
4243 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
4244 rhs
= build (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
4246 return build (wanted_code
, truth_type
, lhs
, rhs
);
4252 /* Handle the case of comparisons with constants. If there is something in
4253 common between the masks, those bits of the constants must be the same.
4254 If not, the condition is always false. Test for this to avoid generating
4255 incorrect code below. */
4256 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
4257 if (! integer_zerop (result
)
4258 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
4259 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
4261 if (wanted_code
== NE_EXPR
)
4263 warning ("`or' of unmatched not-equal tests is always 1");
4264 return fold_convert (truth_type
, integer_one_node
);
4268 warning ("`and' of mutually exclusive equal-tests is always 0");
4269 return fold_convert (truth_type
, integer_zero_node
);
4273 /* Construct the expression we will return. First get the component
4274 reference we will make. Unless the mask is all ones the width of
4275 that field, perform the mask operation. Then compare with the
4277 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
4278 ll_unsignedp
|| rl_unsignedp
);
4280 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
4281 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
4282 result
= build (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
4284 return build (wanted_code
, truth_type
, result
,
4285 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
4288 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4292 optimize_minmax_comparison (tree t
)
4294 tree type
= TREE_TYPE (t
);
4295 tree arg0
= TREE_OPERAND (t
, 0);
4296 enum tree_code op_code
;
4297 tree comp_const
= TREE_OPERAND (t
, 1);
4299 int consts_equal
, consts_lt
;
4302 STRIP_SIGN_NOPS (arg0
);
4304 op_code
= TREE_CODE (arg0
);
4305 minmax_const
= TREE_OPERAND (arg0
, 1);
4306 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
4307 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
4308 inner
= TREE_OPERAND (arg0
, 0);
4310 /* If something does not permit us to optimize, return the original tree. */
4311 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
4312 || TREE_CODE (comp_const
) != INTEGER_CST
4313 || TREE_CONSTANT_OVERFLOW (comp_const
)
4314 || TREE_CODE (minmax_const
) != INTEGER_CST
4315 || TREE_CONSTANT_OVERFLOW (minmax_const
))
4318 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4319 and GT_EXPR, doing the rest with recursive calls using logical
4321 switch (TREE_CODE (t
))
4323 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
4325 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t
)));
4329 fold (build (TRUTH_ORIF_EXPR
, type
,
4330 optimize_minmax_comparison
4331 (build (EQ_EXPR
, type
, arg0
, comp_const
)),
4332 optimize_minmax_comparison
4333 (build (GT_EXPR
, type
, arg0
, comp_const
))));
4336 if (op_code
== MAX_EXPR
&& consts_equal
)
4337 /* MAX (X, 0) == 0 -> X <= 0 */
4338 return fold (build (LE_EXPR
, type
, inner
, comp_const
));
4340 else if (op_code
== MAX_EXPR
&& consts_lt
)
4341 /* MAX (X, 0) == 5 -> X == 5 */
4342 return fold (build (EQ_EXPR
, type
, inner
, comp_const
));
4344 else if (op_code
== MAX_EXPR
)
4345 /* MAX (X, 0) == -1 -> false */
4346 return omit_one_operand (type
, integer_zero_node
, inner
);
4348 else if (consts_equal
)
4349 /* MIN (X, 0) == 0 -> X >= 0 */
4350 return fold (build (GE_EXPR
, type
, inner
, comp_const
));
4353 /* MIN (X, 0) == 5 -> false */
4354 return omit_one_operand (type
, integer_zero_node
, inner
);
4357 /* MIN (X, 0) == -1 -> X == -1 */
4358 return fold (build (EQ_EXPR
, type
, inner
, comp_const
));
4361 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
4362 /* MAX (X, 0) > 0 -> X > 0
4363 MAX (X, 0) > 5 -> X > 5 */
4364 return fold (build (GT_EXPR
, type
, inner
, comp_const
));
4366 else if (op_code
== MAX_EXPR
)
4367 /* MAX (X, 0) > -1 -> true */
4368 return omit_one_operand (type
, integer_one_node
, inner
);
4370 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
4371 /* MIN (X, 0) > 0 -> false
4372 MIN (X, 0) > 5 -> false */
4373 return omit_one_operand (type
, integer_zero_node
, inner
);
4376 /* MIN (X, 0) > -1 -> X > -1 */
4377 return fold (build (GT_EXPR
, type
, inner
, comp_const
));
4384 /* T is an integer expression that is being multiplied, divided, or taken a
4385 modulus (CODE says which and what kind of divide or modulus) by a
4386 constant C. See if we can eliminate that operation by folding it with
4387 other operations already in T. WIDE_TYPE, if non-null, is a type that
4388 should be used for the computation if wider than our type.
4390 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4391 (X * 2) + (Y * 4). We must, however, be assured that either the original
4392 expression would not overflow or that overflow is undefined for the type
4393 in the language in question.
4395 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4396 the machine has a multiply-accumulate insn or that this is part of an
4397 addressing calculation.
4399 If we return a non-null expression, it is an equivalent form of the
4400 original computation, but need not be in the original type. */
4403 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
)
4405 /* To avoid exponential search depth, refuse to allow recursion past
4406 three levels. Beyond that (1) it's highly unlikely that we'll find
4407 something interesting and (2) we've probably processed it before
4408 when we built the inner expression. */
4417 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
);
4424 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
)
4426 tree type
= TREE_TYPE (t
);
4427 enum tree_code tcode
= TREE_CODE (t
);
4428 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
4429 > GET_MODE_SIZE (TYPE_MODE (type
)))
4430 ? wide_type
: type
);
4432 int same_p
= tcode
== code
;
4433 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
4435 /* Don't deal with constants of zero here; they confuse the code below. */
4436 if (integer_zerop (c
))
4439 if (TREE_CODE_CLASS (tcode
) == '1')
4440 op0
= TREE_OPERAND (t
, 0);
4442 if (TREE_CODE_CLASS (tcode
) == '2')
4443 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
4445 /* Note that we need not handle conditional operations here since fold
4446 already handles those cases. So just do arithmetic here. */
4450 /* For a constant, we can always simplify if we are a multiply
4451 or (for divide and modulus) if it is a multiple of our constant. */
4452 if (code
== MULT_EXPR
4453 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
4454 return const_binop (code
, fold_convert (ctype
, t
),
4455 fold_convert (ctype
, c
), 0);
4458 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
4459 /* If op0 is an expression ... */
4460 if ((TREE_CODE_CLASS (TREE_CODE (op0
)) == '<'
4461 || TREE_CODE_CLASS (TREE_CODE (op0
)) == '1'
4462 || TREE_CODE_CLASS (TREE_CODE (op0
)) == '2'
4463 || TREE_CODE_CLASS (TREE_CODE (op0
)) == 'e')
4464 /* ... and is unsigned, and its type is smaller than ctype,
4465 then we cannot pass through as widening. */
4466 && ((TREE_UNSIGNED (TREE_TYPE (op0
))
4467 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
4468 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
4469 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
4470 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
4471 /* ... or this is a truncation (t is narrower than op0),
4472 then we cannot pass through this narrowing. */
4473 || (GET_MODE_SIZE (TYPE_MODE (type
))
4474 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))
4475 /* ... or signedness changes for division or modulus,
4476 then we cannot pass through this conversion. */
4477 || (code
!= MULT_EXPR
4478 && (TREE_UNSIGNED (ctype
)
4479 != TREE_UNSIGNED (TREE_TYPE (op0
))))))
4482 /* Pass the constant down and see if we can make a simplification. If
4483 we can, replace this expression with the inner simplification for
4484 possible later conversion to our or some other type. */
4485 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
4486 && TREE_CODE (t2
) == INTEGER_CST
4487 && ! TREE_CONSTANT_OVERFLOW (t2
)
4488 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
4490 ? ctype
: NULL_TREE
))))
4495 /* If widening the type changes it from signed to unsigned, then we
4496 must avoid building ABS_EXPR itself as unsigned. */
4497 if (TREE_UNSIGNED (ctype
) && !TREE_UNSIGNED (type
))
4499 tree cstype
= (*lang_hooks
.types
.signed_type
) (ctype
);
4500 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
)) != 0)
4502 t1
= fold (build1 (tcode
, cstype
, fold_convert (cstype
, t1
)));
4503 return fold_convert (ctype
, t1
);
4509 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
4510 return fold (build1 (tcode
, ctype
, fold_convert (ctype
, t1
)));
4513 case MIN_EXPR
: case MAX_EXPR
:
4514 /* If widening the type changes the signedness, then we can't perform
4515 this optimization as that changes the result. */
4516 if (TREE_UNSIGNED (ctype
) != TREE_UNSIGNED (type
))
4519 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4520 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
4521 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
4523 if (tree_int_cst_sgn (c
) < 0)
4524 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
4526 return fold (build (tcode
, ctype
, fold_convert (ctype
, t1
),
4527 fold_convert (ctype
, t2
)));
4531 case WITH_RECORD_EXPR
:
4532 if ((t1
= extract_muldiv (TREE_OPERAND (t
, 0), c
, code
, wide_type
)) != 0)
4533 return build (WITH_RECORD_EXPR
, TREE_TYPE (t1
), t1
,
4534 TREE_OPERAND (t
, 1));
4537 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
4538 /* If the second operand is constant, this is a multiplication
4539 or floor division, by a power of two, so we can treat it that
4540 way unless the multiplier or divisor overflows. */
4541 if (TREE_CODE (op1
) == INTEGER_CST
4542 /* const_binop may not detect overflow correctly,
4543 so check for it explicitly here. */
4544 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
4545 && TREE_INT_CST_HIGH (op1
) == 0
4546 && 0 != (t1
= fold_convert (ctype
,
4547 const_binop (LSHIFT_EXPR
,
4550 && ! TREE_OVERFLOW (t1
))
4551 return extract_muldiv (build (tcode
== LSHIFT_EXPR
4552 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
4553 ctype
, fold_convert (ctype
, op0
), t1
),
4554 c
, code
, wide_type
);
4557 case PLUS_EXPR
: case MINUS_EXPR
:
4558 /* See if we can eliminate the operation on both sides. If we can, we
4559 can return a new PLUS or MINUS. If we can't, the only remaining
4560 cases where we can do anything are if the second operand is a
4562 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
4563 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
4564 if (t1
!= 0 && t2
!= 0
4565 && (code
== MULT_EXPR
4566 /* If not multiplication, we can only do this if both operands
4567 are divisible by c. */
4568 || (multiple_of_p (ctype
, op0
, c
)
4569 && multiple_of_p (ctype
, op1
, c
))))
4570 return fold (build (tcode
, ctype
, fold_convert (ctype
, t1
),
4571 fold_convert (ctype
, t2
)));
4573 /* If this was a subtraction, negate OP1 and set it to be an addition.
4574 This simplifies the logic below. */
4575 if (tcode
== MINUS_EXPR
)
4576 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
4578 if (TREE_CODE (op1
) != INTEGER_CST
)
4581 /* If either OP1 or C are negative, this optimization is not safe for
4582 some of the division and remainder types while for others we need
4583 to change the code. */
4584 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
4586 if (code
== CEIL_DIV_EXPR
)
4587 code
= FLOOR_DIV_EXPR
;
4588 else if (code
== FLOOR_DIV_EXPR
)
4589 code
= CEIL_DIV_EXPR
;
4590 else if (code
!= MULT_EXPR
4591 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
4595 /* If it's a multiply or a division/modulus operation of a multiple
4596 of our constant, do the operation and verify it doesn't overflow. */
4597 if (code
== MULT_EXPR
4598 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
4600 op1
= const_binop (code
, fold_convert (ctype
, op1
),
4601 fold_convert (ctype
, c
), 0);
4602 /* We allow the constant to overflow with wrapping semantics. */
4604 || (TREE_OVERFLOW (op1
) && ! flag_wrapv
))
4610 /* If we have an unsigned type is not a sizetype, we cannot widen
4611 the operation since it will change the result if the original
4612 computation overflowed. */
4613 if (TREE_UNSIGNED (ctype
)
4614 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
4618 /* If we were able to eliminate our operation from the first side,
4619 apply our operation to the second side and reform the PLUS. */
4620 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
4621 return fold (build (tcode
, ctype
, fold_convert (ctype
, t1
), op1
));
4623 /* The last case is if we are a multiply. In that case, we can
4624 apply the distributive law to commute the multiply and addition
4625 if the multiplication of the constants doesn't overflow. */
4626 if (code
== MULT_EXPR
)
4627 return fold (build (tcode
, ctype
,
4628 fold (build (code
, ctype
,
4629 fold_convert (ctype
, op0
),
4630 fold_convert (ctype
, c
))),
4636 /* We have a special case here if we are doing something like
4637 (C * 8) % 4 since we know that's zero. */
4638 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
4639 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
4640 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
4641 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
4642 return omit_one_operand (type
, integer_zero_node
, op0
);
4644 /* ... fall through ... */
4646 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
4647 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
4648 /* If we can extract our operation from the LHS, do so and return a
4649 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4650 do something only if the second operand is a constant. */
4652 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
4653 return fold (build (tcode
, ctype
, fold_convert (ctype
, t1
),
4654 fold_convert (ctype
, op1
)));
4655 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
4656 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
4657 return fold (build (tcode
, ctype
, fold_convert (ctype
, op0
),
4658 fold_convert (ctype
, t1
)));
4659 else if (TREE_CODE (op1
) != INTEGER_CST
)
4662 /* If these are the same operation types, we can associate them
4663 assuming no overflow. */
4665 && 0 != (t1
= const_binop (MULT_EXPR
, fold_convert (ctype
, op1
),
4666 fold_convert (ctype
, c
), 0))
4667 && ! TREE_OVERFLOW (t1
))
4668 return fold (build (tcode
, ctype
, fold_convert (ctype
, op0
), t1
));
4670 /* If these operations "cancel" each other, we have the main
4671 optimizations of this pass, which occur when either constant is a
4672 multiple of the other, in which case we replace this with either an
4673 operation or CODE or TCODE.
4675 If we have an unsigned type that is not a sizetype, we cannot do
4676 this since it will change the result if the original computation
4678 if ((! TREE_UNSIGNED (ctype
)
4679 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
4681 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
4682 || (tcode
== MULT_EXPR
4683 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
4684 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
4686 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
4687 return fold (build (tcode
, ctype
, fold_convert (ctype
, op0
),
4688 fold_convert (ctype
,
4689 const_binop (TRUNC_DIV_EXPR
,
4691 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
4692 return fold (build (code
, ctype
, fold_convert (ctype
, op0
),
4693 fold_convert (ctype
,
4694 const_binop (TRUNC_DIV_EXPR
,
4706 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4707 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4708 that we may sometimes modify the tree. */
4711 strip_compound_expr (tree t
, tree s
)
4713 enum tree_code code
= TREE_CODE (t
);
4715 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4716 if (code
== COMPOUND_EXPR
&& TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
4717 && TREE_OPERAND (TREE_OPERAND (t
, 0), 0) == s
)
4718 return TREE_OPERAND (t
, 1);
4720 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4721 don't bother handling any other types. */
4722 else if (code
== COND_EXPR
)
4724 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
4725 TREE_OPERAND (t
, 1) = strip_compound_expr (TREE_OPERAND (t
, 1), s
);
4726 TREE_OPERAND (t
, 2) = strip_compound_expr (TREE_OPERAND (t
, 2), s
);
4728 else if (TREE_CODE_CLASS (code
) == '1')
4729 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
4730 else if (TREE_CODE_CLASS (code
) == '<'
4731 || TREE_CODE_CLASS (code
) == '2')
4733 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
4734 TREE_OPERAND (t
, 1) = strip_compound_expr (TREE_OPERAND (t
, 1), s
);
4740 /* Return a node which has the indicated constant VALUE (either 0 or
4741 1), and is of the indicated TYPE. */
4744 constant_boolean_node (int value
, tree type
)
4746 if (type
== integer_type_node
)
4747 return value
? integer_one_node
: integer_zero_node
;
4748 else if (TREE_CODE (type
) == BOOLEAN_TYPE
)
4749 return (*lang_hooks
.truthvalue_conversion
) (value
? integer_one_node
:
4753 tree t
= build_int_2 (value
, 0);
4755 TREE_TYPE (t
) = type
;
4760 /* Utility function for the following routine, to see how complex a nesting of
4761 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4762 we don't care (to avoid spending too much time on complex expressions.). */
4765 count_cond (tree expr
, int lim
)
4769 if (TREE_CODE (expr
) != COND_EXPR
)
4774 ctrue
= count_cond (TREE_OPERAND (expr
, 1), lim
- 1);
4775 cfalse
= count_cond (TREE_OPERAND (expr
, 2), lim
- 1 - ctrue
);
4776 return MIN (lim
, 1 + ctrue
+ cfalse
);
4779 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4780 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4781 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4782 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4783 COND is the first argument to CODE; otherwise (as in the example
4784 given here), it is the second argument. TYPE is the type of the
4785 original expression. */
4788 fold_binary_op_with_conditional_arg (enum tree_code code
, tree type
,
4789 tree cond
, tree arg
, int cond_first_p
)
4791 tree test
, true_value
, false_value
;
4792 tree lhs
= NULL_TREE
;
4793 tree rhs
= NULL_TREE
;
4794 /* In the end, we'll produce a COND_EXPR. Both arms of the
4795 conditional expression will be binary operations. The left-hand
4796 side of the expression to be executed if the condition is true
4797 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4798 of the expression to be executed if the condition is true will be
4799 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4800 but apply to the expression to be executed if the conditional is
4806 /* These are the codes to use for the left-hand side and right-hand
4807 side of the COND_EXPR. Normally, they are the same as CODE. */
4808 enum tree_code lhs_code
= code
;
4809 enum tree_code rhs_code
= code
;
4810 /* And these are the types of the expressions. */
4811 tree lhs_type
= type
;
4812 tree rhs_type
= type
;
4817 true_rhs
= false_rhs
= &arg
;
4818 true_lhs
= &true_value
;
4819 false_lhs
= &false_value
;
4823 true_lhs
= false_lhs
= &arg
;
4824 true_rhs
= &true_value
;
4825 false_rhs
= &false_value
;
4828 if (TREE_CODE (cond
) == COND_EXPR
)
4830 test
= TREE_OPERAND (cond
, 0);
4831 true_value
= TREE_OPERAND (cond
, 1);
4832 false_value
= TREE_OPERAND (cond
, 2);
4833 /* If this operand throws an expression, then it does not make
4834 sense to try to perform a logical or arithmetic operation
4835 involving it. Instead of building `a + throw 3' for example,
4836 we simply build `a, throw 3'. */
4837 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
4841 lhs_code
= COMPOUND_EXPR
;
4842 lhs_type
= void_type_node
;
4847 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
4851 rhs_code
= COMPOUND_EXPR
;
4852 rhs_type
= void_type_node
;
4860 tree testtype
= TREE_TYPE (cond
);
4862 true_value
= fold_convert (testtype
, integer_one_node
);
4863 false_value
= fold_convert (testtype
, integer_zero_node
);
4866 /* If ARG is complex we want to make sure we only evaluate it once. Though
4867 this is only required if it is volatile, it might be more efficient even
4868 if it is not. However, if we succeed in folding one part to a constant,
4869 we do not need to make this SAVE_EXPR. Since we do this optimization
4870 primarily to see if we do end up with constant and this SAVE_EXPR
4871 interferes with later optimizations, suppressing it when we can is
4874 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4875 do so. Don't try to see if the result is a constant if an arm is a
4876 COND_EXPR since we get exponential behavior in that case. */
4878 if (saved_expr_p (arg
))
4880 else if (lhs
== 0 && rhs
== 0
4881 && !TREE_CONSTANT (arg
)
4882 && (*lang_hooks
.decls
.global_bindings_p
) () == 0
4883 && ((TREE_CODE (arg
) != VAR_DECL
&& TREE_CODE (arg
) != PARM_DECL
)
4884 || TREE_SIDE_EFFECTS (arg
)))
4886 if (TREE_CODE (true_value
) != COND_EXPR
)
4887 lhs
= fold (build (lhs_code
, lhs_type
, *true_lhs
, *true_rhs
));
4889 if (TREE_CODE (false_value
) != COND_EXPR
)
4890 rhs
= fold (build (rhs_code
, rhs_type
, *false_lhs
, *false_rhs
));
4892 if ((lhs
== 0 || ! TREE_CONSTANT (lhs
))
4893 && (rhs
== 0 || !TREE_CONSTANT (rhs
)))
4895 arg
= save_expr (arg
);
4897 save
= saved_expr_p (arg
);
4902 lhs
= fold (build (lhs_code
, lhs_type
, *true_lhs
, *true_rhs
));
4904 rhs
= fold (build (rhs_code
, rhs_type
, *false_lhs
, *false_rhs
));
4906 test
= fold (build (COND_EXPR
, type
, test
, lhs
, rhs
));
4908 /* If ARG involves a SAVE_EXPR, we need to ensure it is evaluated
4909 ahead of the COND_EXPR we made. Otherwise we would have it only
4910 evaluated in one branch, with the other branch using the result
4911 but missing the evaluation code. Beware that the save_expr call
4912 above might not return a SAVE_EXPR, so testing the TREE_CODE
4913 of ARG is not enough to decide here. Â */
4915 return build (COMPOUND_EXPR
, type
,
4916 fold_convert (void_type_node
, arg
),
4917 strip_compound_expr (test
, arg
));
4919 return fold_convert (type
, test
);
4923 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4925 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4926 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4927 ADDEND is the same as X.
4929 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4930 and finite. The problematic cases are when X is zero, and its mode
4931 has signed zeros. In the case of rounding towards -infinity,
4932 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4933 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4936 fold_real_zero_addition_p (tree type
, tree addend
, int negate
)
4938 if (!real_zerop (addend
))
4941 /* Don't allow the fold with -fsignaling-nans. */
4942 if (HONOR_SNANS (TYPE_MODE (type
)))
4945 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4946 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
4949 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4950 if (TREE_CODE (addend
) == REAL_CST
4951 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
4954 /* The mode has signed zeros, and we have to honor their sign.
4955 In this situation, there is only one case we can return true for.
4956 X - 0 is the same as X unless rounding towards -infinity is
4958 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
4961 /* Subroutine of fold() that checks comparisons of built-in math
4962 functions against real constants.
4964 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4965 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4966 is the type of the result and ARG0 and ARG1 are the operands of the
4967 comparison. ARG1 must be a TREE_REAL_CST.
4969 The function returns the constant folded tree if a simplification
4970 can be made, and NULL_TREE otherwise. */
4973 fold_mathfn_compare (enum built_in_function fcode
, enum tree_code code
,
4974 tree type
, tree arg0
, tree arg1
)
4978 if (fcode
== BUILT_IN_SQRT
4979 || fcode
== BUILT_IN_SQRTF
4980 || fcode
== BUILT_IN_SQRTL
)
4982 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
4983 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
4985 c
= TREE_REAL_CST (arg1
);
4986 if (REAL_VALUE_NEGATIVE (c
))
4988 /* sqrt(x) < y is always false, if y is negative. */
4989 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
4990 return omit_one_operand (type
,
4991 fold_convert (type
, integer_zero_node
),
4994 /* sqrt(x) > y is always true, if y is negative and we
4995 don't care about NaNs, i.e. negative values of x. */
4996 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
4997 return omit_one_operand (type
,
4998 fold_convert (type
, integer_one_node
),
5001 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5002 return fold (build (GE_EXPR
, type
, arg
,
5003 build_real (TREE_TYPE (arg
), dconst0
)));
5005 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
5009 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5010 real_convert (&c2
, mode
, &c2
);
5012 if (REAL_VALUE_ISINF (c2
))
5014 /* sqrt(x) > y is x == +Inf, when y is very large. */
5015 if (HONOR_INFINITIES (mode
))
5016 return fold (build (EQ_EXPR
, type
, arg
,
5017 build_real (TREE_TYPE (arg
), c2
)));
5019 /* sqrt(x) > y is always false, when y is very large
5020 and we don't care about infinities. */
5021 return omit_one_operand (type
,
5022 fold_convert (type
, integer_zero_node
),
5026 /* sqrt(x) > c is the same as x > c*c. */
5027 return fold (build (code
, type
, arg
,
5028 build_real (TREE_TYPE (arg
), c2
)));
5030 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
5034 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
5035 real_convert (&c2
, mode
, &c2
);
5037 if (REAL_VALUE_ISINF (c2
))
5039 /* sqrt(x) < y is always true, when y is a very large
5040 value and we don't care about NaNs or Infinities. */
5041 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
5042 return omit_one_operand (type
,
5043 fold_convert (type
, integer_one_node
),
5046 /* sqrt(x) < y is x != +Inf when y is very large and we
5047 don't care about NaNs. */
5048 if (! HONOR_NANS (mode
))
5049 return fold (build (NE_EXPR
, type
, arg
,
5050 build_real (TREE_TYPE (arg
), c2
)));
5052 /* sqrt(x) < y is x >= 0 when y is very large and we
5053 don't care about Infinities. */
5054 if (! HONOR_INFINITIES (mode
))
5055 return fold (build (GE_EXPR
, type
, arg
,
5056 build_real (TREE_TYPE (arg
), dconst0
)));
5058 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5059 if ((*lang_hooks
.decls
.global_bindings_p
) () != 0
5060 || CONTAINS_PLACEHOLDER_P (arg
))
5063 arg
= save_expr (arg
);
5064 return fold (build (TRUTH_ANDIF_EXPR
, type
,
5065 fold (build (GE_EXPR
, type
, arg
,
5066 build_real (TREE_TYPE (arg
),
5068 fold (build (NE_EXPR
, type
, arg
,
5069 build_real (TREE_TYPE (arg
),
5073 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5074 if (! HONOR_NANS (mode
))
5075 return fold (build (code
, type
, arg
,
5076 build_real (TREE_TYPE (arg
), c2
)));
5078 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5079 if ((*lang_hooks
.decls
.global_bindings_p
) () == 0
5080 && ! CONTAINS_PLACEHOLDER_P (arg
))
5082 arg
= save_expr (arg
);
5083 return fold (build (TRUTH_ANDIF_EXPR
, type
,
5084 fold (build (GE_EXPR
, type
, arg
,
5085 build_real (TREE_TYPE (arg
),
5087 fold (build (code
, type
, arg
,
5088 build_real (TREE_TYPE (arg
),
5097 /* Subroutine of fold() that optimizes comparisons against Infinities,
5098 either +Inf or -Inf.
5100 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5101 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5102 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5104 The function returns the constant folded tree if a simplification
5105 can be made, and NULL_TREE otherwise. */
5108 fold_inf_compare (enum tree_code code
, tree type
, tree arg0
, tree arg1
)
5110 enum machine_mode mode
;
5111 REAL_VALUE_TYPE max
;
5115 mode
= TYPE_MODE (TREE_TYPE (arg0
));
5117 /* For negative infinity swap the sense of the comparison. */
5118 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
5120 code
= swap_tree_comparison (code
);
5125 /* x > +Inf is always false, if with ignore sNANs. */
5126 if (HONOR_SNANS (mode
))
5128 return omit_one_operand (type
,
5129 fold_convert (type
, integer_zero_node
),
5133 /* x <= +Inf is always true, if we don't case about NaNs. */
5134 if (! HONOR_NANS (mode
))
5135 return omit_one_operand (type
,
5136 fold_convert (type
, integer_one_node
),
5139 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5140 if ((*lang_hooks
.decls
.global_bindings_p
) () == 0
5141 && ! CONTAINS_PLACEHOLDER_P (arg0
))
5143 arg0
= save_expr (arg0
);
5144 return fold (build (EQ_EXPR
, type
, arg0
, arg0
));
5150 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5151 real_maxval (&max
, neg
, mode
);
5152 return fold (build (neg
? LT_EXPR
: GT_EXPR
, type
,
5153 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5156 /* x < +Inf is always equal to x <= DBL_MAX. */
5157 real_maxval (&max
, neg
, mode
);
5158 return fold (build (neg
? GE_EXPR
: LE_EXPR
, type
,
5159 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5162 /* x != +Inf is always equal to !(x > DBL_MAX). */
5163 real_maxval (&max
, neg
, mode
);
5164 if (! HONOR_NANS (mode
))
5165 return fold (build (neg
? GE_EXPR
: LE_EXPR
, type
,
5166 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5167 temp
= fold (build (neg
? LT_EXPR
: GT_EXPR
, type
,
5168 arg0
, build_real (TREE_TYPE (arg0
), max
)));
5169 return fold (build1 (TRUTH_NOT_EXPR
, type
, temp
));
5178 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5179 equality/inequality test, then return a simplified form of
5180 the test using shifts and logical operations. Otherwise return
5181 NULL. TYPE is the desired result type. */
5184 fold_single_bit_test (enum tree_code code
, tree arg0
, tree arg1
,
5187 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5189 if (code
== TRUTH_NOT_EXPR
)
5191 code
= TREE_CODE (arg0
);
5192 if (code
!= NE_EXPR
&& code
!= EQ_EXPR
)
5195 /* Extract the arguments of the EQ/NE. */
5196 arg1
= TREE_OPERAND (arg0
, 1);
5197 arg0
= TREE_OPERAND (arg0
, 0);
5199 /* This requires us to invert the code. */
5200 code
= (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
);
5203 /* If this is testing a single bit, we can optimize the test. */
5204 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
5205 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
5206 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
5208 tree inner
= TREE_OPERAND (arg0
, 0);
5209 tree type
= TREE_TYPE (arg0
);
5210 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
5211 enum machine_mode operand_mode
= TYPE_MODE (type
);
5213 tree signed_type
, unsigned_type
, intermediate_type
;
5216 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5217 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5218 arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
5219 if (arg00
!= NULL_TREE
5220 /* This is only a win if casting to a signed type is cheap,
5221 i.e. when arg00's type is not a partial mode. */
5222 && TYPE_PRECISION (TREE_TYPE (arg00
))
5223 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
5225 tree stype
= (*lang_hooks
.types
.signed_type
) (TREE_TYPE (arg00
));
5226 return fold (build (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, result_type
,
5227 fold_convert (stype
, arg00
),
5228 fold_convert (stype
, integer_zero_node
)));
5231 /* Otherwise we have (A & C) != 0 where C is a single bit,
5232 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5233 Similarly for (A & C) == 0. */
5235 /* If INNER is a right shift of a constant and it plus BITNUM does
5236 not overflow, adjust BITNUM and INNER. */
5237 if (TREE_CODE (inner
) == RSHIFT_EXPR
5238 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
5239 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
5240 && bitnum
< TYPE_PRECISION (type
)
5241 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
5242 bitnum
- TYPE_PRECISION (type
)))
5244 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
5245 inner
= TREE_OPERAND (inner
, 0);
5248 /* If we are going to be able to omit the AND below, we must do our
5249 operations as unsigned. If we must use the AND, we have a choice.
5250 Normally unsigned is faster, but for some machines signed is. */
5251 #ifdef LOAD_EXTEND_OP
5252 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1);
5257 signed_type
= (*lang_hooks
.types
.type_for_mode
) (operand_mode
, 0);
5258 unsigned_type
= (*lang_hooks
.types
.type_for_mode
) (operand_mode
, 1);
5259 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
5260 inner
= fold_convert (intermediate_type
, inner
);
5263 inner
= build (RSHIFT_EXPR
, intermediate_type
,
5264 inner
, size_int (bitnum
));
5266 if (code
== EQ_EXPR
)
5267 inner
= build (BIT_XOR_EXPR
, intermediate_type
,
5268 inner
, integer_one_node
);
5270 /* Put the AND last so it can combine with more things. */
5271 inner
= build (BIT_AND_EXPR
, intermediate_type
,
5272 inner
, integer_one_node
);
5274 /* Make sure to return the proper type. */
5275 inner
= fold_convert (result_type
, inner
);
5282 /* Check whether we are allowed to reorder operands arg0 and arg1,
5283 such that the evaluation of arg1 occurs before arg0. */
5286 reorder_operands_p (tree arg0
, tree arg1
)
5288 if (! flag_evaluation_order
)
5290 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
5292 return ! TREE_SIDE_EFFECTS (arg0
)
5293 && ! TREE_SIDE_EFFECTS (arg1
);
5296 /* Test whether it is preferable two swap two operands, ARG0 and
5297 ARG1, for example because ARG0 is an integer constant and ARG1
5298 isn't. If REORDER is true, only recommend swapping if we can
5299 evaluate the operands in reverse order. */
5302 tree_swap_operands_p (tree arg0
, tree arg1
, bool reorder
)
5304 STRIP_SIGN_NOPS (arg0
);
5305 STRIP_SIGN_NOPS (arg1
);
5307 if (TREE_CODE (arg1
) == INTEGER_CST
)
5309 if (TREE_CODE (arg0
) == INTEGER_CST
)
5312 if (TREE_CODE (arg1
) == REAL_CST
)
5314 if (TREE_CODE (arg0
) == REAL_CST
)
5317 if (TREE_CODE (arg1
) == COMPLEX_CST
)
5319 if (TREE_CODE (arg0
) == COMPLEX_CST
)
5322 if (TREE_CONSTANT (arg1
))
5324 if (TREE_CONSTANT (arg0
))
5330 if (reorder
&& flag_evaluation_order
5331 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
5342 /* Perform constant folding and related simplification of EXPR.
5343 The related simplifications include x*1 => x, x*0 => 0, etc.,
5344 and application of the associative law.
5345 NOP_EXPR conversions may be removed freely (as long as we
5346 are careful not to change the C type of the overall expression)
5347 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5348 but we can constant-fold them if they have constant operands. */
5350 #ifdef ENABLE_FOLD_CHECKING
5351 # define fold(x) fold_1 (x)
5352 static tree
fold_1 (tree
);
5358 tree t
= expr
, orig_t
;
5359 tree t1
= NULL_TREE
;
5361 tree type
= TREE_TYPE (expr
);
5362 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
5363 enum tree_code code
= TREE_CODE (t
);
5364 int kind
= TREE_CODE_CLASS (code
);
5366 /* WINS will be nonzero when the switch is done
5367 if all operands are constant. */
5370 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5371 Likewise for a SAVE_EXPR that's already been evaluated. */
5372 if (code
== RTL_EXPR
|| (code
== SAVE_EXPR
&& SAVE_EXPR_RTL (t
) != 0))
5375 /* Return right away if a constant. */
5381 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
5385 /* Special case for conversion ops that can have fixed point args. */
5386 arg0
= TREE_OPERAND (t
, 0);
5388 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5390 STRIP_SIGN_NOPS (arg0
);
5392 if (arg0
!= 0 && TREE_CODE (arg0
) == COMPLEX_CST
)
5393 subop
= TREE_REALPART (arg0
);
5397 if (subop
!= 0 && TREE_CODE (subop
) != INTEGER_CST
5398 && TREE_CODE (subop
) != REAL_CST
)
5399 /* Note that TREE_CONSTANT isn't enough:
5400 static var addresses are constant but we can't
5401 do arithmetic on them. */
5404 else if (IS_EXPR_CODE_CLASS (kind
))
5406 int len
= first_rtl_op (code
);
5408 for (i
= 0; i
< len
; i
++)
5410 tree op
= TREE_OPERAND (t
, i
);
5414 continue; /* Valid for CALL_EXPR, at least. */
5416 if (kind
== '<' || code
== RSHIFT_EXPR
)
5418 /* Signedness matters here. Perhaps we can refine this
5420 STRIP_SIGN_NOPS (op
);
5423 /* Strip any conversions that don't change the mode. */
5426 if (TREE_CODE (op
) == COMPLEX_CST
)
5427 subop
= TREE_REALPART (op
);
5431 if (TREE_CODE (subop
) != INTEGER_CST
5432 && TREE_CODE (subop
) != REAL_CST
)
5433 /* Note that TREE_CONSTANT isn't enough:
5434 static var addresses are constant but we can't
5435 do arithmetic on them. */
5445 /* If this is a commutative operation, and ARG0 is a constant, move it
5446 to ARG1 to reduce the number of tests below. */
5447 if ((code
== PLUS_EXPR
|| code
== MULT_EXPR
|| code
== MIN_EXPR
5448 || code
== MAX_EXPR
|| code
== BIT_IOR_EXPR
|| code
== BIT_XOR_EXPR
5449 || code
== BIT_AND_EXPR
)
5450 && tree_swap_operands_p (arg0
, arg1
, true))
5451 return fold (build (code
, type
, TREE_OPERAND (t
, 1),
5452 TREE_OPERAND (t
, 0)));
5454 /* Now WINS is set as described above,
5455 ARG0 is the first operand of EXPR,
5456 and ARG1 is the second operand (if it has more than one operand).
5458 First check for cases where an arithmetic operation is applied to a
5459 compound, conditional, or comparison operation. Push the arithmetic
5460 operation inside the compound or conditional to see if any folding
5461 can then be done. Convert comparison to conditional for this purpose.
5462 The also optimizes non-constant cases that used to be done in
5465 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5466 one of the operands is a comparison and the other is a comparison, a
5467 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5468 code below would make the expression more complex. Change it to a
5469 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5470 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5472 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
5473 || code
== EQ_EXPR
|| code
== NE_EXPR
)
5474 && ((truth_value_p (TREE_CODE (arg0
))
5475 && (truth_value_p (TREE_CODE (arg1
))
5476 || (TREE_CODE (arg1
) == BIT_AND_EXPR
5477 && integer_onep (TREE_OPERAND (arg1
, 1)))))
5478 || (truth_value_p (TREE_CODE (arg1
))
5479 && (truth_value_p (TREE_CODE (arg0
))
5480 || (TREE_CODE (arg0
) == BIT_AND_EXPR
5481 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
5483 t
= fold (build (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
5484 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
5488 if (code
== EQ_EXPR
)
5489 t
= invert_truthvalue (t
);
5494 if (TREE_CODE_CLASS (code
) == '1')
5496 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
5497 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5498 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))));
5499 else if (TREE_CODE (arg0
) == COND_EXPR
)
5501 tree arg01
= TREE_OPERAND (arg0
, 1);
5502 tree arg02
= TREE_OPERAND (arg0
, 2);
5503 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
5504 arg01
= fold (build1 (code
, type
, arg01
));
5505 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
5506 arg02
= fold (build1 (code
, type
, arg02
));
5507 t
= fold (build (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5510 /* If this was a conversion, and all we did was to move into
5511 inside the COND_EXPR, bring it back out. But leave it if
5512 it is a conversion from integer to integer and the
5513 result precision is no wider than a word since such a
5514 conversion is cheap and may be optimized away by combine,
5515 while it couldn't if it were outside the COND_EXPR. Then return
5516 so we don't get into an infinite recursion loop taking the
5517 conversion out and then back in. */
5519 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
5520 || code
== NON_LVALUE_EXPR
)
5521 && TREE_CODE (t
) == COND_EXPR
5522 && TREE_CODE (TREE_OPERAND (t
, 1)) == code
5523 && TREE_CODE (TREE_OPERAND (t
, 2)) == code
5524 && ! VOID_TYPE_P (TREE_OPERAND (t
, 1))
5525 && ! VOID_TYPE_P (TREE_OPERAND (t
, 2))
5526 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0))
5527 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 2), 0)))
5528 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t
))
5530 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0))))
5531 && TYPE_PRECISION (TREE_TYPE (t
)) <= BITS_PER_WORD
))
5532 t
= build1 (code
, type
,
5534 TREE_TYPE (TREE_OPERAND
5535 (TREE_OPERAND (t
, 1), 0)),
5536 TREE_OPERAND (t
, 0),
5537 TREE_OPERAND (TREE_OPERAND (t
, 1), 0),
5538 TREE_OPERAND (TREE_OPERAND (t
, 2), 0)));
5541 else if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<')
5542 return fold (build (COND_EXPR
, type
, arg0
,
5543 fold (build1 (code
, type
, integer_one_node
)),
5544 fold (build1 (code
, type
, integer_zero_node
))));
5546 else if (TREE_CODE_CLASS (code
) == '<'
5547 && TREE_CODE (arg0
) == COMPOUND_EXPR
)
5548 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5549 fold (build (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
5550 else if (TREE_CODE_CLASS (code
) == '<'
5551 && TREE_CODE (arg1
) == COMPOUND_EXPR
)
5552 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
5553 fold (build (code
, type
, arg0
, TREE_OPERAND (arg1
, 1))));
5554 else if (TREE_CODE_CLASS (code
) == '2'
5555 || TREE_CODE_CLASS (code
) == '<')
5557 if (TREE_CODE (arg1
) == COMPOUND_EXPR
5558 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1
, 0))
5559 && ! TREE_SIDE_EFFECTS (arg0
))
5560 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
5561 fold (build (code
, type
,
5562 arg0
, TREE_OPERAND (arg1
, 1))));
5563 else if ((TREE_CODE (arg1
) == COND_EXPR
5564 || (TREE_CODE_CLASS (TREE_CODE (arg1
)) == '<'
5565 && TREE_CODE_CLASS (code
) != '<'))
5566 && (TREE_CODE (arg0
) != COND_EXPR
5567 || count_cond (arg0
, 25) + count_cond (arg1
, 25) <= 25)
5568 && (! TREE_SIDE_EFFECTS (arg0
)
5569 || ((*lang_hooks
.decls
.global_bindings_p
) () == 0
5570 && ! CONTAINS_PLACEHOLDER_P (arg0
))))
5572 fold_binary_op_with_conditional_arg (code
, type
, arg1
, arg0
,
5573 /*cond_first_p=*/0);
5574 else if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
5575 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5576 fold (build (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
5577 else if ((TREE_CODE (arg0
) == COND_EXPR
5578 || (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
5579 && TREE_CODE_CLASS (code
) != '<'))
5580 && (TREE_CODE (arg1
) != COND_EXPR
5581 || count_cond (arg0
, 25) + count_cond (arg1
, 25) <= 25)
5582 && (! TREE_SIDE_EFFECTS (arg1
)
5583 || ((*lang_hooks
.decls
.global_bindings_p
) () == 0
5584 && ! CONTAINS_PLACEHOLDER_P (arg1
))))
5586 fold_binary_op_with_conditional_arg (code
, type
, arg0
, arg1
,
5587 /*cond_first_p=*/1);
5601 return fold (DECL_INITIAL (t
));
5606 case FIX_TRUNC_EXPR
:
5607 /* Other kinds of FIX are not handled properly by fold_convert. */
5609 if (TREE_TYPE (TREE_OPERAND (t
, 0)) == TREE_TYPE (t
))
5610 return TREE_OPERAND (t
, 0);
5612 /* Handle cases of two conversions in a row. */
5613 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
5614 || TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
)
5616 tree inside_type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
5617 tree inter_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
5618 tree final_type
= TREE_TYPE (t
);
5619 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
5620 int inside_ptr
= POINTER_TYPE_P (inside_type
);
5621 int inside_float
= FLOAT_TYPE_P (inside_type
);
5622 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
5623 int inside_unsignedp
= TREE_UNSIGNED (inside_type
);
5624 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
5625 int inter_ptr
= POINTER_TYPE_P (inter_type
);
5626 int inter_float
= FLOAT_TYPE_P (inter_type
);
5627 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
5628 int inter_unsignedp
= TREE_UNSIGNED (inter_type
);
5629 int final_int
= INTEGRAL_TYPE_P (final_type
);
5630 int final_ptr
= POINTER_TYPE_P (final_type
);
5631 int final_float
= FLOAT_TYPE_P (final_type
);
5632 unsigned int final_prec
= TYPE_PRECISION (final_type
);
5633 int final_unsignedp
= TREE_UNSIGNED (final_type
);
5635 /* In addition to the cases of two conversions in a row
5636 handled below, if we are converting something to its own
5637 type via an object of identical or wider precision, neither
5638 conversion is needed. */
5639 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (final_type
)
5640 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
5641 && inter_prec
>= final_prec
)
5642 return fold (build1 (code
, final_type
,
5643 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
5645 /* Likewise, if the intermediate and final types are either both
5646 float or both integer, we don't need the middle conversion if
5647 it is wider than the final type and doesn't change the signedness
5648 (for integers). Avoid this if the final type is a pointer
5649 since then we sometimes need the inner conversion. Likewise if
5650 the outer has a precision not equal to the size of its mode. */
5651 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
5652 || (inter_float
&& inside_float
))
5653 && inter_prec
>= inside_prec
5654 && (inter_float
|| inter_unsignedp
== inside_unsignedp
)
5655 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (final_type
))
5656 && TYPE_MODE (final_type
) == TYPE_MODE (inter_type
))
5658 return fold (build1 (code
, final_type
,
5659 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
5661 /* If we have a sign-extension of a zero-extended value, we can
5662 replace that by a single zero-extension. */
5663 if (inside_int
&& inter_int
&& final_int
5664 && inside_prec
< inter_prec
&& inter_prec
< final_prec
5665 && inside_unsignedp
&& !inter_unsignedp
)
5666 return fold (build1 (code
, final_type
,
5667 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
5669 /* Two conversions in a row are not needed unless:
5670 - some conversion is floating-point (overstrict for now), or
5671 - the intermediate type is narrower than both initial and
5673 - the intermediate type and innermost type differ in signedness,
5674 and the outermost type is wider than the intermediate, or
5675 - the initial type is a pointer type and the precisions of the
5676 intermediate and final types differ, or
5677 - the final type is a pointer type and the precisions of the
5678 initial and intermediate types differ. */
5679 if (! inside_float
&& ! inter_float
&& ! final_float
5680 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
5681 && ! (inside_int
&& inter_int
5682 && inter_unsignedp
!= inside_unsignedp
5683 && inter_prec
< final_prec
)
5684 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
5685 == (final_unsignedp
&& final_prec
> inter_prec
))
5686 && ! (inside_ptr
&& inter_prec
!= final_prec
)
5687 && ! (final_ptr
&& inside_prec
!= inter_prec
)
5688 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (final_type
))
5689 && TYPE_MODE (final_type
) == TYPE_MODE (inter_type
))
5691 return fold (build1 (code
, final_type
,
5692 TREE_OPERAND (TREE_OPERAND (t
, 0), 0)));
5695 if (TREE_CODE (TREE_OPERAND (t
, 0)) == MODIFY_EXPR
5696 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t
, 0), 1))
5697 /* Detect assigning a bitfield. */
5698 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)) == COMPONENT_REF
5699 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 1))))
5701 /* Don't leave an assignment inside a conversion
5702 unless assigning a bitfield. */
5703 tree prev
= TREE_OPERAND (t
, 0);
5706 TREE_OPERAND (t
, 0) = TREE_OPERAND (prev
, 1);
5707 /* First do the assignment, then return converted constant. */
5708 t
= build (COMPOUND_EXPR
, TREE_TYPE (t
), prev
, fold (t
));
5709 TREE_NO_UNUSED_WARNING (t
) = 1;
5714 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5715 constants (if x has signed type, the sign bit cannot be set
5716 in c). This folds extension into the BIT_AND_EXPR. */
5717 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
5718 && TREE_CODE (TREE_TYPE (t
)) != BOOLEAN_TYPE
5719 && TREE_CODE (TREE_OPERAND (t
, 0)) == BIT_AND_EXPR
5720 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 1)) == INTEGER_CST
)
5722 tree
and = TREE_OPERAND (t
, 0);
5723 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
5726 if (TREE_UNSIGNED (TREE_TYPE (and))
5727 || (TYPE_PRECISION (TREE_TYPE (t
))
5728 <= TYPE_PRECISION (TREE_TYPE (and))))
5730 else if (TYPE_PRECISION (TREE_TYPE (and1
))
5731 <= HOST_BITS_PER_WIDE_INT
5732 && host_integerp (and1
, 1))
5734 unsigned HOST_WIDE_INT cst
;
5736 cst
= tree_low_cst (and1
, 1);
5737 cst
&= (HOST_WIDE_INT
) -1
5738 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
5739 change
= (cst
== 0);
5740 #ifdef LOAD_EXTEND_OP
5742 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
5745 tree uns
= (*lang_hooks
.types
.unsigned_type
) (TREE_TYPE (and0
));
5746 and0
= fold_convert (uns
, and0
);
5747 and1
= fold_convert (uns
, and1
);
5752 return fold (build (BIT_AND_EXPR
, TREE_TYPE (t
),
5753 fold_convert (TREE_TYPE (t
), and0
),
5754 fold_convert (TREE_TYPE (t
), and1
)));
5757 tem
= fold_convert_const (code
, TREE_TYPE (t
), arg0
);
5758 return tem
? tem
: t
;
5760 case VIEW_CONVERT_EXPR
:
5761 if (TREE_CODE (TREE_OPERAND (t
, 0)) == VIEW_CONVERT_EXPR
)
5762 return build1 (VIEW_CONVERT_EXPR
, type
,
5763 TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
5767 if (TREE_CODE (arg0
) == CONSTRUCTOR
5768 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
5770 tree m
= purpose_member (arg1
, CONSTRUCTOR_ELTS (arg0
));
5777 if (TREE_CONSTANT (t
) != wins
)
5781 TREE_CONSTANT (t
) = wins
;
5786 if (negate_expr_p (arg0
))
5787 return fold_convert (type
, negate_expr (arg0
));
5793 if (TREE_CODE (arg0
) == INTEGER_CST
)
5795 /* If the value is unsigned, then the absolute value is
5796 the same as the ordinary value. */
5797 if (TREE_UNSIGNED (type
))
5799 /* Similarly, if the value is non-negative. */
5800 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
5802 /* If the value is negative, then the absolute value is
5806 unsigned HOST_WIDE_INT low
;
5808 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
5809 TREE_INT_CST_HIGH (arg0
),
5811 t
= build_int_2 (low
, high
);
5812 TREE_TYPE (t
) = type
;
5814 = (TREE_OVERFLOW (arg0
)
5815 | force_fit_type (t
, overflow
));
5816 TREE_CONSTANT_OVERFLOW (t
)
5817 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
5820 else if (TREE_CODE (arg0
) == REAL_CST
)
5822 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
5823 t
= build_real (type
,
5824 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
5827 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
5828 return fold (build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0)));
5829 /* Convert fabs((double)float) into (double)fabsf(float). */
5830 else if (TREE_CODE (arg0
) == NOP_EXPR
5831 && TREE_CODE (type
) == REAL_TYPE
)
5833 tree targ0
= strip_float_extensions (arg0
);
5835 return fold_convert (type
, fold (build1 (ABS_EXPR
,
5839 else if (tree_expr_nonnegative_p (arg0
))
5844 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
5845 return fold_convert (type
, arg0
);
5846 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
5847 return build (COMPLEX_EXPR
, type
,
5848 TREE_OPERAND (arg0
, 0),
5849 negate_expr (TREE_OPERAND (arg0
, 1)));
5850 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
5851 return build_complex (type
, TREE_REALPART (arg0
),
5852 negate_expr (TREE_IMAGPART (arg0
)));
5853 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
5854 return fold (build (TREE_CODE (arg0
), type
,
5855 fold (build1 (CONJ_EXPR
, type
,
5856 TREE_OPERAND (arg0
, 0))),
5857 fold (build1 (CONJ_EXPR
,
5858 type
, TREE_OPERAND (arg0
, 1)))));
5859 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
5860 return TREE_OPERAND (arg0
, 0);
5866 t
= build_int_2 (~ TREE_INT_CST_LOW (arg0
),
5867 ~ TREE_INT_CST_HIGH (arg0
));
5868 TREE_TYPE (t
) = type
;
5869 force_fit_type (t
, 0);
5870 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg0
);
5871 TREE_CONSTANT_OVERFLOW (t
) = TREE_CONSTANT_OVERFLOW (arg0
);
5873 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
5874 return TREE_OPERAND (arg0
, 0);
5878 /* A + (-B) -> A - B */
5879 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
5880 return fold (build (MINUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
5881 /* (-A) + B -> B - A */
5882 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
5883 return fold (build (MINUS_EXPR
, type
, arg1
, TREE_OPERAND (arg0
, 0)));
5884 else if (! FLOAT_TYPE_P (type
))
5886 if (integer_zerop (arg1
))
5887 return non_lvalue (fold_convert (type
, arg0
));
5889 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5890 with a constant, and the two constants have no bits in common,
5891 we should treat this as a BIT_IOR_EXPR since this may produce more
5893 if (TREE_CODE (arg0
) == BIT_AND_EXPR
5894 && TREE_CODE (arg1
) == BIT_AND_EXPR
5895 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
5896 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
5897 && integer_zerop (const_binop (BIT_AND_EXPR
,
5898 TREE_OPERAND (arg0
, 1),
5899 TREE_OPERAND (arg1
, 1), 0)))
5901 code
= BIT_IOR_EXPR
;
5905 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5906 (plus (plus (mult) (mult)) (foo)) so that we can
5907 take advantage of the factoring cases below. */
5908 if ((TREE_CODE (arg0
) == PLUS_EXPR
5909 && TREE_CODE (arg1
) == MULT_EXPR
)
5910 || (TREE_CODE (arg1
) == PLUS_EXPR
5911 && TREE_CODE (arg0
) == MULT_EXPR
))
5913 tree parg0
, parg1
, parg
, marg
;
5915 if (TREE_CODE (arg0
) == PLUS_EXPR
)
5916 parg
= arg0
, marg
= arg1
;
5918 parg
= arg1
, marg
= arg0
;
5919 parg0
= TREE_OPERAND (parg
, 0);
5920 parg1
= TREE_OPERAND (parg
, 1);
5924 if (TREE_CODE (parg0
) == MULT_EXPR
5925 && TREE_CODE (parg1
) != MULT_EXPR
)
5926 return fold (build (PLUS_EXPR
, type
,
5927 fold (build (PLUS_EXPR
, type
,
5928 fold_convert (type
, parg0
),
5929 fold_convert (type
, marg
))),
5930 fold_convert (type
, parg1
)));
5931 if (TREE_CODE (parg0
) != MULT_EXPR
5932 && TREE_CODE (parg1
) == MULT_EXPR
)
5933 return fold (build (PLUS_EXPR
, type
,
5934 fold (build (PLUS_EXPR
, type
,
5935 fold_convert (type
, parg1
),
5936 fold_convert (type
, marg
))),
5937 fold_convert (type
, parg0
)));
5940 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
)
5942 tree arg00
, arg01
, arg10
, arg11
;
5943 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
5945 /* (A * C) + (B * C) -> (A+B) * C.
5946 We are most concerned about the case where C is a constant,
5947 but other combinations show up during loop reduction. Since
5948 it is not difficult, try all four possibilities. */
5950 arg00
= TREE_OPERAND (arg0
, 0);
5951 arg01
= TREE_OPERAND (arg0
, 1);
5952 arg10
= TREE_OPERAND (arg1
, 0);
5953 arg11
= TREE_OPERAND (arg1
, 1);
5956 if (operand_equal_p (arg01
, arg11
, 0))
5957 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
5958 else if (operand_equal_p (arg00
, arg10
, 0))
5959 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
5960 else if (operand_equal_p (arg00
, arg11
, 0))
5961 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
5962 else if (operand_equal_p (arg01
, arg10
, 0))
5963 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
5965 /* No identical multiplicands; see if we can find a common
5966 power-of-two factor in non-power-of-two multiplies. This
5967 can help in multi-dimensional array access. */
5968 else if (TREE_CODE (arg01
) == INTEGER_CST
5969 && TREE_CODE (arg11
) == INTEGER_CST
5970 && TREE_INT_CST_HIGH (arg01
) == 0
5971 && TREE_INT_CST_HIGH (arg11
) == 0)
5973 HOST_WIDE_INT int01
, int11
, tmp
;
5974 int01
= TREE_INT_CST_LOW (arg01
);
5975 int11
= TREE_INT_CST_LOW (arg11
);
5977 /* Move min of absolute values to int11. */
5978 if ((int01
>= 0 ? int01
: -int01
)
5979 < (int11
>= 0 ? int11
: -int11
))
5981 tmp
= int01
, int01
= int11
, int11
= tmp
;
5982 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
5983 alt0
= arg01
, arg01
= arg11
, arg11
= alt0
;
5986 if (exact_log2 (int11
) > 0 && int01
% int11
== 0)
5988 alt0
= fold (build (MULT_EXPR
, type
, arg00
,
5989 build_int_2 (int01
/ int11
, 0)));
5996 return fold (build (MULT_EXPR
, type
,
5997 fold (build (PLUS_EXPR
, type
, alt0
, alt1
)),
6003 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6004 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
6005 return non_lvalue (fold_convert (type
, arg0
));
6007 /* Likewise if the operands are reversed. */
6008 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
6009 return non_lvalue (fold_convert (type
, arg1
));
6011 /* Convert x+x into x*2.0. */
6012 if (operand_equal_p (arg0
, arg1
, 0)
6013 && SCALAR_FLOAT_TYPE_P (type
))
6014 return fold (build (MULT_EXPR
, type
, arg0
,
6015 build_real (type
, dconst2
)));
6017 /* Convert x*c+x into x*(c+1). */
6018 if (flag_unsafe_math_optimizations
6019 && TREE_CODE (arg0
) == MULT_EXPR
6020 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
6021 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
6022 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
6026 c
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
6027 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6028 return fold (build (MULT_EXPR
, type
, arg1
,
6029 build_real (type
, c
)));
6032 /* Convert x+x*c into x*(c+1). */
6033 if (flag_unsafe_math_optimizations
6034 && TREE_CODE (arg1
) == MULT_EXPR
6035 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
6036 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
6037 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
6041 c
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
6042 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6043 return fold (build (MULT_EXPR
, type
, arg0
,
6044 build_real (type
, c
)));
6047 /* Convert x*c1+x*c2 into x*(c1+c2). */
6048 if (flag_unsafe_math_optimizations
6049 && TREE_CODE (arg0
) == MULT_EXPR
6050 && TREE_CODE (arg1
) == MULT_EXPR
6051 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
6052 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0
, 1))
6053 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
6054 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1
, 1))
6055 && operand_equal_p (TREE_OPERAND (arg0
, 0),
6056 TREE_OPERAND (arg1
, 0), 0))
6058 REAL_VALUE_TYPE c1
, c2
;
6060 c1
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
6061 c2
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
6062 real_arithmetic (&c1
, PLUS_EXPR
, &c1
, &c2
);
6063 return fold (build (MULT_EXPR
, type
,
6064 TREE_OPERAND (arg0
, 0),
6065 build_real (type
, c1
)));
6070 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6071 is a rotate of A by C1 bits. */
6072 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6073 is a rotate of A by B bits. */
6075 enum tree_code code0
, code1
;
6076 code0
= TREE_CODE (arg0
);
6077 code1
= TREE_CODE (arg1
);
6078 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
6079 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
6080 && operand_equal_p (TREE_OPERAND (arg0
, 0),
6081 TREE_OPERAND (arg1
, 0), 0)
6082 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
6084 tree tree01
, tree11
;
6085 enum tree_code code01
, code11
;
6087 tree01
= TREE_OPERAND (arg0
, 1);
6088 tree11
= TREE_OPERAND (arg1
, 1);
6089 STRIP_NOPS (tree01
);
6090 STRIP_NOPS (tree11
);
6091 code01
= TREE_CODE (tree01
);
6092 code11
= TREE_CODE (tree11
);
6093 if (code01
== INTEGER_CST
6094 && code11
== INTEGER_CST
6095 && TREE_INT_CST_HIGH (tree01
) == 0
6096 && TREE_INT_CST_HIGH (tree11
) == 0
6097 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
6098 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
6099 return build (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6100 code0
== LSHIFT_EXPR
? tree01
: tree11
);
6101 else if (code11
== MINUS_EXPR
)
6103 tree tree110
, tree111
;
6104 tree110
= TREE_OPERAND (tree11
, 0);
6105 tree111
= TREE_OPERAND (tree11
, 1);
6106 STRIP_NOPS (tree110
);
6107 STRIP_NOPS (tree111
);
6108 if (TREE_CODE (tree110
) == INTEGER_CST
6109 && 0 == compare_tree_int (tree110
,
6111 (TREE_TYPE (TREE_OPERAND
6113 && operand_equal_p (tree01
, tree111
, 0))
6114 return build ((code0
== LSHIFT_EXPR
6117 type
, TREE_OPERAND (arg0
, 0), tree01
);
6119 else if (code01
== MINUS_EXPR
)
6121 tree tree010
, tree011
;
6122 tree010
= TREE_OPERAND (tree01
, 0);
6123 tree011
= TREE_OPERAND (tree01
, 1);
6124 STRIP_NOPS (tree010
);
6125 STRIP_NOPS (tree011
);
6126 if (TREE_CODE (tree010
) == INTEGER_CST
6127 && 0 == compare_tree_int (tree010
,
6129 (TREE_TYPE (TREE_OPERAND
6131 && operand_equal_p (tree11
, tree011
, 0))
6132 return build ((code0
!= LSHIFT_EXPR
6135 type
, TREE_OPERAND (arg0
, 0), tree11
);
6141 /* In most languages, can't associate operations on floats through
6142 parentheses. Rather than remember where the parentheses were, we
6143 don't associate floats at all, unless the user has specified
6144 -funsafe-math-optimizations. */
6147 && (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
6149 tree var0
, con0
, lit0
, minus_lit0
;
6150 tree var1
, con1
, lit1
, minus_lit1
;
6152 /* Split both trees into variables, constants, and literals. Then
6153 associate each group together, the constants with literals,
6154 then the result with variables. This increases the chances of
6155 literals being recombined later and of generating relocatable
6156 expressions for the sum of a constant and literal. */
6157 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
6158 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
6159 code
== MINUS_EXPR
);
6161 /* Only do something if we found more than two objects. Otherwise,
6162 nothing has changed and we risk infinite recursion. */
6163 if (2 < ((var0
!= 0) + (var1
!= 0)
6164 + (con0
!= 0) + (con1
!= 0)
6165 + (lit0
!= 0) + (lit1
!= 0)
6166 + (minus_lit0
!= 0) + (minus_lit1
!= 0)))
6168 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6169 if (code
== MINUS_EXPR
)
6172 var0
= associate_trees (var0
, var1
, code
, type
);
6173 con0
= associate_trees (con0
, con1
, code
, type
);
6174 lit0
= associate_trees (lit0
, lit1
, code
, type
);
6175 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
6177 /* Preserve the MINUS_EXPR if the negative part of the literal is
6178 greater than the positive part. Otherwise, the multiplicative
6179 folding code (i.e extract_muldiv) may be fooled in case
6180 unsigned constants are subtracted, like in the following
6181 example: ((X*2 + 4) - 8U)/2. */
6182 if (minus_lit0
&& lit0
)
6184 if (TREE_CODE (lit0
) == INTEGER_CST
6185 && TREE_CODE (minus_lit0
) == INTEGER_CST
6186 && tree_int_cst_lt (lit0
, minus_lit0
))
6188 minus_lit0
= associate_trees (minus_lit0
, lit0
,
6194 lit0
= associate_trees (lit0
, minus_lit0
,
6202 return fold_convert (type
,
6203 associate_trees (var0
, minus_lit0
,
6207 con0
= associate_trees (con0
, minus_lit0
,
6209 return fold_convert (type
,
6210 associate_trees (var0
, con0
,
6215 con0
= associate_trees (con0
, lit0
, code
, type
);
6216 return fold_convert (type
, associate_trees (var0
, con0
,
6223 t1
= const_binop (code
, arg0
, arg1
, 0);
6224 if (t1
!= NULL_TREE
)
6226 /* The return value should always have
6227 the same type as the original expression. */
6228 if (TREE_TYPE (t1
) != TREE_TYPE (t
))
6229 t1
= fold_convert (TREE_TYPE (t
), t1
);
6236 /* A - (-B) -> A + B */
6237 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
6238 return fold (build (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
6239 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6240 if (TREE_CODE (arg0
) == NEGATE_EXPR
6241 && (FLOAT_TYPE_P (type
)
6242 || (INTEGRAL_TYPE_P (type
) && flag_wrapv
&& !flag_trapv
))
6243 && negate_expr_p (arg1
)
6244 && reorder_operands_p (arg0
, arg1
))
6245 return fold (build (MINUS_EXPR
, type
, negate_expr (arg1
),
6246 TREE_OPERAND (arg0
, 0)));
6248 if (! FLOAT_TYPE_P (type
))
6250 if (! wins
&& integer_zerop (arg0
))
6251 return negate_expr (fold_convert (type
, arg1
));
6252 if (integer_zerop (arg1
))
6253 return non_lvalue (fold_convert (type
, arg0
));
6255 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
6256 about the case where C is a constant, just try one of the
6257 four possibilities. */
6259 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
6260 && operand_equal_p (TREE_OPERAND (arg0
, 1),
6261 TREE_OPERAND (arg1
, 1), 0))
6262 return fold (build (MULT_EXPR
, type
,
6263 fold (build (MINUS_EXPR
, type
,
6264 TREE_OPERAND (arg0
, 0),
6265 TREE_OPERAND (arg1
, 0))),
6266 TREE_OPERAND (arg0
, 1)));
6268 /* Fold A - (A & B) into ~B & A. */
6269 if (!TREE_SIDE_EFFECTS (arg0
)
6270 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
6272 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
6273 return fold (build (BIT_AND_EXPR
, type
,
6274 fold (build1 (BIT_NOT_EXPR
, type
,
6275 TREE_OPERAND (arg1
, 0))),
6277 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
6278 return fold (build (BIT_AND_EXPR
, type
,
6279 fold (build1 (BIT_NOT_EXPR
, type
,
6280 TREE_OPERAND (arg1
, 1))),
6284 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6285 any power of 2 minus 1. */
6286 if (TREE_CODE (arg0
) == BIT_AND_EXPR
6287 && TREE_CODE (arg1
) == BIT_AND_EXPR
6288 && operand_equal_p (TREE_OPERAND (arg0
, 0),
6289 TREE_OPERAND (arg1
, 0), 0))
6291 tree mask0
= TREE_OPERAND (arg0
, 1);
6292 tree mask1
= TREE_OPERAND (arg1
, 1);
6293 tree tem
= fold (build1 (BIT_NOT_EXPR
, type
, mask0
));
6295 if (operand_equal_p (tem
, mask1
, 0))
6297 tem
= fold (build (BIT_XOR_EXPR
, type
,
6298 TREE_OPERAND (arg0
, 0), mask1
));
6299 return fold (build (MINUS_EXPR
, type
, tem
, mask1
));
6304 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6305 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
6306 return non_lvalue (fold_convert (type
, arg0
));
6308 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6309 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6310 (-ARG1 + ARG0) reduces to -ARG1. */
6311 else if (!wins
&& fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
6312 return negate_expr (fold_convert (type
, arg1
));
6314 /* Fold &x - &x. This can happen from &x.foo - &x.
6315 This is unsafe for certain floats even in non-IEEE formats.
6316 In IEEE, it is unsafe because it does wrong for NaNs.
6317 Also note that operand_equal_p is always false if an operand
6320 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
6321 && operand_equal_p (arg0
, arg1
, 0))
6322 return fold_convert (type
, integer_zero_node
);
6327 /* (-A) * (-B) -> A * B */
6328 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
6329 return fold (build (MULT_EXPR
, type
,
6330 TREE_OPERAND (arg0
, 0),
6331 negate_expr (arg1
)));
6332 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
6333 return fold (build (MULT_EXPR
, type
,
6335 TREE_OPERAND (arg1
, 0)));
6337 if (! FLOAT_TYPE_P (type
))
6339 if (integer_zerop (arg1
))
6340 return omit_one_operand (type
, arg1
, arg0
);
6341 if (integer_onep (arg1
))
6342 return non_lvalue (fold_convert (type
, arg0
));
6344 /* (a * (1 << b)) is (a << b) */
6345 if (TREE_CODE (arg1
) == LSHIFT_EXPR
6346 && integer_onep (TREE_OPERAND (arg1
, 0)))
6347 return fold (build (LSHIFT_EXPR
, type
, arg0
,
6348 TREE_OPERAND (arg1
, 1)));
6349 if (TREE_CODE (arg0
) == LSHIFT_EXPR
6350 && integer_onep (TREE_OPERAND (arg0
, 0)))
6351 return fold (build (LSHIFT_EXPR
, type
, arg1
,
6352 TREE_OPERAND (arg0
, 1)));
6354 if (TREE_CODE (arg1
) == INTEGER_CST
6355 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0),
6356 fold_convert (type
, arg1
),
6358 return fold_convert (type
, tem
);
6363 /* Maybe fold x * 0 to 0. The expressions aren't the same
6364 when x is NaN, since x * 0 is also NaN. Nor are they the
6365 same in modes with signed zeros, since multiplying a
6366 negative value by 0 gives -0, not +0. */
6367 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
6368 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
6369 && real_zerop (arg1
))
6370 return omit_one_operand (type
, arg1
, arg0
);
6371 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6372 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
6373 && real_onep (arg1
))
6374 return non_lvalue (fold_convert (type
, arg0
));
6376 /* Transform x * -1.0 into -x. */
6377 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
6378 && real_minus_onep (arg1
))
6379 return fold (build1 (NEGATE_EXPR
, type
, arg0
));
6381 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6382 if (flag_unsafe_math_optimizations
6383 && TREE_CODE (arg0
) == RDIV_EXPR
6384 && TREE_CODE (arg1
) == REAL_CST
6385 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
6387 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
6390 return fold (build (RDIV_EXPR
, type
, tem
,
6391 TREE_OPERAND (arg0
, 1)));
6394 if (flag_unsafe_math_optimizations
)
6396 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
6397 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
6399 /* Optimizations of sqrt(...)*sqrt(...). */
6400 if ((fcode0
== BUILT_IN_SQRT
&& fcode1
== BUILT_IN_SQRT
)
6401 || (fcode0
== BUILT_IN_SQRTF
&& fcode1
== BUILT_IN_SQRTF
)
6402 || (fcode0
== BUILT_IN_SQRTL
&& fcode1
== BUILT_IN_SQRTL
))
6404 tree sqrtfn
, arg
, arglist
;
6405 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
6406 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
6408 /* Optimize sqrt(x)*sqrt(x) as x. */
6409 if (operand_equal_p (arg00
, arg10
, 0)
6410 && ! HONOR_SNANS (TYPE_MODE (type
)))
6413 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6414 sqrtfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6415 arg
= fold (build (MULT_EXPR
, type
, arg00
, arg10
));
6416 arglist
= build_tree_list (NULL_TREE
, arg
);
6417 return build_function_call_expr (sqrtfn
, arglist
);
6420 /* Optimize expN(x)*expN(y) as expN(x+y). */
6421 if (fcode0
== fcode1
6422 && (fcode0
== BUILT_IN_EXP
6423 || fcode0
== BUILT_IN_EXPF
6424 || fcode0
== BUILT_IN_EXPL
6425 || fcode0
== BUILT_IN_EXP2
6426 || fcode0
== BUILT_IN_EXP2F
6427 || fcode0
== BUILT_IN_EXP2L
6428 || fcode0
== BUILT_IN_EXP10
6429 || fcode0
== BUILT_IN_EXP10F
6430 || fcode0
== BUILT_IN_EXP10L
6431 || fcode0
== BUILT_IN_POW10
6432 || fcode0
== BUILT_IN_POW10F
6433 || fcode0
== BUILT_IN_POW10L
))
6435 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6436 tree arg
= build (PLUS_EXPR
, type
,
6437 TREE_VALUE (TREE_OPERAND (arg0
, 1)),
6438 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
6439 tree arglist
= build_tree_list (NULL_TREE
, fold (arg
));
6440 return build_function_call_expr (expfn
, arglist
);
6443 /* Optimizations of pow(...)*pow(...). */
6444 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
6445 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
6446 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
6448 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
6449 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
6451 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
6452 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
6455 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6456 if (operand_equal_p (arg01
, arg11
, 0))
6458 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6459 tree arg
= build (MULT_EXPR
, type
, arg00
, arg10
);
6460 tree arglist
= tree_cons (NULL_TREE
, fold (arg
),
6461 build_tree_list (NULL_TREE
,
6463 return build_function_call_expr (powfn
, arglist
);
6466 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6467 if (operand_equal_p (arg00
, arg10
, 0))
6469 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6470 tree arg
= fold (build (PLUS_EXPR
, type
, arg01
, arg11
));
6471 tree arglist
= tree_cons (NULL_TREE
, arg00
,
6472 build_tree_list (NULL_TREE
,
6474 return build_function_call_expr (powfn
, arglist
);
6478 /* Optimize tan(x)*cos(x) as sin(x). */
6479 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
6480 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
6481 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
6482 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
6483 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
6484 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
6485 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
6486 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
6494 sinfn
= implicit_built_in_decls
[BUILT_IN_SIN
];
6498 sinfn
= implicit_built_in_decls
[BUILT_IN_SINF
];
6502 sinfn
= implicit_built_in_decls
[BUILT_IN_SINL
];
6508 if (sinfn
!= NULL_TREE
)
6509 return build_function_call_expr (sinfn
,
6510 TREE_OPERAND (arg0
, 1));
6513 /* Optimize x*pow(x,c) as pow(x,c+1). */
6514 if (fcode1
== BUILT_IN_POW
6515 || fcode1
== BUILT_IN_POWF
6516 || fcode1
== BUILT_IN_POWL
)
6518 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
6519 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
,
6521 if (TREE_CODE (arg11
) == REAL_CST
6522 && ! TREE_CONSTANT_OVERFLOW (arg11
)
6523 && operand_equal_p (arg0
, arg10
, 0))
6525 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
6529 c
= TREE_REAL_CST (arg11
);
6530 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6531 arg
= build_real (type
, c
);
6532 arglist
= build_tree_list (NULL_TREE
, arg
);
6533 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
6534 return build_function_call_expr (powfn
, arglist
);
6538 /* Optimize pow(x,c)*x as pow(x,c+1). */
6539 if (fcode0
== BUILT_IN_POW
6540 || fcode0
== BUILT_IN_POWF
6541 || fcode0
== BUILT_IN_POWL
)
6543 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
6544 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
,
6546 if (TREE_CODE (arg01
) == REAL_CST
6547 && ! TREE_CONSTANT_OVERFLOW (arg01
)
6548 && operand_equal_p (arg1
, arg00
, 0))
6550 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6554 c
= TREE_REAL_CST (arg01
);
6555 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
6556 arg
= build_real (type
, c
);
6557 arglist
= build_tree_list (NULL_TREE
, arg
);
6558 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
6559 return build_function_call_expr (powfn
, arglist
);
6563 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6565 && operand_equal_p (arg0
, arg1
, 0))
6569 if (type
== double_type_node
)
6570 powfn
= implicit_built_in_decls
[BUILT_IN_POW
];
6571 else if (type
== float_type_node
)
6572 powfn
= implicit_built_in_decls
[BUILT_IN_POWF
];
6573 else if (type
== long_double_type_node
)
6574 powfn
= implicit_built_in_decls
[BUILT_IN_POWL
];
6580 tree arg
= build_real (type
, dconst2
);
6581 tree arglist
= build_tree_list (NULL_TREE
, arg
);
6582 arglist
= tree_cons (NULL_TREE
, arg0
, arglist
);
6583 return build_function_call_expr (powfn
, arglist
);
6592 if (integer_all_onesp (arg1
))
6593 return omit_one_operand (type
, arg1
, arg0
);
6594 if (integer_zerop (arg1
))
6595 return non_lvalue (fold_convert (type
, arg0
));
6596 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
6597 if (t1
!= NULL_TREE
)
6600 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6602 This results in more efficient code for machines without a NAND
6603 instruction. Combine will canonicalize to the first form
6604 which will allow use of NAND instructions provided by the
6605 backend if they exist. */
6606 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
6607 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
6609 return fold (build1 (BIT_NOT_EXPR
, type
,
6610 build (BIT_AND_EXPR
, type
,
6611 TREE_OPERAND (arg0
, 0),
6612 TREE_OPERAND (arg1
, 0))));
6615 /* See if this can be simplified into a rotate first. If that
6616 is unsuccessful continue in the association code. */
6620 if (integer_zerop (arg1
))
6621 return non_lvalue (fold_convert (type
, arg0
));
6622 if (integer_all_onesp (arg1
))
6623 return fold (build1 (BIT_NOT_EXPR
, type
, arg0
));
6625 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6626 with a constant, and the two constants have no bits in common,
6627 we should treat this as a BIT_IOR_EXPR since this may produce more
6629 if (TREE_CODE (arg0
) == BIT_AND_EXPR
6630 && TREE_CODE (arg1
) == BIT_AND_EXPR
6631 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
6632 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
6633 && integer_zerop (const_binop (BIT_AND_EXPR
,
6634 TREE_OPERAND (arg0
, 1),
6635 TREE_OPERAND (arg1
, 1), 0)))
6637 code
= BIT_IOR_EXPR
;
6641 /* See if this can be simplified into a rotate first. If that
6642 is unsuccessful continue in the association code. */
6646 if (integer_all_onesp (arg1
))
6647 return non_lvalue (fold_convert (type
, arg0
));
6648 if (integer_zerop (arg1
))
6649 return omit_one_operand (type
, arg1
, arg0
);
6650 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
6651 if (t1
!= NULL_TREE
)
6653 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6654 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
6655 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
6658 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
6660 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
6661 && (~TREE_INT_CST_LOW (arg1
)
6662 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
6663 return fold_convert (type
, TREE_OPERAND (arg0
, 0));
6666 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6668 This results in more efficient code for machines without a NOR
6669 instruction. Combine will canonicalize to the first form
6670 which will allow use of NOR instructions provided by the
6671 backend if they exist. */
6672 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
6673 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
6675 return fold (build1 (BIT_NOT_EXPR
, type
,
6676 build (BIT_IOR_EXPR
, type
,
6677 TREE_OPERAND (arg0
, 0),
6678 TREE_OPERAND (arg1
, 0))));
6684 /* Don't touch a floating-point divide by zero unless the mode
6685 of the constant can represent infinity. */
6686 if (TREE_CODE (arg1
) == REAL_CST
6687 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
6688 && real_zerop (arg1
))
6691 /* (-A) / (-B) -> A / B */
6692 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
6693 return fold (build (RDIV_EXPR
, type
,
6694 TREE_OPERAND (arg0
, 0),
6695 negate_expr (arg1
)));
6696 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
6697 return fold (build (RDIV_EXPR
, type
,
6699 TREE_OPERAND (arg1
, 0)));
6701 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6702 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
6703 && real_onep (arg1
))
6704 return non_lvalue (fold_convert (type
, arg0
));
6706 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6707 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
6708 && real_minus_onep (arg1
))
6709 return non_lvalue (fold_convert (type
, negate_expr (arg0
)));
6711 /* If ARG1 is a constant, we can convert this to a multiply by the
6712 reciprocal. This does not have the same rounding properties,
6713 so only do this if -funsafe-math-optimizations. We can actually
6714 always safely do it if ARG1 is a power of two, but it's hard to
6715 tell if it is or not in a portable manner. */
6716 if (TREE_CODE (arg1
) == REAL_CST
)
6718 if (flag_unsafe_math_optimizations
6719 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
6721 return fold (build (MULT_EXPR
, type
, arg0
, tem
));
6722 /* Find the reciprocal if optimizing and the result is exact. */
6726 r
= TREE_REAL_CST (arg1
);
6727 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
6729 tem
= build_real (type
, r
);
6730 return fold (build (MULT_EXPR
, type
, arg0
, tem
));
6734 /* Convert A/B/C to A/(B*C). */
6735 if (flag_unsafe_math_optimizations
6736 && TREE_CODE (arg0
) == RDIV_EXPR
)
6737 return fold (build (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
6738 fold (build (MULT_EXPR
, type
,
6739 TREE_OPERAND (arg0
, 1), arg1
))));
6741 /* Convert A/(B/C) to (A/B)*C. */
6742 if (flag_unsafe_math_optimizations
6743 && TREE_CODE (arg1
) == RDIV_EXPR
)
6744 return fold (build (MULT_EXPR
, type
,
6745 fold (build (RDIV_EXPR
, type
, arg0
,
6746 TREE_OPERAND (arg1
, 0))),
6747 TREE_OPERAND (arg1
, 1)));
6749 /* Convert C1/(X*C2) into (C1/C2)/X. */
6750 if (flag_unsafe_math_optimizations
6751 && TREE_CODE (arg1
) == MULT_EXPR
6752 && TREE_CODE (arg0
) == REAL_CST
6753 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
6755 tree tem
= const_binop (RDIV_EXPR
, arg0
,
6756 TREE_OPERAND (arg1
, 1), 0);
6758 return fold (build (RDIV_EXPR
, type
, tem
,
6759 TREE_OPERAND (arg1
, 0)));
6762 if (flag_unsafe_math_optimizations
)
6764 enum built_in_function fcode
= builtin_mathfn_code (arg1
);
6765 /* Optimize x/expN(y) into x*expN(-y). */
6766 if (fcode
== BUILT_IN_EXP
6767 || fcode
== BUILT_IN_EXPF
6768 || fcode
== BUILT_IN_EXPL
6769 || fcode
== BUILT_IN_EXP2
6770 || fcode
== BUILT_IN_EXP2F
6771 || fcode
== BUILT_IN_EXP2L
6772 || fcode
== BUILT_IN_EXP10
6773 || fcode
== BUILT_IN_EXP10F
6774 || fcode
== BUILT_IN_EXP10L
6775 || fcode
== BUILT_IN_POW10
6776 || fcode
== BUILT_IN_POW10F
6777 || fcode
== BUILT_IN_POW10L
)
6779 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
6780 tree arg
= build1 (NEGATE_EXPR
, type
,
6781 TREE_VALUE (TREE_OPERAND (arg1
, 1)));
6782 tree arglist
= build_tree_list (NULL_TREE
, fold (arg
));
6783 arg1
= build_function_call_expr (expfn
, arglist
);
6784 return fold (build (MULT_EXPR
, type
, arg0
, arg1
));
6787 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6788 if (fcode
== BUILT_IN_POW
6789 || fcode
== BUILT_IN_POWF
6790 || fcode
== BUILT_IN_POWL
)
6792 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0);
6793 tree arg10
= TREE_VALUE (TREE_OPERAND (arg1
, 1));
6794 tree arg11
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1
, 1)));
6795 tree neg11
= fold (build1 (NEGATE_EXPR
, type
, arg11
));
6796 tree arglist
= tree_cons(NULL_TREE
, arg10
,
6797 build_tree_list (NULL_TREE
, neg11
));
6798 arg1
= build_function_call_expr (powfn
, arglist
);
6799 return fold (build (MULT_EXPR
, type
, arg0
, arg1
));
6803 if (flag_unsafe_math_optimizations
)
6805 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
6806 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
6808 /* Optimize sin(x)/cos(x) as tan(x). */
6809 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
6810 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
6811 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
6812 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
6813 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
6817 if (fcode0
== BUILT_IN_SIN
)
6818 tanfn
= implicit_built_in_decls
[BUILT_IN_TAN
];
6819 else if (fcode0
== BUILT_IN_SINF
)
6820 tanfn
= implicit_built_in_decls
[BUILT_IN_TANF
];
6821 else if (fcode0
== BUILT_IN_SINL
)
6822 tanfn
= implicit_built_in_decls
[BUILT_IN_TANL
];
6826 if (tanfn
!= NULL_TREE
)
6827 return build_function_call_expr (tanfn
,
6828 TREE_OPERAND (arg0
, 1));
6831 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6832 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
6833 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
6834 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
6835 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0
, 1)),
6836 TREE_VALUE (TREE_OPERAND (arg1
, 1)), 0))
6840 if (fcode0
== BUILT_IN_COS
)
6841 tanfn
= implicit_built_in_decls
[BUILT_IN_TAN
];
6842 else if (fcode0
== BUILT_IN_COSF
)
6843 tanfn
= implicit_built_in_decls
[BUILT_IN_TANF
];
6844 else if (fcode0
== BUILT_IN_COSL
)
6845 tanfn
= implicit_built_in_decls
[BUILT_IN_TANL
];
6849 if (tanfn
!= NULL_TREE
)
6851 tree tmp
= TREE_OPERAND (arg0
, 1);
6852 tmp
= build_function_call_expr (tanfn
, tmp
);
6853 return fold (build (RDIV_EXPR
, type
,
6854 build_real (type
, dconst1
),
6859 /* Optimize pow(x,c)/x as pow(x,c-1). */
6860 if (fcode0
== BUILT_IN_POW
6861 || fcode0
== BUILT_IN_POWF
6862 || fcode0
== BUILT_IN_POWL
)
6864 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
6865 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
6866 if (TREE_CODE (arg01
) == REAL_CST
6867 && ! TREE_CONSTANT_OVERFLOW (arg01
)
6868 && operand_equal_p (arg1
, arg00
, 0))
6870 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6874 c
= TREE_REAL_CST (arg01
);
6875 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
6876 arg
= build_real (type
, c
);
6877 arglist
= build_tree_list (NULL_TREE
, arg
);
6878 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
6879 return build_function_call_expr (powfn
, arglist
);
6885 case TRUNC_DIV_EXPR
:
6886 case ROUND_DIV_EXPR
:
6887 case FLOOR_DIV_EXPR
:
6889 case EXACT_DIV_EXPR
:
6890 if (integer_onep (arg1
))
6891 return non_lvalue (fold_convert (type
, arg0
));
6892 if (integer_zerop (arg1
))
6895 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6896 operation, EXACT_DIV_EXPR.
6898 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6899 At one time others generated faster code, it's not clear if they do
6900 after the last round to changes to the DIV code in expmed.c. */
6901 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
6902 && multiple_of_p (type
, arg0
, arg1
))
6903 return fold (build (EXACT_DIV_EXPR
, type
, arg0
, arg1
));
6905 if (TREE_CODE (arg1
) == INTEGER_CST
6906 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
6908 return fold_convert (type
, tem
);
6913 case FLOOR_MOD_EXPR
:
6914 case ROUND_MOD_EXPR
:
6915 case TRUNC_MOD_EXPR
:
6916 if (integer_onep (arg1
))
6917 return omit_one_operand (type
, integer_zero_node
, arg0
);
6918 if (integer_zerop (arg1
))
6921 if (TREE_CODE (arg1
) == INTEGER_CST
6922 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
6924 return fold_convert (type
, tem
);
6930 if (integer_all_onesp (arg0
))
6931 return omit_one_operand (type
, arg0
, arg1
);
6935 /* Optimize -1 >> x for arithmetic right shifts. */
6936 if (integer_all_onesp (arg0
) && ! TREE_UNSIGNED (type
))
6937 return omit_one_operand (type
, arg0
, arg1
);
6938 /* ... fall through ... */
6942 if (integer_zerop (arg1
))
6943 return non_lvalue (fold_convert (type
, arg0
));
6944 if (integer_zerop (arg0
))
6945 return omit_one_operand (type
, arg0
, arg1
);
6947 /* Since negative shift count is not well-defined,
6948 don't try to compute it in the compiler. */
6949 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
6951 /* Rewrite an LROTATE_EXPR by a constant into an
6952 RROTATE_EXPR by a new constant. */
6953 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
6955 tree tem
= build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type
)), 0);
6956 tem
= fold_convert (TREE_TYPE (arg1
), tem
);
6957 tem
= const_binop (MINUS_EXPR
, tem
, arg1
, 0);
6958 return fold (build (RROTATE_EXPR
, type
, arg0
, tem
));
6961 /* If we have a rotate of a bit operation with the rotate count and
6962 the second operand of the bit operation both constant,
6963 permute the two operations. */
6964 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
6965 && (TREE_CODE (arg0
) == BIT_AND_EXPR
6966 || TREE_CODE (arg0
) == BIT_IOR_EXPR
6967 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
6968 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
6969 return fold (build (TREE_CODE (arg0
), type
,
6970 fold (build (code
, type
,
6971 TREE_OPERAND (arg0
, 0), arg1
)),
6972 fold (build (code
, type
,
6973 TREE_OPERAND (arg0
, 1), arg1
))));
6975 /* Two consecutive rotates adding up to the width of the mode can
6977 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
6978 && TREE_CODE (arg0
) == RROTATE_EXPR
6979 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
6980 && TREE_INT_CST_HIGH (arg1
) == 0
6981 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
6982 && ((TREE_INT_CST_LOW (arg1
)
6983 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
6984 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
6985 return TREE_OPERAND (arg0
, 0);
6990 if (operand_equal_p (arg0
, arg1
, 0))
6991 return omit_one_operand (type
, arg0
, arg1
);
6992 if (INTEGRAL_TYPE_P (type
)
6993 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), 1))
6994 return omit_one_operand (type
, arg1
, arg0
);
6998 if (operand_equal_p (arg0
, arg1
, 0))
6999 return omit_one_operand (type
, arg0
, arg1
);
7000 if (INTEGRAL_TYPE_P (type
)
7001 && TYPE_MAX_VALUE (type
)
7002 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), 1))
7003 return omit_one_operand (type
, arg1
, arg0
);
7006 case TRUTH_NOT_EXPR
:
7007 /* Note that the operand of this must be an int
7008 and its values must be 0 or 1.
7009 ("true" is a fixed value perhaps depending on the language,
7010 but we don't handle values other than 1 correctly yet.) */
7011 tem
= invert_truthvalue (arg0
);
7012 /* Avoid infinite recursion. */
7013 if (TREE_CODE (tem
) == TRUTH_NOT_EXPR
)
7015 tem
= fold_single_bit_test (code
, arg0
, arg1
, type
);
7020 return fold_convert (type
, tem
);
7022 case TRUTH_ANDIF_EXPR
:
7023 /* Note that the operands of this must be ints
7024 and their values must be 0 or 1.
7025 ("true" is a fixed value perhaps depending on the language.) */
7026 /* If first arg is constant zero, return it. */
7027 if (integer_zerop (arg0
))
7028 return fold_convert (type
, arg0
);
7029 case TRUTH_AND_EXPR
:
7030 /* If either arg is constant true, drop it. */
7031 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
7032 return non_lvalue (fold_convert (type
, arg1
));
7033 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
7034 /* Preserve sequence points. */
7035 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
7036 return non_lvalue (fold_convert (type
, arg0
));
7037 /* If second arg is constant zero, result is zero, but first arg
7038 must be evaluated. */
7039 if (integer_zerop (arg1
))
7040 return omit_one_operand (type
, arg1
, arg0
);
7041 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7042 case will be handled here. */
7043 if (integer_zerop (arg0
))
7044 return omit_one_operand (type
, arg0
, arg1
);
7047 /* We only do these simplifications if we are optimizing. */
7051 /* Check for things like (A || B) && (A || C). We can convert this
7052 to A || (B && C). Note that either operator can be any of the four
7053 truth and/or operations and the transformation will still be
7054 valid. Also note that we only care about order for the
7055 ANDIF and ORIF operators. If B contains side effects, this
7056 might change the truth-value of A. */
7057 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
7058 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
7059 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
7060 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
7061 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
7062 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
7064 tree a00
= TREE_OPERAND (arg0
, 0);
7065 tree a01
= TREE_OPERAND (arg0
, 1);
7066 tree a10
= TREE_OPERAND (arg1
, 0);
7067 tree a11
= TREE_OPERAND (arg1
, 1);
7068 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
7069 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
7070 && (code
== TRUTH_AND_EXPR
7071 || code
== TRUTH_OR_EXPR
));
7073 if (operand_equal_p (a00
, a10
, 0))
7074 return fold (build (TREE_CODE (arg0
), type
, a00
,
7075 fold (build (code
, type
, a01
, a11
))));
7076 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
7077 return fold (build (TREE_CODE (arg0
), type
, a00
,
7078 fold (build (code
, type
, a01
, a10
))));
7079 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
7080 return fold (build (TREE_CODE (arg0
), type
, a01
,
7081 fold (build (code
, type
, a00
, a11
))));
7083 /* This case if tricky because we must either have commutative
7084 operators or else A10 must not have side-effects. */
7086 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
7087 && operand_equal_p (a01
, a11
, 0))
7088 return fold (build (TREE_CODE (arg0
), type
,
7089 fold (build (code
, type
, a00
, a10
)),
7093 /* See if we can build a range comparison. */
7094 if (0 != (tem
= fold_range_test (t
)))
7097 /* Check for the possibility of merging component references. If our
7098 lhs is another similar operation, try to merge its rhs with our
7099 rhs. Then try to merge our lhs and rhs. */
7100 if (TREE_CODE (arg0
) == code
7101 && 0 != (tem
= fold_truthop (code
, type
,
7102 TREE_OPERAND (arg0
, 1), arg1
)))
7103 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
7105 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
7110 case TRUTH_ORIF_EXPR
:
7111 /* Note that the operands of this must be ints
7112 and their values must be 0 or true.
7113 ("true" is a fixed value perhaps depending on the language.) */
7114 /* If first arg is constant true, return it. */
7115 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
7116 return fold_convert (type
, arg0
);
7118 /* If either arg is constant zero, drop it. */
7119 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
7120 return non_lvalue (fold_convert (type
, arg1
));
7121 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
7122 /* Preserve sequence points. */
7123 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
7124 return non_lvalue (fold_convert (type
, arg0
));
7125 /* If second arg is constant true, result is true, but we must
7126 evaluate first arg. */
7127 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
7128 return omit_one_operand (type
, arg1
, arg0
);
7129 /* Likewise for first arg, but note this only occurs here for
7131 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
7132 return omit_one_operand (type
, arg0
, arg1
);
7135 case TRUTH_XOR_EXPR
:
7136 /* If either arg is constant zero, drop it. */
7137 if (integer_zerop (arg0
))
7138 return non_lvalue (fold_convert (type
, arg1
));
7139 if (integer_zerop (arg1
))
7140 return non_lvalue (fold_convert (type
, arg0
));
7141 /* If either arg is constant true, this is a logical inversion. */
7142 if (integer_onep (arg0
))
7143 return non_lvalue (fold_convert (type
, invert_truthvalue (arg1
)));
7144 if (integer_onep (arg1
))
7145 return non_lvalue (fold_convert (type
, invert_truthvalue (arg0
)));
7154 /* If one arg is a real or integer constant, put it last. */
7155 if (tree_swap_operands_p (arg0
, arg1
, true))
7156 return fold (build (swap_tree_comparison (code
), type
, arg1
, arg0
));
7158 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
7160 tree targ0
= strip_float_extensions (arg0
);
7161 tree targ1
= strip_float_extensions (arg1
);
7162 tree newtype
= TREE_TYPE (targ0
);
7164 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
7165 newtype
= TREE_TYPE (targ1
);
7167 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7168 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
7169 return fold (build (code
, type
, fold_convert (newtype
, targ0
),
7170 fold_convert (newtype
, targ1
)));
7172 /* (-a) CMP (-b) -> b CMP a */
7173 if (TREE_CODE (arg0
) == NEGATE_EXPR
7174 && TREE_CODE (arg1
) == NEGATE_EXPR
)
7175 return fold (build (code
, type
, TREE_OPERAND (arg1
, 0),
7176 TREE_OPERAND (arg0
, 0)));
7178 if (TREE_CODE (arg1
) == REAL_CST
)
7180 REAL_VALUE_TYPE cst
;
7181 cst
= TREE_REAL_CST (arg1
);
7183 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7184 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
7186 fold (build (swap_tree_comparison (code
), type
,
7187 TREE_OPERAND (arg0
, 0),
7188 build_real (TREE_TYPE (arg1
),
7189 REAL_VALUE_NEGATE (cst
))));
7191 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7192 /* a CMP (-0) -> a CMP 0 */
7193 if (REAL_VALUE_MINUS_ZERO (cst
))
7194 return fold (build (code
, type
, arg0
,
7195 build_real (TREE_TYPE (arg1
), dconst0
)));
7197 /* x != NaN is always true, other ops are always false. */
7198 if (REAL_VALUE_ISNAN (cst
)
7199 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
7201 t
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
7202 return omit_one_operand (type
, fold_convert (type
, t
), arg0
);
7205 /* Fold comparisons against infinity. */
7206 if (REAL_VALUE_ISINF (cst
))
7208 tem
= fold_inf_compare (code
, type
, arg0
, arg1
);
7209 if (tem
!= NULL_TREE
)
7214 /* If this is a comparison of a real constant with a PLUS_EXPR
7215 or a MINUS_EXPR of a real constant, we can convert it into a
7216 comparison with a revised real constant as long as no overflow
7217 occurs when unsafe_math_optimizations are enabled. */
7218 if (flag_unsafe_math_optimizations
7219 && TREE_CODE (arg1
) == REAL_CST
7220 && (TREE_CODE (arg0
) == PLUS_EXPR
7221 || TREE_CODE (arg0
) == MINUS_EXPR
)
7222 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
7223 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
7224 ? MINUS_EXPR
: PLUS_EXPR
,
7225 arg1
, TREE_OPERAND (arg0
, 1), 0))
7226 && ! TREE_CONSTANT_OVERFLOW (tem
))
7227 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
7229 /* Likewise, we can simplify a comparison of a real constant with
7230 a MINUS_EXPR whose first operand is also a real constant, i.e.
7231 (c1 - x) < c2 becomes x > c1-c2. */
7232 if (flag_unsafe_math_optimizations
7233 && TREE_CODE (arg1
) == REAL_CST
7234 && TREE_CODE (arg0
) == MINUS_EXPR
7235 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
7236 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
7238 && ! TREE_CONSTANT_OVERFLOW (tem
))
7239 return fold (build (swap_tree_comparison (code
), type
,
7240 TREE_OPERAND (arg0
, 1), tem
));
7242 /* Fold comparisons against built-in math functions. */
7243 if (TREE_CODE (arg1
) == REAL_CST
7244 && flag_unsafe_math_optimizations
7245 && ! flag_errno_math
)
7247 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
7249 if (fcode
!= END_BUILTINS
)
7251 tem
= fold_mathfn_compare (fcode
, code
, type
, arg0
, arg1
);
7252 if (tem
!= NULL_TREE
)
7258 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7259 if (TREE_CONSTANT (arg1
)
7260 && (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
7261 || TREE_CODE (arg0
) == POSTDECREMENT_EXPR
)
7262 /* This optimization is invalid for ordered comparisons
7263 if CONST+INCR overflows or if foo+incr might overflow.
7264 This optimization is invalid for floating point due to rounding.
7265 For pointer types we assume overflow doesn't happen. */
7266 && (POINTER_TYPE_P (TREE_TYPE (arg0
))
7267 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
7268 && (code
== EQ_EXPR
|| code
== NE_EXPR
))))
7270 tree varop
, newconst
;
7272 if (TREE_CODE (arg0
) == POSTINCREMENT_EXPR
)
7274 newconst
= fold (build (PLUS_EXPR
, TREE_TYPE (arg0
),
7275 arg1
, TREE_OPERAND (arg0
, 1)));
7276 varop
= build (PREINCREMENT_EXPR
, TREE_TYPE (arg0
),
7277 TREE_OPERAND (arg0
, 0),
7278 TREE_OPERAND (arg0
, 1));
7282 newconst
= fold (build (MINUS_EXPR
, TREE_TYPE (arg0
),
7283 arg1
, TREE_OPERAND (arg0
, 1)));
7284 varop
= build (PREDECREMENT_EXPR
, TREE_TYPE (arg0
),
7285 TREE_OPERAND (arg0
, 0),
7286 TREE_OPERAND (arg0
, 1));
7290 /* If VAROP is a reference to a bitfield, we must mask
7291 the constant by the width of the field. */
7292 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
7293 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop
, 0), 1)))
7295 tree fielddecl
= TREE_OPERAND (TREE_OPERAND (varop
, 0), 1);
7296 int size
= TREE_INT_CST_LOW (DECL_SIZE (fielddecl
));
7297 tree folded_compare
, shift
;
7299 /* First check whether the comparison would come out
7300 always the same. If we don't do that we would
7301 change the meaning with the masking. */
7302 folded_compare
= fold (build (code
, type
,
7303 TREE_OPERAND (varop
, 0),
7305 if (integer_zerop (folded_compare
)
7306 || integer_onep (folded_compare
))
7307 return omit_one_operand (type
, folded_compare
, varop
);
7309 shift
= build_int_2 (TYPE_PRECISION (TREE_TYPE (varop
)) - size
,
7311 newconst
= fold (build (LSHIFT_EXPR
, TREE_TYPE (varop
),
7313 newconst
= fold (build (RSHIFT_EXPR
, TREE_TYPE (varop
),
7317 return fold (build (code
, type
, varop
, newconst
));
7320 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7321 This transformation affects the cases which are handled in later
7322 optimizations involving comparisons with non-negative constants. */
7323 if (TREE_CODE (arg1
) == INTEGER_CST
7324 && TREE_CODE (arg0
) != INTEGER_CST
7325 && tree_int_cst_sgn (arg1
) > 0)
7330 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
7331 return fold (build (GT_EXPR
, type
, arg0
, arg1
));
7334 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
7335 return fold (build (LE_EXPR
, type
, arg0
, arg1
));
7342 /* Comparisons with the highest or lowest possible integer of
7343 the specified size will have known values. */
7345 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1
)));
7347 if (TREE_CODE (arg1
) == INTEGER_CST
7348 && ! TREE_CONSTANT_OVERFLOW (arg1
)
7349 && width
<= HOST_BITS_PER_WIDE_INT
7350 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
7351 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
7353 unsigned HOST_WIDE_INT signed_max
;
7354 unsigned HOST_WIDE_INT max
, min
;
7356 signed_max
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1)) - 1;
7358 if (TREE_UNSIGNED (TREE_TYPE (arg1
)))
7360 max
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
7366 min
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
7369 if (TREE_INT_CST_HIGH (arg1
) == 0
7370 && TREE_INT_CST_LOW (arg1
) == max
)
7374 return omit_one_operand (type
,
7379 return fold (build (EQ_EXPR
, type
, arg0
, arg1
));
7382 return omit_one_operand (type
,
7387 return fold (build (NE_EXPR
, type
, arg0
, arg1
));
7389 /* The GE_EXPR and LT_EXPR cases above are not normally
7390 reached because of previous transformations. */
7395 else if (TREE_INT_CST_HIGH (arg1
) == 0
7396 && TREE_INT_CST_LOW (arg1
) == max
- 1)
7400 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
7401 return fold (build (EQ_EXPR
, type
, arg0
, arg1
));
7403 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
7404 return fold (build (NE_EXPR
, type
, arg0
, arg1
));
7408 else if (TREE_INT_CST_HIGH (arg1
) == (min
? -1 : 0)
7409 && TREE_INT_CST_LOW (arg1
) == min
)
7413 return omit_one_operand (type
,
7418 return fold (build (EQ_EXPR
, type
, arg0
, arg1
));
7421 return omit_one_operand (type
,
7426 return fold (build (NE_EXPR
, type
, arg0
, arg1
));
7431 else if (TREE_INT_CST_HIGH (arg1
) == (min
? -1 : 0)
7432 && TREE_INT_CST_LOW (arg1
) == min
+ 1)
7436 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
7437 return fold (build (NE_EXPR
, type
, arg0
, arg1
));
7439 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
7440 return fold (build (EQ_EXPR
, type
, arg0
, arg1
));
7445 else if (TREE_INT_CST_HIGH (arg1
) == 0
7446 && TREE_INT_CST_LOW (arg1
) == signed_max
7447 && TREE_UNSIGNED (TREE_TYPE (arg1
))
7448 /* signed_type does not work on pointer types. */
7449 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
7451 /* The following case also applies to X < signed_max+1
7452 and X >= signed_max+1 because previous transformations. */
7453 if (code
== LE_EXPR
|| code
== GT_EXPR
)
7456 st0
= (*lang_hooks
.types
.signed_type
) (TREE_TYPE (arg0
));
7457 st1
= (*lang_hooks
.types
.signed_type
) (TREE_TYPE (arg1
));
7459 (build (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
7460 type
, fold_convert (st0
, arg0
),
7461 fold_convert (st1
, integer_zero_node
)));
7467 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7468 a MINUS_EXPR of a constant, we can convert it into a comparison with
7469 a revised constant as long as no overflow occurs. */
7470 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7471 && TREE_CODE (arg1
) == INTEGER_CST
7472 && (TREE_CODE (arg0
) == PLUS_EXPR
7473 || TREE_CODE (arg0
) == MINUS_EXPR
)
7474 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7475 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
7476 ? MINUS_EXPR
: PLUS_EXPR
,
7477 arg1
, TREE_OPERAND (arg0
, 1), 0))
7478 && ! TREE_CONSTANT_OVERFLOW (tem
))
7479 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
7481 /* Similarly for a NEGATE_EXPR. */
7482 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7483 && TREE_CODE (arg0
) == NEGATE_EXPR
7484 && TREE_CODE (arg1
) == INTEGER_CST
7485 && 0 != (tem
= negate_expr (arg1
))
7486 && TREE_CODE (tem
) == INTEGER_CST
7487 && ! TREE_CONSTANT_OVERFLOW (tem
))
7488 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
7490 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7491 for !=. Don't do this for ordered comparisons due to overflow. */
7492 else if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7493 && integer_zerop (arg1
) && TREE_CODE (arg0
) == MINUS_EXPR
)
7494 return fold (build (code
, type
,
7495 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1)));
7497 /* If we are widening one operand of an integer comparison,
7498 see if the other operand is similarly being widened. Perhaps we
7499 can do the comparison in the narrower type. */
7500 else if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
7501 && TREE_CODE (arg0
) == NOP_EXPR
7502 && (tem
= get_unwidened (arg0
, NULL_TREE
)) != arg0
7503 && (TYPE_PRECISION (TREE_TYPE (tem
))
7504 > TYPE_PRECISION (TREE_TYPE (arg0
)))
7505 && (code
== EQ_EXPR
|| code
== NE_EXPR
7506 || TREE_UNSIGNED (TREE_TYPE (arg0
))
7507 == TREE_UNSIGNED (TREE_TYPE (tem
)))
7508 && (t1
= get_unwidened (arg1
, TREE_TYPE (tem
))) != 0
7509 && (TREE_TYPE (t1
) == TREE_TYPE (tem
)
7510 || (TREE_CODE (t1
) == INTEGER_CST
7511 && int_fits_type_p (t1
, TREE_TYPE (tem
)))))
7512 return fold (build (code
, type
, tem
,
7513 fold_convert (TREE_TYPE (tem
), t1
)));
7515 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7516 constant, we can simplify it. */
7517 else if (TREE_CODE (arg1
) == INTEGER_CST
7518 && (TREE_CODE (arg0
) == MIN_EXPR
7519 || TREE_CODE (arg0
) == MAX_EXPR
)
7520 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
7521 return optimize_minmax_comparison (t
);
7523 /* If we are comparing an ABS_EXPR with a constant, we can
7524 convert all the cases into explicit comparisons, but they may
7525 well not be faster than doing the ABS and one comparison.
7526 But ABS (X) <= C is a range comparison, which becomes a subtraction
7527 and a comparison, and is probably faster. */
7528 else if (code
== LE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
7529 && TREE_CODE (arg0
) == ABS_EXPR
7530 && ! TREE_SIDE_EFFECTS (arg0
)
7531 && (0 != (tem
= negate_expr (arg1
)))
7532 && TREE_CODE (tem
) == INTEGER_CST
7533 && ! TREE_CONSTANT_OVERFLOW (tem
))
7534 return fold (build (TRUTH_ANDIF_EXPR
, type
,
7535 build (GE_EXPR
, type
, TREE_OPERAND (arg0
, 0), tem
),
7536 build (LE_EXPR
, type
,
7537 TREE_OPERAND (arg0
, 0), arg1
)));
7539 /* If this is an EQ or NE comparison with zero and ARG0 is
7540 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7541 two operations, but the latter can be done in one less insn
7542 on machines that have only two-operand insns or on which a
7543 constant cannot be the first operand. */
7544 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
7545 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
7547 if (TREE_CODE (TREE_OPERAND (arg0
, 0)) == LSHIFT_EXPR
7548 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0)))
7550 fold (build (code
, type
,
7551 build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
7553 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
7554 TREE_OPERAND (arg0
, 1),
7555 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1)),
7556 fold_convert (TREE_TYPE (arg0
),
7559 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
7560 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
7562 fold (build (code
, type
,
7563 build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
7565 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
7566 TREE_OPERAND (arg0
, 0),
7567 TREE_OPERAND (TREE_OPERAND (arg0
, 1), 1)),
7568 fold_convert (TREE_TYPE (arg0
),
7573 /* If this is an NE or EQ comparison of zero against the result of a
7574 signed MOD operation whose second operand is a power of 2, make
7575 the MOD operation unsigned since it is simpler and equivalent. */
7576 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7577 && integer_zerop (arg1
)
7578 && ! TREE_UNSIGNED (TREE_TYPE (arg0
))
7579 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
7580 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
7581 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
7582 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
7583 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
7585 tree newtype
= (*lang_hooks
.types
.unsigned_type
) (TREE_TYPE (arg0
));
7586 tree newmod
= build (TREE_CODE (arg0
), newtype
,
7587 fold_convert (newtype
,
7588 TREE_OPERAND (arg0
, 0)),
7589 fold_convert (newtype
,
7590 TREE_OPERAND (arg0
, 1)));
7592 return build (code
, type
, newmod
, fold_convert (newtype
, arg1
));
7595 /* If this is an NE comparison of zero with an AND of one, remove the
7596 comparison since the AND will give the correct value. */
7597 if (code
== NE_EXPR
&& integer_zerop (arg1
)
7598 && TREE_CODE (arg0
) == BIT_AND_EXPR
7599 && integer_onep (TREE_OPERAND (arg0
, 1)))
7600 return fold_convert (type
, arg0
);
7602 /* If we have (A & C) == C where C is a power of 2, convert this into
7603 (A & C) != 0. Similarly for NE_EXPR. */
7604 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7605 && TREE_CODE (arg0
) == BIT_AND_EXPR
7606 && integer_pow2p (TREE_OPERAND (arg0
, 1))
7607 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
7608 return fold (build (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
7609 arg0
, integer_zero_node
));
7611 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7612 2, then fold the expression into shifts and logical operations. */
7613 tem
= fold_single_bit_test (code
, arg0
, arg1
, type
);
7617 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7618 Similarly for NE_EXPR. */
7619 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7620 && TREE_CODE (arg0
) == BIT_AND_EXPR
7621 && TREE_CODE (arg1
) == INTEGER_CST
7622 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
7625 = fold (build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
7626 arg1
, build1 (BIT_NOT_EXPR
,
7627 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
7628 TREE_OPERAND (arg0
, 1))));
7629 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
7630 if (integer_nonzerop (dandnotc
))
7631 return omit_one_operand (type
, rslt
, arg0
);
7634 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7635 Similarly for NE_EXPR. */
7636 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7637 && TREE_CODE (arg0
) == BIT_IOR_EXPR
7638 && TREE_CODE (arg1
) == INTEGER_CST
7639 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
7642 = fold (build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
7643 TREE_OPERAND (arg0
, 1),
7644 build1 (BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
)));
7645 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
7646 if (integer_nonzerop (candnotd
))
7647 return omit_one_operand (type
, rslt
, arg0
);
7650 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7651 and similarly for >= into !=. */
7652 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
7653 && TREE_UNSIGNED (TREE_TYPE (arg0
))
7654 && TREE_CODE (arg1
) == LSHIFT_EXPR
7655 && integer_onep (TREE_OPERAND (arg1
, 0)))
7656 return build (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
7657 build (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
7658 TREE_OPERAND (arg1
, 1)),
7659 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
7661 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
7662 && TREE_UNSIGNED (TREE_TYPE (arg0
))
7663 && (TREE_CODE (arg1
) == NOP_EXPR
7664 || TREE_CODE (arg1
) == CONVERT_EXPR
)
7665 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
7666 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
7668 build (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
7669 fold_convert (TREE_TYPE (arg0
),
7670 build (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
7671 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
7673 fold_convert (TREE_TYPE (arg0
), integer_zero_node
));
7675 /* Simplify comparison of something with itself. (For IEEE
7676 floating-point, we can only do some of these simplifications.) */
7677 if (operand_equal_p (arg0
, arg1
, 0))
7682 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
7683 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
7684 return constant_boolean_node (1, type
);
7689 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
7690 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
7691 return constant_boolean_node (1, type
);
7692 return fold (build (EQ_EXPR
, type
, arg0
, arg1
));
7695 /* For NE, we can only do this simplification if integer
7696 or we don't honor IEEE floating point NaNs. */
7697 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
7698 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
7700 /* ... fall through ... */
7703 return constant_boolean_node (0, type
);
7709 /* If we are comparing an expression that just has comparisons
7710 of two integer values, arithmetic expressions of those comparisons,
7711 and constants, we can simplify it. There are only three cases
7712 to check: the two values can either be equal, the first can be
7713 greater, or the second can be greater. Fold the expression for
7714 those three values. Since each value must be 0 or 1, we have
7715 eight possibilities, each of which corresponds to the constant 0
7716 or 1 or one of the six possible comparisons.
7718 This handles common cases like (a > b) == 0 but also handles
7719 expressions like ((x > y) - (y > x)) > 0, which supposedly
7720 occur in macroized code. */
7722 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
7724 tree cval1
= 0, cval2
= 0;
7727 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
7728 /* Don't handle degenerate cases here; they should already
7729 have been handled anyway. */
7730 && cval1
!= 0 && cval2
!= 0
7731 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
7732 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
7733 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
7734 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
7735 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
7736 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
7737 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
7739 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
7740 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
7742 /* We can't just pass T to eval_subst in case cval1 or cval2
7743 was the same as ARG1. */
7746 = fold (build (code
, type
,
7747 eval_subst (arg0
, cval1
, maxval
, cval2
, minval
),
7750 = fold (build (code
, type
,
7751 eval_subst (arg0
, cval1
, maxval
, cval2
, maxval
),
7754 = fold (build (code
, type
,
7755 eval_subst (arg0
, cval1
, minval
, cval2
, maxval
),
7758 /* All three of these results should be 0 or 1. Confirm they
7759 are. Then use those values to select the proper code
7762 if ((integer_zerop (high_result
)
7763 || integer_onep (high_result
))
7764 && (integer_zerop (equal_result
)
7765 || integer_onep (equal_result
))
7766 && (integer_zerop (low_result
)
7767 || integer_onep (low_result
)))
7769 /* Make a 3-bit mask with the high-order bit being the
7770 value for `>', the next for '=', and the low for '<'. */
7771 switch ((integer_onep (high_result
) * 4)
7772 + (integer_onep (equal_result
) * 2)
7773 + integer_onep (low_result
))
7777 return omit_one_operand (type
, integer_zero_node
, arg0
);
7798 return omit_one_operand (type
, integer_one_node
, arg0
);
7801 t
= build (code
, type
, cval1
, cval2
);
7803 return save_expr (t
);
7810 /* If this is a comparison of a field, we may be able to simplify it. */
7811 if (((TREE_CODE (arg0
) == COMPONENT_REF
7812 && (*lang_hooks
.can_use_bit_fields_p
) ())
7813 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
7814 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
7815 /* Handle the constant case even without -O
7816 to make sure the warnings are given. */
7817 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
7819 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
7824 /* If this is a comparison of complex values and either or both sides
7825 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7826 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7827 This may prevent needless evaluations. */
7828 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7829 && TREE_CODE (TREE_TYPE (arg0
)) == COMPLEX_TYPE
7830 && (TREE_CODE (arg0
) == COMPLEX_EXPR
7831 || TREE_CODE (arg1
) == COMPLEX_EXPR
7832 || TREE_CODE (arg0
) == COMPLEX_CST
7833 || TREE_CODE (arg1
) == COMPLEX_CST
))
7835 tree subtype
= TREE_TYPE (TREE_TYPE (arg0
));
7836 tree real0
, imag0
, real1
, imag1
;
7838 arg0
= save_expr (arg0
);
7839 arg1
= save_expr (arg1
);
7840 real0
= fold (build1 (REALPART_EXPR
, subtype
, arg0
));
7841 imag0
= fold (build1 (IMAGPART_EXPR
, subtype
, arg0
));
7842 real1
= fold (build1 (REALPART_EXPR
, subtype
, arg1
));
7843 imag1
= fold (build1 (IMAGPART_EXPR
, subtype
, arg1
));
7845 return fold (build ((code
== EQ_EXPR
? TRUTH_ANDIF_EXPR
7848 fold (build (code
, type
, real0
, real1
)),
7849 fold (build (code
, type
, imag0
, imag1
))));
7852 /* Optimize comparisons of strlen vs zero to a compare of the
7853 first character of the string vs zero. To wit,
7854 strlen(ptr) == 0 => *ptr == 0
7855 strlen(ptr) != 0 => *ptr != 0
7856 Other cases should reduce to one of these two (or a constant)
7857 due to the return value of strlen being unsigned. */
7858 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
7859 && integer_zerop (arg1
)
7860 && TREE_CODE (arg0
) == CALL_EXPR
)
7862 tree fndecl
= get_callee_fndecl (arg0
);
7866 && DECL_BUILT_IN (fndecl
)
7867 && DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_MD
7868 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
7869 && (arglist
= TREE_OPERAND (arg0
, 1))
7870 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
7871 && ! TREE_CHAIN (arglist
))
7872 return fold (build (code
, type
,
7873 build1 (INDIRECT_REF
, char_type_node
,
7874 TREE_VALUE(arglist
)),
7875 integer_zero_node
));
7878 /* From here on, the only cases we handle are when the result is
7879 known to be a constant.
7881 To compute GT, swap the arguments and do LT.
7882 To compute GE, do LT and invert the result.
7883 To compute LE, swap the arguments, do LT and invert the result.
7884 To compute NE, do EQ and invert the result.
7886 Therefore, the code below must handle only EQ and LT. */
7888 if (code
== LE_EXPR
|| code
== GT_EXPR
)
7890 tem
= arg0
, arg0
= arg1
, arg1
= tem
;
7891 code
= swap_tree_comparison (code
);
7894 /* Note that it is safe to invert for real values here because we
7895 will check below in the one case that it matters. */
7899 if (code
== NE_EXPR
|| code
== GE_EXPR
)
7902 code
= invert_tree_comparison (code
);
7905 /* Compute a result for LT or EQ if args permit;
7906 otherwise return T. */
7907 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
7909 if (code
== EQ_EXPR
)
7910 t1
= build_int_2 (tree_int_cst_equal (arg0
, arg1
), 0);
7912 t1
= build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0
))
7913 ? INT_CST_LT_UNSIGNED (arg0
, arg1
)
7914 : INT_CST_LT (arg0
, arg1
)),
7918 #if 0 /* This is no longer useful, but breaks some real code. */
7919 /* Assume a nonexplicit constant cannot equal an explicit one,
7920 since such code would be undefined anyway.
7921 Exception: on sysvr4, using #pragma weak,
7922 a label can come out as 0. */
7923 else if (TREE_CODE (arg1
) == INTEGER_CST
7924 && !integer_zerop (arg1
)
7925 && TREE_CONSTANT (arg0
)
7926 && TREE_CODE (arg0
) == ADDR_EXPR
7928 t1
= build_int_2 (0, 0);
7930 /* Two real constants can be compared explicitly. */
7931 else if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
7933 /* If either operand is a NaN, the result is false with two
7934 exceptions: First, an NE_EXPR is true on NaNs, but that case
7935 is already handled correctly since we will be inverting the
7936 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7937 or a GE_EXPR into a LT_EXPR, we must return true so that it
7938 will be inverted into false. */
7940 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
7941 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
7942 t1
= build_int_2 (invert
&& code
== LT_EXPR
, 0);
7944 else if (code
== EQ_EXPR
)
7945 t1
= build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
),
7946 TREE_REAL_CST (arg1
)),
7949 t1
= build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0
),
7950 TREE_REAL_CST (arg1
)),
7954 if (t1
== NULL_TREE
)
7958 TREE_INT_CST_LOW (t1
) ^= 1;
7960 TREE_TYPE (t1
) = type
;
7961 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7962 return (*lang_hooks
.truthvalue_conversion
) (t1
);
7966 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7967 so all simple results must be passed through pedantic_non_lvalue. */
7968 if (TREE_CODE (arg0
) == INTEGER_CST
)
7970 tem
= TREE_OPERAND (t
, (integer_zerop (arg0
) ? 2 : 1));
7971 /* Only optimize constant conditions when the selected branch
7972 has the same type as the COND_EXPR. This avoids optimizing
7973 away "c ? x : throw", where the throw has a void type. */
7974 if (! VOID_TYPE_P (TREE_TYPE (tem
))
7975 || VOID_TYPE_P (TREE_TYPE (t
)))
7976 return pedantic_non_lvalue (tem
);
7979 if (operand_equal_p (arg1
, TREE_OPERAND (expr
, 2), 0))
7980 return pedantic_omit_one_operand (type
, arg1
, arg0
);
7982 /* If we have A op B ? A : C, we may be able to convert this to a
7983 simpler expression, depending on the operation and the values
7984 of B and C. Signed zeros prevent all of these transformations,
7985 for reasons given above each one. */
7987 if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
7988 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
7989 arg1
, TREE_OPERAND (arg0
, 1))
7990 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
7992 tree arg2
= TREE_OPERAND (t
, 2);
7993 enum tree_code comp_code
= TREE_CODE (arg0
);
7997 /* If we have A op 0 ? A : -A, consider applying the following
8000 A == 0? A : -A same as -A
8001 A != 0? A : -A same as A
8002 A >= 0? A : -A same as abs (A)
8003 A > 0? A : -A same as abs (A)
8004 A <= 0? A : -A same as -abs (A)
8005 A < 0? A : -A same as -abs (A)
8007 None of these transformations work for modes with signed
8008 zeros. If A is +/-0, the first two transformations will
8009 change the sign of the result (from +0 to -0, or vice
8010 versa). The last four will fix the sign of the result,
8011 even though the original expressions could be positive or
8012 negative, depending on the sign of A.
8014 Note that all these transformations are correct if A is
8015 NaN, since the two alternatives (A and -A) are also NaNs. */
8016 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
8017 ? real_zerop (TREE_OPERAND (arg0
, 1))
8018 : integer_zerop (TREE_OPERAND (arg0
, 1)))
8019 && TREE_CODE (arg2
) == NEGATE_EXPR
8020 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
8024 tem
= fold_convert (TREE_TYPE (TREE_OPERAND (t
, 1)), arg1
);
8025 tem
= fold_convert (type
, negate_expr (tem
));
8026 return pedantic_non_lvalue (tem
);
8028 return pedantic_non_lvalue (fold_convert (type
, arg1
));
8031 if (TREE_UNSIGNED (TREE_TYPE (arg1
)))
8032 arg1
= fold_convert ((*lang_hooks
.types
.signed_type
)
8033 (TREE_TYPE (arg1
)), arg1
);
8034 arg1
= fold (build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
));
8035 return pedantic_non_lvalue (fold_convert (type
, arg1
));
8038 if (TREE_UNSIGNED (TREE_TYPE (arg1
)))
8039 arg1
= fold_convert ((lang_hooks
.types
.signed_type
)
8040 (TREE_TYPE (arg1
)), arg1
);
8041 arg1
= fold (build1 (ABS_EXPR
, TREE_TYPE (arg1
), arg1
));
8042 arg1
= negate_expr (fold_convert (type
, arg1
));
8043 return pedantic_non_lvalue (arg1
);
8048 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
8049 A == 0 ? A : 0 is always 0 unless A is -0. Note that
8050 both transformations are correct when A is NaN: A != 0
8051 is then true, and A == 0 is false. */
8053 if (integer_zerop (TREE_OPERAND (arg0
, 1)) && integer_zerop (arg2
))
8055 if (comp_code
== NE_EXPR
)
8056 return pedantic_non_lvalue (fold_convert (type
, arg1
));
8057 else if (comp_code
== EQ_EXPR
)
8058 return pedantic_non_lvalue (fold_convert (type
, integer_zero_node
));
8061 /* Try some transformations of A op B ? A : B.
8063 A == B? A : B same as B
8064 A != B? A : B same as A
8065 A >= B? A : B same as max (A, B)
8066 A > B? A : B same as max (B, A)
8067 A <= B? A : B same as min (A, B)
8068 A < B? A : B same as min (B, A)
8070 As above, these transformations don't work in the presence
8071 of signed zeros. For example, if A and B are zeros of
8072 opposite sign, the first two transformations will change
8073 the sign of the result. In the last four, the original
8074 expressions give different results for (A=+0, B=-0) and
8075 (A=-0, B=+0), but the transformed expressions do not.
8077 The first two transformations are correct if either A or B
8078 is a NaN. In the first transformation, the condition will
8079 be false, and B will indeed be chosen. In the case of the
8080 second transformation, the condition A != B will be true,
8081 and A will be chosen.
8083 The conversions to max() and min() are not correct if B is
8084 a number and A is not. The conditions in the original
8085 expressions will be false, so all four give B. The min()
8086 and max() versions would give a NaN instead. */
8087 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 1),
8088 arg2
, TREE_OPERAND (arg0
, 0)))
8090 tree comp_op0
= TREE_OPERAND (arg0
, 0);
8091 tree comp_op1
= TREE_OPERAND (arg0
, 1);
8092 tree comp_type
= TREE_TYPE (comp_op0
);
8094 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8095 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
8105 return pedantic_non_lvalue (fold_convert (type
, arg2
));
8107 return pedantic_non_lvalue (fold_convert (type
, arg1
));
8110 /* In C++ a ?: expression can be an lvalue, so put the
8111 operand which will be used if they are equal first
8112 so that we can convert this back to the
8113 corresponding COND_EXPR. */
8114 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
8115 return pedantic_non_lvalue (fold_convert
8116 (type
, fold (build (MIN_EXPR
, comp_type
,
8117 (comp_code
== LE_EXPR
8118 ? comp_op0
: comp_op1
),
8119 (comp_code
== LE_EXPR
8120 ? comp_op1
: comp_op0
)))));
8124 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
8125 return pedantic_non_lvalue (fold_convert
8126 (type
, fold (build (MAX_EXPR
, comp_type
,
8127 (comp_code
== GE_EXPR
8128 ? comp_op0
: comp_op1
),
8129 (comp_code
== GE_EXPR
8130 ? comp_op1
: comp_op0
)))));
8137 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8138 we might still be able to simplify this. For example,
8139 if C1 is one less or one more than C2, this might have started
8140 out as a MIN or MAX and been transformed by this function.
8141 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8143 if (INTEGRAL_TYPE_P (type
)
8144 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8145 && TREE_CODE (arg2
) == INTEGER_CST
8146 /* ??? We somehow can end up here with
8147 (unsigned int)1 == 1 ? 1U : 2U
8148 for which we won't make any progress but recurse
8149 indefinitely. Just stop here in this case. */
8150 && TREE_CODE (arg1
) != INTEGER_CST
)
8154 /* We can replace A with C1 in this case. */
8155 arg1
= fold_convert (type
, TREE_OPERAND (arg0
, 1));
8156 return fold (build (code
, type
, TREE_OPERAND (t
, 0), arg1
,
8157 TREE_OPERAND (t
, 2)));
8160 /* If C1 is C2 + 1, this is min(A, C2). */
8161 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
), 1)
8162 && operand_equal_p (TREE_OPERAND (arg0
, 1),
8163 const_binop (PLUS_EXPR
, arg2
,
8164 integer_one_node
, 0), 1))
8165 return pedantic_non_lvalue
8166 (fold (build (MIN_EXPR
, type
, arg1
, arg2
)));
8170 /* If C1 is C2 - 1, this is min(A, C2). */
8171 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
), 1)
8172 && operand_equal_p (TREE_OPERAND (arg0
, 1),
8173 const_binop (MINUS_EXPR
, arg2
,
8174 integer_one_node
, 0), 1))
8175 return pedantic_non_lvalue
8176 (fold (build (MIN_EXPR
, type
, arg1
, arg2
)));
8180 /* If C1 is C2 - 1, this is max(A, C2). */
8181 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
), 1)
8182 && operand_equal_p (TREE_OPERAND (arg0
, 1),
8183 const_binop (MINUS_EXPR
, arg2
,
8184 integer_one_node
, 0), 1))
8185 return pedantic_non_lvalue
8186 (fold (build (MAX_EXPR
, type
, arg1
, arg2
)));
8190 /* If C1 is C2 + 1, this is max(A, C2). */
8191 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
), 1)
8192 && operand_equal_p (TREE_OPERAND (arg0
, 1),
8193 const_binop (PLUS_EXPR
, arg2
,
8194 integer_one_node
, 0), 1))
8195 return pedantic_non_lvalue
8196 (fold (build (MAX_EXPR
, type
, arg1
, arg2
)));
8205 /* If the second operand is simpler than the third, swap them
8206 since that produces better jump optimization results. */
8207 if (truth_value_p (TREE_CODE (arg0
))
8208 && tree_swap_operands_p (TREE_OPERAND (t
, 1),
8209 TREE_OPERAND (t
, 2), false))
8211 /* See if this can be inverted. If it can't, possibly because
8212 it was a floating-point inequality comparison, don't do
8214 tem
= invert_truthvalue (arg0
);
8216 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
8217 return fold (build (code
, type
, tem
,
8218 TREE_OPERAND (t
, 2), TREE_OPERAND (t
, 1)));
8221 /* Convert A ? 1 : 0 to simply A. */
8222 if (integer_onep (TREE_OPERAND (t
, 1))
8223 && integer_zerop (TREE_OPERAND (t
, 2))
8224 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8225 call to fold will try to move the conversion inside
8226 a COND, which will recurse. In that case, the COND_EXPR
8227 is probably the best choice, so leave it alone. */
8228 && type
== TREE_TYPE (arg0
))
8229 return pedantic_non_lvalue (arg0
);
8231 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8232 over COND_EXPR in cases such as floating point comparisons. */
8233 if (integer_zerop (TREE_OPERAND (t
, 1))
8234 && integer_onep (TREE_OPERAND (t
, 2))
8235 && truth_value_p (TREE_CODE (arg0
)))
8236 return pedantic_non_lvalue (fold_convert (type
,
8237 invert_truthvalue (arg0
)));
8239 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8240 operation is simply A & 2. */
8242 if (integer_zerop (TREE_OPERAND (t
, 2))
8243 && TREE_CODE (arg0
) == NE_EXPR
8244 && integer_zerop (TREE_OPERAND (arg0
, 1))
8245 && integer_pow2p (arg1
)
8246 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
8247 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
8249 return pedantic_non_lvalue (fold_convert (type
,
8250 TREE_OPERAND (arg0
, 0)));
8252 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8253 if (integer_zerop (TREE_OPERAND (t
, 2))
8254 && truth_value_p (TREE_CODE (arg0
))
8255 && truth_value_p (TREE_CODE (arg1
)))
8256 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR
, type
,
8259 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8260 if (integer_onep (TREE_OPERAND (t
, 2))
8261 && truth_value_p (TREE_CODE (arg0
))
8262 && truth_value_p (TREE_CODE (arg1
)))
8264 /* Only perform transformation if ARG0 is easily inverted. */
8265 tem
= invert_truthvalue (arg0
);
8266 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
8267 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR
, type
,
8274 /* When pedantic, a compound expression can be neither an lvalue
8275 nor an integer constant expression. */
8276 if (TREE_SIDE_EFFECTS (arg0
) || pedantic
)
8278 /* Don't let (0, 0) be null pointer constant. */
8279 if (integer_zerop (arg1
))
8280 return build1 (NOP_EXPR
, type
, arg1
);
8281 return fold_convert (type
, arg1
);
8285 return build_complex (type
, arg0
, arg1
);
8289 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8291 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8292 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
8293 TREE_OPERAND (arg0
, 1));
8294 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
8295 return TREE_REALPART (arg0
);
8296 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8297 return fold (build (TREE_CODE (arg0
), type
,
8298 fold (build1 (REALPART_EXPR
, type
,
8299 TREE_OPERAND (arg0
, 0))),
8300 fold (build1 (REALPART_EXPR
,
8301 type
, TREE_OPERAND (arg0
, 1)))));
8305 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8306 return fold_convert (type
, integer_zero_node
);
8307 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8308 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
8309 TREE_OPERAND (arg0
, 0));
8310 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
8311 return TREE_IMAGPART (arg0
);
8312 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8313 return fold (build (TREE_CODE (arg0
), type
,
8314 fold (build1 (IMAGPART_EXPR
, type
,
8315 TREE_OPERAND (arg0
, 0))),
8316 fold (build1 (IMAGPART_EXPR
, type
,
8317 TREE_OPERAND (arg0
, 1)))));
8320 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8322 case CLEANUP_POINT_EXPR
:
8323 if (! has_cleanups (arg0
))
8324 return TREE_OPERAND (t
, 0);
8327 enum tree_code code0
= TREE_CODE (arg0
);
8328 int kind0
= TREE_CODE_CLASS (code0
);
8329 tree arg00
= TREE_OPERAND (arg0
, 0);
8332 if (kind0
== '1' || code0
== TRUTH_NOT_EXPR
)
8333 return fold (build1 (code0
, type
,
8334 fold (build1 (CLEANUP_POINT_EXPR
,
8335 TREE_TYPE (arg00
), arg00
))));
8337 if (kind0
== '<' || kind0
== '2'
8338 || code0
== TRUTH_ANDIF_EXPR
|| code0
== TRUTH_ORIF_EXPR
8339 || code0
== TRUTH_AND_EXPR
|| code0
== TRUTH_OR_EXPR
8340 || code0
== TRUTH_XOR_EXPR
)
8342 arg01
= TREE_OPERAND (arg0
, 1);
8344 if (TREE_CONSTANT (arg00
)
8345 || ((code0
== TRUTH_ANDIF_EXPR
|| code0
== TRUTH_ORIF_EXPR
)
8346 && ! has_cleanups (arg00
)))
8347 return fold (build (code0
, type
, arg00
,
8348 fold (build1 (CLEANUP_POINT_EXPR
,
8349 TREE_TYPE (arg01
), arg01
))));
8351 if (TREE_CONSTANT (arg01
))
8352 return fold (build (code0
, type
,
8353 fold (build1 (CLEANUP_POINT_EXPR
,
8354 TREE_TYPE (arg00
), arg00
)),
8362 /* Check for a built-in function. */
8363 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
8364 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0))
8366 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0)))
8368 tree tmp
= fold_builtin (expr
);
8376 } /* switch (code) */
8379 #ifdef ENABLE_FOLD_CHECKING
8382 static void fold_checksum_tree (tree
, struct md5_ctx
*, htab_t
);
8383 static void fold_check_failed (tree
, tree
);
8384 void print_fold_checksum (tree
);
8386 /* When --enable-checking=fold, compute a digest of expr before
8387 and after actual fold call to see if fold did not accidentally
8388 change original expr. */
8395 unsigned char checksum_before
[16], checksum_after
[16];
8398 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
8399 md5_init_ctx (&ctx
);
8400 fold_checksum_tree (expr
, &ctx
, ht
);
8401 md5_finish_ctx (&ctx
, checksum_before
);
8404 ret
= fold_1 (expr
);
8406 md5_init_ctx (&ctx
);
8407 fold_checksum_tree (expr
, &ctx
, ht
);
8408 md5_finish_ctx (&ctx
, checksum_after
);
8411 if (memcmp (checksum_before
, checksum_after
, 16))
8412 fold_check_failed (expr
, ret
);
8418 print_fold_checksum (tree expr
)
8421 unsigned char checksum
[16], cnt
;
8424 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
8425 md5_init_ctx (&ctx
);
8426 fold_checksum_tree (expr
, &ctx
, ht
);
8427 md5_finish_ctx (&ctx
, checksum
);
8429 for (cnt
= 0; cnt
< 16; ++cnt
)
8430 fprintf (stderr
, "%02x", checksum
[cnt
]);
8431 putc ('\n', stderr
);
8435 fold_check_failed (tree expr ATTRIBUTE_UNUSED
, tree ret ATTRIBUTE_UNUSED
)
8437 internal_error ("fold check: original tree changed by fold");
8441 fold_checksum_tree (tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
8444 enum tree_code code
;
8445 struct tree_decl buf
;
8448 if (sizeof (struct tree_exp
) + 5 * sizeof (tree
)
8449 > sizeof (struct tree_decl
)
8450 || sizeof (struct tree_type
) > sizeof (struct tree_decl
))
8454 slot
= htab_find_slot (ht
, expr
, INSERT
);
8458 code
= TREE_CODE (expr
);
8459 if (code
== SAVE_EXPR
&& SAVE_EXPR_NOPLACEHOLDER (expr
))
8461 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8462 memcpy (&buf
, expr
, tree_size (expr
));
8464 SAVE_EXPR_NOPLACEHOLDER (expr
) = 0;
8466 else if (TREE_CODE_CLASS (code
) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr
))
8468 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8469 memcpy (&buf
, expr
, tree_size (expr
));
8471 SET_DECL_ASSEMBLER_NAME (expr
, NULL
);
8473 else if (TREE_CODE_CLASS (code
) == 't'
8474 && (TYPE_POINTER_TO (expr
) || TYPE_REFERENCE_TO (expr
)))
8476 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8477 memcpy (&buf
, expr
, tree_size (expr
));
8479 TYPE_POINTER_TO (expr
) = NULL
;
8480 TYPE_REFERENCE_TO (expr
) = NULL
;
8482 md5_process_bytes (expr
, tree_size (expr
), ctx
);
8483 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
8484 if (TREE_CODE_CLASS (code
) != 't' && TREE_CODE_CLASS (code
) != 'd')
8485 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
8486 len
= TREE_CODE_LENGTH (code
);
8487 switch (TREE_CODE_CLASS (code
))
8493 md5_process_bytes (TREE_STRING_POINTER (expr
),
8494 TREE_STRING_LENGTH (expr
), ctx
);
8497 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
8498 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
8501 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
8511 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
8512 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
8515 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
8516 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
8525 case SAVE_EXPR
: len
= 2; break;
8526 case GOTO_SUBROUTINE_EXPR
: len
= 0; break;
8527 case RTL_EXPR
: len
= 0; break;
8528 case WITH_CLEANUP_EXPR
: len
= 2; break;
8537 for (i
= 0; i
< len
; ++i
)
8538 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
8541 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
8542 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
8543 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
8544 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
8545 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
8546 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
8547 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
8548 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
8549 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
8550 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
8551 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
8554 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
8555 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
8556 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
8557 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
8558 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
8559 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
8560 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
8561 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
8562 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
8563 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
8572 /* Perform constant folding and related simplification of initializer
8573 expression EXPR. This behaves identically to "fold" but ignores
8574 potential run-time traps and exceptions that fold must preserve. */
8577 fold_initializer (tree expr
)
8579 int saved_signaling_nans
= flag_signaling_nans
;
8580 int saved_trapping_math
= flag_trapping_math
;
8581 int saved_trapv
= flag_trapv
;
8584 flag_signaling_nans
= 0;
8585 flag_trapping_math
= 0;
8588 result
= fold (expr
);
8590 flag_signaling_nans
= saved_signaling_nans
;
8591 flag_trapping_math
= saved_trapping_math
;
8592 flag_trapv
= saved_trapv
;
8597 /* Determine if first argument is a multiple of second argument. Return 0 if
8598 it is not, or we cannot easily determined it to be.
8600 An example of the sort of thing we care about (at this point; this routine
8601 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8602 fold cases do now) is discovering that
8604 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8610 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8612 This code also handles discovering that
8614 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8616 is a multiple of 8 so we don't have to worry about dealing with a
8619 Note that we *look* inside a SAVE_EXPR only to determine how it was
8620 calculated; it is not safe for fold to do much of anything else with the
8621 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8622 at run time. For example, the latter example above *cannot* be implemented
8623 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8624 evaluation time of the original SAVE_EXPR is not necessarily the same at
8625 the time the new expression is evaluated. The only optimization of this
8626 sort that would be valid is changing
8628 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8632 SAVE_EXPR (I) * SAVE_EXPR (J)
8634 (where the same SAVE_EXPR (J) is used in the original and the
8635 transformed version). */
8638 multiple_of_p (tree type
, tree top
, tree bottom
)
8640 if (operand_equal_p (top
, bottom
, 0))
8643 if (TREE_CODE (type
) != INTEGER_TYPE
)
8646 switch (TREE_CODE (top
))
8649 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
8650 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
8654 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
8655 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
8658 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
8662 op1
= TREE_OPERAND (top
, 1);
8663 /* const_binop may not detect overflow correctly,
8664 so check for it explicitly here. */
8665 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
8666 > TREE_INT_CST_LOW (op1
)
8667 && TREE_INT_CST_HIGH (op1
) == 0
8668 && 0 != (t1
= fold_convert (type
,
8669 const_binop (LSHIFT_EXPR
,
8672 && ! TREE_OVERFLOW (t1
))
8673 return multiple_of_p (type
, t1
, bottom
);
8678 /* Can't handle conversions from non-integral or wider integral type. */
8679 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
8680 || (TYPE_PRECISION (type
)
8681 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
8684 /* .. fall through ... */
8687 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
8690 if (TREE_CODE (bottom
) != INTEGER_CST
8691 || (TREE_UNSIGNED (type
)
8692 && (tree_int_cst_sgn (top
) < 0
8693 || tree_int_cst_sgn (bottom
) < 0)))
8695 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,
8703 /* Return true if `t' is known to be non-negative. */
8706 tree_expr_nonnegative_p (tree t
)
8708 switch (TREE_CODE (t
))
8714 return tree_int_cst_sgn (t
) >= 0;
8717 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
8720 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
8721 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8722 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8724 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8725 both unsigned and at least 2 bits shorter than the result. */
8726 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
8727 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
8728 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
8730 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
8731 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
8732 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TREE_UNSIGNED (inner1
)
8733 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TREE_UNSIGNED (inner2
))
8735 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
8736 TYPE_PRECISION (inner2
)) + 1;
8737 return prec
< TYPE_PRECISION (TREE_TYPE (t
));
8743 if (FLOAT_TYPE_P (TREE_TYPE (t
)))
8745 /* x * x for floating point x is always non-negative. */
8746 if (operand_equal_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1), 0))
8748 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8749 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8752 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8753 both unsigned and their total bits is shorter than the result. */
8754 if (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
8755 && TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
8756 && TREE_CODE (TREE_OPERAND (t
, 1)) == NOP_EXPR
)
8758 tree inner1
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
8759 tree inner2
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0));
8760 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TREE_UNSIGNED (inner1
)
8761 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TREE_UNSIGNED (inner2
))
8762 return TYPE_PRECISION (inner1
) + TYPE_PRECISION (inner2
)
8763 < TYPE_PRECISION (TREE_TYPE (t
));
8767 case TRUNC_DIV_EXPR
:
8769 case FLOOR_DIV_EXPR
:
8770 case ROUND_DIV_EXPR
:
8771 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8772 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8774 case TRUNC_MOD_EXPR
:
8776 case FLOOR_MOD_EXPR
:
8777 case ROUND_MOD_EXPR
:
8778 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8781 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8782 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8786 tree inner_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
8787 tree outer_type
= TREE_TYPE (t
);
8789 if (TREE_CODE (outer_type
) == REAL_TYPE
)
8791 if (TREE_CODE (inner_type
) == REAL_TYPE
)
8792 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8793 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
8795 if (TREE_UNSIGNED (inner_type
))
8797 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8800 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
8802 if (TREE_CODE (inner_type
) == REAL_TYPE
)
8803 return tree_expr_nonnegative_p (TREE_OPERAND (t
,0));
8804 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
8805 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
8806 && TREE_UNSIGNED (inner_type
);
8812 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
8813 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
8815 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8817 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8818 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8820 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
8821 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8823 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8825 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
8827 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8828 case NON_LVALUE_EXPR
:
8829 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8831 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
8833 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t
));
8837 tree fndecl
= get_callee_fndecl (t
);
8838 tree arglist
= TREE_OPERAND (t
, 1);
8840 && DECL_BUILT_IN (fndecl
)
8841 && DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_MD
)
8842 switch (DECL_FUNCTION_CODE (fndecl
))
8845 case BUILT_IN_CABSL
:
8846 case BUILT_IN_CABSF
:
8851 case BUILT_IN_EXP2F
:
8852 case BUILT_IN_EXP2L
:
8853 case BUILT_IN_EXP10
:
8854 case BUILT_IN_EXP10F
:
8855 case BUILT_IN_EXP10L
:
8857 case BUILT_IN_FABSF
:
8858 case BUILT_IN_FABSL
:
8861 case BUILT_IN_FFSLL
:
8862 case BUILT_IN_PARITY
:
8863 case BUILT_IN_PARITYL
:
8864 case BUILT_IN_PARITYLL
:
8865 case BUILT_IN_POPCOUNT
:
8866 case BUILT_IN_POPCOUNTL
:
8867 case BUILT_IN_POPCOUNTLL
:
8868 case BUILT_IN_POW10
:
8869 case BUILT_IN_POW10F
:
8870 case BUILT_IN_POW10L
:
8872 case BUILT_IN_SQRTF
:
8873 case BUILT_IN_SQRTL
:
8877 case BUILT_IN_ATANF
:
8878 case BUILT_IN_ATANL
:
8880 case BUILT_IN_CEILF
:
8881 case BUILT_IN_CEILL
:
8882 case BUILT_IN_FLOOR
:
8883 case BUILT_IN_FLOORF
:
8884 case BUILT_IN_FLOORL
:
8885 case BUILT_IN_NEARBYINT
:
8886 case BUILT_IN_NEARBYINTF
:
8887 case BUILT_IN_NEARBYINTL
:
8888 case BUILT_IN_ROUND
:
8889 case BUILT_IN_ROUNDF
:
8890 case BUILT_IN_ROUNDL
:
8891 case BUILT_IN_TRUNC
:
8892 case BUILT_IN_TRUNCF
:
8893 case BUILT_IN_TRUNCL
:
8894 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
8899 return tree_expr_nonnegative_p (TREE_VALUE (arglist
));
8906 /* ... fall through ... */
8909 if (truth_value_p (TREE_CODE (t
)))
8910 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8914 /* We don't know sign of `t', so be conservative and return false. */
8918 /* Return true if `r' is known to be non-negative.
8919 Only handles constants at the moment. */
8922 rtl_expr_nonnegative_p (rtx r
)
8924 switch (GET_CODE (r
))
8927 return INTVAL (r
) >= 0;
8930 if (GET_MODE (r
) == VOIDmode
)
8931 return CONST_DOUBLE_HIGH (r
) >= 0;
8939 units
= CONST_VECTOR_NUNITS (r
);
8941 for (i
= 0; i
< units
; ++i
)
8943 elt
= CONST_VECTOR_ELT (r
, i
);
8944 if (!rtl_expr_nonnegative_p (elt
))
8953 /* These are always nonnegative. */
8961 #include "gt-fold-const.h"