1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 2002,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
56 #include "langhooks.h"
58 static void encode
PARAMS ((HOST_WIDE_INT
*,
59 unsigned HOST_WIDE_INT
,
61 static void decode
PARAMS ((HOST_WIDE_INT
*,
62 unsigned HOST_WIDE_INT
*,
64 static tree negate_expr
PARAMS ((tree
));
65 static tree split_tree
PARAMS ((tree
, enum tree_code
, tree
*, tree
*,
67 static tree associate_trees
PARAMS ((tree
, tree
, enum tree_code
, tree
));
68 static tree int_const_binop
PARAMS ((enum tree_code
, tree
, tree
, int));
69 static tree const_binop
PARAMS ((enum tree_code
, tree
, tree
, int));
70 static hashval_t size_htab_hash
PARAMS ((const void *));
71 static int size_htab_eq
PARAMS ((const void *, const void *));
72 static tree fold_convert
PARAMS ((tree
, tree
));
73 static enum tree_code invert_tree_comparison
PARAMS ((enum tree_code
));
74 static enum tree_code swap_tree_comparison
PARAMS ((enum tree_code
));
75 static int comparison_to_compcode
PARAMS ((enum tree_code
));
76 static enum tree_code compcode_to_comparison
PARAMS ((int));
77 static int truth_value_p
PARAMS ((enum tree_code
));
78 static int operand_equal_for_comparison_p
PARAMS ((tree
, tree
, tree
));
79 static int twoval_comparison_p
PARAMS ((tree
, tree
*, tree
*, int *));
80 static tree eval_subst
PARAMS ((tree
, tree
, tree
, tree
, tree
));
81 static tree omit_one_operand
PARAMS ((tree
, tree
, tree
));
82 static tree pedantic_omit_one_operand
PARAMS ((tree
, tree
, tree
));
83 static tree distribute_bit_expr
PARAMS ((enum tree_code
, tree
, tree
, tree
));
84 static tree make_bit_field_ref
PARAMS ((tree
, tree
, int, int, int));
85 static tree optimize_bit_field_compare
PARAMS ((enum tree_code
, tree
,
87 static tree decode_field_reference
PARAMS ((tree
, HOST_WIDE_INT
*,
89 enum machine_mode
*, int *,
90 int *, tree
*, tree
*));
91 static int all_ones_mask_p
PARAMS ((tree
, int));
92 static tree sign_bit_p
PARAMS ((tree
, tree
));
93 static int simple_operand_p
PARAMS ((tree
));
94 static tree range_binop
PARAMS ((enum tree_code
, tree
, tree
, int,
96 static tree make_range
PARAMS ((tree
, int *, tree
*, tree
*));
97 static tree build_range_check
PARAMS ((tree
, tree
, int, tree
, tree
));
98 static int merge_ranges
PARAMS ((int *, tree
*, tree
*, int, tree
, tree
,
100 static tree fold_range_test
PARAMS ((tree
));
101 static tree unextend
PARAMS ((tree
, int, int, tree
));
102 static tree fold_truthop
PARAMS ((enum tree_code
, tree
, tree
, tree
));
103 static tree optimize_minmax_comparison
PARAMS ((tree
));
104 static tree extract_muldiv
PARAMS ((tree
, tree
, enum tree_code
, tree
));
105 static tree strip_compound_expr
PARAMS ((tree
, tree
));
106 static int multiple_of_p
PARAMS ((tree
, tree
, tree
));
107 static tree constant_boolean_node
PARAMS ((int, tree
));
108 static int count_cond
PARAMS ((tree
, int));
109 static tree fold_binary_op_with_conditional_arg
110 PARAMS ((enum tree_code
, tree
, tree
, tree
, int));
111 static bool fold_real_zero_addition_p
PARAMS ((tree
, tree
, int));
113 /* The following constants represent a bit based encoding of GCC's
114 comparison operators. This encoding simplifies transformations
115 on relational comparison operators, such as AND and OR. */
116 #define COMPCODE_FALSE 0
117 #define COMPCODE_LT 1
118 #define COMPCODE_EQ 2
119 #define COMPCODE_LE 3
120 #define COMPCODE_GT 4
121 #define COMPCODE_NE 5
122 #define COMPCODE_GE 6
123 #define COMPCODE_TRUE 7
125 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
126 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
127 and SUM1. Then this yields nonzero if overflow occurred during the
130 Overflow occurs if A and B have the same sign, but A and SUM differ in
131 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
133 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
135 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
136 We do that by representing the two-word integer in 4 words, with only
137 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
138 number. The value of the word is LOWPART + HIGHPART * BASE. */
141 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
142 #define HIGHPART(x) \
143 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
144 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
146 /* Unpack a two-word integer into 4 words.
147 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
148 WORDS points to the array of HOST_WIDE_INTs. */
151 encode (words
, low
, hi
)
152 HOST_WIDE_INT
*words
;
153 unsigned HOST_WIDE_INT low
;
156 words
[0] = LOWPART (low
);
157 words
[1] = HIGHPART (low
);
158 words
[2] = LOWPART (hi
);
159 words
[3] = HIGHPART (hi
);
162 /* Pack an array of 4 words into a two-word integer.
163 WORDS points to the array of words.
164 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
167 decode (words
, low
, hi
)
168 HOST_WIDE_INT
*words
;
169 unsigned HOST_WIDE_INT
*low
;
172 *low
= words
[0] + words
[1] * BASE
;
173 *hi
= words
[2] + words
[3] * BASE
;
176 /* Make the integer constant T valid for its type by setting to 0 or 1 all
177 the bits in the constant that don't belong in the type.
179 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
180 nonzero, a signed overflow has already occurred in calculating T, so
183 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
187 force_fit_type (t
, overflow
)
191 unsigned HOST_WIDE_INT low
;
195 if (TREE_CODE (t
) == REAL_CST
)
197 #ifdef CHECK_FLOAT_VALUE
198 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t
)), TREE_REAL_CST (t
),
204 else if (TREE_CODE (t
) != INTEGER_CST
)
207 low
= TREE_INT_CST_LOW (t
);
208 high
= TREE_INT_CST_HIGH (t
);
210 if (POINTER_TYPE_P (TREE_TYPE (t
)))
213 prec
= TYPE_PRECISION (TREE_TYPE (t
));
215 /* First clear all bits that are beyond the type's precision. */
217 if (prec
== 2 * HOST_BITS_PER_WIDE_INT
)
219 else if (prec
> HOST_BITS_PER_WIDE_INT
)
220 TREE_INT_CST_HIGH (t
)
221 &= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
224 TREE_INT_CST_HIGH (t
) = 0;
225 if (prec
< HOST_BITS_PER_WIDE_INT
)
226 TREE_INT_CST_LOW (t
) &= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
229 /* Unsigned types do not suffer sign extension or overflow unless they
231 if (TREE_UNSIGNED (TREE_TYPE (t
))
232 && ! (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
233 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))))
236 /* If the value's sign bit is set, extend the sign. */
237 if (prec
!= 2 * HOST_BITS_PER_WIDE_INT
238 && (prec
> HOST_BITS_PER_WIDE_INT
239 ? 0 != (TREE_INT_CST_HIGH (t
)
241 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
242 : 0 != (TREE_INT_CST_LOW (t
)
243 & ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)))))
245 /* Value is negative:
246 set to 1 all the bits that are outside this type's precision. */
247 if (prec
> HOST_BITS_PER_WIDE_INT
)
248 TREE_INT_CST_HIGH (t
)
249 |= ((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
252 TREE_INT_CST_HIGH (t
) = -1;
253 if (prec
< HOST_BITS_PER_WIDE_INT
)
254 TREE_INT_CST_LOW (t
) |= ((unsigned HOST_WIDE_INT
) (-1) << prec
);
258 /* Return nonzero if signed overflow occurred. */
260 ((overflow
| (low
^ TREE_INT_CST_LOW (t
)) | (high
^ TREE_INT_CST_HIGH (t
)))
264 /* Add two doubleword integers with doubleword result.
265 Each argument is given as two `HOST_WIDE_INT' pieces.
266 One argument is L1 and H1; the other, L2 and H2.
267 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
270 add_double (l1
, h1
, l2
, h2
, lv
, hv
)
271 unsigned HOST_WIDE_INT l1
, l2
;
272 HOST_WIDE_INT h1
, h2
;
273 unsigned HOST_WIDE_INT
*lv
;
276 unsigned HOST_WIDE_INT l
;
280 h
= h1
+ h2
+ (l
< l1
);
284 return OVERFLOW_SUM_SIGN (h1
, h2
, h
);
287 /* Negate a doubleword integer with doubleword result.
288 Return nonzero if the operation overflows, assuming it's signed.
289 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
290 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
293 neg_double (l1
, h1
, lv
, hv
)
294 unsigned HOST_WIDE_INT l1
;
296 unsigned HOST_WIDE_INT
*lv
;
303 return (*hv
& h1
) < 0;
313 /* Multiply two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows, assuming it's signed.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 mul_double (l1
, h1
, l2
, h2
, lv
, hv
)
321 unsigned HOST_WIDE_INT l1
, l2
;
322 HOST_WIDE_INT h1
, h2
;
323 unsigned HOST_WIDE_INT
*lv
;
326 HOST_WIDE_INT arg1
[4];
327 HOST_WIDE_INT arg2
[4];
328 HOST_WIDE_INT prod
[4 * 2];
329 unsigned HOST_WIDE_INT carry
;
331 unsigned HOST_WIDE_INT toplow
, neglow
;
332 HOST_WIDE_INT tophigh
, neghigh
;
334 encode (arg1
, l1
, h1
);
335 encode (arg2
, l2
, h2
);
337 memset ((char *) prod
, 0, sizeof prod
);
339 for (i
= 0; i
< 4; i
++)
342 for (j
= 0; j
< 4; j
++)
345 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
346 carry
+= arg1
[i
] * arg2
[j
];
347 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
349 prod
[k
] = LOWPART (carry
);
350 carry
= HIGHPART (carry
);
355 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
357 /* Check for overflow by calculating the top half of the answer in full;
358 it should agree with the low half's sign bit. */
359 decode (prod
+ 4, &toplow
, &tophigh
);
362 neg_double (l2
, h2
, &neglow
, &neghigh
);
363 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
367 neg_double (l1
, h1
, &neglow
, &neghigh
);
368 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
370 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
373 /* Shift the doubleword integer in L1, H1 left by COUNT places
374 keeping only PREC bits of result.
375 Shift right if COUNT is negative.
376 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
377 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
380 lshift_double (l1
, h1
, count
, prec
, lv
, hv
, arith
)
381 unsigned HOST_WIDE_INT l1
;
382 HOST_WIDE_INT h1
, count
;
384 unsigned HOST_WIDE_INT
*lv
;
388 unsigned HOST_WIDE_INT signmask
;
392 rshift_double (l1
, h1
, -count
, prec
, lv
, hv
, arith
);
396 #ifdef SHIFT_COUNT_TRUNCATED
397 if (SHIFT_COUNT_TRUNCATED
)
401 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
403 /* Shifting by the host word size is undefined according to the
404 ANSI standard, so we must handle this as a special case. */
408 else if (count
>= HOST_BITS_PER_WIDE_INT
)
410 *hv
= l1
<< (count
- HOST_BITS_PER_WIDE_INT
);
415 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
416 | (l1
>> (HOST_BITS_PER_WIDE_INT
- count
- 1) >> 1));
420 /* Sign extend all bits that are beyond the precision. */
422 signmask
= -((prec
> HOST_BITS_PER_WIDE_INT
423 ? ((unsigned HOST_WIDE_INT
) *hv
424 >> (prec
- HOST_BITS_PER_WIDE_INT
- 1))
425 : (*lv
>> (prec
- 1))) & 1);
427 if (prec
>= 2 * HOST_BITS_PER_WIDE_INT
)
429 else if (prec
>= HOST_BITS_PER_WIDE_INT
)
431 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
432 *hv
|= signmask
<< (prec
- HOST_BITS_PER_WIDE_INT
);
437 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << prec
);
438 *lv
|= signmask
<< prec
;
442 /* Shift the doubleword integer in L1, H1 right by COUNT places
443 keeping only PREC bits of result. COUNT must be positive.
444 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
445 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
448 rshift_double (l1
, h1
, count
, prec
, lv
, hv
, arith
)
449 unsigned HOST_WIDE_INT l1
;
450 HOST_WIDE_INT h1
, count
;
452 unsigned HOST_WIDE_INT
*lv
;
456 unsigned HOST_WIDE_INT signmask
;
459 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
462 #ifdef SHIFT_COUNT_TRUNCATED
463 if (SHIFT_COUNT_TRUNCATED
)
467 if (count
>= 2 * HOST_BITS_PER_WIDE_INT
)
469 /* Shifting by the host word size is undefined according to the
470 ANSI standard, so we must handle this as a special case. */
474 else if (count
>= HOST_BITS_PER_WIDE_INT
)
477 *lv
= (unsigned HOST_WIDE_INT
) h1
>> (count
- HOST_BITS_PER_WIDE_INT
);
481 *hv
= (unsigned HOST_WIDE_INT
) h1
>> count
;
483 | ((unsigned HOST_WIDE_INT
) h1
<< (HOST_BITS_PER_WIDE_INT
- count
- 1) << 1));
486 /* Zero / sign extend all bits that are beyond the precision. */
488 if (count
>= (HOST_WIDE_INT
)prec
)
493 else if ((prec
- count
) >= 2 * HOST_BITS_PER_WIDE_INT
)
495 else if ((prec
- count
) >= HOST_BITS_PER_WIDE_INT
)
497 *hv
&= ~((HOST_WIDE_INT
) (-1) << (prec
- count
- HOST_BITS_PER_WIDE_INT
));
498 *hv
|= signmask
<< (prec
- count
- HOST_BITS_PER_WIDE_INT
);
503 *lv
&= ~((unsigned HOST_WIDE_INT
) (-1) << (prec
- count
));
504 *lv
|= signmask
<< (prec
- count
);
508 /* Rotate the doubleword integer in L1, H1 left by COUNT places
509 keeping only PREC bits of result.
510 Rotate right if COUNT is negative.
511 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
514 lrotate_double (l1
, h1
, count
, prec
, lv
, hv
)
515 unsigned HOST_WIDE_INT l1
;
516 HOST_WIDE_INT h1
, count
;
518 unsigned HOST_WIDE_INT
*lv
;
521 unsigned HOST_WIDE_INT s1l
, s2l
;
522 HOST_WIDE_INT s1h
, s2h
;
528 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
529 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
534 /* Rotate the doubleword integer in L1, H1 left by COUNT places
535 keeping only PREC bits of result. COUNT must be positive.
536 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
539 rrotate_double (l1
, h1
, count
, prec
, lv
, hv
)
540 unsigned HOST_WIDE_INT l1
;
541 HOST_WIDE_INT h1
, count
;
543 unsigned HOST_WIDE_INT
*lv
;
546 unsigned HOST_WIDE_INT s1l
, s2l
;
547 HOST_WIDE_INT s1h
, s2h
;
553 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
554 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
559 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
560 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
561 CODE is a tree code for a kind of division, one of
562 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
564 It controls how the quotient is rounded to an integer.
565 Return nonzero if the operation overflows.
566 UNS nonzero says do unsigned division. */
569 div_and_round_double (code
, uns
,
570 lnum_orig
, hnum_orig
, lden_orig
, hden_orig
,
571 lquo
, hquo
, lrem
, hrem
)
574 unsigned HOST_WIDE_INT lnum_orig
; /* num == numerator == dividend */
575 HOST_WIDE_INT hnum_orig
;
576 unsigned HOST_WIDE_INT lden_orig
; /* den == denominator == divisor */
577 HOST_WIDE_INT hden_orig
;
578 unsigned HOST_WIDE_INT
*lquo
, *lrem
;
579 HOST_WIDE_INT
*hquo
, *hrem
;
582 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
583 HOST_WIDE_INT den
[4], quo
[4];
585 unsigned HOST_WIDE_INT work
;
586 unsigned HOST_WIDE_INT carry
= 0;
587 unsigned HOST_WIDE_INT lnum
= lnum_orig
;
588 HOST_WIDE_INT hnum
= hnum_orig
;
589 unsigned HOST_WIDE_INT lden
= lden_orig
;
590 HOST_WIDE_INT hden
= hden_orig
;
593 if (hden
== 0 && lden
== 0)
594 overflow
= 1, lden
= 1;
596 /* calculate quotient sign and convert operands to unsigned. */
602 /* (minimum integer) / (-1) is the only overflow case. */
603 if (neg_double (lnum
, hnum
, &lnum
, &hnum
)
604 && ((HOST_WIDE_INT
) lden
& hden
) == -1)
610 neg_double (lden
, hden
, &lden
, &hden
);
614 if (hnum
== 0 && hden
== 0)
615 { /* single precision */
617 /* This unsigned division rounds toward zero. */
623 { /* trivial case: dividend < divisor */
624 /* hden != 0 already checked. */
631 memset ((char *) quo
, 0, sizeof quo
);
633 memset ((char *) num
, 0, sizeof num
); /* to zero 9th element */
634 memset ((char *) den
, 0, sizeof den
);
636 encode (num
, lnum
, hnum
);
637 encode (den
, lden
, hden
);
639 /* Special code for when the divisor < BASE. */
640 if (hden
== 0 && lden
< (unsigned HOST_WIDE_INT
) BASE
)
642 /* hnum != 0 already checked. */
643 for (i
= 4 - 1; i
>= 0; i
--)
645 work
= num
[i
] + carry
* BASE
;
646 quo
[i
] = work
/ lden
;
652 /* Full double precision division,
653 with thanks to Don Knuth's "Seminumerical Algorithms". */
654 int num_hi_sig
, den_hi_sig
;
655 unsigned HOST_WIDE_INT quo_est
, scale
;
657 /* Find the highest non-zero divisor digit. */
658 for (i
= 4 - 1;; i
--)
665 /* Insure that the first digit of the divisor is at least BASE/2.
666 This is required by the quotient digit estimation algorithm. */
668 scale
= BASE
/ (den
[den_hi_sig
] + 1);
670 { /* scale divisor and dividend */
672 for (i
= 0; i
<= 4 - 1; i
++)
674 work
= (num
[i
] * scale
) + carry
;
675 num
[i
] = LOWPART (work
);
676 carry
= HIGHPART (work
);
681 for (i
= 0; i
<= 4 - 1; i
++)
683 work
= (den
[i
] * scale
) + carry
;
684 den
[i
] = LOWPART (work
);
685 carry
= HIGHPART (work
);
686 if (den
[i
] != 0) den_hi_sig
= i
;
693 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--)
695 /* Guess the next quotient digit, quo_est, by dividing the first
696 two remaining dividend digits by the high order quotient digit.
697 quo_est is never low and is at most 2 high. */
698 unsigned HOST_WIDE_INT tmp
;
700 num_hi_sig
= i
+ den_hi_sig
+ 1;
701 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
702 if (num
[num_hi_sig
] != den
[den_hi_sig
])
703 quo_est
= work
/ den
[den_hi_sig
];
707 /* Refine quo_est so it's usually correct, and at most one high. */
708 tmp
= work
- quo_est
* den
[den_hi_sig
];
710 && (den
[den_hi_sig
- 1] * quo_est
711 > (tmp
* BASE
+ num
[num_hi_sig
- 2])))
714 /* Try QUO_EST as the quotient digit, by multiplying the
715 divisor by QUO_EST and subtracting from the remaining dividend.
716 Keep in mind that QUO_EST is the I - 1st digit. */
719 for (j
= 0; j
<= den_hi_sig
; j
++)
721 work
= quo_est
* den
[j
] + carry
;
722 carry
= HIGHPART (work
);
723 work
= num
[i
+ j
] - LOWPART (work
);
724 num
[i
+ j
] = LOWPART (work
);
725 carry
+= HIGHPART (work
) != 0;
728 /* If quo_est was high by one, then num[i] went negative and
729 we need to correct things. */
730 if (num
[num_hi_sig
] < (HOST_WIDE_INT
) carry
)
733 carry
= 0; /* add divisor back in */
734 for (j
= 0; j
<= den_hi_sig
; j
++)
736 work
= num
[i
+ j
] + den
[j
] + carry
;
737 carry
= HIGHPART (work
);
738 num
[i
+ j
] = LOWPART (work
);
741 num
[num_hi_sig
] += carry
;
744 /* Store the quotient digit. */
749 decode (quo
, lquo
, hquo
);
752 /* if result is negative, make it so. */
754 neg_double (*lquo
, *hquo
, lquo
, hquo
);
756 /* compute trial remainder: rem = num - (quo * den) */
757 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
758 neg_double (*lrem
, *hrem
, lrem
, hrem
);
759 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
764 case TRUNC_MOD_EXPR
: /* round toward zero */
765 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
769 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
770 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
773 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
781 case CEIL_MOD_EXPR
: /* round toward positive infinity */
782 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
784 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
792 case ROUND_MOD_EXPR
: /* round to closest integer */
794 unsigned HOST_WIDE_INT labs_rem
= *lrem
;
795 HOST_WIDE_INT habs_rem
= *hrem
;
796 unsigned HOST_WIDE_INT labs_den
= lden
, ltwice
;
797 HOST_WIDE_INT habs_den
= hden
, htwice
;
799 /* Get absolute values */
801 neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
803 neg_double (lden
, hden
, &labs_den
, &habs_den
);
805 /* If (2 * abs (lrem) >= abs (lden)) */
806 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
807 labs_rem
, habs_rem
, <wice
, &htwice
);
809 if (((unsigned HOST_WIDE_INT
) habs_den
810 < (unsigned HOST_WIDE_INT
) htwice
)
811 || (((unsigned HOST_WIDE_INT
) habs_den
812 == (unsigned HOST_WIDE_INT
) htwice
)
813 && (labs_den
< ltwice
)))
817 add_double (*lquo
, *hquo
,
818 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
821 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
833 /* compute true remainder: rem = num - (quo * den) */
834 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
835 neg_double (*lrem
, *hrem
, lrem
, hrem
);
836 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
840 /* Given T, an expression, return the negation of T. Allow for T to be
841 null, in which case return null. */
853 type
= TREE_TYPE (t
);
856 switch (TREE_CODE (t
))
860 if (! TREE_UNSIGNED (type
)
861 && 0 != (tem
= fold (build1 (NEGATE_EXPR
, type
, t
)))
862 && ! TREE_OVERFLOW (tem
))
867 return convert (type
, TREE_OPERAND (t
, 0));
870 /* - (A - B) -> B - A */
871 if (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
872 return convert (type
,
873 fold (build (MINUS_EXPR
, TREE_TYPE (t
),
875 TREE_OPERAND (t
, 0))));
882 return convert (type
, fold (build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
)));
885 /* Split a tree IN into a constant, literal and variable parts that could be
886 combined with CODE to make IN. "constant" means an expression with
887 TREE_CONSTANT but that isn't an actual constant. CODE must be a
888 commutative arithmetic operation. Store the constant part into *CONP,
889 the literal in *LITP and return the variable part. If a part isn't
890 present, set it to null. If the tree does not decompose in this way,
891 return the entire tree as the variable part and the other parts as null.
893 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
894 case, we negate an operand that was subtracted. Except if it is a
895 literal for which we use *MINUS_LITP instead.
897 If NEGATE_P is true, we are negating all of IN, again except a literal
898 for which we use *MINUS_LITP instead.
900 If IN is itself a literal or constant, return it as appropriate.
902 Note that we do not guarantee that any of the three values will be the
903 same type as IN, but they will have the same signedness and mode. */
906 split_tree (in
, code
, conp
, litp
, minus_litp
, negate_p
)
909 tree
*conp
, *litp
, *minus_litp
;
918 /* Strip any conversions that don't change the machine mode or signedness. */
919 STRIP_SIGN_NOPS (in
);
921 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
)
923 else if (TREE_CODE (in
) == code
924 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
925 /* We can associate addition and subtraction together (even
926 though the C standard doesn't say so) for integers because
927 the value is not affected. For reals, the value might be
928 affected, so we can't. */
929 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
930 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
932 tree op0
= TREE_OPERAND (in
, 0);
933 tree op1
= TREE_OPERAND (in
, 1);
934 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
935 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
937 /* First see if either of the operands is a literal, then a constant. */
938 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
)
939 *litp
= op0
, op0
= 0;
940 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
)
941 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
943 if (op0
!= 0 && TREE_CONSTANT (op0
))
944 *conp
= op0
, op0
= 0;
945 else if (op1
!= 0 && TREE_CONSTANT (op1
))
946 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
948 /* If we haven't dealt with either operand, this is not a case we can
949 decompose. Otherwise, VAR is either of the ones remaining, if any. */
950 if (op0
!= 0 && op1
!= 0)
955 var
= op1
, neg_var_p
= neg1_p
;
957 /* Now do any needed negations. */
959 *minus_litp
= *litp
, *litp
= 0;
961 *conp
= negate_expr (*conp
);
963 var
= negate_expr (var
);
965 else if (TREE_CONSTANT (in
))
973 *minus_litp
= *litp
, *litp
= 0;
974 else if (*minus_litp
)
975 *litp
= *minus_litp
, *minus_litp
= 0;
976 *conp
= negate_expr (*conp
);
977 var
= negate_expr (var
);
983 /* Re-associate trees split by the above function. T1 and T2 are either
984 expressions to associate or null. Return the new expression, if any. If
985 we build an operation, do it in TYPE and with CODE. */
988 associate_trees (t1
, t2
, code
, type
)
998 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
999 try to fold this since we will have infinite recursion. But do
1000 deal with any NEGATE_EXPRs. */
1001 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
1002 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
1004 if (code
== PLUS_EXPR
)
1006 if (TREE_CODE (t1
) == NEGATE_EXPR
)
1007 return build (MINUS_EXPR
, type
, convert (type
, t2
),
1008 convert (type
, TREE_OPERAND (t1
, 0)));
1009 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
1010 return build (MINUS_EXPR
, type
, convert (type
, t1
),
1011 convert (type
, TREE_OPERAND (t2
, 0)));
1013 return build (code
, type
, convert (type
, t1
), convert (type
, t2
));
1016 return fold (build (code
, type
, convert (type
, t1
), convert (type
, t2
)));
1019 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1020 to produce a new constant.
1022 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1025 int_const_binop (code
, arg1
, arg2
, notrunc
)
1026 enum tree_code code
;
1030 unsigned HOST_WIDE_INT int1l
, int2l
;
1031 HOST_WIDE_INT int1h
, int2h
;
1032 unsigned HOST_WIDE_INT low
;
1034 unsigned HOST_WIDE_INT garbagel
;
1035 HOST_WIDE_INT garbageh
;
1037 tree type
= TREE_TYPE (arg1
);
1038 int uns
= TREE_UNSIGNED (type
);
1040 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
1042 int no_overflow
= 0;
1044 int1l
= TREE_INT_CST_LOW (arg1
);
1045 int1h
= TREE_INT_CST_HIGH (arg1
);
1046 int2l
= TREE_INT_CST_LOW (arg2
);
1047 int2h
= TREE_INT_CST_HIGH (arg2
);
1052 low
= int1l
| int2l
, hi
= int1h
| int2h
;
1056 low
= int1l
^ int2l
, hi
= int1h
^ int2h
;
1060 low
= int1l
& int2l
, hi
= int1h
& int2h
;
1063 case BIT_ANDTC_EXPR
:
1064 low
= int1l
& ~int2l
, hi
= int1h
& ~int2h
;
1070 /* It's unclear from the C standard whether shifts can overflow.
1071 The following code ignores overflow; perhaps a C standard
1072 interpretation ruling is needed. */
1073 lshift_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1081 lrotate_double (int1l
, int1h
, int2l
, TYPE_PRECISION (type
),
1086 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1090 neg_double (int2l
, int2h
, &low
, &hi
);
1091 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1092 overflow
= OVERFLOW_SUM_SIGN (hi
, int2h
, int1h
);
1096 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1099 case TRUNC_DIV_EXPR
:
1100 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1101 case EXACT_DIV_EXPR
:
1102 /* This is a shortcut for a common special case. */
1103 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1104 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1105 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1106 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1108 if (code
== CEIL_DIV_EXPR
)
1111 low
= int1l
/ int2l
, hi
= 0;
1115 /* ... fall through ... */
1117 case ROUND_DIV_EXPR
:
1118 if (int2h
== 0 && int2l
== 1)
1120 low
= int1l
, hi
= int1h
;
1123 if (int1l
== int2l
&& int1h
== int2h
1124 && ! (int1l
== 0 && int1h
== 0))
1129 overflow
= div_and_round_double (code
, uns
, int1l
, int1h
, int2l
, int2h
,
1130 &low
, &hi
, &garbagel
, &garbageh
);
1133 case TRUNC_MOD_EXPR
:
1134 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1135 /* This is a shortcut for a common special case. */
1136 if (int2h
== 0 && (HOST_WIDE_INT
) int2l
> 0
1137 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1138 && ! TREE_CONSTANT_OVERFLOW (arg2
)
1139 && int1h
== 0 && (HOST_WIDE_INT
) int1l
>= 0)
1141 if (code
== CEIL_MOD_EXPR
)
1143 low
= int1l
% int2l
, hi
= 0;
1147 /* ... fall through ... */
1149 case ROUND_MOD_EXPR
:
1150 overflow
= div_and_round_double (code
, uns
,
1151 int1l
, int1h
, int2l
, int2h
,
1152 &garbagel
, &garbageh
, &low
, &hi
);
1158 low
= (((unsigned HOST_WIDE_INT
) int1h
1159 < (unsigned HOST_WIDE_INT
) int2h
)
1160 || (((unsigned HOST_WIDE_INT
) int1h
1161 == (unsigned HOST_WIDE_INT
) int2h
)
1164 low
= (int1h
< int2h
1165 || (int1h
== int2h
&& int1l
< int2l
));
1167 if (low
== (code
== MIN_EXPR
))
1168 low
= int1l
, hi
= int1h
;
1170 low
= int2l
, hi
= int2h
;
1177 /* If this is for a sizetype, can be represented as one (signed)
1178 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1181 && ((hi
== 0 && (HOST_WIDE_INT
) low
>= 0)
1182 || (hi
== -1 && (HOST_WIDE_INT
) low
< 0))
1183 && overflow
== 0 && ! TREE_OVERFLOW (arg1
) && ! TREE_OVERFLOW (arg2
))
1184 return size_int_type_wide (low
, type
);
1187 t
= build_int_2 (low
, hi
);
1188 TREE_TYPE (t
) = TREE_TYPE (arg1
);
1193 ? (!uns
|| is_sizetype
) && overflow
1194 : (force_fit_type (t
, (!uns
|| is_sizetype
) && overflow
)
1196 | TREE_OVERFLOW (arg1
)
1197 | TREE_OVERFLOW (arg2
));
1199 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1200 So check if force_fit_type truncated the value. */
1202 && ! TREE_OVERFLOW (t
)
1203 && (TREE_INT_CST_HIGH (t
) != hi
1204 || TREE_INT_CST_LOW (t
) != low
))
1205 TREE_OVERFLOW (t
) = 1;
1207 TREE_CONSTANT_OVERFLOW (t
) = (TREE_OVERFLOW (t
)
1208 | TREE_CONSTANT_OVERFLOW (arg1
)
1209 | TREE_CONSTANT_OVERFLOW (arg2
));
1213 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1214 constant. We assume ARG1 and ARG2 have the same data type, or at least
1215 are the same kind of constant and the same machine mode.
1217 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1220 const_binop (code
, arg1
, arg2
, notrunc
)
1221 enum tree_code code
;
1228 if (TREE_CODE (arg1
) == INTEGER_CST
)
1229 return int_const_binop (code
, arg1
, arg2
, notrunc
);
1231 if (TREE_CODE (arg1
) == REAL_CST
)
1235 REAL_VALUE_TYPE value
;
1238 d1
= TREE_REAL_CST (arg1
);
1239 d2
= TREE_REAL_CST (arg2
);
1241 /* If either operand is a NaN, just return it. Otherwise, set up
1242 for floating-point trap; we return an overflow. */
1243 if (REAL_VALUE_ISNAN (d1
))
1245 else if (REAL_VALUE_ISNAN (d2
))
1248 REAL_ARITHMETIC (value
, code
, d1
, d2
);
1250 t
= build_real (TREE_TYPE (arg1
),
1251 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1
)),
1255 = (force_fit_type (t
, 0)
1256 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1257 TREE_CONSTANT_OVERFLOW (t
)
1259 | TREE_CONSTANT_OVERFLOW (arg1
)
1260 | TREE_CONSTANT_OVERFLOW (arg2
);
1263 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1265 tree type
= TREE_TYPE (arg1
);
1266 tree r1
= TREE_REALPART (arg1
);
1267 tree i1
= TREE_IMAGPART (arg1
);
1268 tree r2
= TREE_REALPART (arg2
);
1269 tree i2
= TREE_IMAGPART (arg2
);
1275 t
= build_complex (type
,
1276 const_binop (PLUS_EXPR
, r1
, r2
, notrunc
),
1277 const_binop (PLUS_EXPR
, i1
, i2
, notrunc
));
1281 t
= build_complex (type
,
1282 const_binop (MINUS_EXPR
, r1
, r2
, notrunc
),
1283 const_binop (MINUS_EXPR
, i1
, i2
, notrunc
));
1287 t
= build_complex (type
,
1288 const_binop (MINUS_EXPR
,
1289 const_binop (MULT_EXPR
,
1291 const_binop (MULT_EXPR
,
1294 const_binop (PLUS_EXPR
,
1295 const_binop (MULT_EXPR
,
1297 const_binop (MULT_EXPR
,
1305 = const_binop (PLUS_EXPR
,
1306 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1307 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1310 t
= build_complex (type
,
1312 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1313 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1314 const_binop (PLUS_EXPR
,
1315 const_binop (MULT_EXPR
, r1
, r2
,
1317 const_binop (MULT_EXPR
, i1
, i2
,
1320 magsquared
, notrunc
),
1322 (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1323 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1324 const_binop (MINUS_EXPR
,
1325 const_binop (MULT_EXPR
, i1
, r2
,
1327 const_binop (MULT_EXPR
, r1
, i2
,
1330 magsquared
, notrunc
));
1342 /* These are the hash table functions for the hash table of INTEGER_CST
1343 nodes of a sizetype. */
1345 /* Return the hash code code X, an INTEGER_CST. */
1353 return (TREE_INT_CST_HIGH (t
) ^ TREE_INT_CST_LOW (t
)
1354 ^ (hashval_t
) ((long) TREE_TYPE (t
) >> 3)
1355 ^ (TREE_OVERFLOW (t
) << 20));
1358 /* Return non-zero if the value represented by *X (an INTEGER_CST tree node)
1359 is the same as that given by *Y, which is the same. */
1369 return (TREE_INT_CST_HIGH (xt
) == TREE_INT_CST_HIGH (yt
)
1370 && TREE_INT_CST_LOW (xt
) == TREE_INT_CST_LOW (yt
)
1371 && TREE_TYPE (xt
) == TREE_TYPE (yt
)
1372 && TREE_OVERFLOW (xt
) == TREE_OVERFLOW (yt
));
1375 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1376 bits are given by NUMBER and of the sizetype represented by KIND. */
1379 size_int_wide (number
, kind
)
1380 HOST_WIDE_INT number
;
1381 enum size_type_kind kind
;
1383 return size_int_type_wide (number
, sizetype_tab
[(int) kind
]);
1386 /* Likewise, but the desired type is specified explicitly. */
1388 static GTY (()) tree new_const
;
1389 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
1393 size_int_type_wide (number
, type
)
1394 HOST_WIDE_INT number
;
1401 size_htab
= htab_create (1024, size_htab_hash
, size_htab_eq
, NULL
);
1402 new_const
= make_node (INTEGER_CST
);
1405 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1406 hash table, we return the value from the hash table. Otherwise, we
1407 place that in the hash table and make a new node for the next time. */
1408 TREE_INT_CST_LOW (new_const
) = number
;
1409 TREE_INT_CST_HIGH (new_const
) = number
< 0 ? -1 : 0;
1410 TREE_TYPE (new_const
) = type
;
1411 TREE_OVERFLOW (new_const
) = TREE_CONSTANT_OVERFLOW (new_const
)
1412 = force_fit_type (new_const
, 0);
1414 slot
= htab_find_slot (size_htab
, new_const
, INSERT
);
1419 *slot
= (PTR
) new_const
;
1420 new_const
= make_node (INTEGER_CST
);
1424 return (tree
) *slot
;
1427 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1428 is a tree code. The type of the result is taken from the operands.
1429 Both must be the same type integer type and it must be a size type.
1430 If the operands are constant, so is the result. */
1433 size_binop (code
, arg0
, arg1
)
1434 enum tree_code code
;
1437 tree type
= TREE_TYPE (arg0
);
1439 if (TREE_CODE (type
) != INTEGER_TYPE
|| ! TYPE_IS_SIZETYPE (type
)
1440 || type
!= TREE_TYPE (arg1
))
1443 /* Handle the special case of two integer constants faster. */
1444 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1446 /* And some specific cases even faster than that. */
1447 if (code
== PLUS_EXPR
&& integer_zerop (arg0
))
1449 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
1450 && integer_zerop (arg1
))
1452 else if (code
== MULT_EXPR
&& integer_onep (arg0
))
1455 /* Handle general case of two integer constants. */
1456 return int_const_binop (code
, arg0
, arg1
, 0);
1459 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1460 return error_mark_node
;
1462 return fold (build (code
, type
, arg0
, arg1
));
1465 /* Given two values, either both of sizetype or both of bitsizetype,
1466 compute the difference between the two values. Return the value
1467 in signed type corresponding to the type of the operands. */
1470 size_diffop (arg0
, arg1
)
1473 tree type
= TREE_TYPE (arg0
);
1476 if (TREE_CODE (type
) != INTEGER_TYPE
|| ! TYPE_IS_SIZETYPE (type
)
1477 || type
!= TREE_TYPE (arg1
))
1480 /* If the type is already signed, just do the simple thing. */
1481 if (! TREE_UNSIGNED (type
))
1482 return size_binop (MINUS_EXPR
, arg0
, arg1
);
1484 ctype
= (type
== bitsizetype
|| type
== ubitsizetype
1485 ? sbitsizetype
: ssizetype
);
1487 /* If either operand is not a constant, do the conversions to the signed
1488 type and subtract. The hardware will do the right thing with any
1489 overflow in the subtraction. */
1490 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1491 return size_binop (MINUS_EXPR
, convert (ctype
, arg0
),
1492 convert (ctype
, arg1
));
1494 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1495 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1496 overflow) and negate (which can't either). Special-case a result
1497 of zero while we're here. */
1498 if (tree_int_cst_equal (arg0
, arg1
))
1499 return convert (ctype
, integer_zero_node
);
1500 else if (tree_int_cst_lt (arg1
, arg0
))
1501 return convert (ctype
, size_binop (MINUS_EXPR
, arg0
, arg1
));
1503 return size_binop (MINUS_EXPR
, convert (ctype
, integer_zero_node
),
1504 convert (ctype
, size_binop (MINUS_EXPR
, arg1
, arg0
)));
1508 /* Given T, a tree representing type conversion of ARG1, a constant,
1509 return a constant tree representing the result of conversion. */
1512 fold_convert (t
, arg1
)
1516 tree type
= TREE_TYPE (t
);
1519 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
1521 if (TREE_CODE (arg1
) == INTEGER_CST
)
1523 /* If we would build a constant wider than GCC supports,
1524 leave the conversion unfolded. */
1525 if (TYPE_PRECISION (type
) > 2 * HOST_BITS_PER_WIDE_INT
)
1528 /* If we are trying to make a sizetype for a small integer, use
1529 size_int to pick up cached types to reduce duplicate nodes. */
1530 if (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
)
1531 && !TREE_CONSTANT_OVERFLOW (arg1
)
1532 && compare_tree_int (arg1
, 10000) < 0)
1533 return size_int_type_wide (TREE_INT_CST_LOW (arg1
), type
);
1535 /* Given an integer constant, make new constant with new type,
1536 appropriately sign-extended or truncated. */
1537 t
= build_int_2 (TREE_INT_CST_LOW (arg1
),
1538 TREE_INT_CST_HIGH (arg1
));
1539 TREE_TYPE (t
) = type
;
1540 /* Indicate an overflow if (1) ARG1 already overflowed,
1541 or (2) force_fit_type indicates an overflow.
1542 Tell force_fit_type that an overflow has already occurred
1543 if ARG1 is a too-large unsigned value and T is signed.
1544 But don't indicate an overflow if converting a pointer. */
1546 = ((force_fit_type (t
,
1547 (TREE_INT_CST_HIGH (arg1
) < 0
1548 && (TREE_UNSIGNED (type
)
1549 < TREE_UNSIGNED (TREE_TYPE (arg1
)))))
1550 && ! POINTER_TYPE_P (TREE_TYPE (arg1
)))
1551 || TREE_OVERFLOW (arg1
));
1552 TREE_CONSTANT_OVERFLOW (t
)
1553 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1555 else if (TREE_CODE (arg1
) == REAL_CST
)
1557 /* Don't initialize these, use assignments.
1558 Initialized local aggregates don't work on old compilers. */
1562 tree type1
= TREE_TYPE (arg1
);
1565 x
= TREE_REAL_CST (arg1
);
1566 l
= real_value_from_int_cst (type1
, TYPE_MIN_VALUE (type
));
1568 no_upper_bound
= (TYPE_MAX_VALUE (type
) == NULL
);
1569 if (!no_upper_bound
)
1570 u
= real_value_from_int_cst (type1
, TYPE_MAX_VALUE (type
));
1572 /* See if X will be in range after truncation towards 0.
1573 To compensate for truncation, move the bounds away from 0,
1574 but reject if X exactly equals the adjusted bounds. */
1575 REAL_ARITHMETIC (l
, MINUS_EXPR
, l
, dconst1
);
1576 if (!no_upper_bound
)
1577 REAL_ARITHMETIC (u
, PLUS_EXPR
, u
, dconst1
);
1578 /* If X is a NaN, use zero instead and show we have an overflow.
1579 Otherwise, range check. */
1580 if (REAL_VALUE_ISNAN (x
))
1581 overflow
= 1, x
= dconst0
;
1582 else if (! (REAL_VALUES_LESS (l
, x
)
1584 && REAL_VALUES_LESS (x
, u
)))
1588 HOST_WIDE_INT low
, high
;
1589 REAL_VALUE_TO_INT (&low
, &high
, x
);
1590 t
= build_int_2 (low
, high
);
1592 TREE_TYPE (t
) = type
;
1594 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, overflow
);
1595 TREE_CONSTANT_OVERFLOW (t
)
1596 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1598 TREE_TYPE (t
) = type
;
1600 else if (TREE_CODE (type
) == REAL_TYPE
)
1602 if (TREE_CODE (arg1
) == INTEGER_CST
)
1603 return build_real_from_int_cst (type
, arg1
);
1604 if (TREE_CODE (arg1
) == REAL_CST
)
1606 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
1608 /* We make a copy of ARG1 so that we don't modify an
1609 existing constant tree. */
1610 t
= copy_node (arg1
);
1611 TREE_TYPE (t
) = type
;
1615 t
= build_real (type
,
1616 real_value_truncate (TYPE_MODE (type
),
1617 TREE_REAL_CST (arg1
)));
1620 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, 0);
1621 TREE_CONSTANT_OVERFLOW (t
)
1622 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1626 TREE_CONSTANT (t
) = 1;
1630 /* Return an expr equal to X but certainly not valid as an lvalue. */
1638 /* These things are certainly not lvalues. */
1639 if (TREE_CODE (x
) == NON_LVALUE_EXPR
1640 || TREE_CODE (x
) == INTEGER_CST
1641 || TREE_CODE (x
) == REAL_CST
1642 || TREE_CODE (x
) == STRING_CST
1643 || TREE_CODE (x
) == ADDR_EXPR
)
1646 result
= build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
1647 TREE_CONSTANT (result
) = TREE_CONSTANT (x
);
1651 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1652 Zero means allow extended lvalues. */
1654 int pedantic_lvalues
;
1656 /* When pedantic, return an expr equal to X but certainly not valid as a
1657 pedantic lvalue. Otherwise, return X. */
1660 pedantic_non_lvalue (x
)
1663 if (pedantic_lvalues
)
1664 return non_lvalue (x
);
1669 /* Given a tree comparison code, return the code that is the logical inverse
1670 of the given code. It is not safe to do this for floating-point
1671 comparisons, except for NE_EXPR and EQ_EXPR. */
1673 static enum tree_code
1674 invert_tree_comparison (code
)
1675 enum tree_code code
;
1696 /* Similar, but return the comparison that results if the operands are
1697 swapped. This is safe for floating-point. */
1699 static enum tree_code
1700 swap_tree_comparison (code
)
1701 enum tree_code code
;
1722 /* Convert a comparison tree code from an enum tree_code representation
1723 into a compcode bit-based encoding. This function is the inverse of
1724 compcode_to_comparison. */
1727 comparison_to_compcode (code
)
1728 enum tree_code code
;
1749 /* Convert a compcode bit-based encoding of a comparison operator back
1750 to GCC's enum tree_code representation. This function is the
1751 inverse of comparison_to_compcode. */
1753 static enum tree_code
1754 compcode_to_comparison (code
)
1776 /* Return nonzero if CODE is a tree code that represents a truth value. */
1779 truth_value_p (code
)
1780 enum tree_code code
;
1782 return (TREE_CODE_CLASS (code
) == '<'
1783 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
1784 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
1785 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
1788 /* Return nonzero if two operands are necessarily equal.
1789 If ONLY_CONST is non-zero, only return non-zero for constants.
1790 This function tests whether the operands are indistinguishable;
1791 it does not test whether they are equal using C's == operation.
1792 The distinction is important for IEEE floating point, because
1793 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1794 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1797 operand_equal_p (arg0
, arg1
, only_const
)
1801 /* If both types don't have the same signedness, then we can't consider
1802 them equal. We must check this before the STRIP_NOPS calls
1803 because they may change the signedness of the arguments. */
1804 if (TREE_UNSIGNED (TREE_TYPE (arg0
)) != TREE_UNSIGNED (TREE_TYPE (arg1
)))
1810 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
1811 /* This is needed for conversions and for COMPONENT_REF.
1812 Might as well play it safe and always test this. */
1813 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
1814 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
1815 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
1818 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1819 We don't care about side effects in that case because the SAVE_EXPR
1820 takes care of that for us. In all other cases, two expressions are
1821 equal if they have no side effects. If we have two identical
1822 expressions with side effects that should be treated the same due
1823 to the only side effects being identical SAVE_EXPR's, that will
1824 be detected in the recursive calls below. */
1825 if (arg0
== arg1
&& ! only_const
1826 && (TREE_CODE (arg0
) == SAVE_EXPR
1827 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
1830 /* Next handle constant cases, those for which we can return 1 even
1831 if ONLY_CONST is set. */
1832 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
1833 switch (TREE_CODE (arg0
))
1836 return (! TREE_CONSTANT_OVERFLOW (arg0
)
1837 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1838 && tree_int_cst_equal (arg0
, arg1
));
1841 return (! TREE_CONSTANT_OVERFLOW (arg0
)
1842 && ! TREE_CONSTANT_OVERFLOW (arg1
)
1843 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
1844 TREE_REAL_CST (arg1
)));
1850 if (TREE_CONSTANT_OVERFLOW (arg0
)
1851 || TREE_CONSTANT_OVERFLOW (arg1
))
1854 v1
= TREE_VECTOR_CST_ELTS (arg0
);
1855 v2
= TREE_VECTOR_CST_ELTS (arg1
);
1858 if (!operand_equal_p (v1
, v2
, only_const
))
1860 v1
= TREE_CHAIN (v1
);
1861 v2
= TREE_CHAIN (v2
);
1868 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
1870 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
1874 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
1875 && ! memcmp (TREE_STRING_POINTER (arg0
),
1876 TREE_STRING_POINTER (arg1
),
1877 TREE_STRING_LENGTH (arg0
)));
1880 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
1889 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
1892 /* Two conversions are equal only if signedness and modes match. */
1893 if ((TREE_CODE (arg0
) == NOP_EXPR
|| TREE_CODE (arg0
) == CONVERT_EXPR
)
1894 && (TREE_UNSIGNED (TREE_TYPE (arg0
))
1895 != TREE_UNSIGNED (TREE_TYPE (arg1
))))
1898 return operand_equal_p (TREE_OPERAND (arg0
, 0),
1899 TREE_OPERAND (arg1
, 0), 0);
1903 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0)
1904 && operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1),
1908 /* For commutative ops, allow the other order. */
1909 return ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
1910 || TREE_CODE (arg0
) == MIN_EXPR
|| TREE_CODE (arg0
) == MAX_EXPR
1911 || TREE_CODE (arg0
) == BIT_IOR_EXPR
1912 || TREE_CODE (arg0
) == BIT_XOR_EXPR
1913 || TREE_CODE (arg0
) == BIT_AND_EXPR
1914 || TREE_CODE (arg0
) == NE_EXPR
|| TREE_CODE (arg0
) == EQ_EXPR
)
1915 && operand_equal_p (TREE_OPERAND (arg0
, 0),
1916 TREE_OPERAND (arg1
, 1), 0)
1917 && operand_equal_p (TREE_OPERAND (arg0
, 1),
1918 TREE_OPERAND (arg1
, 0), 0));
1921 /* If either of the pointer (or reference) expressions we are dereferencing
1922 contain a side effect, these cannot be equal. */
1923 if (TREE_SIDE_EFFECTS (arg0
)
1924 || TREE_SIDE_EFFECTS (arg1
))
1927 switch (TREE_CODE (arg0
))
1930 return operand_equal_p (TREE_OPERAND (arg0
, 0),
1931 TREE_OPERAND (arg1
, 0), 0);
1935 case ARRAY_RANGE_REF
:
1936 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
1937 TREE_OPERAND (arg1
, 0), 0)
1938 && operand_equal_p (TREE_OPERAND (arg0
, 1),
1939 TREE_OPERAND (arg1
, 1), 0));
1942 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
1943 TREE_OPERAND (arg1
, 0), 0)
1944 && operand_equal_p (TREE_OPERAND (arg0
, 1),
1945 TREE_OPERAND (arg1
, 1), 0)
1946 && operand_equal_p (TREE_OPERAND (arg0
, 2),
1947 TREE_OPERAND (arg1
, 2), 0));
1953 if (TREE_CODE (arg0
) == RTL_EXPR
)
1954 return rtx_equal_p (RTL_EXPR_RTL (arg0
), RTL_EXPR_RTL (arg1
));
1962 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1963 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1965 When in doubt, return 0. */
1968 operand_equal_for_comparison_p (arg0
, arg1
, other
)
1972 int unsignedp1
, unsignedpo
;
1973 tree primarg0
, primarg1
, primother
;
1974 unsigned int correct_width
;
1976 if (operand_equal_p (arg0
, arg1
, 0))
1979 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
1980 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
1983 /* Discard any conversions that don't change the modes of ARG0 and ARG1
1984 and see if the inner values are the same. This removes any
1985 signedness comparison, which doesn't matter here. */
1986 primarg0
= arg0
, primarg1
= arg1
;
1987 STRIP_NOPS (primarg0
);
1988 STRIP_NOPS (primarg1
);
1989 if (operand_equal_p (primarg0
, primarg1
, 0))
1992 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1993 actual comparison operand, ARG0.
1995 First throw away any conversions to wider types
1996 already present in the operands. */
1998 primarg1
= get_narrower (arg1
, &unsignedp1
);
1999 primother
= get_narrower (other
, &unsignedpo
);
2001 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2002 if (unsignedp1
== unsignedpo
2003 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2004 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2006 tree type
= TREE_TYPE (arg0
);
2008 /* Make sure shorter operand is extended the right way
2009 to match the longer operand. */
2010 primarg1
= convert ((*lang_hooks
.types
.signed_or_unsigned_type
)
2011 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2013 if (operand_equal_p (arg0
, convert (type
, primarg1
), 0))
2020 /* See if ARG is an expression that is either a comparison or is performing
2021 arithmetic on comparisons. The comparisons must only be comparing
2022 two different values, which will be stored in *CVAL1 and *CVAL2; if
2023 they are non-zero it means that some operands have already been found.
2024 No variables may be used anywhere else in the expression except in the
2025 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2026 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2028 If this is true, return 1. Otherwise, return zero. */
2031 twoval_comparison_p (arg
, cval1
, cval2
, save_p
)
2033 tree
*cval1
, *cval2
;
2036 enum tree_code code
= TREE_CODE (arg
);
2037 char class = TREE_CODE_CLASS (code
);
2039 /* We can handle some of the 'e' cases here. */
2040 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
2042 else if (class == 'e'
2043 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2044 || code
== COMPOUND_EXPR
))
2047 else if (class == 'e' && code
== SAVE_EXPR
&& SAVE_EXPR_RTL (arg
) == 0
2048 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2050 /* If we've already found a CVAL1 or CVAL2, this expression is
2051 two complex to handle. */
2052 if (*cval1
|| *cval2
)
2062 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2065 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2066 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2067 cval1
, cval2
, save_p
));
2073 if (code
== COND_EXPR
)
2074 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2075 cval1
, cval2
, save_p
)
2076 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2077 cval1
, cval2
, save_p
)
2078 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2079 cval1
, cval2
, save_p
));
2083 /* First see if we can handle the first operand, then the second. For
2084 the second operand, we know *CVAL1 can't be zero. It must be that
2085 one side of the comparison is each of the values; test for the
2086 case where this isn't true by failing if the two operands
2089 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2090 TREE_OPERAND (arg
, 1), 0))
2094 *cval1
= TREE_OPERAND (arg
, 0);
2095 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2097 else if (*cval2
== 0)
2098 *cval2
= TREE_OPERAND (arg
, 0);
2099 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2104 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2106 else if (*cval2
== 0)
2107 *cval2
= TREE_OPERAND (arg
, 1);
2108 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2120 /* ARG is a tree that is known to contain just arithmetic operations and
2121 comparisons. Evaluate the operations in the tree substituting NEW0 for
2122 any occurrence of OLD0 as an operand of a comparison and likewise for
2126 eval_subst (arg
, old0
, new0
, old1
, new1
)
2128 tree old0
, new0
, old1
, new1
;
2130 tree type
= TREE_TYPE (arg
);
2131 enum tree_code code
= TREE_CODE (arg
);
2132 char class = TREE_CODE_CLASS (code
);
2134 /* We can handle some of the 'e' cases here. */
2135 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
2137 else if (class == 'e'
2138 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2144 return fold (build1 (code
, type
,
2145 eval_subst (TREE_OPERAND (arg
, 0),
2146 old0
, new0
, old1
, new1
)));
2149 return fold (build (code
, type
,
2150 eval_subst (TREE_OPERAND (arg
, 0),
2151 old0
, new0
, old1
, new1
),
2152 eval_subst (TREE_OPERAND (arg
, 1),
2153 old0
, new0
, old1
, new1
)));
2159 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
2162 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
2165 return fold (build (code
, type
,
2166 eval_subst (TREE_OPERAND (arg
, 0),
2167 old0
, new0
, old1
, new1
),
2168 eval_subst (TREE_OPERAND (arg
, 1),
2169 old0
, new0
, old1
, new1
),
2170 eval_subst (TREE_OPERAND (arg
, 2),
2171 old0
, new0
, old1
, new1
)));
2175 /* fall through - ??? */
2179 tree arg0
= TREE_OPERAND (arg
, 0);
2180 tree arg1
= TREE_OPERAND (arg
, 1);
2182 /* We need to check both for exact equality and tree equality. The
2183 former will be true if the operand has a side-effect. In that
2184 case, we know the operand occurred exactly once. */
2186 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2188 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2191 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2193 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2196 return fold (build (code
, type
, arg0
, arg1
));
2204 /* Return a tree for the case when the result of an expression is RESULT
2205 converted to TYPE and OMITTED was previously an operand of the expression
2206 but is now not needed (e.g., we folded OMITTED * 0).
2208 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2209 the conversion of RESULT to TYPE. */
2212 omit_one_operand (type
, result
, omitted
)
2213 tree type
, result
, omitted
;
2215 tree t
= convert (type
, result
);
2217 if (TREE_SIDE_EFFECTS (omitted
))
2218 return build (COMPOUND_EXPR
, type
, omitted
, t
);
2220 return non_lvalue (t
);
2223 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2226 pedantic_omit_one_operand (type
, result
, omitted
)
2227 tree type
, result
, omitted
;
2229 tree t
= convert (type
, result
);
2231 if (TREE_SIDE_EFFECTS (omitted
))
2232 return build (COMPOUND_EXPR
, type
, omitted
, t
);
2234 return pedantic_non_lvalue (t
);
2237 /* Return a simplified tree node for the truth-negation of ARG. This
2238 never alters ARG itself. We assume that ARG is an operation that
2239 returns a truth value (0 or 1). */
2242 invert_truthvalue (arg
)
2245 tree type
= TREE_TYPE (arg
);
2246 enum tree_code code
= TREE_CODE (arg
);
2248 if (code
== ERROR_MARK
)
2251 /* If this is a comparison, we can simply invert it, except for
2252 floating-point non-equality comparisons, in which case we just
2253 enclose a TRUTH_NOT_EXPR around what we have. */
2255 if (TREE_CODE_CLASS (code
) == '<')
2257 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg
, 0)))
2258 && !flag_unsafe_math_optimizations
2261 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2263 return build (invert_tree_comparison (code
), type
,
2264 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
2270 return convert (type
, build_int_2 (integer_zerop (arg
), 0));
2272 case TRUTH_AND_EXPR
:
2273 return build (TRUTH_OR_EXPR
, type
,
2274 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2275 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2278 return build (TRUTH_AND_EXPR
, type
,
2279 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2280 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2282 case TRUTH_XOR_EXPR
:
2283 /* Here we can invert either operand. We invert the first operand
2284 unless the second operand is a TRUTH_NOT_EXPR in which case our
2285 result is the XOR of the first operand with the inside of the
2286 negation of the second operand. */
2288 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
2289 return build (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
2290 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
2292 return build (TRUTH_XOR_EXPR
, type
,
2293 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2294 TREE_OPERAND (arg
, 1));
2296 case TRUTH_ANDIF_EXPR
:
2297 return build (TRUTH_ORIF_EXPR
, type
,
2298 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2299 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2301 case TRUTH_ORIF_EXPR
:
2302 return build (TRUTH_ANDIF_EXPR
, type
,
2303 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2304 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2306 case TRUTH_NOT_EXPR
:
2307 return TREE_OPERAND (arg
, 0);
2310 return build (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2311 invert_truthvalue (TREE_OPERAND (arg
, 1)),
2312 invert_truthvalue (TREE_OPERAND (arg
, 2)));
2315 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2316 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2318 case WITH_RECORD_EXPR
:
2319 return build (WITH_RECORD_EXPR
, type
,
2320 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2321 TREE_OPERAND (arg
, 1));
2323 case NON_LVALUE_EXPR
:
2324 return invert_truthvalue (TREE_OPERAND (arg
, 0));
2329 return build1 (TREE_CODE (arg
), type
,
2330 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2333 if (!integer_onep (TREE_OPERAND (arg
, 1)))
2335 return build (EQ_EXPR
, type
, arg
, convert (type
, integer_zero_node
));
2338 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2340 case CLEANUP_POINT_EXPR
:
2341 return build1 (CLEANUP_POINT_EXPR
, type
,
2342 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2347 if (TREE_CODE (TREE_TYPE (arg
)) != BOOLEAN_TYPE
)
2349 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2352 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2353 operands are another bit-wise operation with a common input. If so,
2354 distribute the bit operations to save an operation and possibly two if
2355 constants are involved. For example, convert
2356 (A | B) & (A | C) into A | (B & C)
2357 Further simplification will occur if B and C are constants.
2359 If this optimization cannot be done, 0 will be returned. */
2362 distribute_bit_expr (code
, type
, arg0
, arg1
)
2363 enum tree_code code
;
2370 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2371 || TREE_CODE (arg0
) == code
2372 || (TREE_CODE (arg0
) != BIT_AND_EXPR
2373 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
2376 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
2378 common
= TREE_OPERAND (arg0
, 0);
2379 left
= TREE_OPERAND (arg0
, 1);
2380 right
= TREE_OPERAND (arg1
, 1);
2382 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
2384 common
= TREE_OPERAND (arg0
, 0);
2385 left
= TREE_OPERAND (arg0
, 1);
2386 right
= TREE_OPERAND (arg1
, 0);
2388 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
2390 common
= TREE_OPERAND (arg0
, 1);
2391 left
= TREE_OPERAND (arg0
, 0);
2392 right
= TREE_OPERAND (arg1
, 1);
2394 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
2396 common
= TREE_OPERAND (arg0
, 1);
2397 left
= TREE_OPERAND (arg0
, 0);
2398 right
= TREE_OPERAND (arg1
, 0);
2403 return fold (build (TREE_CODE (arg0
), type
, common
,
2404 fold (build (code
, type
, left
, right
))));
2407 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2408 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2411 make_bit_field_ref (inner
, type
, bitsize
, bitpos
, unsignedp
)
2414 int bitsize
, bitpos
;
2417 tree result
= build (BIT_FIELD_REF
, type
, inner
,
2418 size_int (bitsize
), bitsize_int (bitpos
));
2420 TREE_UNSIGNED (result
) = unsignedp
;
2425 /* Optimize a bit-field compare.
2427 There are two cases: First is a compare against a constant and the
2428 second is a comparison of two items where the fields are at the same
2429 bit position relative to the start of a chunk (byte, halfword, word)
2430 large enough to contain it. In these cases we can avoid the shift
2431 implicit in bitfield extractions.
2433 For constants, we emit a compare of the shifted constant with the
2434 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2435 compared. For two fields at the same position, we do the ANDs with the
2436 similar mask and compare the result of the ANDs.
2438 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2439 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2440 are the left and right operands of the comparison, respectively.
2442 If the optimization described above can be done, we return the resulting
2443 tree. Otherwise we return zero. */
2446 optimize_bit_field_compare (code
, compare_type
, lhs
, rhs
)
2447 enum tree_code code
;
2451 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
2452 tree type
= TREE_TYPE (lhs
);
2453 tree signed_type
, unsigned_type
;
2454 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
2455 enum machine_mode lmode
, rmode
, nmode
;
2456 int lunsignedp
, runsignedp
;
2457 int lvolatilep
= 0, rvolatilep
= 0;
2458 tree linner
, rinner
= NULL_TREE
;
2462 /* Get all the information about the extractions being done. If the bit size
2463 if the same as the size of the underlying object, we aren't doing an
2464 extraction at all and so can do nothing. We also don't want to
2465 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2466 then will no longer be able to replace it. */
2467 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
2468 &lunsignedp
, &lvolatilep
);
2469 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
2470 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
2475 /* If this is not a constant, we can only do something if bit positions,
2476 sizes, and signedness are the same. */
2477 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
2478 &runsignedp
, &rvolatilep
);
2480 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
2481 || lunsignedp
!= runsignedp
|| offset
!= 0
2482 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
2486 /* See if we can find a mode to refer to this field. We should be able to,
2487 but fail if we can't. */
2488 nmode
= get_best_mode (lbitsize
, lbitpos
,
2489 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
2490 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
2491 TYPE_ALIGN (TREE_TYPE (rinner
))),
2492 word_mode
, lvolatilep
|| rvolatilep
);
2493 if (nmode
== VOIDmode
)
2496 /* Set signed and unsigned types of the precision of this mode for the
2498 signed_type
= (*lang_hooks
.types
.type_for_mode
) (nmode
, 0);
2499 unsigned_type
= (*lang_hooks
.types
.type_for_mode
) (nmode
, 1);
2501 /* Compute the bit position and size for the new reference and our offset
2502 within it. If the new reference is the same size as the original, we
2503 won't optimize anything, so return zero. */
2504 nbitsize
= GET_MODE_BITSIZE (nmode
);
2505 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
2507 if (nbitsize
== lbitsize
)
2510 if (BYTES_BIG_ENDIAN
)
2511 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
2513 /* Make the mask to be used against the extracted field. */
2514 mask
= build_int_2 (~0, ~0);
2515 TREE_TYPE (mask
) = unsigned_type
;
2516 force_fit_type (mask
, 0);
2517 mask
= convert (unsigned_type
, mask
);
2518 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
), 0);
2519 mask
= const_binop (RSHIFT_EXPR
, mask
,
2520 size_int (nbitsize
- lbitsize
- lbitpos
), 0);
2523 /* If not comparing with constant, just rework the comparison
2525 return build (code
, compare_type
,
2526 build (BIT_AND_EXPR
, unsigned_type
,
2527 make_bit_field_ref (linner
, unsigned_type
,
2528 nbitsize
, nbitpos
, 1),
2530 build (BIT_AND_EXPR
, unsigned_type
,
2531 make_bit_field_ref (rinner
, unsigned_type
,
2532 nbitsize
, nbitpos
, 1),
2535 /* Otherwise, we are handling the constant case. See if the constant is too
2536 big for the field. Warn and return a tree of for 0 (false) if so. We do
2537 this not only for its own sake, but to avoid having to test for this
2538 error case below. If we didn't, we might generate wrong code.
2540 For unsigned fields, the constant shifted right by the field length should
2541 be all zero. For signed fields, the high-order bits should agree with
2546 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
2547 convert (unsigned_type
, rhs
),
2548 size_int (lbitsize
), 0)))
2550 warning ("comparison is always %d due to width of bit-field",
2552 return convert (compare_type
,
2554 ? integer_one_node
: integer_zero_node
));
2559 tree tem
= const_binop (RSHIFT_EXPR
, convert (signed_type
, rhs
),
2560 size_int (lbitsize
- 1), 0);
2561 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
2563 warning ("comparison is always %d due to width of bit-field",
2565 return convert (compare_type
,
2567 ? integer_one_node
: integer_zero_node
));
2571 /* Single-bit compares should always be against zero. */
2572 if (lbitsize
== 1 && ! integer_zerop (rhs
))
2574 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
2575 rhs
= convert (type
, integer_zero_node
);
2578 /* Make a new bitfield reference, shift the constant over the
2579 appropriate number of bits and mask it with the computed mask
2580 (in case this was a signed field). If we changed it, make a new one. */
2581 lhs
= make_bit_field_ref (linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
2584 TREE_SIDE_EFFECTS (lhs
) = 1;
2585 TREE_THIS_VOLATILE (lhs
) = 1;
2588 rhs
= fold (const_binop (BIT_AND_EXPR
,
2589 const_binop (LSHIFT_EXPR
,
2590 convert (unsigned_type
, rhs
),
2591 size_int (lbitpos
), 0),
2594 return build (code
, compare_type
,
2595 build (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
2599 /* Subroutine for fold_truthop: decode a field reference.
2601 If EXP is a comparison reference, we return the innermost reference.
2603 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2604 set to the starting bit number.
2606 If the innermost field can be completely contained in a mode-sized
2607 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2609 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2610 otherwise it is not changed.
2612 *PUNSIGNEDP is set to the signedness of the field.
2614 *PMASK is set to the mask used. This is either contained in a
2615 BIT_AND_EXPR or derived from the width of the field.
2617 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2619 Return 0 if this is not a component reference or is one that we can't
2620 do anything with. */
2623 decode_field_reference (exp
, pbitsize
, pbitpos
, pmode
, punsignedp
,
2624 pvolatilep
, pmask
, pand_mask
)
2626 HOST_WIDE_INT
*pbitsize
, *pbitpos
;
2627 enum machine_mode
*pmode
;
2628 int *punsignedp
, *pvolatilep
;
2633 tree mask
, inner
, offset
;
2635 unsigned int precision
;
2637 /* All the optimizations using this function assume integer fields.
2638 There are problems with FP fields since the type_for_size call
2639 below can fail for, e.g., XFmode. */
2640 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
2645 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
2647 and_mask
= TREE_OPERAND (exp
, 1);
2648 exp
= TREE_OPERAND (exp
, 0);
2649 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
2650 if (TREE_CODE (and_mask
) != INTEGER_CST
)
2654 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
2655 punsignedp
, pvolatilep
);
2656 if ((inner
== exp
&& and_mask
== 0)
2657 || *pbitsize
< 0 || offset
!= 0
2658 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
2661 /* Compute the mask to access the bitfield. */
2662 unsigned_type
= (*lang_hooks
.types
.type_for_size
) (*pbitsize
, 1);
2663 precision
= TYPE_PRECISION (unsigned_type
);
2665 mask
= build_int_2 (~0, ~0);
2666 TREE_TYPE (mask
) = unsigned_type
;
2667 force_fit_type (mask
, 0);
2668 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
2669 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
2671 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2673 mask
= fold (build (BIT_AND_EXPR
, unsigned_type
,
2674 convert (unsigned_type
, and_mask
), mask
));
2677 *pand_mask
= and_mask
;
2681 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2685 all_ones_mask_p (mask
, size
)
2689 tree type
= TREE_TYPE (mask
);
2690 unsigned int precision
= TYPE_PRECISION (type
);
2693 tmask
= build_int_2 (~0, ~0);
2694 TREE_TYPE (tmask
) = (*lang_hooks
.types
.signed_type
) (type
);
2695 force_fit_type (tmask
, 0);
2697 tree_int_cst_equal (mask
,
2698 const_binop (RSHIFT_EXPR
,
2699 const_binop (LSHIFT_EXPR
, tmask
,
2700 size_int (precision
- size
),
2702 size_int (precision
- size
), 0));
2705 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2706 represents the sign bit of EXP's type. If EXP represents a sign
2707 or zero extension, also test VAL against the unextended type.
2708 The return value is the (sub)expression whose sign bit is VAL,
2709 or NULL_TREE otherwise. */
2712 sign_bit_p (exp
, val
)
2716 unsigned HOST_WIDE_INT lo
;
2721 /* Tree EXP must have an integral type. */
2722 t
= TREE_TYPE (exp
);
2723 if (! INTEGRAL_TYPE_P (t
))
2726 /* Tree VAL must be an integer constant. */
2727 if (TREE_CODE (val
) != INTEGER_CST
2728 || TREE_CONSTANT_OVERFLOW (val
))
2731 width
= TYPE_PRECISION (t
);
2732 if (width
> HOST_BITS_PER_WIDE_INT
)
2734 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
2740 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
2743 if (TREE_INT_CST_HIGH (val
) == hi
&& TREE_INT_CST_LOW (val
) == lo
)
2746 /* Handle extension from a narrower type. */
2747 if (TREE_CODE (exp
) == NOP_EXPR
2748 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
2749 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
2754 /* Subroutine for fold_truthop: determine if an operand is simple enough
2755 to be evaluated unconditionally. */
2758 simple_operand_p (exp
)
2761 /* Strip any conversions that don't change the machine mode. */
2762 while ((TREE_CODE (exp
) == NOP_EXPR
2763 || TREE_CODE (exp
) == CONVERT_EXPR
)
2764 && (TYPE_MODE (TREE_TYPE (exp
))
2765 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
2766 exp
= TREE_OPERAND (exp
, 0);
2768 return (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c'
2770 && ! TREE_ADDRESSABLE (exp
)
2771 && ! TREE_THIS_VOLATILE (exp
)
2772 && ! DECL_NONLOCAL (exp
)
2773 /* Don't regard global variables as simple. They may be
2774 allocated in ways unknown to the compiler (shared memory,
2775 #pragma weak, etc). */
2776 && ! TREE_PUBLIC (exp
)
2777 && ! DECL_EXTERNAL (exp
)
2778 /* Loading a static variable is unduly expensive, but global
2779 registers aren't expensive. */
2780 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
2783 /* The following functions are subroutines to fold_range_test and allow it to
2784 try to change a logical combination of comparisons into a range test.
2787 X == 2 || X == 3 || X == 4 || X == 5
2791 (unsigned) (X - 2) <= 3
2793 We describe each set of comparisons as being either inside or outside
2794 a range, using a variable named like IN_P, and then describe the
2795 range with a lower and upper bound. If one of the bounds is omitted,
2796 it represents either the highest or lowest value of the type.
2798 In the comments below, we represent a range by two numbers in brackets
2799 preceded by a "+" to designate being inside that range, or a "-" to
2800 designate being outside that range, so the condition can be inverted by
2801 flipping the prefix. An omitted bound is represented by a "-". For
2802 example, "- [-, 10]" means being outside the range starting at the lowest
2803 possible value and ending at 10, in other words, being greater than 10.
2804 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2807 We set up things so that the missing bounds are handled in a consistent
2808 manner so neither a missing bound nor "true" and "false" need to be
2809 handled using a special case. */
2811 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2812 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2813 and UPPER1_P are nonzero if the respective argument is an upper bound
2814 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2815 must be specified for a comparison. ARG1 will be converted to ARG0's
2816 type if both are specified. */
2819 range_binop (code
, type
, arg0
, upper0_p
, arg1
, upper1_p
)
2820 enum tree_code code
;
2823 int upper0_p
, upper1_p
;
2829 /* If neither arg represents infinity, do the normal operation.
2830 Else, if not a comparison, return infinity. Else handle the special
2831 comparison rules. Note that most of the cases below won't occur, but
2832 are handled for consistency. */
2834 if (arg0
!= 0 && arg1
!= 0)
2836 tem
= fold (build (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
2837 arg0
, convert (TREE_TYPE (arg0
), arg1
)));
2839 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
2842 if (TREE_CODE_CLASS (code
) != '<')
2845 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2846 for neither. In real maths, we cannot assume open ended ranges are
2847 the same. But, this is computer arithmetic, where numbers are finite.
2848 We can therefore make the transformation of any unbounded range with
2849 the value Z, Z being greater than any representable number. This permits
2850 us to treat unbounded ranges as equal. */
2851 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
2852 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
2856 result
= sgn0
== sgn1
;
2859 result
= sgn0
!= sgn1
;
2862 result
= sgn0
< sgn1
;
2865 result
= sgn0
<= sgn1
;
2868 result
= sgn0
> sgn1
;
2871 result
= sgn0
>= sgn1
;
2877 return convert (type
, result
? integer_one_node
: integer_zero_node
);
2880 /* Given EXP, a logical expression, set the range it is testing into
2881 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2882 actually being tested. *PLOW and *PHIGH will be made of the same type
2883 as the returned expression. If EXP is not a comparison, we will most
2884 likely not be returning a useful value and range. */
2887 make_range (exp
, pin_p
, plow
, phigh
)
2892 enum tree_code code
;
2893 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, type
= NULL_TREE
;
2894 tree orig_type
= NULL_TREE
;
2896 tree low
, high
, n_low
, n_high
;
2898 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2899 and see if we can refine the range. Some of the cases below may not
2900 happen, but it doesn't seem worth worrying about this. We "continue"
2901 the outer loop when we've changed something; otherwise we "break"
2902 the switch, which will "break" the while. */
2904 in_p
= 0, low
= high
= convert (TREE_TYPE (exp
), integer_zero_node
);
2908 code
= TREE_CODE (exp
);
2910 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
2912 arg0
= TREE_OPERAND (exp
, 0);
2913 if (TREE_CODE_CLASS (code
) == '<'
2914 || TREE_CODE_CLASS (code
) == '1'
2915 || TREE_CODE_CLASS (code
) == '2')
2916 type
= TREE_TYPE (arg0
);
2917 if (TREE_CODE_CLASS (code
) == '2'
2918 || TREE_CODE_CLASS (code
) == '<'
2919 || (TREE_CODE_CLASS (code
) == 'e'
2920 && TREE_CODE_LENGTH (code
) > 1))
2921 arg1
= TREE_OPERAND (exp
, 1);
2924 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
2925 lose a cast by accident. */
2926 if (type
!= NULL_TREE
&& orig_type
== NULL_TREE
)
2931 case TRUTH_NOT_EXPR
:
2932 in_p
= ! in_p
, exp
= arg0
;
2935 case EQ_EXPR
: case NE_EXPR
:
2936 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
2937 /* We can only do something if the range is testing for zero
2938 and if the second operand is an integer constant. Note that
2939 saying something is "in" the range we make is done by
2940 complementing IN_P since it will set in the initial case of
2941 being not equal to zero; "out" is leaving it alone. */
2942 if (low
== 0 || high
== 0
2943 || ! integer_zerop (low
) || ! integer_zerop (high
)
2944 || TREE_CODE (arg1
) != INTEGER_CST
)
2949 case NE_EXPR
: /* - [c, c] */
2952 case EQ_EXPR
: /* + [c, c] */
2953 in_p
= ! in_p
, low
= high
= arg1
;
2955 case GT_EXPR
: /* - [-, c] */
2956 low
= 0, high
= arg1
;
2958 case GE_EXPR
: /* + [c, -] */
2959 in_p
= ! in_p
, low
= arg1
, high
= 0;
2961 case LT_EXPR
: /* - [c, -] */
2962 low
= arg1
, high
= 0;
2964 case LE_EXPR
: /* + [-, c] */
2965 in_p
= ! in_p
, low
= 0, high
= arg1
;
2973 /* If this is an unsigned comparison, we also know that EXP is
2974 greater than or equal to zero. We base the range tests we make
2975 on that fact, so we record it here so we can parse existing
2977 if (TREE_UNSIGNED (type
) && (low
== 0 || high
== 0))
2979 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, in_p
, low
, high
,
2980 1, convert (type
, integer_zero_node
),
2984 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
2986 /* If the high bound is missing, but we
2987 have a low bound, reverse the range so
2988 it goes from zero to the low bound minus 1. */
2989 if (high
== 0 && low
)
2992 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
2993 integer_one_node
, 0);
2994 low
= convert (type
, integer_zero_node
);
3000 /* (-x) IN [a,b] -> x in [-b, -a] */
3001 n_low
= range_binop (MINUS_EXPR
, type
,
3002 convert (type
, integer_zero_node
), 0, high
, 1);
3003 n_high
= range_binop (MINUS_EXPR
, type
,
3004 convert (type
, integer_zero_node
), 0, low
, 0);
3005 low
= n_low
, high
= n_high
;
3011 exp
= build (MINUS_EXPR
, type
, negate_expr (arg0
),
3012 convert (type
, integer_one_node
));
3015 case PLUS_EXPR
: case MINUS_EXPR
:
3016 if (TREE_CODE (arg1
) != INTEGER_CST
)
3019 /* If EXP is signed, any overflow in the computation is undefined,
3020 so we don't worry about it so long as our computations on
3021 the bounds don't overflow. For unsigned, overflow is defined
3022 and this is exactly the right thing. */
3023 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3024 type
, low
, 0, arg1
, 0);
3025 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3026 type
, high
, 1, arg1
, 0);
3027 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3028 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3031 /* Check for an unsigned range which has wrapped around the maximum
3032 value thus making n_high < n_low, and normalize it. */
3033 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3035 low
= range_binop (PLUS_EXPR
, type
, n_high
, 0,
3036 integer_one_node
, 0);
3037 high
= range_binop (MINUS_EXPR
, type
, n_low
, 0,
3038 integer_one_node
, 0);
3040 /* If the range is of the form +/- [ x+1, x ], we won't
3041 be able to normalize it. But then, it represents the
3042 whole range or the empty set, so make it
3044 if (tree_int_cst_equal (n_low
, low
)
3045 && tree_int_cst_equal (n_high
, high
))
3051 low
= n_low
, high
= n_high
;
3056 case NOP_EXPR
: case NON_LVALUE_EXPR
: case CONVERT_EXPR
:
3057 if (TYPE_PRECISION (type
) > TYPE_PRECISION (orig_type
))
3060 if (! INTEGRAL_TYPE_P (type
)
3061 || (low
!= 0 && ! int_fits_type_p (low
, type
))
3062 || (high
!= 0 && ! int_fits_type_p (high
, type
)))
3065 n_low
= low
, n_high
= high
;
3068 n_low
= convert (type
, n_low
);
3071 n_high
= convert (type
, n_high
);
3073 /* If we're converting from an unsigned to a signed type,
3074 we will be doing the comparison as unsigned. The tests above
3075 have already verified that LOW and HIGH are both positive.
3077 So we have to make sure that the original unsigned value will
3078 be interpreted as positive. */
3079 if (TREE_UNSIGNED (type
) && ! TREE_UNSIGNED (TREE_TYPE (exp
)))
3081 tree equiv_type
= (*lang_hooks
.types
.type_for_mode
)
3082 (TYPE_MODE (type
), 1);
3085 /* A range without an upper bound is, naturally, unbounded.
3086 Since convert would have cropped a very large value, use
3087 the max value for the destination type. */
3089 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3090 : TYPE_MAX_VALUE (type
);
3092 if (TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (exp
)))
3093 high_positive
= fold (build (RSHIFT_EXPR
, type
,
3094 convert (type
, high_positive
),
3095 convert (type
, integer_one_node
)));
3097 /* If the low bound is specified, "and" the range with the
3098 range for which the original unsigned value will be
3102 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3104 1, convert (type
, integer_zero_node
),
3108 in_p
= (n_in_p
== in_p
);
3112 /* Otherwise, "or" the range with the range of the input
3113 that will be interpreted as negative. */
3114 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3116 1, convert (type
, integer_zero_node
),
3120 in_p
= (in_p
!= n_in_p
);
3125 low
= n_low
, high
= n_high
;
3135 /* If EXP is a constant, we can evaluate whether this is true or false. */
3136 if (TREE_CODE (exp
) == INTEGER_CST
)
3138 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
3140 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3146 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3150 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3151 type, TYPE, return an expression to test if EXP is in (or out of, depending
3152 on IN_P) the range. */
3155 build_range_check (type
, exp
, in_p
, low
, high
)
3161 tree etype
= TREE_TYPE (exp
);
3165 && (0 != (value
= build_range_check (type
, exp
, 1, low
, high
))))
3166 return invert_truthvalue (value
);
3168 if (low
== 0 && high
== 0)
3169 return convert (type
, integer_one_node
);
3172 return fold (build (LE_EXPR
, type
, exp
, high
));
3175 return fold (build (GE_EXPR
, type
, exp
, low
));
3177 if (operand_equal_p (low
, high
, 0))
3178 return fold (build (EQ_EXPR
, type
, exp
, low
));
3180 if (integer_zerop (low
))
3182 if (! TREE_UNSIGNED (etype
))
3184 etype
= (*lang_hooks
.types
.unsigned_type
) (etype
);
3185 high
= convert (etype
, high
);
3186 exp
= convert (etype
, exp
);
3188 return build_range_check (type
, exp
, 1, 0, high
);
3191 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3192 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
3194 unsigned HOST_WIDE_INT lo
;
3198 prec
= TYPE_PRECISION (etype
);
3199 if (prec
<= HOST_BITS_PER_WIDE_INT
)
3202 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
3206 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
3207 lo
= (unsigned HOST_WIDE_INT
) -1;
3210 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
3212 if (TREE_UNSIGNED (etype
))
3214 etype
= (*lang_hooks
.types
.signed_type
) (etype
);
3215 exp
= convert (etype
, exp
);
3217 return fold (build (GT_EXPR
, type
, exp
,
3218 convert (etype
, integer_zero_node
)));
3222 if (0 != (value
= const_binop (MINUS_EXPR
, high
, low
, 0))
3223 && ! TREE_OVERFLOW (value
))
3224 return build_range_check (type
,
3225 fold (build (MINUS_EXPR
, etype
, exp
, low
)),
3226 1, convert (etype
, integer_zero_node
), value
);
3231 /* Given two ranges, see if we can merge them into one. Return 1 if we
3232 can, 0 if we can't. Set the output range into the specified parameters. */
3235 merge_ranges (pin_p
, plow
, phigh
, in0_p
, low0
, high0
, in1_p
, low1
, high1
)
3239 tree low0
, high0
, low1
, high1
;
3247 int lowequal
= ((low0
== 0 && low1
== 0)
3248 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3249 low0
, 0, low1
, 0)));
3250 int highequal
= ((high0
== 0 && high1
== 0)
3251 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3252 high0
, 1, high1
, 1)));
3254 /* Make range 0 be the range that starts first, or ends last if they
3255 start at the same value. Swap them if it isn't. */
3256 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3259 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
3260 high1
, 1, high0
, 1))))
3262 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
3263 tem
= low0
, low0
= low1
, low1
= tem
;
3264 tem
= high0
, high0
= high1
, high1
= tem
;
3267 /* Now flag two cases, whether the ranges are disjoint or whether the
3268 second range is totally subsumed in the first. Note that the tests
3269 below are simplified by the ones above. */
3270 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
3271 high0
, 1, low1
, 0));
3272 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
3273 high1
, 1, high0
, 1));
3275 /* We now have four cases, depending on whether we are including or
3276 excluding the two ranges. */
3279 /* If they don't overlap, the result is false. If the second range
3280 is a subset it is the result. Otherwise, the range is from the start
3281 of the second to the end of the first. */
3283 in_p
= 0, low
= high
= 0;
3285 in_p
= 1, low
= low1
, high
= high1
;
3287 in_p
= 1, low
= low1
, high
= high0
;
3290 else if (in0_p
&& ! in1_p
)
3292 /* If they don't overlap, the result is the first range. If they are
3293 equal, the result is false. If the second range is a subset of the
3294 first, and the ranges begin at the same place, we go from just after
3295 the end of the first range to the end of the second. If the second
3296 range is not a subset of the first, or if it is a subset and both
3297 ranges end at the same place, the range starts at the start of the
3298 first range and ends just before the second range.
3299 Otherwise, we can't describe this as a single range. */
3301 in_p
= 1, low
= low0
, high
= high0
;
3302 else if (lowequal
&& highequal
)
3303 in_p
= 0, low
= high
= 0;
3304 else if (subset
&& lowequal
)
3306 in_p
= 1, high
= high0
;
3307 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high1
, 0,
3308 integer_one_node
, 0);
3310 else if (! subset
|| highequal
)
3312 in_p
= 1, low
= low0
;
3313 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low1
, 0,
3314 integer_one_node
, 0);
3320 else if (! in0_p
&& in1_p
)
3322 /* If they don't overlap, the result is the second range. If the second
3323 is a subset of the first, the result is false. Otherwise,
3324 the range starts just after the first range and ends at the
3325 end of the second. */
3327 in_p
= 1, low
= low1
, high
= high1
;
3328 else if (subset
|| highequal
)
3329 in_p
= 0, low
= high
= 0;
3332 in_p
= 1, high
= high1
;
3333 low
= range_binop (PLUS_EXPR
, NULL_TREE
, high0
, 1,
3334 integer_one_node
, 0);
3340 /* The case where we are excluding both ranges. Here the complex case
3341 is if they don't overlap. In that case, the only time we have a
3342 range is if they are adjacent. If the second is a subset of the
3343 first, the result is the first. Otherwise, the range to exclude
3344 starts at the beginning of the first range and ends at the end of the
3348 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
3349 range_binop (PLUS_EXPR
, NULL_TREE
,
3351 integer_one_node
, 1),
3353 in_p
= 0, low
= low0
, high
= high1
;
3358 in_p
= 0, low
= low0
, high
= high0
;
3360 in_p
= 0, low
= low0
, high
= high1
;
3363 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
3367 /* EXP is some logical combination of boolean tests. See if we can
3368 merge it into some range test. Return the new tree if so. */
3371 fold_range_test (exp
)
3374 int or_op
= (TREE_CODE (exp
) == TRUTH_ORIF_EXPR
3375 || TREE_CODE (exp
) == TRUTH_OR_EXPR
);
3376 int in0_p
, in1_p
, in_p
;
3377 tree low0
, low1
, low
, high0
, high1
, high
;
3378 tree lhs
= make_range (TREE_OPERAND (exp
, 0), &in0_p
, &low0
, &high0
);
3379 tree rhs
= make_range (TREE_OPERAND (exp
, 1), &in1_p
, &low1
, &high1
);
3382 /* If this is an OR operation, invert both sides; we will invert
3383 again at the end. */
3385 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
3387 /* If both expressions are the same, if we can merge the ranges, and we
3388 can build the range test, return it or it inverted. If one of the
3389 ranges is always true or always false, consider it to be the same
3390 expression as the other. */
3391 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
3392 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
3394 && 0 != (tem
= (build_range_check (TREE_TYPE (exp
),
3396 : rhs
!= 0 ? rhs
: integer_zero_node
,
3398 return or_op
? invert_truthvalue (tem
) : tem
;
3400 /* On machines where the branch cost is expensive, if this is a
3401 short-circuited branch and the underlying object on both sides
3402 is the same, make a non-short-circuit operation. */
3403 else if (BRANCH_COST
>= 2
3404 && lhs
!= 0 && rhs
!= 0
3405 && (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
3406 || TREE_CODE (exp
) == TRUTH_ORIF_EXPR
)
3407 && operand_equal_p (lhs
, rhs
, 0))
3409 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3410 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3411 which cases we can't do this. */
3412 if (simple_operand_p (lhs
))
3413 return build (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
3414 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
3415 TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
3416 TREE_OPERAND (exp
, 1));
3418 else if ((*lang_hooks
.decls
.global_bindings_p
) () == 0
3419 && ! contains_placeholder_p (lhs
))
3421 tree common
= save_expr (lhs
);
3423 if (0 != (lhs
= build_range_check (TREE_TYPE (exp
), common
,
3424 or_op
? ! in0_p
: in0_p
,
3426 && (0 != (rhs
= build_range_check (TREE_TYPE (exp
), common
,
3427 or_op
? ! in1_p
: in1_p
,
3429 return build (TREE_CODE (exp
) == TRUTH_ANDIF_EXPR
3430 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
3431 TREE_TYPE (exp
), lhs
, rhs
);
3438 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3439 bit value. Arrange things so the extra bits will be set to zero if and
3440 only if C is signed-extended to its full width. If MASK is nonzero,
3441 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3444 unextend (c
, p
, unsignedp
, mask
)
3450 tree type
= TREE_TYPE (c
);
3451 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
3454 if (p
== modesize
|| unsignedp
)
3457 /* We work by getting just the sign bit into the low-order bit, then
3458 into the high-order bit, then sign-extend. We then XOR that value
3460 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
3461 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
3463 /* We must use a signed type in order to get an arithmetic right shift.
3464 However, we must also avoid introducing accidental overflows, so that
3465 a subsequent call to integer_zerop will work. Hence we must
3466 do the type conversion here. At this point, the constant is either
3467 zero or one, and the conversion to a signed type can never overflow.
3468 We could get an overflow if this conversion is done anywhere else. */
3469 if (TREE_UNSIGNED (type
))
3470 temp
= convert ((*lang_hooks
.types
.signed_type
) (type
), temp
);
3472 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
3473 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
3475 temp
= const_binop (BIT_AND_EXPR
, temp
, convert (TREE_TYPE (c
), mask
), 0);
3476 /* If necessary, convert the type back to match the type of C. */
3477 if (TREE_UNSIGNED (type
))
3478 temp
= convert (type
, temp
);
3480 return convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
3483 /* Find ways of folding logical expressions of LHS and RHS:
3484 Try to merge two comparisons to the same innermost item.
3485 Look for range tests like "ch >= '0' && ch <= '9'".
3486 Look for combinations of simple terms on machines with expensive branches
3487 and evaluate the RHS unconditionally.
3489 For example, if we have p->a == 2 && p->b == 4 and we can make an
3490 object large enough to span both A and B, we can do this with a comparison
3491 against the object ANDed with the a mask.
3493 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3494 operations to do this with one comparison.
3496 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3497 function and the one above.
3499 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3500 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3502 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3505 We return the simplified tree or 0 if no optimization is possible. */
3508 fold_truthop (code
, truth_type
, lhs
, rhs
)
3509 enum tree_code code
;
3510 tree truth_type
, lhs
, rhs
;
3512 /* If this is the "or" of two comparisons, we can do something if
3513 the comparisons are NE_EXPR. If this is the "and", we can do something
3514 if the comparisons are EQ_EXPR. I.e.,
3515 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3517 WANTED_CODE is this operation code. For single bit fields, we can
3518 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3519 comparison for one-bit fields. */
3521 enum tree_code wanted_code
;
3522 enum tree_code lcode
, rcode
;
3523 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
3524 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
3525 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
3526 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
3527 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
3528 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
3529 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
3530 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
3531 enum machine_mode lnmode
, rnmode
;
3532 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
3533 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
3534 tree l_const
, r_const
;
3535 tree lntype
, rntype
, result
;
3536 int first_bit
, end_bit
;
3539 /* Start by getting the comparison codes. Fail if anything is volatile.
3540 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3541 it were surrounded with a NE_EXPR. */
3543 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
3546 lcode
= TREE_CODE (lhs
);
3547 rcode
= TREE_CODE (rhs
);
3549 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
3550 lcode
= NE_EXPR
, lhs
= build (NE_EXPR
, truth_type
, lhs
, integer_zero_node
);
3552 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
3553 rcode
= NE_EXPR
, rhs
= build (NE_EXPR
, truth_type
, rhs
, integer_zero_node
);
3555 if (TREE_CODE_CLASS (lcode
) != '<' || TREE_CODE_CLASS (rcode
) != '<')
3558 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
3559 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
3561 ll_arg
= TREE_OPERAND (lhs
, 0);
3562 lr_arg
= TREE_OPERAND (lhs
, 1);
3563 rl_arg
= TREE_OPERAND (rhs
, 0);
3564 rr_arg
= TREE_OPERAND (rhs
, 1);
3566 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3567 if (simple_operand_p (ll_arg
)
3568 && simple_operand_p (lr_arg
)
3569 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg
)))
3573 if (operand_equal_p (ll_arg
, rl_arg
, 0)
3574 && operand_equal_p (lr_arg
, rr_arg
, 0))
3576 int lcompcode
, rcompcode
;
3578 lcompcode
= comparison_to_compcode (lcode
);
3579 rcompcode
= comparison_to_compcode (rcode
);
3580 compcode
= (code
== TRUTH_AND_EXPR
)
3581 ? lcompcode
& rcompcode
3582 : lcompcode
| rcompcode
;
3584 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
3585 && operand_equal_p (lr_arg
, rl_arg
, 0))
3587 int lcompcode
, rcompcode
;
3589 rcode
= swap_tree_comparison (rcode
);
3590 lcompcode
= comparison_to_compcode (lcode
);
3591 rcompcode
= comparison_to_compcode (rcode
);
3592 compcode
= (code
== TRUTH_AND_EXPR
)
3593 ? lcompcode
& rcompcode
3594 : lcompcode
| rcompcode
;
3599 if (compcode
== COMPCODE_TRUE
)
3600 return convert (truth_type
, integer_one_node
);
3601 else if (compcode
== COMPCODE_FALSE
)
3602 return convert (truth_type
, integer_zero_node
);
3603 else if (compcode
!= -1)
3604 return build (compcode_to_comparison (compcode
),
3605 truth_type
, ll_arg
, lr_arg
);
3608 /* If the RHS can be evaluated unconditionally and its operands are
3609 simple, it wins to evaluate the RHS unconditionally on machines
3610 with expensive branches. In this case, this isn't a comparison
3611 that can be merged. Avoid doing this if the RHS is a floating-point
3612 comparison since those can trap. */
3614 if (BRANCH_COST
>= 2
3615 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
3616 && simple_operand_p (rl_arg
)
3617 && simple_operand_p (rr_arg
))
3619 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3620 if (code
== TRUTH_OR_EXPR
3621 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
3622 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
3623 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
3624 return build (NE_EXPR
, truth_type
,
3625 build (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
3629 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3630 if (code
== TRUTH_AND_EXPR
3631 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
3632 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
3633 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
))
3634 return build (EQ_EXPR
, truth_type
,
3635 build (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
3639 return build (code
, truth_type
, lhs
, rhs
);
3642 /* See if the comparisons can be merged. Then get all the parameters for
3645 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
3646 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
3650 ll_inner
= decode_field_reference (ll_arg
,
3651 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
3652 &ll_unsignedp
, &volatilep
, &ll_mask
,
3654 lr_inner
= decode_field_reference (lr_arg
,
3655 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
3656 &lr_unsignedp
, &volatilep
, &lr_mask
,
3658 rl_inner
= decode_field_reference (rl_arg
,
3659 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
3660 &rl_unsignedp
, &volatilep
, &rl_mask
,
3662 rr_inner
= decode_field_reference (rr_arg
,
3663 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
3664 &rr_unsignedp
, &volatilep
, &rr_mask
,
3667 /* It must be true that the inner operation on the lhs of each
3668 comparison must be the same if we are to be able to do anything.
3669 Then see if we have constants. If not, the same must be true for
3671 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
3672 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
3675 if (TREE_CODE (lr_arg
) == INTEGER_CST
3676 && TREE_CODE (rr_arg
) == INTEGER_CST
)
3677 l_const
= lr_arg
, r_const
= rr_arg
;
3678 else if (lr_inner
== 0 || rr_inner
== 0
3679 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
3682 l_const
= r_const
= 0;
3684 /* If either comparison code is not correct for our logical operation,
3685 fail. However, we can convert a one-bit comparison against zero into
3686 the opposite comparison against that bit being set in the field. */
3688 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
3689 if (lcode
!= wanted_code
)
3691 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
3693 /* Make the left operand unsigned, since we are only interested
3694 in the value of one bit. Otherwise we are doing the wrong
3703 /* This is analogous to the code for l_const above. */
3704 if (rcode
!= wanted_code
)
3706 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
3715 /* After this point all optimizations will generate bit-field
3716 references, which we might not want. */
3717 if (! (*lang_hooks
.can_use_bit_fields_p
) ())
3720 /* See if we can find a mode that contains both fields being compared on
3721 the left. If we can't, fail. Otherwise, update all constants and masks
3722 to be relative to a field of that size. */
3723 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
3724 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
3725 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
3726 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
3728 if (lnmode
== VOIDmode
)
3731 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
3732 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
3733 lntype
= (*lang_hooks
.types
.type_for_size
) (lnbitsize
, 1);
3734 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
3736 if (BYTES_BIG_ENDIAN
)
3738 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
3739 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
3742 ll_mask
= const_binop (LSHIFT_EXPR
, convert (lntype
, ll_mask
),
3743 size_int (xll_bitpos
), 0);
3744 rl_mask
= const_binop (LSHIFT_EXPR
, convert (lntype
, rl_mask
),
3745 size_int (xrl_bitpos
), 0);
3749 l_const
= convert (lntype
, l_const
);
3750 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
3751 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
3752 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
3753 fold (build1 (BIT_NOT_EXPR
,
3757 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
3759 return convert (truth_type
,
3760 wanted_code
== NE_EXPR
3761 ? integer_one_node
: integer_zero_node
);
3766 r_const
= convert (lntype
, r_const
);
3767 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
3768 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
3769 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
3770 fold (build1 (BIT_NOT_EXPR
,
3774 warning ("comparison is always %d", wanted_code
== NE_EXPR
);
3776 return convert (truth_type
,
3777 wanted_code
== NE_EXPR
3778 ? integer_one_node
: integer_zero_node
);
3782 /* If the right sides are not constant, do the same for it. Also,
3783 disallow this optimization if a size or signedness mismatch occurs
3784 between the left and right sides. */
3787 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
3788 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
3789 /* Make sure the two fields on the right
3790 correspond to the left without being swapped. */
3791 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
3794 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
3795 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
3796 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
3797 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
3799 if (rnmode
== VOIDmode
)
3802 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
3803 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
3804 rntype
= (*lang_hooks
.types
.type_for_size
) (rnbitsize
, 1);
3805 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
3807 if (BYTES_BIG_ENDIAN
)
3809 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
3810 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
3813 lr_mask
= const_binop (LSHIFT_EXPR
, convert (rntype
, lr_mask
),
3814 size_int (xlr_bitpos
), 0);
3815 rr_mask
= const_binop (LSHIFT_EXPR
, convert (rntype
, rr_mask
),
3816 size_int (xrr_bitpos
), 0);
3818 /* Make a mask that corresponds to both fields being compared.
3819 Do this for both items being compared. If the operands are the
3820 same size and the bits being compared are in the same position
3821 then we can do this by masking both and comparing the masked
3823 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
3824 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
3825 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
3827 lhs
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
3828 ll_unsignedp
|| rl_unsignedp
);
3829 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
3830 lhs
= build (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
3832 rhs
= make_bit_field_ref (lr_inner
, rntype
, rnbitsize
, rnbitpos
,
3833 lr_unsignedp
|| rr_unsignedp
);
3834 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
3835 rhs
= build (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
3837 return build (wanted_code
, truth_type
, lhs
, rhs
);
3840 /* There is still another way we can do something: If both pairs of
3841 fields being compared are adjacent, we may be able to make a wider
3842 field containing them both.
3844 Note that we still must mask the lhs/rhs expressions. Furthermore,
3845 the mask must be shifted to account for the shift done by
3846 make_bit_field_ref. */
3847 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
3848 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
3849 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
3850 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
3854 lhs
= make_bit_field_ref (ll_inner
, lntype
, ll_bitsize
+ rl_bitsize
,
3855 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
3856 rhs
= make_bit_field_ref (lr_inner
, rntype
, lr_bitsize
+ rr_bitsize
,
3857 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
3859 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
3860 size_int (MIN (xll_bitpos
, xrl_bitpos
)), 0);
3861 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
3862 size_int (MIN (xlr_bitpos
, xrr_bitpos
)), 0);
3864 /* Convert to the smaller type before masking out unwanted bits. */
3866 if (lntype
!= rntype
)
3868 if (lnbitsize
> rnbitsize
)
3870 lhs
= convert (rntype
, lhs
);
3871 ll_mask
= convert (rntype
, ll_mask
);
3874 else if (lnbitsize
< rnbitsize
)
3876 rhs
= convert (lntype
, rhs
);
3877 lr_mask
= convert (lntype
, lr_mask
);
3882 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
3883 lhs
= build (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
3885 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
3886 rhs
= build (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
3888 return build (wanted_code
, truth_type
, lhs
, rhs
);
3894 /* Handle the case of comparisons with constants. If there is something in
3895 common between the masks, those bits of the constants must be the same.
3896 If not, the condition is always false. Test for this to avoid generating
3897 incorrect code below. */
3898 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
3899 if (! integer_zerop (result
)
3900 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
3901 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
3903 if (wanted_code
== NE_EXPR
)
3905 warning ("`or' of unmatched not-equal tests is always 1");
3906 return convert (truth_type
, integer_one_node
);
3910 warning ("`and' of mutually exclusive equal-tests is always 0");
3911 return convert (truth_type
, integer_zero_node
);
3915 /* Construct the expression we will return. First get the component
3916 reference we will make. Unless the mask is all ones the width of
3917 that field, perform the mask operation. Then compare with the
3919 result
= make_bit_field_ref (ll_inner
, lntype
, lnbitsize
, lnbitpos
,
3920 ll_unsignedp
|| rl_unsignedp
);
3922 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
3923 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
3924 result
= build (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
3926 return build (wanted_code
, truth_type
, result
,
3927 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
3930 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
3934 optimize_minmax_comparison (t
)
3937 tree type
= TREE_TYPE (t
);
3938 tree arg0
= TREE_OPERAND (t
, 0);
3939 enum tree_code op_code
;
3940 tree comp_const
= TREE_OPERAND (t
, 1);
3942 int consts_equal
, consts_lt
;
3945 STRIP_SIGN_NOPS (arg0
);
3947 op_code
= TREE_CODE (arg0
);
3948 minmax_const
= TREE_OPERAND (arg0
, 1);
3949 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
3950 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
3951 inner
= TREE_OPERAND (arg0
, 0);
3953 /* If something does not permit us to optimize, return the original tree. */
3954 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
3955 || TREE_CODE (comp_const
) != INTEGER_CST
3956 || TREE_CONSTANT_OVERFLOW (comp_const
)
3957 || TREE_CODE (minmax_const
) != INTEGER_CST
3958 || TREE_CONSTANT_OVERFLOW (minmax_const
))
3961 /* Now handle all the various comparison codes. We only handle EQ_EXPR
3962 and GT_EXPR, doing the rest with recursive calls using logical
3964 switch (TREE_CODE (t
))
3966 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
3968 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t
)));
3972 fold (build (TRUTH_ORIF_EXPR
, type
,
3973 optimize_minmax_comparison
3974 (build (EQ_EXPR
, type
, arg0
, comp_const
)),
3975 optimize_minmax_comparison
3976 (build (GT_EXPR
, type
, arg0
, comp_const
))));
3979 if (op_code
== MAX_EXPR
&& consts_equal
)
3980 /* MAX (X, 0) == 0 -> X <= 0 */
3981 return fold (build (LE_EXPR
, type
, inner
, comp_const
));
3983 else if (op_code
== MAX_EXPR
&& consts_lt
)
3984 /* MAX (X, 0) == 5 -> X == 5 */
3985 return fold (build (EQ_EXPR
, type
, inner
, comp_const
));
3987 else if (op_code
== MAX_EXPR
)
3988 /* MAX (X, 0) == -1 -> false */
3989 return omit_one_operand (type
, integer_zero_node
, inner
);
3991 else if (consts_equal
)
3992 /* MIN (X, 0) == 0 -> X >= 0 */
3993 return fold (build (GE_EXPR
, type
, inner
, comp_const
));
3996 /* MIN (X, 0) == 5 -> false */
3997 return omit_one_operand (type
, integer_zero_node
, inner
);
4000 /* MIN (X, 0) == -1 -> X == -1 */
4001 return fold (build (EQ_EXPR
, type
, inner
, comp_const
));
4004 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
4005 /* MAX (X, 0) > 0 -> X > 0
4006 MAX (X, 0) > 5 -> X > 5 */
4007 return fold (build (GT_EXPR
, type
, inner
, comp_const
));
4009 else if (op_code
== MAX_EXPR
)
4010 /* MAX (X, 0) > -1 -> true */
4011 return omit_one_operand (type
, integer_one_node
, inner
);
4013 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
4014 /* MIN (X, 0) > 0 -> false
4015 MIN (X, 0) > 5 -> false */
4016 return omit_one_operand (type
, integer_zero_node
, inner
);
4019 /* MIN (X, 0) > -1 -> X > -1 */
4020 return fold (build (GT_EXPR
, type
, inner
, comp_const
));
4027 /* T is an integer expression that is being multiplied, divided, or taken a
4028 modulus (CODE says which and what kind of divide or modulus) by a
4029 constant C. See if we can eliminate that operation by folding it with
4030 other operations already in T. WIDE_TYPE, if non-null, is a type that
4031 should be used for the computation if wider than our type.
4033 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4034 (X * 2) + (Y * 4). We must, however, be assured that either the original
4035 expression would not overflow or that overflow is undefined for the type
4036 in the language in question.
4038 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4039 the machine has a multiply-accumulate insn or that this is part of an
4040 addressing calculation.
4042 If we return a non-null expression, it is an equivalent form of the
4043 original computation, but need not be in the original type. */
4046 extract_muldiv (t
, c
, code
, wide_type
)
4049 enum tree_code code
;
4052 tree type
= TREE_TYPE (t
);
4053 enum tree_code tcode
= TREE_CODE (t
);
4054 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
4055 > GET_MODE_SIZE (TYPE_MODE (type
)))
4056 ? wide_type
: type
);
4058 int same_p
= tcode
== code
;
4059 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
4061 /* Don't deal with constants of zero here; they confuse the code below. */
4062 if (integer_zerop (c
))
4065 if (TREE_CODE_CLASS (tcode
) == '1')
4066 op0
= TREE_OPERAND (t
, 0);
4068 if (TREE_CODE_CLASS (tcode
) == '2')
4069 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
4071 /* Note that we need not handle conditional operations here since fold
4072 already handles those cases. So just do arithmetic here. */
4076 /* For a constant, we can always simplify if we are a multiply
4077 or (for divide and modulus) if it is a multiple of our constant. */
4078 if (code
== MULT_EXPR
4079 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
, 0)))
4080 return const_binop (code
, convert (ctype
, t
), convert (ctype
, c
), 0);
4083 case CONVERT_EXPR
: case NON_LVALUE_EXPR
: case NOP_EXPR
:
4084 /* If op0 is an expression ... */
4085 if ((TREE_CODE_CLASS (TREE_CODE (op0
)) == '<'
4086 || TREE_CODE_CLASS (TREE_CODE (op0
)) == '1'
4087 || TREE_CODE_CLASS (TREE_CODE (op0
)) == '2'
4088 || TREE_CODE_CLASS (TREE_CODE (op0
)) == 'e')
4089 /* ... and is unsigned, and its type is smaller than ctype,
4090 then we cannot pass through as widening. */
4091 && ((TREE_UNSIGNED (TREE_TYPE (op0
))
4092 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
4093 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
4094 && (GET_MODE_SIZE (TYPE_MODE (ctype
))
4095 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
)))))
4096 /* ... or its type is larger than ctype,
4097 then we cannot pass through this truncation. */
4098 || (GET_MODE_SIZE (TYPE_MODE (ctype
))
4099 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0
))))))
4102 /* Pass the constant down and see if we can make a simplification. If
4103 we can, replace this expression with the inner simplification for
4104 possible later conversion to our or some other type. */
4105 if (0 != (t1
= extract_muldiv (op0
, convert (TREE_TYPE (op0
), c
), code
,
4106 code
== MULT_EXPR
? ctype
: NULL_TREE
)))
4110 case NEGATE_EXPR
: case ABS_EXPR
:
4111 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
4112 return fold (build1 (tcode
, ctype
, convert (ctype
, t1
)));
4115 case MIN_EXPR
: case MAX_EXPR
:
4116 /* If widening the type changes the signedness, then we can't perform
4117 this optimization as that changes the result. */
4118 if (TREE_UNSIGNED (ctype
) != TREE_UNSIGNED (type
))
4121 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4122 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0
4123 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
4125 if (tree_int_cst_sgn (c
) < 0)
4126 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
4128 return fold (build (tcode
, ctype
, convert (ctype
, t1
),
4129 convert (ctype
, t2
)));
4133 case WITH_RECORD_EXPR
:
4134 if ((t1
= extract_muldiv (TREE_OPERAND (t
, 0), c
, code
, wide_type
)) != 0)
4135 return build (WITH_RECORD_EXPR
, TREE_TYPE (t1
), t1
,
4136 TREE_OPERAND (t
, 1));
4140 /* If this has not been evaluated and the operand has no side effects,
4141 we can see if we can do something inside it and make a new one.
4142 Note that this test is overly conservative since we can do this
4143 if the only reason it had side effects is that it was another
4144 similar SAVE_EXPR, but that isn't worth bothering with. */
4145 if (SAVE_EXPR_RTL (t
) == 0 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0))
4146 && 0 != (t1
= extract_muldiv (TREE_OPERAND (t
, 0), c
, code
,
4149 t1
= save_expr (t1
);
4150 if (SAVE_EXPR_PERSISTENT_P (t
) && TREE_CODE (t1
) == SAVE_EXPR
)
4151 SAVE_EXPR_PERSISTENT_P (t1
) = 1;
4152 if (is_pending_size (t
))
4153 put_pending_size (t1
);
4158 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
4159 /* If the second operand is constant, this is a multiplication
4160 or floor division, by a power of two, so we can treat it that
4161 way unless the multiplier or divisor overflows. */
4162 if (TREE_CODE (op1
) == INTEGER_CST
4163 /* const_binop may not detect overflow correctly,
4164 so check for it explicitly here. */
4165 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
4166 && TREE_INT_CST_HIGH (op1
) == 0
4167 && 0 != (t1
= convert (ctype
,
4168 const_binop (LSHIFT_EXPR
, size_one_node
,
4170 && ! TREE_OVERFLOW (t1
))
4171 return extract_muldiv (build (tcode
== LSHIFT_EXPR
4172 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
4173 ctype
, convert (ctype
, op0
), t1
),
4174 c
, code
, wide_type
);
4177 case PLUS_EXPR
: case MINUS_EXPR
:
4178 /* See if we can eliminate the operation on both sides. If we can, we
4179 can return a new PLUS or MINUS. If we can't, the only remaining
4180 cases where we can do anything are if the second operand is a
4182 t1
= extract_muldiv (op0
, c
, code
, wide_type
);
4183 t2
= extract_muldiv (op1
, c
, code
, wide_type
);
4184 if (t1
!= 0 && t2
!= 0
4185 && (code
== MULT_EXPR
4186 /* If not multiplication, we can only do this if either operand
4187 is divisible by c. */
4188 || multiple_of_p (ctype
, op0
, c
)
4189 || multiple_of_p (ctype
, op1
, c
)))
4190 return fold (build (tcode
, ctype
, convert (ctype
, t1
),
4191 convert (ctype
, t2
)));
4193 /* If this was a subtraction, negate OP1 and set it to be an addition.
4194 This simplifies the logic below. */
4195 if (tcode
== MINUS_EXPR
)
4196 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
4198 if (TREE_CODE (op1
) != INTEGER_CST
)
4201 /* If either OP1 or C are negative, this optimization is not safe for
4202 some of the division and remainder types while for others we need
4203 to change the code. */
4204 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
4206 if (code
== CEIL_DIV_EXPR
)
4207 code
= FLOOR_DIV_EXPR
;
4208 else if (code
== FLOOR_DIV_EXPR
)
4209 code
= CEIL_DIV_EXPR
;
4210 else if (code
!= MULT_EXPR
4211 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
4215 /* If it's a multiply or a division/modulus operation of a multiple
4216 of our constant, do the operation and verify it doesn't overflow. */
4217 if (code
== MULT_EXPR
4218 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
4220 op1
= const_binop (code
, convert (ctype
, op1
), convert (ctype
, c
), 0);
4221 if (op1
== 0 || TREE_OVERFLOW (op1
))
4227 /* If we have an unsigned type is not a sizetype, we cannot widen
4228 the operation since it will change the result if the original
4229 computation overflowed. */
4230 if (TREE_UNSIGNED (ctype
)
4231 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
4235 /* If we were able to eliminate our operation from the first side,
4236 apply our operation to the second side and reform the PLUS. */
4237 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
4238 return fold (build (tcode
, ctype
, convert (ctype
, t1
), op1
));
4240 /* The last case is if we are a multiply. In that case, we can
4241 apply the distributive law to commute the multiply and addition
4242 if the multiplication of the constants doesn't overflow. */
4243 if (code
== MULT_EXPR
)
4244 return fold (build (tcode
, ctype
, fold (build (code
, ctype
,
4245 convert (ctype
, op0
),
4246 convert (ctype
, c
))),
4252 /* We have a special case here if we are doing something like
4253 (C * 8) % 4 since we know that's zero. */
4254 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
4255 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
4256 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
4257 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
4258 return omit_one_operand (type
, integer_zero_node
, op0
);
4260 /* ... fall through ... */
4262 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
4263 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
4264 /* If we can extract our operation from the LHS, do so and return a
4265 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4266 do something only if the second operand is a constant. */
4268 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
)) != 0)
4269 return fold (build (tcode
, ctype
, convert (ctype
, t1
),
4270 convert (ctype
, op1
)));
4271 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
4272 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
)) != 0)
4273 return fold (build (tcode
, ctype
, convert (ctype
, op0
),
4274 convert (ctype
, t1
)));
4275 else if (TREE_CODE (op1
) != INTEGER_CST
)
4278 /* If these are the same operation types, we can associate them
4279 assuming no overflow. */
4281 && 0 != (t1
= const_binop (MULT_EXPR
, convert (ctype
, op1
),
4282 convert (ctype
, c
), 0))
4283 && ! TREE_OVERFLOW (t1
))
4284 return fold (build (tcode
, ctype
, convert (ctype
, op0
), t1
));
4286 /* If these operations "cancel" each other, we have the main
4287 optimizations of this pass, which occur when either constant is a
4288 multiple of the other, in which case we replace this with either an
4289 operation or CODE or TCODE.
4291 If we have an unsigned type that is not a sizetype, we cannot do
4292 this since it will change the result if the original computation
4294 if ((! TREE_UNSIGNED (ctype
)
4295 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
4296 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
4297 || (tcode
== MULT_EXPR
4298 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
4299 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
)))
4301 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
, 0)))
4302 return fold (build (tcode
, ctype
, convert (ctype
, op0
),
4304 const_binop (TRUNC_DIV_EXPR
,
4306 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
, 0)))
4307 return fold (build (code
, ctype
, convert (ctype
, op0
),
4309 const_binop (TRUNC_DIV_EXPR
,
4321 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4322 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4323 that we may sometimes modify the tree. */
4326 strip_compound_expr (t
, s
)
4330 enum tree_code code
= TREE_CODE (t
);
4332 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4333 if (code
== COMPOUND_EXPR
&& TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
4334 && TREE_OPERAND (TREE_OPERAND (t
, 0), 0) == s
)
4335 return TREE_OPERAND (t
, 1);
4337 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4338 don't bother handling any other types. */
4339 else if (code
== COND_EXPR
)
4341 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
4342 TREE_OPERAND (t
, 1) = strip_compound_expr (TREE_OPERAND (t
, 1), s
);
4343 TREE_OPERAND (t
, 2) = strip_compound_expr (TREE_OPERAND (t
, 2), s
);
4345 else if (TREE_CODE_CLASS (code
) == '1')
4346 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
4347 else if (TREE_CODE_CLASS (code
) == '<'
4348 || TREE_CODE_CLASS (code
) == '2')
4350 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
4351 TREE_OPERAND (t
, 1) = strip_compound_expr (TREE_OPERAND (t
, 1), s
);
4357 /* Return a node which has the indicated constant VALUE (either 0 or
4358 1), and is of the indicated TYPE. */
4361 constant_boolean_node (value
, type
)
4365 if (type
== integer_type_node
)
4366 return value
? integer_one_node
: integer_zero_node
;
4367 else if (TREE_CODE (type
) == BOOLEAN_TYPE
)
4368 return (*lang_hooks
.truthvalue_conversion
) (value
? integer_one_node
:
4372 tree t
= build_int_2 (value
, 0);
4374 TREE_TYPE (t
) = type
;
4379 /* Utility function for the following routine, to see how complex a nesting of
4380 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4381 we don't care (to avoid spending too much time on complex expressions.). */
4384 count_cond (expr
, lim
)
4390 if (TREE_CODE (expr
) != COND_EXPR
)
4395 ctrue
= count_cond (TREE_OPERAND (expr
, 1), lim
- 1);
4396 cfalse
= count_cond (TREE_OPERAND (expr
, 2), lim
- 1 - ctrue
);
4397 return MIN (lim
, 1 + ctrue
+ cfalse
);
4400 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4401 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4402 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4403 expression, and ARG to `a'. If COND_FIRST_P is non-zero, then the
4404 COND is the first argument to CODE; otherwise (as in the example
4405 given here), it is the second argument. TYPE is the type of the
4406 original expression. */
4409 fold_binary_op_with_conditional_arg (code
, type
, cond
, arg
, cond_first_p
)
4410 enum tree_code code
;
4416 tree test
, true_value
, false_value
;
4417 tree lhs
= NULL_TREE
;
4418 tree rhs
= NULL_TREE
;
4419 /* In the end, we'll produce a COND_EXPR. Both arms of the
4420 conditional expression will be binary operations. The left-hand
4421 side of the expression to be executed if the condition is true
4422 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4423 of the expression to be executed if the condition is true will be
4424 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4425 but apply to the expression to be executed if the conditional is
4431 /* These are the codes to use for the left-hand side and right-hand
4432 side of the COND_EXPR. Normally, they are the same as CODE. */
4433 enum tree_code lhs_code
= code
;
4434 enum tree_code rhs_code
= code
;
4435 /* And these are the types of the expressions. */
4436 tree lhs_type
= type
;
4437 tree rhs_type
= type
;
4441 true_rhs
= false_rhs
= &arg
;
4442 true_lhs
= &true_value
;
4443 false_lhs
= &false_value
;
4447 true_lhs
= false_lhs
= &arg
;
4448 true_rhs
= &true_value
;
4449 false_rhs
= &false_value
;
4452 if (TREE_CODE (cond
) == COND_EXPR
)
4454 test
= TREE_OPERAND (cond
, 0);
4455 true_value
= TREE_OPERAND (cond
, 1);
4456 false_value
= TREE_OPERAND (cond
, 2);
4457 /* If this operand throws an expression, then it does not make
4458 sense to try to perform a logical or arithmetic operation
4459 involving it. Instead of building `a + throw 3' for example,
4460 we simply build `a, throw 3'. */
4461 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
4463 lhs_code
= COMPOUND_EXPR
;
4465 lhs_type
= void_type_node
;
4467 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
4469 rhs_code
= COMPOUND_EXPR
;
4471 rhs_type
= void_type_node
;
4476 tree testtype
= TREE_TYPE (cond
);
4478 true_value
= convert (testtype
, integer_one_node
);
4479 false_value
= convert (testtype
, integer_zero_node
);
4482 /* If ARG is complex we want to make sure we only evaluate
4483 it once. Though this is only required if it is volatile, it
4484 might be more efficient even if it is not. However, if we
4485 succeed in folding one part to a constant, we do not need
4486 to make this SAVE_EXPR. Since we do this optimization
4487 primarily to see if we do end up with constant and this
4488 SAVE_EXPR interferes with later optimizations, suppressing
4489 it when we can is important.
4491 If we are not in a function, we can't make a SAVE_EXPR, so don't
4492 try to do so. Don't try to see if the result is a constant
4493 if an arm is a COND_EXPR since we get exponential behavior
4496 if (TREE_CODE (arg
) != SAVE_EXPR
&& ! TREE_CONSTANT (arg
)
4497 && (*lang_hooks
.decls
.global_bindings_p
) () == 0
4498 && ((TREE_CODE (arg
) != VAR_DECL
4499 && TREE_CODE (arg
) != PARM_DECL
)
4500 || TREE_SIDE_EFFECTS (arg
)))
4502 if (TREE_CODE (true_value
) != COND_EXPR
)
4503 lhs
= fold (build (lhs_code
, lhs_type
, *true_lhs
, *true_rhs
));
4505 if (TREE_CODE (false_value
) != COND_EXPR
)
4506 rhs
= fold (build (rhs_code
, rhs_type
, *false_lhs
, *false_rhs
));
4508 if ((lhs
== 0 || ! TREE_CONSTANT (lhs
))
4509 && (rhs
== 0 || !TREE_CONSTANT (rhs
)))
4510 arg
= save_expr (arg
), lhs
= rhs
= 0;
4514 lhs
= fold (build (lhs_code
, lhs_type
, *true_lhs
, *true_rhs
));
4516 rhs
= fold (build (rhs_code
, rhs_type
, *false_lhs
, *false_rhs
));
4518 test
= fold (build (COND_EXPR
, type
, test
, lhs
, rhs
));
4520 if (TREE_CODE (arg
) == SAVE_EXPR
)
4521 return build (COMPOUND_EXPR
, type
,
4522 convert (void_type_node
, arg
),
4523 strip_compound_expr (test
, arg
));
4525 return convert (type
, test
);
4529 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4531 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4532 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4533 ADDEND is the same as X.
4535 X + 0 and X - 0 both give X when X is NaN, infinite, or non-zero
4536 and finite. The problematic cases are when X is zero, and its mode
4537 has signed zeros. In the case of rounding towards -infinity,
4538 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4539 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4542 fold_real_zero_addition_p (type
, addend
, negate
)
4546 if (!real_zerop (addend
))
4549 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4550 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
4553 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4554 if (TREE_CODE (addend
) == REAL_CST
4555 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
4558 /* The mode has signed zeros, and we have to honor their sign.
4559 In this situation, there is only one case we can return true for.
4560 X - 0 is the same as X unless rounding towards -infinity is
4562 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
4566 /* Perform constant folding and related simplification of EXPR.
4567 The related simplifications include x*1 => x, x*0 => 0, etc.,
4568 and application of the associative law.
4569 NOP_EXPR conversions may be removed freely (as long as we
4570 are careful not to change the C type of the overall expression)
4571 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4572 but we can constant-fold them if they have constant operands. */
4579 tree t1
= NULL_TREE
;
4581 tree type
= TREE_TYPE (expr
);
4582 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
4583 enum tree_code code
= TREE_CODE (t
);
4584 int kind
= TREE_CODE_CLASS (code
);
4586 /* WINS will be nonzero when the switch is done
4587 if all operands are constant. */
4590 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4591 Likewise for a SAVE_EXPR that's already been evaluated. */
4592 if (code
== RTL_EXPR
|| (code
== SAVE_EXPR
&& SAVE_EXPR_RTL (t
) != 0))
4595 /* Return right away if a constant. */
4599 #ifdef MAX_INTEGER_COMPUTATION_MODE
4600 check_max_integer_computation_mode (expr
);
4603 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
4607 /* Special case for conversion ops that can have fixed point args. */
4608 arg0
= TREE_OPERAND (t
, 0);
4610 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4612 STRIP_SIGN_NOPS (arg0
);
4614 if (arg0
!= 0 && TREE_CODE (arg0
) == COMPLEX_CST
)
4615 subop
= TREE_REALPART (arg0
);
4619 if (subop
!= 0 && TREE_CODE (subop
) != INTEGER_CST
4620 && TREE_CODE (subop
) != REAL_CST
4622 /* Note that TREE_CONSTANT isn't enough:
4623 static var addresses are constant but we can't
4624 do arithmetic on them. */
4627 else if (IS_EXPR_CODE_CLASS (kind
) || kind
== 'r')
4629 int len
= first_rtl_op (code
);
4631 for (i
= 0; i
< len
; i
++)
4633 tree op
= TREE_OPERAND (t
, i
);
4637 continue; /* Valid for CALL_EXPR, at least. */
4639 if (kind
== '<' || code
== RSHIFT_EXPR
)
4641 /* Signedness matters here. Perhaps we can refine this
4643 STRIP_SIGN_NOPS (op
);
4646 /* Strip any conversions that don't change the mode. */
4649 if (TREE_CODE (op
) == COMPLEX_CST
)
4650 subop
= TREE_REALPART (op
);
4654 if (TREE_CODE (subop
) != INTEGER_CST
4655 && TREE_CODE (subop
) != REAL_CST
)
4656 /* Note that TREE_CONSTANT isn't enough:
4657 static var addresses are constant but we can't
4658 do arithmetic on them. */
4668 /* If this is a commutative operation, and ARG0 is a constant, move it
4669 to ARG1 to reduce the number of tests below. */
4670 if ((code
== PLUS_EXPR
|| code
== MULT_EXPR
|| code
== MIN_EXPR
4671 || code
== MAX_EXPR
|| code
== BIT_IOR_EXPR
|| code
== BIT_XOR_EXPR
4672 || code
== BIT_AND_EXPR
)
4673 && (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
))
4675 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
4677 tem
= TREE_OPERAND (t
, 0); TREE_OPERAND (t
, 0) = TREE_OPERAND (t
, 1);
4678 TREE_OPERAND (t
, 1) = tem
;
4681 /* Now WINS is set as described above,
4682 ARG0 is the first operand of EXPR,
4683 and ARG1 is the second operand (if it has more than one operand).
4685 First check for cases where an arithmetic operation is applied to a
4686 compound, conditional, or comparison operation. Push the arithmetic
4687 operation inside the compound or conditional to see if any folding
4688 can then be done. Convert comparison to conditional for this purpose.
4689 The also optimizes non-constant cases that used to be done in
4692 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
4693 one of the operands is a comparison and the other is a comparison, a
4694 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4695 code below would make the expression more complex. Change it to a
4696 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4697 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4699 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
4700 || code
== EQ_EXPR
|| code
== NE_EXPR
)
4701 && ((truth_value_p (TREE_CODE (arg0
))
4702 && (truth_value_p (TREE_CODE (arg1
))
4703 || (TREE_CODE (arg1
) == BIT_AND_EXPR
4704 && integer_onep (TREE_OPERAND (arg1
, 1)))))
4705 || (truth_value_p (TREE_CODE (arg1
))
4706 && (truth_value_p (TREE_CODE (arg0
))
4707 || (TREE_CODE (arg0
) == BIT_AND_EXPR
4708 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
4710 t
= fold (build (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
4711 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
4715 if (code
== EQ_EXPR
)
4716 t
= invert_truthvalue (t
);
4721 if (TREE_CODE_CLASS (code
) == '1')
4723 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
4724 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
4725 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))));
4726 else if (TREE_CODE (arg0
) == COND_EXPR
)
4728 t
= fold (build (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
4729 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))),
4730 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 2)))));
4732 /* If this was a conversion, and all we did was to move into
4733 inside the COND_EXPR, bring it back out. But leave it if
4734 it is a conversion from integer to integer and the
4735 result precision is no wider than a word since such a
4736 conversion is cheap and may be optimized away by combine,
4737 while it couldn't if it were outside the COND_EXPR. Then return
4738 so we don't get into an infinite recursion loop taking the
4739 conversion out and then back in. */
4741 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
4742 || code
== NON_LVALUE_EXPR
)
4743 && TREE_CODE (t
) == COND_EXPR
4744 && TREE_CODE (TREE_OPERAND (t
, 1)) == code
4745 && TREE_CODE (TREE_OPERAND (t
, 2)) == code
4746 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0))
4747 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 2), 0)))
4748 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t
))
4750 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0))))
4751 && TYPE_PRECISION (TREE_TYPE (t
)) <= BITS_PER_WORD
))
4752 t
= build1 (code
, type
,
4754 TREE_TYPE (TREE_OPERAND
4755 (TREE_OPERAND (t
, 1), 0)),
4756 TREE_OPERAND (t
, 0),
4757 TREE_OPERAND (TREE_OPERAND (t
, 1), 0),
4758 TREE_OPERAND (TREE_OPERAND (t
, 2), 0)));
4761 else if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<')
4762 return fold (build (COND_EXPR
, type
, arg0
,
4763 fold (build1 (code
, type
, integer_one_node
)),
4764 fold (build1 (code
, type
, integer_zero_node
))));
4766 else if (TREE_CODE_CLASS (code
) == '2'
4767 || TREE_CODE_CLASS (code
) == '<')
4769 if (TREE_CODE (arg1
) == COMPOUND_EXPR
)
4770 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
4771 fold (build (code
, type
,
4772 arg0
, TREE_OPERAND (arg1
, 1))));
4773 else if ((TREE_CODE (arg1
) == COND_EXPR
4774 || (TREE_CODE_CLASS (TREE_CODE (arg1
)) == '<'
4775 && TREE_CODE_CLASS (code
) != '<'))
4776 && (TREE_CODE (arg0
) != COND_EXPR
4777 || count_cond (arg0
, 25) + count_cond (arg1
, 25) <= 25)
4778 && (! TREE_SIDE_EFFECTS (arg0
)
4779 || ((*lang_hooks
.decls
.global_bindings_p
) () == 0
4780 && ! contains_placeholder_p (arg0
))))
4782 fold_binary_op_with_conditional_arg (code
, type
, arg1
, arg0
,
4783 /*cond_first_p=*/0);
4784 else if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
4785 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
4786 fold (build (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
4787 else if ((TREE_CODE (arg0
) == COND_EXPR
4788 || (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
4789 && TREE_CODE_CLASS (code
) != '<'))
4790 && (TREE_CODE (arg1
) != COND_EXPR
4791 || count_cond (arg0
, 25) + count_cond (arg1
, 25) <= 25)
4792 && (! TREE_SIDE_EFFECTS (arg1
)
4793 || ((*lang_hooks
.decls
.global_bindings_p
) () == 0
4794 && ! contains_placeholder_p (arg1
))))
4796 fold_binary_op_with_conditional_arg (code
, type
, arg0
, arg1
,
4797 /*cond_first_p=*/1);
4799 else if (TREE_CODE_CLASS (code
) == '<'
4800 && TREE_CODE (arg0
) == COMPOUND_EXPR
)
4801 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
4802 fold (build (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
4803 else if (TREE_CODE_CLASS (code
) == '<'
4804 && TREE_CODE (arg1
) == COMPOUND_EXPR
)
4805 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
4806 fold (build (code
, type
, arg0
, TREE_OPERAND (arg1
, 1))));
4819 return fold (DECL_INITIAL (t
));
4824 case FIX_TRUNC_EXPR
:
4825 /* Other kinds of FIX are not handled properly by fold_convert. */
4827 if (TREE_TYPE (TREE_OPERAND (t
, 0)) == TREE_TYPE (t
))
4828 return TREE_OPERAND (t
, 0);
4830 /* Handle cases of two conversions in a row. */
4831 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
4832 || TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
)
4834 tree inside_type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
4835 tree inter_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
4836 tree final_type
= TREE_TYPE (t
);
4837 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
4838 int inside_ptr
= POINTER_TYPE_P (inside_type
);
4839 int inside_float
= FLOAT_TYPE_P (inside_type
);
4840 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
4841 int inside_unsignedp
= TREE_UNSIGNED (inside_type
);
4842 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
4843 int inter_ptr
= POINTER_TYPE_P (inter_type
);
4844 int inter_float
= FLOAT_TYPE_P (inter_type
);
4845 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
4846 int inter_unsignedp
= TREE_UNSIGNED (inter_type
);
4847 int final_int
= INTEGRAL_TYPE_P (final_type
);
4848 int final_ptr
= POINTER_TYPE_P (final_type
);
4849 int final_float
= FLOAT_TYPE_P (final_type
);
4850 unsigned int final_prec
= TYPE_PRECISION (final_type
);
4851 int final_unsignedp
= TREE_UNSIGNED (final_type
);
4853 /* In addition to the cases of two conversions in a row
4854 handled below, if we are converting something to its own
4855 type via an object of identical or wider precision, neither
4856 conversion is needed. */
4857 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (final_type
)
4858 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
4859 && inter_prec
>= final_prec
)
4860 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
4862 /* Likewise, if the intermediate and final types are either both
4863 float or both integer, we don't need the middle conversion if
4864 it is wider than the final type and doesn't change the signedness
4865 (for integers). Avoid this if the final type is a pointer
4866 since then we sometimes need the inner conversion. Likewise if
4867 the outer has a precision not equal to the size of its mode. */
4868 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
4869 || (inter_float
&& inside_float
))
4870 && inter_prec
>= inside_prec
4871 && (inter_float
|| inter_unsignedp
== inside_unsignedp
)
4872 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (final_type
))
4873 && TYPE_MODE (final_type
) == TYPE_MODE (inter_type
))
4875 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
4877 /* If we have a sign-extension of a zero-extended value, we can
4878 replace that by a single zero-extension. */
4879 if (inside_int
&& inter_int
&& final_int
4880 && inside_prec
< inter_prec
&& inter_prec
< final_prec
4881 && inside_unsignedp
&& !inter_unsignedp
)
4882 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
4884 /* Two conversions in a row are not needed unless:
4885 - some conversion is floating-point (overstrict for now), or
4886 - the intermediate type is narrower than both initial and
4888 - the intermediate type and innermost type differ in signedness,
4889 and the outermost type is wider than the intermediate, or
4890 - the initial type is a pointer type and the precisions of the
4891 intermediate and final types differ, or
4892 - the final type is a pointer type and the precisions of the
4893 initial and intermediate types differ. */
4894 if (! inside_float
&& ! inter_float
&& ! final_float
4895 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
4896 && ! (inside_int
&& inter_int
4897 && inter_unsignedp
!= inside_unsignedp
4898 && inter_prec
< final_prec
)
4899 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
4900 == (final_unsignedp
&& final_prec
> inter_prec
))
4901 && ! (inside_ptr
&& inter_prec
!= final_prec
)
4902 && ! (final_ptr
&& inside_prec
!= inter_prec
)
4903 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (final_type
))
4904 && TYPE_MODE (final_type
) == TYPE_MODE (inter_type
))
4906 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
4909 if (TREE_CODE (TREE_OPERAND (t
, 0)) == MODIFY_EXPR
4910 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t
, 0), 1))
4911 /* Detect assigning a bitfield. */
4912 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)) == COMPONENT_REF
4913 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 1))))
4915 /* Don't leave an assignment inside a conversion
4916 unless assigning a bitfield. */
4917 tree prev
= TREE_OPERAND (t
, 0);
4918 TREE_OPERAND (t
, 0) = TREE_OPERAND (prev
, 1);
4919 /* First do the assignment, then return converted constant. */
4920 t
= build (COMPOUND_EXPR
, TREE_TYPE (t
), prev
, fold (t
));
4925 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
4926 constants (if x has signed type, the sign bit cannot be set
4927 in c). This folds extension into the BIT_AND_EXPR. */
4928 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
4929 && TREE_CODE (TREE_TYPE (t
)) != BOOLEAN_TYPE
4930 && TREE_CODE (TREE_OPERAND (t
, 0)) == BIT_AND_EXPR
4931 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 1)) == INTEGER_CST
)
4933 tree
and = TREE_OPERAND (t
, 0);
4934 tree and0
= TREE_OPERAND (and, 0), and1
= TREE_OPERAND (and, 1);
4937 if (TREE_UNSIGNED (TREE_TYPE (and))
4938 || (TYPE_PRECISION (TREE_TYPE (t
))
4939 <= TYPE_PRECISION (TREE_TYPE (and))))
4941 else if (TYPE_PRECISION (TREE_TYPE (and1
))
4942 <= HOST_BITS_PER_WIDE_INT
4943 && host_integerp (and1
, 1))
4945 unsigned HOST_WIDE_INT cst
;
4947 cst
= tree_low_cst (and1
, 1);
4948 cst
&= (HOST_WIDE_INT
) -1
4949 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
4950 change
= (cst
== 0);
4951 #ifdef LOAD_EXTEND_OP
4953 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
4956 tree uns
= (*lang_hooks
.types
.unsigned_type
) (TREE_TYPE (and0
));
4957 and0
= convert (uns
, and0
);
4958 and1
= convert (uns
, and1
);
4963 return fold (build (BIT_AND_EXPR
, TREE_TYPE (t
),
4964 convert (TREE_TYPE (t
), and0
),
4965 convert (TREE_TYPE (t
), and1
)));
4970 TREE_CONSTANT (t
) = TREE_CONSTANT (arg0
);
4973 return fold_convert (t
, arg0
);
4975 case VIEW_CONVERT_EXPR
:
4976 if (TREE_CODE (TREE_OPERAND (t
, 0)) == VIEW_CONVERT_EXPR
)
4977 return build1 (VIEW_CONVERT_EXPR
, type
,
4978 TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
4982 if (TREE_CODE (arg0
) == CONSTRUCTOR
)
4984 tree m
= purpose_member (arg1
, CONSTRUCTOR_ELTS (arg0
));
4991 TREE_CONSTANT (t
) = wins
;
4997 if (TREE_CODE (arg0
) == INTEGER_CST
)
4999 unsigned HOST_WIDE_INT low
;
5001 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
5002 TREE_INT_CST_HIGH (arg0
),
5004 t
= build_int_2 (low
, high
);
5005 TREE_TYPE (t
) = type
;
5007 = (TREE_OVERFLOW (arg0
)
5008 | force_fit_type (t
, overflow
&& !TREE_UNSIGNED (type
)));
5009 TREE_CONSTANT_OVERFLOW (t
)
5010 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
5012 else if (TREE_CODE (arg0
) == REAL_CST
)
5013 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
5015 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
5016 return TREE_OPERAND (arg0
, 0);
5018 /* Convert - (a - b) to (b - a) for non-floating-point. */
5019 else if (TREE_CODE (arg0
) == MINUS_EXPR
5020 && (! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
))
5021 return build (MINUS_EXPR
, type
, TREE_OPERAND (arg0
, 1),
5022 TREE_OPERAND (arg0
, 0));
5029 if (TREE_CODE (arg0
) == INTEGER_CST
)
5031 /* If the value is unsigned, then the absolute value is
5032 the same as the ordinary value. */
5033 if (TREE_UNSIGNED (type
))
5035 /* Similarly, if the value is non-negative. */
5036 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
5038 /* If the value is negative, then the absolute value is
5042 unsigned HOST_WIDE_INT low
;
5044 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
5045 TREE_INT_CST_HIGH (arg0
),
5047 t
= build_int_2 (low
, high
);
5048 TREE_TYPE (t
) = type
;
5050 = (TREE_OVERFLOW (arg0
)
5051 | force_fit_type (t
, overflow
));
5052 TREE_CONSTANT_OVERFLOW (t
)
5053 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
5056 else if (TREE_CODE (arg0
) == REAL_CST
)
5058 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
5059 t
= build_real (type
,
5060 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
5063 else if (TREE_CODE (arg0
) == ABS_EXPR
|| TREE_CODE (arg0
) == NEGATE_EXPR
)
5064 return build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
5068 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
5069 return convert (type
, arg0
);
5070 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
5071 return build (COMPLEX_EXPR
, type
,
5072 TREE_OPERAND (arg0
, 0),
5073 negate_expr (TREE_OPERAND (arg0
, 1)));
5074 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
5075 return build_complex (type
, TREE_REALPART (arg0
),
5076 negate_expr (TREE_IMAGPART (arg0
)));
5077 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
5078 return fold (build (TREE_CODE (arg0
), type
,
5079 fold (build1 (CONJ_EXPR
, type
,
5080 TREE_OPERAND (arg0
, 0))),
5081 fold (build1 (CONJ_EXPR
,
5082 type
, TREE_OPERAND (arg0
, 1)))));
5083 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
5084 return TREE_OPERAND (arg0
, 0);
5090 t
= build_int_2 (~ TREE_INT_CST_LOW (arg0
),
5091 ~ TREE_INT_CST_HIGH (arg0
));
5092 TREE_TYPE (t
) = type
;
5093 force_fit_type (t
, 0);
5094 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg0
);
5095 TREE_CONSTANT_OVERFLOW (t
) = TREE_CONSTANT_OVERFLOW (arg0
);
5097 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
5098 return TREE_OPERAND (arg0
, 0);
5102 /* A + (-B) -> A - B */
5103 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
5104 return fold (build (MINUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
5105 /* (-A) + B -> B - A */
5106 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
5107 return fold (build (MINUS_EXPR
, type
, arg1
, TREE_OPERAND (arg0
, 0)));
5108 else if (! FLOAT_TYPE_P (type
))
5110 if (integer_zerop (arg1
))
5111 return non_lvalue (convert (type
, arg0
));
5113 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5114 with a constant, and the two constants have no bits in common,
5115 we should treat this as a BIT_IOR_EXPR since this may produce more
5117 if (TREE_CODE (arg0
) == BIT_AND_EXPR
5118 && TREE_CODE (arg1
) == BIT_AND_EXPR
5119 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
5120 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
5121 && integer_zerop (const_binop (BIT_AND_EXPR
,
5122 TREE_OPERAND (arg0
, 1),
5123 TREE_OPERAND (arg1
, 1), 0)))
5125 code
= BIT_IOR_EXPR
;
5129 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5130 (plus (plus (mult) (mult)) (foo)) so that we can
5131 take advantage of the factoring cases below. */
5132 if ((TREE_CODE (arg0
) == PLUS_EXPR
5133 && TREE_CODE (arg1
) == MULT_EXPR
)
5134 || (TREE_CODE (arg1
) == PLUS_EXPR
5135 && TREE_CODE (arg0
) == MULT_EXPR
))
5137 tree parg0
, parg1
, parg
, marg
;
5139 if (TREE_CODE (arg0
) == PLUS_EXPR
)
5140 parg
= arg0
, marg
= arg1
;
5142 parg
= arg1
, marg
= arg0
;
5143 parg0
= TREE_OPERAND (parg
, 0);
5144 parg1
= TREE_OPERAND (parg
, 1);
5148 if (TREE_CODE (parg0
) == MULT_EXPR
5149 && TREE_CODE (parg1
) != MULT_EXPR
)
5150 return fold (build (PLUS_EXPR
, type
,
5151 fold (build (PLUS_EXPR
, type
, parg0
, marg
)),
5153 if (TREE_CODE (parg0
) != MULT_EXPR
5154 && TREE_CODE (parg1
) == MULT_EXPR
)
5155 return fold (build (PLUS_EXPR
, type
,
5156 fold (build (PLUS_EXPR
, type
, parg1
, marg
)),
5160 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
)
5162 tree arg00
, arg01
, arg10
, arg11
;
5163 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
5165 /* (A * C) + (B * C) -> (A+B) * C.
5166 We are most concerned about the case where C is a constant,
5167 but other combinations show up during loop reduction. Since
5168 it is not difficult, try all four possibilities. */
5170 arg00
= TREE_OPERAND (arg0
, 0);
5171 arg01
= TREE_OPERAND (arg0
, 1);
5172 arg10
= TREE_OPERAND (arg1
, 0);
5173 arg11
= TREE_OPERAND (arg1
, 1);
5176 if (operand_equal_p (arg01
, arg11
, 0))
5177 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
5178 else if (operand_equal_p (arg00
, arg10
, 0))
5179 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
5180 else if (operand_equal_p (arg00
, arg11
, 0))
5181 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
5182 else if (operand_equal_p (arg01
, arg10
, 0))
5183 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
5185 /* No identical multiplicands; see if we can find a common
5186 power-of-two factor in non-power-of-two multiplies. This
5187 can help in multi-dimensional array access. */
5188 else if (TREE_CODE (arg01
) == INTEGER_CST
5189 && TREE_CODE (arg11
) == INTEGER_CST
5190 && TREE_INT_CST_HIGH (arg01
) == 0
5191 && TREE_INT_CST_HIGH (arg11
) == 0)
5193 HOST_WIDE_INT int01
, int11
, tmp
;
5194 int01
= TREE_INT_CST_LOW (arg01
);
5195 int11
= TREE_INT_CST_LOW (arg11
);
5197 /* Move min of absolute values to int11. */
5198 if ((int01
>= 0 ? int01
: -int01
)
5199 < (int11
>= 0 ? int11
: -int11
))
5201 tmp
= int01
, int01
= int11
, int11
= tmp
;
5202 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
5203 alt0
= arg01
, arg01
= arg11
, arg11
= alt0
;
5206 if (exact_log2 (int11
) > 0 && int01
% int11
== 0)
5208 alt0
= fold (build (MULT_EXPR
, type
, arg00
,
5209 build_int_2 (int01
/ int11
, 0)));
5216 return fold (build (MULT_EXPR
, type
,
5217 fold (build (PLUS_EXPR
, type
, alt0
, alt1
)),
5222 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5223 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
5224 return non_lvalue (convert (type
, arg0
));
5226 /* Likewise if the operands are reversed. */
5227 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
5228 return non_lvalue (convert (type
, arg1
));
5231 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5232 is a rotate of A by C1 bits. */
5233 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5234 is a rotate of A by B bits. */
5236 enum tree_code code0
, code1
;
5237 code0
= TREE_CODE (arg0
);
5238 code1
= TREE_CODE (arg1
);
5239 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
5240 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
5241 && operand_equal_p (TREE_OPERAND (arg0
, 0),
5242 TREE_OPERAND (arg1
, 0), 0)
5243 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
5245 tree tree01
, tree11
;
5246 enum tree_code code01
, code11
;
5248 tree01
= TREE_OPERAND (arg0
, 1);
5249 tree11
= TREE_OPERAND (arg1
, 1);
5250 STRIP_NOPS (tree01
);
5251 STRIP_NOPS (tree11
);
5252 code01
= TREE_CODE (tree01
);
5253 code11
= TREE_CODE (tree11
);
5254 if (code01
== INTEGER_CST
5255 && code11
== INTEGER_CST
5256 && TREE_INT_CST_HIGH (tree01
) == 0
5257 && TREE_INT_CST_HIGH (tree11
) == 0
5258 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
5259 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
5260 return build (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5261 code0
== LSHIFT_EXPR
? tree01
: tree11
);
5262 else if (code11
== MINUS_EXPR
)
5264 tree tree110
, tree111
;
5265 tree110
= TREE_OPERAND (tree11
, 0);
5266 tree111
= TREE_OPERAND (tree11
, 1);
5267 STRIP_NOPS (tree110
);
5268 STRIP_NOPS (tree111
);
5269 if (TREE_CODE (tree110
) == INTEGER_CST
5270 && 0 == compare_tree_int (tree110
,
5272 (TREE_TYPE (TREE_OPERAND
5274 && operand_equal_p (tree01
, tree111
, 0))
5275 return build ((code0
== LSHIFT_EXPR
5278 type
, TREE_OPERAND (arg0
, 0), tree01
);
5280 else if (code01
== MINUS_EXPR
)
5282 tree tree010
, tree011
;
5283 tree010
= TREE_OPERAND (tree01
, 0);
5284 tree011
= TREE_OPERAND (tree01
, 1);
5285 STRIP_NOPS (tree010
);
5286 STRIP_NOPS (tree011
);
5287 if (TREE_CODE (tree010
) == INTEGER_CST
5288 && 0 == compare_tree_int (tree010
,
5290 (TREE_TYPE (TREE_OPERAND
5292 && operand_equal_p (tree11
, tree011
, 0))
5293 return build ((code0
!= LSHIFT_EXPR
5296 type
, TREE_OPERAND (arg0
, 0), tree11
);
5302 /* In most languages, can't associate operations on floats through
5303 parentheses. Rather than remember where the parentheses were, we
5304 don't associate floats at all. It shouldn't matter much. However,
5305 associating multiplications is only very slightly inaccurate, so do
5306 that if -funsafe-math-optimizations is specified. */
5309 && (! FLOAT_TYPE_P (type
)
5310 || (flag_unsafe_math_optimizations
&& code
== MULT_EXPR
)))
5312 tree var0
, con0
, lit0
, minus_lit0
;
5313 tree var1
, con1
, lit1
, minus_lit1
;
5315 /* Split both trees into variables, constants, and literals. Then
5316 associate each group together, the constants with literals,
5317 then the result with variables. This increases the chances of
5318 literals being recombined later and of generating relocatable
5319 expressions for the sum of a constant and literal. */
5320 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
5321 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
5322 code
== MINUS_EXPR
);
5324 /* Only do something if we found more than two objects. Otherwise,
5325 nothing has changed and we risk infinite recursion. */
5326 if (2 < ((var0
!= 0) + (var1
!= 0)
5327 + (con0
!= 0) + (con1
!= 0)
5328 + (lit0
!= 0) + (lit1
!= 0)
5329 + (minus_lit0
!= 0) + (minus_lit1
!= 0)))
5331 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5332 if (code
== MINUS_EXPR
)
5335 var0
= associate_trees (var0
, var1
, code
, type
);
5336 con0
= associate_trees (con0
, con1
, code
, type
);
5337 lit0
= associate_trees (lit0
, lit1
, code
, type
);
5338 minus_lit0
= associate_trees (minus_lit0
, minus_lit1
, code
, type
);
5340 /* Preserve the MINUS_EXPR if the negative part of the literal is
5341 greater than the positive part. Otherwise, the multiplicative
5342 folding code (i.e extract_muldiv) may be fooled in case
5343 unsigned constants are substracted, like in the following
5344 example: ((X*2 + 4) - 8U)/2. */
5345 if (minus_lit0
&& lit0
)
5347 if (tree_int_cst_lt (lit0
, minus_lit0
))
5349 minus_lit0
= associate_trees (minus_lit0
, lit0
,
5355 lit0
= associate_trees (lit0
, minus_lit0
,
5363 return convert (type
, associate_trees (var0
, minus_lit0
,
5367 con0
= associate_trees (con0
, minus_lit0
,
5369 return convert (type
, associate_trees (var0
, con0
,
5374 con0
= associate_trees (con0
, lit0
, code
, type
);
5375 return convert (type
, associate_trees (var0
, con0
, code
, type
));
5381 t1
= const_binop (code
, arg0
, arg1
, 0);
5382 if (t1
!= NULL_TREE
)
5384 /* The return value should always have
5385 the same type as the original expression. */
5386 if (TREE_TYPE (t1
) != TREE_TYPE (t
))
5387 t1
= convert (TREE_TYPE (t
), t1
);
5394 /* A - (-B) -> A + B */
5395 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
5396 return fold (build (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
5397 /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5398 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& TREE_CODE (arg1
) == REAL_CST
)
5400 fold (build (MINUS_EXPR
, type
,
5401 build_real (TREE_TYPE (arg1
),
5402 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1
))),
5403 TREE_OPERAND (arg0
, 0)));
5405 if (! FLOAT_TYPE_P (type
))
5407 if (! wins
&& integer_zerop (arg0
))
5408 return negate_expr (convert (type
, arg1
));
5409 if (integer_zerop (arg1
))
5410 return non_lvalue (convert (type
, arg0
));
5412 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5413 about the case where C is a constant, just try one of the
5414 four possibilities. */
5416 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
5417 && operand_equal_p (TREE_OPERAND (arg0
, 1),
5418 TREE_OPERAND (arg1
, 1), 0))
5419 return fold (build (MULT_EXPR
, type
,
5420 fold (build (MINUS_EXPR
, type
,
5421 TREE_OPERAND (arg0
, 0),
5422 TREE_OPERAND (arg1
, 0))),
5423 TREE_OPERAND (arg0
, 1)));
5426 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5427 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
5428 return non_lvalue (convert (type
, arg0
));
5430 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
5431 ARG0 is zero and X + ARG0 reduces to X, since that would mean
5432 (-ARG1 + ARG0) reduces to -ARG1. */
5433 else if (!wins
&& fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
5434 return negate_expr (convert (type
, arg1
));
5436 /* Fold &x - &x. This can happen from &x.foo - &x.
5437 This is unsafe for certain floats even in non-IEEE formats.
5438 In IEEE, it is unsafe because it does wrong for NaNs.
5439 Also note that operand_equal_p is always false if an operand
5442 if ((! FLOAT_TYPE_P (type
) || flag_unsafe_math_optimizations
)
5443 && operand_equal_p (arg0
, arg1
, 0))
5444 return convert (type
, integer_zero_node
);
5449 /* (-A) * (-B) -> A * B */
5450 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& TREE_CODE (arg1
) == NEGATE_EXPR
)
5451 return fold (build (MULT_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5452 TREE_OPERAND (arg1
, 0)));
5454 if (! FLOAT_TYPE_P (type
))
5456 if (integer_zerop (arg1
))
5457 return omit_one_operand (type
, arg1
, arg0
);
5458 if (integer_onep (arg1
))
5459 return non_lvalue (convert (type
, arg0
));
5461 /* (a * (1 << b)) is (a << b) */
5462 if (TREE_CODE (arg1
) == LSHIFT_EXPR
5463 && integer_onep (TREE_OPERAND (arg1
, 0)))
5464 return fold (build (LSHIFT_EXPR
, type
, arg0
,
5465 TREE_OPERAND (arg1
, 1)));
5466 if (TREE_CODE (arg0
) == LSHIFT_EXPR
5467 && integer_onep (TREE_OPERAND (arg0
, 0)))
5468 return fold (build (LSHIFT_EXPR
, type
, arg1
,
5469 TREE_OPERAND (arg0
, 1)));
5471 if (TREE_CODE (arg1
) == INTEGER_CST
5472 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
5474 return convert (type
, tem
);
5479 /* Maybe fold x * 0 to 0. The expressions aren't the same
5480 when x is NaN, since x * 0 is also NaN. Nor are they the
5481 same in modes with signed zeros, since multiplying a
5482 negative value by 0 gives -0, not +0. */
5483 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
5484 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
5485 && real_zerop (arg1
))
5486 return omit_one_operand (type
, arg1
, arg0
);
5487 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
5488 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
5489 && real_onep (arg1
))
5490 return non_lvalue (convert (type
, arg0
));
5492 /* Transform x * -1.0 into -x. */
5493 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
5494 && real_minus_onep (arg1
))
5495 return fold (build1 (NEGATE_EXPR
, type
, arg0
));
5498 if (! wins
&& real_twop (arg1
)
5499 && (*lang_hooks
.decls
.global_bindings_p
) () == 0
5500 && ! contains_placeholder_p (arg0
))
5502 tree arg
= save_expr (arg0
);
5503 return build (PLUS_EXPR
, type
, arg
, arg
);
5510 if (integer_all_onesp (arg1
))
5511 return omit_one_operand (type
, arg1
, arg0
);
5512 if (integer_zerop (arg1
))
5513 return non_lvalue (convert (type
, arg0
));
5514 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
5515 if (t1
!= NULL_TREE
)
5518 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5520 This results in more efficient code for machines without a NAND
5521 instruction. Combine will canonicalize to the first form
5522 which will allow use of NAND instructions provided by the
5523 backend if they exist. */
5524 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
5525 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
5527 return fold (build1 (BIT_NOT_EXPR
, type
,
5528 build (BIT_AND_EXPR
, type
,
5529 TREE_OPERAND (arg0
, 0),
5530 TREE_OPERAND (arg1
, 0))));
5533 /* See if this can be simplified into a rotate first. If that
5534 is unsuccessful continue in the association code. */
5538 if (integer_zerop (arg1
))
5539 return non_lvalue (convert (type
, arg0
));
5540 if (integer_all_onesp (arg1
))
5541 return fold (build1 (BIT_NOT_EXPR
, type
, arg0
));
5543 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5544 with a constant, and the two constants have no bits in common,
5545 we should treat this as a BIT_IOR_EXPR since this may produce more
5547 if (TREE_CODE (arg0
) == BIT_AND_EXPR
5548 && TREE_CODE (arg1
) == BIT_AND_EXPR
5549 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
5550 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
5551 && integer_zerop (const_binop (BIT_AND_EXPR
,
5552 TREE_OPERAND (arg0
, 1),
5553 TREE_OPERAND (arg1
, 1), 0)))
5555 code
= BIT_IOR_EXPR
;
5559 /* See if this can be simplified into a rotate first. If that
5560 is unsuccessful continue in the association code. */
5565 if (integer_all_onesp (arg1
))
5566 return non_lvalue (convert (type
, arg0
));
5567 if (integer_zerop (arg1
))
5568 return omit_one_operand (type
, arg1
, arg0
);
5569 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
5570 if (t1
!= NULL_TREE
)
5572 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5573 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
5574 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
5577 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
5579 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
5580 && (~TREE_INT_CST_LOW (arg1
)
5581 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
5582 return build1 (NOP_EXPR
, type
, TREE_OPERAND (arg0
, 0));
5585 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5587 This results in more efficient code for machines without a NOR
5588 instruction. Combine will canonicalize to the first form
5589 which will allow use of NOR instructions provided by the
5590 backend if they exist. */
5591 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
5592 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
5594 return fold (build1 (BIT_NOT_EXPR
, type
,
5595 build (BIT_IOR_EXPR
, type
,
5596 TREE_OPERAND (arg0
, 0),
5597 TREE_OPERAND (arg1
, 0))));
5602 case BIT_ANDTC_EXPR
:
5603 if (integer_all_onesp (arg0
))
5604 return non_lvalue (convert (type
, arg1
));
5605 if (integer_zerop (arg0
))
5606 return omit_one_operand (type
, arg0
, arg1
);
5607 if (TREE_CODE (arg1
) == INTEGER_CST
)
5609 arg1
= fold (build1 (BIT_NOT_EXPR
, type
, arg1
));
5610 code
= BIT_AND_EXPR
;
5616 /* Don't touch a floating-point divide by zero unless the mode
5617 of the constant can represent infinity. */
5618 if (TREE_CODE (arg1
) == REAL_CST
5619 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
5620 && real_zerop (arg1
))
5623 /* (-A) / (-B) -> A / B */
5624 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& TREE_CODE (arg1
) == NEGATE_EXPR
)
5625 return fold (build (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5626 TREE_OPERAND (arg1
, 0)));
5628 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
5629 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
5630 && real_onep (arg1
))
5631 return non_lvalue (convert (type
, arg0
));
5633 /* If ARG1 is a constant, we can convert this to a multiply by the
5634 reciprocal. This does not have the same rounding properties,
5635 so only do this if -funsafe-math-optimizations. We can actually
5636 always safely do it if ARG1 is a power of two, but it's hard to
5637 tell if it is or not in a portable manner. */
5638 if (TREE_CODE (arg1
) == REAL_CST
)
5640 if (flag_unsafe_math_optimizations
5641 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
5643 return fold (build (MULT_EXPR
, type
, arg0
, tem
));
5644 /* Find the reciprocal if optimizing and the result is exact. */
5648 r
= TREE_REAL_CST (arg1
);
5649 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
5651 tem
= build_real (type
, r
);
5652 return fold (build (MULT_EXPR
, type
, arg0
, tem
));
5656 /* Convert A/B/C to A/(B*C). */
5657 if (flag_unsafe_math_optimizations
5658 && TREE_CODE (arg0
) == RDIV_EXPR
)
5660 return fold (build (RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
5661 build (MULT_EXPR
, type
, TREE_OPERAND (arg0
, 1),
5664 /* Convert A/(B/C) to (A/B)*C. */
5665 if (flag_unsafe_math_optimizations
5666 && TREE_CODE (arg1
) == RDIV_EXPR
)
5668 return fold (build (MULT_EXPR
, type
,
5669 build (RDIV_EXPR
, type
, arg0
,
5670 TREE_OPERAND (arg1
, 0)),
5671 TREE_OPERAND (arg1
, 1)));
5675 case TRUNC_DIV_EXPR
:
5676 case ROUND_DIV_EXPR
:
5677 case FLOOR_DIV_EXPR
:
5679 case EXACT_DIV_EXPR
:
5680 if (integer_onep (arg1
))
5681 return non_lvalue (convert (type
, arg0
));
5682 if (integer_zerop (arg1
))
5685 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5686 operation, EXACT_DIV_EXPR.
5688 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5689 At one time others generated faster code, it's not clear if they do
5690 after the last round to changes to the DIV code in expmed.c. */
5691 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
5692 && multiple_of_p (type
, arg0
, arg1
))
5693 return fold (build (EXACT_DIV_EXPR
, type
, arg0
, arg1
));
5695 if (TREE_CODE (arg1
) == INTEGER_CST
5696 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
5698 return convert (type
, tem
);
5703 case FLOOR_MOD_EXPR
:
5704 case ROUND_MOD_EXPR
:
5705 case TRUNC_MOD_EXPR
:
5706 if (integer_onep (arg1
))
5707 return omit_one_operand (type
, integer_zero_node
, arg0
);
5708 if (integer_zerop (arg1
))
5711 if (TREE_CODE (arg1
) == INTEGER_CST
5712 && 0 != (tem
= extract_muldiv (TREE_OPERAND (t
, 0), arg1
,
5714 return convert (type
, tem
);
5722 if (integer_zerop (arg1
))
5723 return non_lvalue (convert (type
, arg0
));
5724 /* Since negative shift count is not well-defined,
5725 don't try to compute it in the compiler. */
5726 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
5728 /* Rewrite an LROTATE_EXPR by a constant into an
5729 RROTATE_EXPR by a new constant. */
5730 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
5732 TREE_SET_CODE (t
, RROTATE_EXPR
);
5733 code
= RROTATE_EXPR
;
5734 TREE_OPERAND (t
, 1) = arg1
5737 convert (TREE_TYPE (arg1
),
5738 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type
)), 0)),
5740 if (tree_int_cst_sgn (arg1
) < 0)
5744 /* If we have a rotate of a bit operation with the rotate count and
5745 the second operand of the bit operation both constant,
5746 permute the two operations. */
5747 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
5748 && (TREE_CODE (arg0
) == BIT_AND_EXPR
5749 || TREE_CODE (arg0
) == BIT_ANDTC_EXPR
5750 || TREE_CODE (arg0
) == BIT_IOR_EXPR
5751 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
5752 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
5753 return fold (build (TREE_CODE (arg0
), type
,
5754 fold (build (code
, type
,
5755 TREE_OPERAND (arg0
, 0), arg1
)),
5756 fold (build (code
, type
,
5757 TREE_OPERAND (arg0
, 1), arg1
))));
5759 /* Two consecutive rotates adding up to the width of the mode can
5761 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
5762 && TREE_CODE (arg0
) == RROTATE_EXPR
5763 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
5764 && TREE_INT_CST_HIGH (arg1
) == 0
5765 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
5766 && ((TREE_INT_CST_LOW (arg1
)
5767 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
5768 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type
))))
5769 return TREE_OPERAND (arg0
, 0);
5774 if (operand_equal_p (arg0
, arg1
, 0))
5775 return omit_one_operand (type
, arg0
, arg1
);
5776 if (INTEGRAL_TYPE_P (type
)
5777 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), 1))
5778 return omit_one_operand (type
, arg1
, arg0
);
5782 if (operand_equal_p (arg0
, arg1
, 0))
5783 return omit_one_operand (type
, arg0
, arg1
);
5784 if (INTEGRAL_TYPE_P (type
)
5785 && TYPE_MAX_VALUE (type
)
5786 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), 1))
5787 return omit_one_operand (type
, arg1
, arg0
);
5790 case TRUTH_NOT_EXPR
:
5791 /* Note that the operand of this must be an int
5792 and its values must be 0 or 1.
5793 ("true" is a fixed value perhaps depending on the language,
5794 but we don't handle values other than 1 correctly yet.) */
5795 tem
= invert_truthvalue (arg0
);
5796 /* Avoid infinite recursion. */
5797 if (TREE_CODE (tem
) == TRUTH_NOT_EXPR
)
5799 return convert (type
, tem
);
5801 case TRUTH_ANDIF_EXPR
:
5802 /* Note that the operands of this must be ints
5803 and their values must be 0 or 1.
5804 ("true" is a fixed value perhaps depending on the language.) */
5805 /* If first arg is constant zero, return it. */
5806 if (integer_zerop (arg0
))
5807 return convert (type
, arg0
);
5808 case TRUTH_AND_EXPR
:
5809 /* If either arg is constant true, drop it. */
5810 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
5811 return non_lvalue (convert (type
, arg1
));
5812 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
5813 /* Preserve sequence points. */
5814 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
5815 return non_lvalue (convert (type
, arg0
));
5816 /* If second arg is constant zero, result is zero, but first arg
5817 must be evaluated. */
5818 if (integer_zerop (arg1
))
5819 return omit_one_operand (type
, arg1
, arg0
);
5820 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5821 case will be handled here. */
5822 if (integer_zerop (arg0
))
5823 return omit_one_operand (type
, arg0
, arg1
);
5826 /* We only do these simplifications if we are optimizing. */
5830 /* Check for things like (A || B) && (A || C). We can convert this
5831 to A || (B && C). Note that either operator can be any of the four
5832 truth and/or operations and the transformation will still be
5833 valid. Also note that we only care about order for the
5834 ANDIF and ORIF operators. If B contains side effects, this
5835 might change the truth-value of A. */
5836 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
5837 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
5838 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
5839 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
5840 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
5841 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
5843 tree a00
= TREE_OPERAND (arg0
, 0);
5844 tree a01
= TREE_OPERAND (arg0
, 1);
5845 tree a10
= TREE_OPERAND (arg1
, 0);
5846 tree a11
= TREE_OPERAND (arg1
, 1);
5847 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
5848 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
5849 && (code
== TRUTH_AND_EXPR
5850 || code
== TRUTH_OR_EXPR
));
5852 if (operand_equal_p (a00
, a10
, 0))
5853 return fold (build (TREE_CODE (arg0
), type
, a00
,
5854 fold (build (code
, type
, a01
, a11
))));
5855 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
5856 return fold (build (TREE_CODE (arg0
), type
, a00
,
5857 fold (build (code
, type
, a01
, a10
))));
5858 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
5859 return fold (build (TREE_CODE (arg0
), type
, a01
,
5860 fold (build (code
, type
, a00
, a11
))));
5862 /* This case if tricky because we must either have commutative
5863 operators or else A10 must not have side-effects. */
5865 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
5866 && operand_equal_p (a01
, a11
, 0))
5867 return fold (build (TREE_CODE (arg0
), type
,
5868 fold (build (code
, type
, a00
, a10
)),
5872 /* See if we can build a range comparison. */
5873 if (0 != (tem
= fold_range_test (t
)))
5876 /* Check for the possibility of merging component references. If our
5877 lhs is another similar operation, try to merge its rhs with our
5878 rhs. Then try to merge our lhs and rhs. */
5879 if (TREE_CODE (arg0
) == code
5880 && 0 != (tem
= fold_truthop (code
, type
,
5881 TREE_OPERAND (arg0
, 1), arg1
)))
5882 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
5884 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
5889 case TRUTH_ORIF_EXPR
:
5890 /* Note that the operands of this must be ints
5891 and their values must be 0 or true.
5892 ("true" is a fixed value perhaps depending on the language.) */
5893 /* If first arg is constant true, return it. */
5894 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
5895 return convert (type
, arg0
);
5897 /* If either arg is constant zero, drop it. */
5898 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
5899 return non_lvalue (convert (type
, arg1
));
5900 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
5901 /* Preserve sequence points. */
5902 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
5903 return non_lvalue (convert (type
, arg0
));
5904 /* If second arg is constant true, result is true, but we must
5905 evaluate first arg. */
5906 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
5907 return omit_one_operand (type
, arg1
, arg0
);
5908 /* Likewise for first arg, but note this only occurs here for
5910 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
5911 return omit_one_operand (type
, arg0
, arg1
);
5914 case TRUTH_XOR_EXPR
:
5915 /* If either arg is constant zero, drop it. */
5916 if (integer_zerop (arg0
))
5917 return non_lvalue (convert (type
, arg1
));
5918 if (integer_zerop (arg1
))
5919 return non_lvalue (convert (type
, arg0
));
5920 /* If either arg is constant true, this is a logical inversion. */
5921 if (integer_onep (arg0
))
5922 return non_lvalue (convert (type
, invert_truthvalue (arg1
)));
5923 if (integer_onep (arg1
))
5924 return non_lvalue (convert (type
, invert_truthvalue (arg0
)));
5933 /* If one arg is a real or integer constant, put it last. */
5934 if ((TREE_CODE (arg0
) == INTEGER_CST
5935 && TREE_CODE (arg1
) != INTEGER_CST
)
5936 || (TREE_CODE (arg0
) == REAL_CST
5937 && TREE_CODE (arg0
) != REAL_CST
))
5939 TREE_OPERAND (t
, 0) = arg1
;
5940 TREE_OPERAND (t
, 1) = arg0
;
5941 arg0
= TREE_OPERAND (t
, 0);
5942 arg1
= TREE_OPERAND (t
, 1);
5943 code
= swap_tree_comparison (code
);
5944 TREE_SET_CODE (t
, code
);
5947 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
5949 /* (-a) CMP (-b) -> b CMP a */
5950 if (TREE_CODE (arg0
) == NEGATE_EXPR
5951 && TREE_CODE (arg1
) == NEGATE_EXPR
)
5952 return fold (build (code
, type
, TREE_OPERAND (arg1
, 0),
5953 TREE_OPERAND (arg0
, 0)));
5954 /* (-a) CMP CST -> a swap(CMP) (-CST) */
5955 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& TREE_CODE (arg1
) == REAL_CST
)
5958 (swap_tree_comparison (code
), type
,
5959 TREE_OPERAND (arg0
, 0),
5960 build_real (TREE_TYPE (arg1
),
5961 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1
)))));
5962 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
5963 /* a CMP (-0) -> a CMP 0 */
5964 if (TREE_CODE (arg1
) == REAL_CST
5965 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1
)))
5966 return fold (build (code
, type
, arg0
,
5967 build_real (TREE_TYPE (arg1
), dconst0
)));
5969 /* If this is a comparison of a real constant with a PLUS_EXPR
5970 or a MINUS_EXPR of a real constant, we can convert it into a
5971 comparison with a revised real constant as long as no overflow
5972 occurs when unsafe_math_optimizations are enabled. */
5973 if (flag_unsafe_math_optimizations
5974 && TREE_CODE (arg1
) == REAL_CST
5975 && (TREE_CODE (arg0
) == PLUS_EXPR
5976 || TREE_CODE (arg0
) == MINUS_EXPR
)
5977 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
5978 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
5979 ? MINUS_EXPR
: PLUS_EXPR
,
5980 arg1
, TREE_OPERAND (arg0
, 1), 0))
5981 && ! TREE_CONSTANT_OVERFLOW (tem
))
5982 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
5985 /* Convert foo++ == CONST into ++foo == CONST + INCR.
5986 First, see if one arg is constant; find the constant arg
5987 and the other one. */
5989 tree constop
= 0, varop
= NULL_TREE
;
5990 int constopnum
= -1;
5992 if (TREE_CONSTANT (arg1
))
5993 constopnum
= 1, constop
= arg1
, varop
= arg0
;
5994 if (TREE_CONSTANT (arg0
))
5995 constopnum
= 0, constop
= arg0
, varop
= arg1
;
5997 if (constop
&& TREE_CODE (varop
) == POSTINCREMENT_EXPR
)
5999 /* This optimization is invalid for ordered comparisons
6000 if CONST+INCR overflows or if foo+incr might overflow.
6001 This optimization is invalid for floating point due to rounding.
6002 For pointer types we assume overflow doesn't happen. */
6003 if (POINTER_TYPE_P (TREE_TYPE (varop
))
6004 || (! FLOAT_TYPE_P (TREE_TYPE (varop
))
6005 && (code
== EQ_EXPR
|| code
== NE_EXPR
)))
6008 = fold (build (PLUS_EXPR
, TREE_TYPE (varop
),
6009 constop
, TREE_OPERAND (varop
, 1)));
6011 /* Do not overwrite the current varop to be a preincrement,
6012 create a new node so that we won't confuse our caller who
6013 might create trees and throw them away, reusing the
6014 arguments that they passed to build. This shows up in
6015 the THEN or ELSE parts of ?: being postincrements. */
6016 varop
= build (PREINCREMENT_EXPR
, TREE_TYPE (varop
),
6017 TREE_OPERAND (varop
, 0),
6018 TREE_OPERAND (varop
, 1));
6020 /* If VAROP is a reference to a bitfield, we must mask
6021 the constant by the width of the field. */
6022 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
6023 && DECL_BIT_FIELD(TREE_OPERAND
6024 (TREE_OPERAND (varop
, 0), 1)))
6027 = TREE_INT_CST_LOW (DECL_SIZE
6029 (TREE_OPERAND (varop
, 0), 1)));
6030 tree mask
, unsigned_type
;
6031 unsigned int precision
;
6032 tree folded_compare
;
6034 /* First check whether the comparison would come out
6035 always the same. If we don't do that we would
6036 change the meaning with the masking. */
6037 if (constopnum
== 0)
6038 folded_compare
= fold (build (code
, type
, constop
,
6039 TREE_OPERAND (varop
, 0)));
6041 folded_compare
= fold (build (code
, type
,
6042 TREE_OPERAND (varop
, 0),
6044 if (integer_zerop (folded_compare
)
6045 || integer_onep (folded_compare
))
6046 return omit_one_operand (type
, folded_compare
, varop
);
6048 unsigned_type
= (*lang_hooks
.types
.type_for_size
)(size
, 1);
6049 precision
= TYPE_PRECISION (unsigned_type
);
6050 mask
= build_int_2 (~0, ~0);
6051 TREE_TYPE (mask
) = unsigned_type
;
6052 force_fit_type (mask
, 0);
6053 mask
= const_binop (RSHIFT_EXPR
, mask
,
6054 size_int (precision
- size
), 0);
6055 newconst
= fold (build (BIT_AND_EXPR
,
6056 TREE_TYPE (varop
), newconst
,
6057 convert (TREE_TYPE (varop
),
6061 t
= build (code
, type
,
6062 (constopnum
== 0) ? newconst
: varop
,
6063 (constopnum
== 1) ? newconst
: varop
);
6067 else if (constop
&& TREE_CODE (varop
) == POSTDECREMENT_EXPR
)
6069 if (POINTER_TYPE_P (TREE_TYPE (varop
))
6070 || (! FLOAT_TYPE_P (TREE_TYPE (varop
))
6071 && (code
== EQ_EXPR
|| code
== NE_EXPR
)))
6074 = fold (build (MINUS_EXPR
, TREE_TYPE (varop
),
6075 constop
, TREE_OPERAND (varop
, 1)));
6077 /* Do not overwrite the current varop to be a predecrement,
6078 create a new node so that we won't confuse our caller who
6079 might create trees and throw them away, reusing the
6080 arguments that they passed to build. This shows up in
6081 the THEN or ELSE parts of ?: being postdecrements. */
6082 varop
= build (PREDECREMENT_EXPR
, TREE_TYPE (varop
),
6083 TREE_OPERAND (varop
, 0),
6084 TREE_OPERAND (varop
, 1));
6086 if (TREE_CODE (TREE_OPERAND (varop
, 0)) == COMPONENT_REF
6087 && DECL_BIT_FIELD(TREE_OPERAND
6088 (TREE_OPERAND (varop
, 0), 1)))
6091 = TREE_INT_CST_LOW (DECL_SIZE
6093 (TREE_OPERAND (varop
, 0), 1)));
6094 tree mask
, unsigned_type
;
6095 unsigned int precision
;
6096 tree folded_compare
;
6098 if (constopnum
== 0)
6099 folded_compare
= fold (build (code
, type
, constop
,
6100 TREE_OPERAND (varop
, 0)));
6102 folded_compare
= fold (build (code
, type
,
6103 TREE_OPERAND (varop
, 0),
6105 if (integer_zerop (folded_compare
)
6106 || integer_onep (folded_compare
))
6107 return omit_one_operand (type
, folded_compare
, varop
);
6109 unsigned_type
= (*lang_hooks
.types
.type_for_size
)(size
, 1);
6110 precision
= TYPE_PRECISION (unsigned_type
);
6111 mask
= build_int_2 (~0, ~0);
6112 TREE_TYPE (mask
) = TREE_TYPE (varop
);
6113 force_fit_type (mask
, 0);
6114 mask
= const_binop (RSHIFT_EXPR
, mask
,
6115 size_int (precision
- size
), 0);
6116 newconst
= fold (build (BIT_AND_EXPR
,
6117 TREE_TYPE (varop
), newconst
,
6118 convert (TREE_TYPE (varop
),
6122 t
= build (code
, type
,
6123 (constopnum
== 0) ? newconst
: varop
,
6124 (constopnum
== 1) ? newconst
: varop
);
6130 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
6131 This transformation affects the cases which are handled in later
6132 optimizations involving comparisons with non-negative constants. */
6133 if (TREE_CODE (arg1
) == INTEGER_CST
6134 && TREE_CODE (arg0
) != INTEGER_CST
6135 && tree_int_cst_sgn (arg1
) > 0)
6141 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
6142 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
);
6147 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
6148 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
);
6156 /* Comparisons with the highest or lowest possible integer of
6157 the specified size will have known values. */
6159 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1
)));
6161 if (TREE_CODE (arg1
) == INTEGER_CST
6162 && ! TREE_CONSTANT_OVERFLOW (arg1
)
6163 && width
<= HOST_BITS_PER_WIDE_INT
6164 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
6165 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
6167 unsigned HOST_WIDE_INT signed_max
;
6168 unsigned HOST_WIDE_INT max
, min
;
6170 signed_max
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1)) - 1;
6172 if (TREE_UNSIGNED (TREE_TYPE (arg1
)))
6174 max
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
6180 min
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
6183 if (TREE_INT_CST_HIGH (arg1
) == 0
6184 && TREE_INT_CST_LOW (arg1
) == max
)
6188 return omit_one_operand (type
,
6189 convert (type
, integer_zero_node
),
6193 TREE_SET_CODE (t
, EQ_EXPR
);
6196 return omit_one_operand (type
,
6197 convert (type
, integer_one_node
),
6201 TREE_SET_CODE (t
, NE_EXPR
);
6204 /* The GE_EXPR and LT_EXPR cases above are not normally
6205 reached because of previous transformations. */
6210 else if (TREE_INT_CST_HIGH (arg1
) == 0
6211 && TREE_INT_CST_LOW (arg1
) == max
- 1)
6216 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
6217 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
);
6221 arg1
= const_binop (PLUS_EXPR
, arg1
, integer_one_node
, 0);
6222 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
);
6227 else if (TREE_INT_CST_HIGH (arg1
) == (min
? -1 : 0)
6228 && TREE_INT_CST_LOW (arg1
) == min
)
6232 return omit_one_operand (type
,
6233 convert (type
, integer_zero_node
),
6237 TREE_SET_CODE (t
, EQ_EXPR
);
6241 return omit_one_operand (type
,
6242 convert (type
, integer_one_node
),
6246 TREE_SET_CODE (t
, NE_EXPR
);
6252 else if (TREE_INT_CST_HIGH (arg1
) == (min
? -1 : 0)
6253 && TREE_INT_CST_LOW (arg1
) == min
+ 1)
6258 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
6259 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
);
6263 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
6264 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
);
6270 else if (TREE_INT_CST_HIGH (arg1
) == 0
6271 && TREE_INT_CST_LOW (arg1
) == signed_max
6272 && TREE_UNSIGNED (TREE_TYPE (arg1
))
6273 /* signed_type does not work on pointer types. */
6274 && INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
6276 /* The following case also applies to X < signed_max+1
6277 and X >= signed_max+1 because previous transformations. */
6278 if (code
== LE_EXPR
|| code
== GT_EXPR
)
6281 st0
= (*lang_hooks
.types
.signed_type
) (TREE_TYPE (arg0
));
6282 st1
= (*lang_hooks
.types
.signed_type
) (TREE_TYPE (arg1
));
6284 (build (code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
6285 type
, convert (st0
, arg0
),
6286 convert (st1
, integer_zero_node
)));
6292 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6293 a MINUS_EXPR of a constant, we can convert it into a comparison with
6294 a revised constant as long as no overflow occurs. */
6295 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
6296 && TREE_CODE (arg1
) == INTEGER_CST
6297 && (TREE_CODE (arg0
) == PLUS_EXPR
6298 || TREE_CODE (arg0
) == MINUS_EXPR
)
6299 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
6300 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
6301 ? MINUS_EXPR
: PLUS_EXPR
,
6302 arg1
, TREE_OPERAND (arg0
, 1), 0))
6303 && ! TREE_CONSTANT_OVERFLOW (tem
))
6304 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
6306 /* Similarly for a NEGATE_EXPR. */
6307 else if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
6308 && TREE_CODE (arg0
) == NEGATE_EXPR
6309 && TREE_CODE (arg1
) == INTEGER_CST
6310 && 0 != (tem
= negate_expr (arg1
))
6311 && TREE_CODE (tem
) == INTEGER_CST
6312 && ! TREE_CONSTANT_OVERFLOW (tem
))
6313 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
6315 /* If we have X - Y == 0, we can convert that to X == Y and similarly
6316 for !=. Don't do this for ordered comparisons due to overflow. */
6317 else if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6318 && integer_zerop (arg1
) && TREE_CODE (arg0
) == MINUS_EXPR
)
6319 return fold (build (code
, type
,
6320 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1)));
6322 /* If we are widening one operand of an integer comparison,
6323 see if the other operand is similarly being widened. Perhaps we
6324 can do the comparison in the narrower type. */
6325 else if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
6326 && TREE_CODE (arg0
) == NOP_EXPR
6327 && (tem
= get_unwidened (arg0
, NULL_TREE
)) != arg0
6328 && (t1
= get_unwidened (arg1
, TREE_TYPE (tem
))) != 0
6329 && (TREE_TYPE (t1
) == TREE_TYPE (tem
)
6330 || (TREE_CODE (t1
) == INTEGER_CST
6331 && int_fits_type_p (t1
, TREE_TYPE (tem
)))))
6332 return fold (build (code
, type
, tem
, convert (TREE_TYPE (tem
), t1
)));
6334 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
6335 constant, we can simplify it. */
6336 else if (TREE_CODE (arg1
) == INTEGER_CST
6337 && (TREE_CODE (arg0
) == MIN_EXPR
6338 || TREE_CODE (arg0
) == MAX_EXPR
)
6339 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
6340 return optimize_minmax_comparison (t
);
6342 /* If we are comparing an ABS_EXPR with a constant, we can
6343 convert all the cases into explicit comparisons, but they may
6344 well not be faster than doing the ABS and one comparison.
6345 But ABS (X) <= C is a range comparison, which becomes a subtraction
6346 and a comparison, and is probably faster. */
6347 else if (code
== LE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
6348 && TREE_CODE (arg0
) == ABS_EXPR
6349 && ! TREE_SIDE_EFFECTS (arg0
)
6350 && (0 != (tem
= negate_expr (arg1
)))
6351 && TREE_CODE (tem
) == INTEGER_CST
6352 && ! TREE_CONSTANT_OVERFLOW (tem
))
6353 return fold (build (TRUTH_ANDIF_EXPR
, type
,
6354 build (GE_EXPR
, type
, TREE_OPERAND (arg0
, 0), tem
),
6355 build (LE_EXPR
, type
,
6356 TREE_OPERAND (arg0
, 0), arg1
)));
6358 /* If this is an EQ or NE comparison with zero and ARG0 is
6359 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
6360 two operations, but the latter can be done in one less insn
6361 on machines that have only two-operand insns or on which a
6362 constant cannot be the first operand. */
6363 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
6364 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
6366 if (TREE_CODE (TREE_OPERAND (arg0
, 0)) == LSHIFT_EXPR
6367 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0)))
6369 fold (build (code
, type
,
6370 build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
6372 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
6373 TREE_OPERAND (arg0
, 1),
6374 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1)),
6375 convert (TREE_TYPE (arg0
),
6378 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
6379 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
6381 fold (build (code
, type
,
6382 build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
6384 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
6385 TREE_OPERAND (arg0
, 0),
6386 TREE_OPERAND (TREE_OPERAND (arg0
, 1), 1)),
6387 convert (TREE_TYPE (arg0
),
6392 /* If this is an NE or EQ comparison of zero against the result of a
6393 signed MOD operation whose second operand is a power of 2, make
6394 the MOD operation unsigned since it is simpler and equivalent. */
6395 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6396 && integer_zerop (arg1
)
6397 && ! TREE_UNSIGNED (TREE_TYPE (arg0
))
6398 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
6399 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
6400 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
6401 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
6402 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6404 tree newtype
= (*lang_hooks
.types
.unsigned_type
) (TREE_TYPE (arg0
));
6405 tree newmod
= build (TREE_CODE (arg0
), newtype
,
6406 convert (newtype
, TREE_OPERAND (arg0
, 0)),
6407 convert (newtype
, TREE_OPERAND (arg0
, 1)));
6409 return build (code
, type
, newmod
, convert (newtype
, arg1
));
6412 /* If this is an NE comparison of zero with an AND of one, remove the
6413 comparison since the AND will give the correct value. */
6414 if (code
== NE_EXPR
&& integer_zerop (arg1
)
6415 && TREE_CODE (arg0
) == BIT_AND_EXPR
6416 && integer_onep (TREE_OPERAND (arg0
, 1)))
6417 return convert (type
, arg0
);
6419 /* If we have (A & C) == C where C is a power of 2, convert this into
6420 (A & C) != 0. Similarly for NE_EXPR. */
6421 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
6422 && TREE_CODE (arg0
) == BIT_AND_EXPR
6423 && integer_pow2p (TREE_OPERAND (arg0
, 1))
6424 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
6425 return fold (build (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
6426 arg0
, integer_zero_node
));
6428 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6429 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6430 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
6431 && TREE_CODE (arg0
) == BIT_AND_EXPR
6432 && integer_zerop (arg1
))
6434 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0),
6435 TREE_OPERAND (arg0
, 1));
6436 if (arg00
!= NULL_TREE
)
6438 tree stype
= (*lang_hooks
.types
.signed_type
) (TREE_TYPE (arg00
));
6439 return fold (build (code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
6440 convert (stype
, arg00
),
6441 convert (stype
, integer_zero_node
)));
6445 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6446 and similarly for >= into !=. */
6447 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
6448 && TREE_UNSIGNED (TREE_TYPE (arg0
))
6449 && TREE_CODE (arg1
) == LSHIFT_EXPR
6450 && integer_onep (TREE_OPERAND (arg1
, 0)))
6451 return build (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
6452 build (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
6453 TREE_OPERAND (arg1
, 1)),
6454 convert (TREE_TYPE (arg0
), integer_zero_node
));
6456 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
6457 && TREE_UNSIGNED (TREE_TYPE (arg0
))
6458 && (TREE_CODE (arg1
) == NOP_EXPR
6459 || TREE_CODE (arg1
) == CONVERT_EXPR
)
6460 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
6461 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
6463 build (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
6464 convert (TREE_TYPE (arg0
),
6465 build (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
6466 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1))),
6467 convert (TREE_TYPE (arg0
), integer_zero_node
));
6469 /* Simplify comparison of something with itself. (For IEEE
6470 floating-point, we can only do some of these simplifications.) */
6471 if (operand_equal_p (arg0
, arg1
, 0))
6478 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
)))
6479 return constant_boolean_node (1, type
);
6481 TREE_SET_CODE (t
, code
);
6485 /* For NE, we can only do this simplification if integer. */
6486 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
6488 /* ... fall through ... */
6491 return constant_boolean_node (0, type
);
6497 /* If we are comparing an expression that just has comparisons
6498 of two integer values, arithmetic expressions of those comparisons,
6499 and constants, we can simplify it. There are only three cases
6500 to check: the two values can either be equal, the first can be
6501 greater, or the second can be greater. Fold the expression for
6502 those three values. Since each value must be 0 or 1, we have
6503 eight possibilities, each of which corresponds to the constant 0
6504 or 1 or one of the six possible comparisons.
6506 This handles common cases like (a > b) == 0 but also handles
6507 expressions like ((x > y) - (y > x)) > 0, which supposedly
6508 occur in macroized code. */
6510 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
6512 tree cval1
= 0, cval2
= 0;
6515 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
6516 /* Don't handle degenerate cases here; they should already
6517 have been handled anyway. */
6518 && cval1
!= 0 && cval2
!= 0
6519 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
6520 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
6521 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
6522 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
6523 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
6524 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
6525 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
6527 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
6528 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
6530 /* We can't just pass T to eval_subst in case cval1 or cval2
6531 was the same as ARG1. */
6534 = fold (build (code
, type
,
6535 eval_subst (arg0
, cval1
, maxval
, cval2
, minval
),
6538 = fold (build (code
, type
,
6539 eval_subst (arg0
, cval1
, maxval
, cval2
, maxval
),
6542 = fold (build (code
, type
,
6543 eval_subst (arg0
, cval1
, minval
, cval2
, maxval
),
6546 /* All three of these results should be 0 or 1. Confirm they
6547 are. Then use those values to select the proper code
6550 if ((integer_zerop (high_result
)
6551 || integer_onep (high_result
))
6552 && (integer_zerop (equal_result
)
6553 || integer_onep (equal_result
))
6554 && (integer_zerop (low_result
)
6555 || integer_onep (low_result
)))
6557 /* Make a 3-bit mask with the high-order bit being the
6558 value for `>', the next for '=', and the low for '<'. */
6559 switch ((integer_onep (high_result
) * 4)
6560 + (integer_onep (equal_result
) * 2)
6561 + integer_onep (low_result
))
6565 return omit_one_operand (type
, integer_zero_node
, arg0
);
6586 return omit_one_operand (type
, integer_one_node
, arg0
);
6589 t
= build (code
, type
, cval1
, cval2
);
6591 return save_expr (t
);
6598 /* If this is a comparison of a field, we may be able to simplify it. */
6599 if (((TREE_CODE (arg0
) == COMPONENT_REF
6600 && (*lang_hooks
.can_use_bit_fields_p
) ())
6601 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
6602 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
6603 /* Handle the constant case even without -O
6604 to make sure the warnings are given. */
6605 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
6607 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
6611 /* If this is a comparison of complex values and either or both sides
6612 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6613 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6614 This may prevent needless evaluations. */
6615 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
6616 && TREE_CODE (TREE_TYPE (arg0
)) == COMPLEX_TYPE
6617 && (TREE_CODE (arg0
) == COMPLEX_EXPR
6618 || TREE_CODE (arg1
) == COMPLEX_EXPR
6619 || TREE_CODE (arg0
) == COMPLEX_CST
6620 || TREE_CODE (arg1
) == COMPLEX_CST
))
6622 tree subtype
= TREE_TYPE (TREE_TYPE (arg0
));
6623 tree real0
, imag0
, real1
, imag1
;
6625 arg0
= save_expr (arg0
);
6626 arg1
= save_expr (arg1
);
6627 real0
= fold (build1 (REALPART_EXPR
, subtype
, arg0
));
6628 imag0
= fold (build1 (IMAGPART_EXPR
, subtype
, arg0
));
6629 real1
= fold (build1 (REALPART_EXPR
, subtype
, arg1
));
6630 imag1
= fold (build1 (IMAGPART_EXPR
, subtype
, arg1
));
6632 return fold (build ((code
== EQ_EXPR
? TRUTH_ANDIF_EXPR
6635 fold (build (code
, type
, real0
, real1
)),
6636 fold (build (code
, type
, imag0
, imag1
))));
6639 /* Optimize comparisons of strlen vs zero to a compare of the
6640 first character of the string vs zero. To wit,
6641 strlen(ptr) == 0 => *ptr == 0
6642 strlen(ptr) != 0 => *ptr != 0
6643 Other cases should reduce to one of these two (or a constant)
6644 due to the return value of strlen being unsigned. */
6645 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
6646 && integer_zerop (arg1
)
6647 && TREE_CODE (arg0
) == CALL_EXPR
6648 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ADDR_EXPR
)
6650 tree fndecl
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
6653 if (TREE_CODE (fndecl
) == FUNCTION_DECL
6654 && DECL_BUILT_IN (fndecl
)
6655 && DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_MD
6656 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
6657 && (arglist
= TREE_OPERAND (arg0
, 1))
6658 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
6659 && ! TREE_CHAIN (arglist
))
6660 return fold (build (code
, type
,
6661 build1 (INDIRECT_REF
, char_type_node
,
6662 TREE_VALUE(arglist
)),
6663 integer_zero_node
));
6666 /* From here on, the only cases we handle are when the result is
6667 known to be a constant.
6669 To compute GT, swap the arguments and do LT.
6670 To compute GE, do LT and invert the result.
6671 To compute LE, swap the arguments, do LT and invert the result.
6672 To compute NE, do EQ and invert the result.
6674 Therefore, the code below must handle only EQ and LT. */
6676 if (code
== LE_EXPR
|| code
== GT_EXPR
)
6678 tem
= arg0
, arg0
= arg1
, arg1
= tem
;
6679 code
= swap_tree_comparison (code
);
6682 /* Note that it is safe to invert for real values here because we
6683 will check below in the one case that it matters. */
6687 if (code
== NE_EXPR
|| code
== GE_EXPR
)
6690 code
= invert_tree_comparison (code
);
6693 /* Compute a result for LT or EQ if args permit;
6694 otherwise return T. */
6695 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
6697 if (code
== EQ_EXPR
)
6698 t1
= build_int_2 (tree_int_cst_equal (arg0
, arg1
), 0);
6700 t1
= build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0
))
6701 ? INT_CST_LT_UNSIGNED (arg0
, arg1
)
6702 : INT_CST_LT (arg0
, arg1
)),
6706 #if 0 /* This is no longer useful, but breaks some real code. */
6707 /* Assume a nonexplicit constant cannot equal an explicit one,
6708 since such code would be undefined anyway.
6709 Exception: on sysvr4, using #pragma weak,
6710 a label can come out as 0. */
6711 else if (TREE_CODE (arg1
) == INTEGER_CST
6712 && !integer_zerop (arg1
)
6713 && TREE_CONSTANT (arg0
)
6714 && TREE_CODE (arg0
) == ADDR_EXPR
6716 t1
= build_int_2 (0, 0);
6718 /* Two real constants can be compared explicitly. */
6719 else if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
6721 /* If either operand is a NaN, the result is false with two
6722 exceptions: First, an NE_EXPR is true on NaNs, but that case
6723 is already handled correctly since we will be inverting the
6724 result for NE_EXPR. Second, if we had inverted a LE_EXPR
6725 or a GE_EXPR into a LT_EXPR, we must return true so that it
6726 will be inverted into false. */
6728 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
6729 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
6730 t1
= build_int_2 (invert
&& code
== LT_EXPR
, 0);
6732 else if (code
== EQ_EXPR
)
6733 t1
= build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
),
6734 TREE_REAL_CST (arg1
)),
6737 t1
= build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0
),
6738 TREE_REAL_CST (arg1
)),
6742 if (t1
== NULL_TREE
)
6746 TREE_INT_CST_LOW (t1
) ^= 1;
6748 TREE_TYPE (t1
) = type
;
6749 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
6750 return (*lang_hooks
.truthvalue_conversion
) (t1
);
6754 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6755 so all simple results must be passed through pedantic_non_lvalue. */
6756 if (TREE_CODE (arg0
) == INTEGER_CST
)
6757 return pedantic_non_lvalue
6758 (TREE_OPERAND (t
, (integer_zerop (arg0
) ? 2 : 1)));
6759 else if (operand_equal_p (arg1
, TREE_OPERAND (expr
, 2), 0))
6760 return pedantic_omit_one_operand (type
, arg1
, arg0
);
6762 /* If the second operand is zero, invert the comparison and swap
6763 the second and third operands. Likewise if the second operand
6764 is constant and the third is not or if the third operand is
6765 equivalent to the first operand of the comparison. */
6767 if (integer_zerop (arg1
)
6768 || (TREE_CONSTANT (arg1
) && ! TREE_CONSTANT (TREE_OPERAND (t
, 2)))
6769 || (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
6770 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
6771 TREE_OPERAND (t
, 2),
6772 TREE_OPERAND (arg0
, 1))))
6774 /* See if this can be inverted. If it can't, possibly because
6775 it was a floating-point inequality comparison, don't do
6777 tem
= invert_truthvalue (arg0
);
6779 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
6781 t
= build (code
, type
, tem
,
6782 TREE_OPERAND (t
, 2), TREE_OPERAND (t
, 1));
6784 /* arg1 should be the first argument of the new T. */
6785 arg1
= TREE_OPERAND (t
, 1);
6790 /* If we have A op B ? A : C, we may be able to convert this to a
6791 simpler expression, depending on the operation and the values
6792 of B and C. Signed zeros prevent all of these transformations,
6793 for reasons given above each one. */
6795 if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
6796 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
6797 arg1
, TREE_OPERAND (arg0
, 1))
6798 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
6800 tree arg2
= TREE_OPERAND (t
, 2);
6801 enum tree_code comp_code
= TREE_CODE (arg0
);
6805 /* If we have A op 0 ? A : -A, consider applying the following
6808 A == 0? A : -A same as -A
6809 A != 0? A : -A same as A
6810 A >= 0? A : -A same as abs (A)
6811 A > 0? A : -A same as abs (A)
6812 A <= 0? A : -A same as -abs (A)
6813 A < 0? A : -A same as -abs (A)
6815 None of these transformations work for modes with signed
6816 zeros. If A is +/-0, the first two transformations will
6817 change the sign of the result (from +0 to -0, or vice
6818 versa). The last four will fix the sign of the result,
6819 even though the original expressions could be positive or
6820 negative, depending on the sign of A.
6822 Note that all these transformations are correct if A is
6823 NaN, since the two alternatives (A and -A) are also NaNs. */
6824 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
6825 ? real_zerop (TREE_OPERAND (arg0
, 1))
6826 : integer_zerop (TREE_OPERAND (arg0
, 1)))
6827 && TREE_CODE (arg2
) == NEGATE_EXPR
6828 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
6836 (convert (TREE_TYPE (TREE_OPERAND (t
, 1)),
6839 return pedantic_non_lvalue (convert (type
, arg1
));
6842 if (TREE_UNSIGNED (TREE_TYPE (arg1
)))
6843 arg1
= convert ((*lang_hooks
.types
.signed_type
)
6844 (TREE_TYPE (arg1
)), arg1
);
6845 return pedantic_non_lvalue
6846 (convert (type
, fold (build1 (ABS_EXPR
,
6847 TREE_TYPE (arg1
), arg1
))));
6850 if (TREE_UNSIGNED (TREE_TYPE (arg1
)))
6851 arg1
= convert ((lang_hooks
.types
.signed_type
)
6852 (TREE_TYPE (arg1
)), arg1
);
6853 return pedantic_non_lvalue
6854 (negate_expr (convert (type
,
6855 fold (build1 (ABS_EXPR
,
6862 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6863 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6864 both transformations are correct when A is NaN: A != 0
6865 is then true, and A == 0 is false. */
6867 if (integer_zerop (TREE_OPERAND (arg0
, 1)) && integer_zerop (arg2
))
6869 if (comp_code
== NE_EXPR
)
6870 return pedantic_non_lvalue (convert (type
, arg1
));
6871 else if (comp_code
== EQ_EXPR
)
6872 return pedantic_non_lvalue (convert (type
, integer_zero_node
));
6875 /* Try some transformations of A op B ? A : B.
6877 A == B? A : B same as B
6878 A != B? A : B same as A
6879 A >= B? A : B same as max (A, B)
6880 A > B? A : B same as max (B, A)
6881 A <= B? A : B same as min (A, B)
6882 A < B? A : B same as min (B, A)
6884 As above, these transformations don't work in the presence
6885 of signed zeros. For example, if A and B are zeros of
6886 opposite sign, the first two transformations will change
6887 the sign of the result. In the last four, the original
6888 expressions give different results for (A=+0, B=-0) and
6889 (A=-0, B=+0), but the transformed expressions do not.
6891 The first two transformations are correct if either A or B
6892 is a NaN. In the first transformation, the condition will
6893 be false, and B will indeed be chosen. In the case of the
6894 second transformation, the condition A != B will be true,
6895 and A will be chosen.
6897 The conversions to max() and min() are not correct if B is
6898 a number and A is not. The conditions in the original
6899 expressions will be false, so all four give B. The min()
6900 and max() versions would give a NaN instead. */
6901 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 1),
6902 arg2
, TREE_OPERAND (arg0
, 0)))
6904 tree comp_op0
= TREE_OPERAND (arg0
, 0);
6905 tree comp_op1
= TREE_OPERAND (arg0
, 1);
6906 tree comp_type
= TREE_TYPE (comp_op0
);
6908 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
6909 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
6915 return pedantic_non_lvalue (convert (type
, arg2
));
6917 return pedantic_non_lvalue (convert (type
, arg1
));
6920 /* In C++ a ?: expression can be an lvalue, so put the
6921 operand which will be used if they are equal first
6922 so that we can convert this back to the
6923 corresponding COND_EXPR. */
6924 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
6925 return pedantic_non_lvalue
6926 (convert (type
, fold (build (MIN_EXPR
, comp_type
,
6927 (comp_code
== LE_EXPR
6928 ? comp_op0
: comp_op1
),
6929 (comp_code
== LE_EXPR
6930 ? comp_op1
: comp_op0
)))));
6934 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
6935 return pedantic_non_lvalue
6936 (convert (type
, fold (build (MAX_EXPR
, comp_type
,
6937 (comp_code
== GE_EXPR
6938 ? comp_op0
: comp_op1
),
6939 (comp_code
== GE_EXPR
6940 ? comp_op1
: comp_op0
)))));
6947 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6948 we might still be able to simplify this. For example,
6949 if C1 is one less or one more than C2, this might have started
6950 out as a MIN or MAX and been transformed by this function.
6951 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
6953 if (INTEGRAL_TYPE_P (type
)
6954 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
6955 && TREE_CODE (arg2
) == INTEGER_CST
)
6959 /* We can replace A with C1 in this case. */
6960 arg1
= convert (type
, TREE_OPERAND (arg0
, 1));
6961 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
,
6962 TREE_OPERAND (t
, 2));
6966 /* If C1 is C2 + 1, this is min(A, C2). */
6967 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
), 1)
6968 && operand_equal_p (TREE_OPERAND (arg0
, 1),
6969 const_binop (PLUS_EXPR
, arg2
,
6970 integer_one_node
, 0), 1))
6971 return pedantic_non_lvalue
6972 (fold (build (MIN_EXPR
, type
, arg1
, arg2
)));
6976 /* If C1 is C2 - 1, this is min(A, C2). */
6977 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
), 1)
6978 && operand_equal_p (TREE_OPERAND (arg0
, 1),
6979 const_binop (MINUS_EXPR
, arg2
,
6980 integer_one_node
, 0), 1))
6981 return pedantic_non_lvalue
6982 (fold (build (MIN_EXPR
, type
, arg1
, arg2
)));
6986 /* If C1 is C2 - 1, this is max(A, C2). */
6987 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
), 1)
6988 && operand_equal_p (TREE_OPERAND (arg0
, 1),
6989 const_binop (MINUS_EXPR
, arg2
,
6990 integer_one_node
, 0), 1))
6991 return pedantic_non_lvalue
6992 (fold (build (MAX_EXPR
, type
, arg1
, arg2
)));
6996 /* If C1 is C2 + 1, this is max(A, C2). */
6997 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
), 1)
6998 && operand_equal_p (TREE_OPERAND (arg0
, 1),
6999 const_binop (PLUS_EXPR
, arg2
,
7000 integer_one_node
, 0), 1))
7001 return pedantic_non_lvalue
7002 (fold (build (MAX_EXPR
, type
, arg1
, arg2
)));
7011 /* If the second operand is simpler than the third, swap them
7012 since that produces better jump optimization results. */
7013 if ((TREE_CONSTANT (arg1
) || DECL_P (arg1
)
7014 || TREE_CODE (arg1
) == SAVE_EXPR
)
7015 && ! (TREE_CONSTANT (TREE_OPERAND (t
, 2))
7016 || DECL_P (TREE_OPERAND (t
, 2))
7017 || TREE_CODE (TREE_OPERAND (t
, 2)) == SAVE_EXPR
))
7019 /* See if this can be inverted. If it can't, possibly because
7020 it was a floating-point inequality comparison, don't do
7022 tem
= invert_truthvalue (arg0
);
7024 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
7026 t
= build (code
, type
, tem
,
7027 TREE_OPERAND (t
, 2), TREE_OPERAND (t
, 1));
7029 /* arg1 should be the first argument of the new T. */
7030 arg1
= TREE_OPERAND (t
, 1);
7035 /* Convert A ? 1 : 0 to simply A. */
7036 if (integer_onep (TREE_OPERAND (t
, 1))
7037 && integer_zerop (TREE_OPERAND (t
, 2))
7038 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
7039 call to fold will try to move the conversion inside
7040 a COND, which will recurse. In that case, the COND_EXPR
7041 is probably the best choice, so leave it alone. */
7042 && type
== TREE_TYPE (arg0
))
7043 return pedantic_non_lvalue (arg0
);
7045 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
7046 over COND_EXPR in cases such as floating point comparisons. */
7047 if (integer_zerop (TREE_OPERAND (t
, 1))
7048 && integer_onep (TREE_OPERAND (t
, 2))
7049 && truth_value_p (TREE_CODE (arg0
)))
7050 return pedantic_non_lvalue (convert (type
,
7051 invert_truthvalue (arg0
)));
7053 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
7054 operation is simply A & 2. */
7056 if (integer_zerop (TREE_OPERAND (t
, 2))
7057 && TREE_CODE (arg0
) == NE_EXPR
7058 && integer_zerop (TREE_OPERAND (arg0
, 1))
7059 && integer_pow2p (arg1
)
7060 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
7061 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
7063 return pedantic_non_lvalue (convert (type
, TREE_OPERAND (arg0
, 0)));
7065 /* Convert A ? B : 0 into A && B if A and B are truth values. */
7066 if (integer_zerop (TREE_OPERAND (t
, 2))
7067 && truth_value_p (TREE_CODE (arg0
))
7068 && truth_value_p (TREE_CODE (arg1
)))
7069 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR
, type
,
7072 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
7073 if (integer_onep (TREE_OPERAND (t
, 2))
7074 && truth_value_p (TREE_CODE (arg0
))
7075 && truth_value_p (TREE_CODE (arg1
)))
7077 /* Only perform transformation if ARG0 is easily inverted. */
7078 tem
= invert_truthvalue (arg0
);
7079 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
7080 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR
, type
,
7087 /* When pedantic, a compound expression can be neither an lvalue
7088 nor an integer constant expression. */
7089 if (TREE_SIDE_EFFECTS (arg0
) || pedantic
)
7091 /* Don't let (0, 0) be null pointer constant. */
7092 if (integer_zerop (arg1
))
7093 return build1 (NOP_EXPR
, type
, arg1
);
7094 return convert (type
, arg1
);
7098 return build_complex (type
, arg0
, arg1
);
7102 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7104 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7105 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
7106 TREE_OPERAND (arg0
, 1));
7107 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
7108 return TREE_REALPART (arg0
);
7109 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7110 return fold (build (TREE_CODE (arg0
), type
,
7111 fold (build1 (REALPART_EXPR
, type
,
7112 TREE_OPERAND (arg0
, 0))),
7113 fold (build1 (REALPART_EXPR
,
7114 type
, TREE_OPERAND (arg0
, 1)))));
7118 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
7119 return convert (type
, integer_zero_node
);
7120 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
7121 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
7122 TREE_OPERAND (arg0
, 0));
7123 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
7124 return TREE_IMAGPART (arg0
);
7125 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
7126 return fold (build (TREE_CODE (arg0
), type
,
7127 fold (build1 (IMAGPART_EXPR
, type
,
7128 TREE_OPERAND (arg0
, 0))),
7129 fold (build1 (IMAGPART_EXPR
, type
,
7130 TREE_OPERAND (arg0
, 1)))));
7133 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
7135 case CLEANUP_POINT_EXPR
:
7136 if (! has_cleanups (arg0
))
7137 return TREE_OPERAND (t
, 0);
7140 enum tree_code code0
= TREE_CODE (arg0
);
7141 int kind0
= TREE_CODE_CLASS (code0
);
7142 tree arg00
= TREE_OPERAND (arg0
, 0);
7145 if (kind0
== '1' || code0
== TRUTH_NOT_EXPR
)
7146 return fold (build1 (code0
, type
,
7147 fold (build1 (CLEANUP_POINT_EXPR
,
7148 TREE_TYPE (arg00
), arg00
))));
7150 if (kind0
== '<' || kind0
== '2'
7151 || code0
== TRUTH_ANDIF_EXPR
|| code0
== TRUTH_ORIF_EXPR
7152 || code0
== TRUTH_AND_EXPR
|| code0
== TRUTH_OR_EXPR
7153 || code0
== TRUTH_XOR_EXPR
)
7155 arg01
= TREE_OPERAND (arg0
, 1);
7157 if (TREE_CONSTANT (arg00
)
7158 || ((code0
== TRUTH_ANDIF_EXPR
|| code0
== TRUTH_ORIF_EXPR
)
7159 && ! has_cleanups (arg00
)))
7160 return fold (build (code0
, type
, arg00
,
7161 fold (build1 (CLEANUP_POINT_EXPR
,
7162 TREE_TYPE (arg01
), arg01
))));
7164 if (TREE_CONSTANT (arg01
))
7165 return fold (build (code0
, type
,
7166 fold (build1 (CLEANUP_POINT_EXPR
,
7167 TREE_TYPE (arg00
), arg00
)),
7175 /* Check for a built-in function. */
7176 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
7177 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0))
7179 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0)))
7181 tree tmp
= fold_builtin (expr
);
7189 } /* switch (code) */
7192 /* Determine if first argument is a multiple of second argument. Return 0 if
7193 it is not, or we cannot easily determined it to be.
7195 An example of the sort of thing we care about (at this point; this routine
7196 could surely be made more general, and expanded to do what the *_DIV_EXPR's
7197 fold cases do now) is discovering that
7199 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7205 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7207 This code also handles discovering that
7209 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7211 is a multiple of 8 so we don't have to worry about dealing with a
7214 Note that we *look* inside a SAVE_EXPR only to determine how it was
7215 calculated; it is not safe for fold to do much of anything else with the
7216 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7217 at run time. For example, the latter example above *cannot* be implemented
7218 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7219 evaluation time of the original SAVE_EXPR is not necessarily the same at
7220 the time the new expression is evaluated. The only optimization of this
7221 sort that would be valid is changing
7223 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7227 SAVE_EXPR (I) * SAVE_EXPR (J)
7229 (where the same SAVE_EXPR (J) is used in the original and the
7230 transformed version). */
7233 multiple_of_p (type
, top
, bottom
)
7238 if (operand_equal_p (top
, bottom
, 0))
7241 if (TREE_CODE (type
) != INTEGER_TYPE
)
7244 switch (TREE_CODE (top
))
7247 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
7248 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
7252 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
7253 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
7256 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
7260 op1
= TREE_OPERAND (top
, 1);
7261 /* const_binop may not detect overflow correctly,
7262 so check for it explicitly here. */
7263 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
7264 > TREE_INT_CST_LOW (op1
)
7265 && TREE_INT_CST_HIGH (op1
) == 0
7266 && 0 != (t1
= convert (type
,
7267 const_binop (LSHIFT_EXPR
, size_one_node
,
7269 && ! TREE_OVERFLOW (t1
))
7270 return multiple_of_p (type
, t1
, bottom
);
7275 /* Can't handle conversions from non-integral or wider integral type. */
7276 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
7277 || (TYPE_PRECISION (type
)
7278 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
7281 /* .. fall through ... */
7284 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
7287 if (TREE_CODE (bottom
) != INTEGER_CST
7288 || (TREE_UNSIGNED (type
)
7289 && (tree_int_cst_sgn (top
) < 0
7290 || tree_int_cst_sgn (bottom
) < 0)))
7292 return integer_zerop (const_binop (TRUNC_MOD_EXPR
,
7300 /* Return true if `t' is known to be non-negative. */
7303 tree_expr_nonnegative_p (t
)
7306 switch (TREE_CODE (t
))
7312 return tree_int_cst_sgn (t
) >= 0;
7313 case TRUNC_DIV_EXPR
:
7315 case FLOOR_DIV_EXPR
:
7316 case ROUND_DIV_EXPR
:
7317 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
7318 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
7319 case TRUNC_MOD_EXPR
:
7321 case FLOOR_MOD_EXPR
:
7322 case ROUND_MOD_EXPR
:
7323 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
7325 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1))
7326 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 2));
7328 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
7330 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
7331 && tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
7333 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0))
7334 || tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
7336 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
7338 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 1));
7340 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
7341 case NON_LVALUE_EXPR
:
7342 return tree_expr_nonnegative_p (TREE_OPERAND (t
, 0));
7344 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t
));
7347 if (truth_value_p (TREE_CODE (t
)))
7348 /* Truth values evaluate to 0 or 1, which is nonnegative. */
7351 /* We don't know sign of `t', so be conservative and return false. */
7356 /* Return true if `r' is known to be non-negative.
7357 Only handles constants at the moment. */
7360 rtl_expr_nonnegative_p (r
)
7363 switch (GET_CODE (r
))
7366 return INTVAL (r
) >= 0;
7369 if (GET_MODE (r
) == VOIDmode
)
7370 return CONST_DOUBLE_HIGH (r
) >= 0;
7378 units
= CONST_VECTOR_NUNITS (r
);
7380 for (i
= 0; i
< units
; ++i
)
7382 elt
= CONST_VECTOR_ELT (r
, i
);
7383 if (!rtl_expr_nonnegative_p (elt
))
7392 /* These are always nonnegative. */
7400 #include "gt-fold-const.h"