re PR rtl-optimization/9258 (ICE in compensate_edge, at reg-stack.c:2589)
[official-gcc.git] / gcc / fold-const.c
blob3ab360b0dd3a2850aedfdc59d0ef7c51eb2c4783
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 2002,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
60 static void encode PARAMS ((HOST_WIDE_INT *,
61 unsigned HOST_WIDE_INT,
62 HOST_WIDE_INT));
63 static void decode PARAMS ((HOST_WIDE_INT *,
64 unsigned HOST_WIDE_INT *,
65 HOST_WIDE_INT *));
66 static tree negate_expr PARAMS ((tree));
67 static tree split_tree PARAMS ((tree, enum tree_code, tree *, tree *,
68 tree *, int));
69 static tree associate_trees PARAMS ((tree, tree, enum tree_code, tree));
70 static tree int_const_binop PARAMS ((enum tree_code, tree, tree, int));
71 static tree const_binop PARAMS ((enum tree_code, tree, tree, int));
72 static hashval_t size_htab_hash PARAMS ((const void *));
73 static int size_htab_eq PARAMS ((const void *, const void *));
74 static tree fold_convert PARAMS ((tree, tree));
75 static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
76 static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
77 static int comparison_to_compcode PARAMS ((enum tree_code));
78 static enum tree_code compcode_to_comparison PARAMS ((int));
79 static int truth_value_p PARAMS ((enum tree_code));
80 static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
81 static int twoval_comparison_p PARAMS ((tree, tree *, tree *, int *));
82 static tree eval_subst PARAMS ((tree, tree, tree, tree, tree));
83 static tree omit_one_operand PARAMS ((tree, tree, tree));
84 static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
85 static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
86 static tree make_bit_field_ref PARAMS ((tree, tree, int, int, int));
87 static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
88 tree, tree));
89 static tree decode_field_reference PARAMS ((tree, HOST_WIDE_INT *,
90 HOST_WIDE_INT *,
91 enum machine_mode *, int *,
92 int *, tree *, tree *));
93 static int all_ones_mask_p PARAMS ((tree, int));
94 static tree sign_bit_p PARAMS ((tree, tree));
95 static int simple_operand_p PARAMS ((tree));
96 static tree range_binop PARAMS ((enum tree_code, tree, tree, int,
97 tree, int));
98 static tree make_range PARAMS ((tree, int *, tree *, tree *));
99 static tree build_range_check PARAMS ((tree, tree, int, tree, tree));
100 static int merge_ranges PARAMS ((int *, tree *, tree *, int, tree, tree,
101 int, tree, tree));
102 static tree fold_range_test PARAMS ((tree));
103 static tree unextend PARAMS ((tree, int, int, tree));
104 static tree fold_truthop PARAMS ((enum tree_code, tree, tree, tree));
105 static tree optimize_minmax_comparison PARAMS ((tree));
106 static tree extract_muldiv PARAMS ((tree, tree, enum tree_code, tree));
107 static tree strip_compound_expr PARAMS ((tree, tree));
108 static int multiple_of_p PARAMS ((tree, tree, tree));
109 static tree constant_boolean_node PARAMS ((int, tree));
110 static int count_cond PARAMS ((tree, int));
111 static tree fold_binary_op_with_conditional_arg
112 PARAMS ((enum tree_code, tree, tree, tree, int));
113 static bool fold_real_zero_addition_p PARAMS ((tree, tree, int));
115 /* The following constants represent a bit based encoding of GCC's
116 comparison operators. This encoding simplifies transformations
117 on relational comparison operators, such as AND and OR. */
118 #define COMPCODE_FALSE 0
119 #define COMPCODE_LT 1
120 #define COMPCODE_EQ 2
121 #define COMPCODE_LE 3
122 #define COMPCODE_GT 4
123 #define COMPCODE_NE 5
124 #define COMPCODE_GE 6
125 #define COMPCODE_TRUE 7
127 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
128 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
129 and SUM1. Then this yields nonzero if overflow occurred during the
130 addition.
132 Overflow occurs if A and B have the same sign, but A and SUM differ in
133 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
134 sign. */
135 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
137 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
138 We do that by representing the two-word integer in 4 words, with only
139 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
140 number. The value of the word is LOWPART + HIGHPART * BASE. */
142 #define LOWPART(x) \
143 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
144 #define HIGHPART(x) \
145 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
146 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
148 /* Unpack a two-word integer into 4 words.
149 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
150 WORDS points to the array of HOST_WIDE_INTs. */
152 static void
153 encode (words, low, hi)
154 HOST_WIDE_INT *words;
155 unsigned HOST_WIDE_INT low;
156 HOST_WIDE_INT hi;
158 words[0] = LOWPART (low);
159 words[1] = HIGHPART (low);
160 words[2] = LOWPART (hi);
161 words[3] = HIGHPART (hi);
164 /* Pack an array of 4 words into a two-word integer.
165 WORDS points to the array of words.
166 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
168 static void
169 decode (words, low, hi)
170 HOST_WIDE_INT *words;
171 unsigned HOST_WIDE_INT *low;
172 HOST_WIDE_INT *hi;
174 *low = words[0] + words[1] * BASE;
175 *hi = words[2] + words[3] * BASE;
178 /* Make the integer constant T valid for its type by setting to 0 or 1 all
179 the bits in the constant that don't belong in the type.
181 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
182 nonzero, a signed overflow has already occurred in calculating T, so
183 propagate it. */
186 force_fit_type (t, overflow)
187 tree t;
188 int overflow;
190 unsigned HOST_WIDE_INT low;
191 HOST_WIDE_INT high;
192 unsigned int prec;
194 if (TREE_CODE (t) == REAL_CST)
196 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
197 Consider doing it via real_convert now. */
198 return overflow;
201 else if (TREE_CODE (t) != INTEGER_CST)
202 return overflow;
204 low = TREE_INT_CST_LOW (t);
205 high = TREE_INT_CST_HIGH (t);
207 if (POINTER_TYPE_P (TREE_TYPE (t)))
208 prec = POINTER_SIZE;
209 else
210 prec = TYPE_PRECISION (TREE_TYPE (t));
212 /* First clear all bits that are beyond the type's precision. */
214 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
216 else if (prec > HOST_BITS_PER_WIDE_INT)
217 TREE_INT_CST_HIGH (t)
218 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
219 else
221 TREE_INT_CST_HIGH (t) = 0;
222 if (prec < HOST_BITS_PER_WIDE_INT)
223 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
226 /* Unsigned types do not suffer sign extension or overflow unless they
227 are a sizetype. */
228 if (TREE_UNSIGNED (TREE_TYPE (t))
229 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
230 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
231 return overflow;
233 /* If the value's sign bit is set, extend the sign. */
234 if (prec != 2 * HOST_BITS_PER_WIDE_INT
235 && (prec > HOST_BITS_PER_WIDE_INT
236 ? 0 != (TREE_INT_CST_HIGH (t)
237 & ((HOST_WIDE_INT) 1
238 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
239 : 0 != (TREE_INT_CST_LOW (t)
240 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
242 /* Value is negative:
243 set to 1 all the bits that are outside this type's precision. */
244 if (prec > HOST_BITS_PER_WIDE_INT)
245 TREE_INT_CST_HIGH (t)
246 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
247 else
249 TREE_INT_CST_HIGH (t) = -1;
250 if (prec < HOST_BITS_PER_WIDE_INT)
251 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
255 /* Return nonzero if signed overflow occurred. */
256 return
257 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
258 != 0);
261 /* Add two doubleword integers with doubleword result.
262 Each argument is given as two `HOST_WIDE_INT' pieces.
263 One argument is L1 and H1; the other, L2 and H2.
264 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
267 add_double (l1, h1, l2, h2, lv, hv)
268 unsigned HOST_WIDE_INT l1, l2;
269 HOST_WIDE_INT h1, h2;
270 unsigned HOST_WIDE_INT *lv;
271 HOST_WIDE_INT *hv;
273 unsigned HOST_WIDE_INT l;
274 HOST_WIDE_INT h;
276 l = l1 + l2;
277 h = h1 + h2 + (l < l1);
279 *lv = l;
280 *hv = h;
281 return OVERFLOW_SUM_SIGN (h1, h2, h);
284 /* Negate a doubleword integer with doubleword result.
285 Return nonzero if the operation overflows, assuming it's signed.
286 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290 neg_double (l1, h1, lv, hv)
291 unsigned HOST_WIDE_INT l1;
292 HOST_WIDE_INT h1;
293 unsigned HOST_WIDE_INT *lv;
294 HOST_WIDE_INT *hv;
296 if (l1 == 0)
298 *lv = 0;
299 *hv = - h1;
300 return (*hv & h1) < 0;
302 else
304 *lv = -l1;
305 *hv = ~h1;
306 return 0;
310 /* Multiply two doubleword integers with doubleword result.
311 Return nonzero if the operation overflows, assuming it's signed.
312 Each argument is given as two `HOST_WIDE_INT' pieces.
313 One argument is L1 and H1; the other, L2 and H2.
314 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
317 mul_double (l1, h1, l2, h2, lv, hv)
318 unsigned HOST_WIDE_INT l1, l2;
319 HOST_WIDE_INT h1, h2;
320 unsigned HOST_WIDE_INT *lv;
321 HOST_WIDE_INT *hv;
323 HOST_WIDE_INT arg1[4];
324 HOST_WIDE_INT arg2[4];
325 HOST_WIDE_INT prod[4 * 2];
326 unsigned HOST_WIDE_INT carry;
327 int i, j, k;
328 unsigned HOST_WIDE_INT toplow, neglow;
329 HOST_WIDE_INT tophigh, neghigh;
331 encode (arg1, l1, h1);
332 encode (arg2, l2, h2);
334 memset ((char *) prod, 0, sizeof prod);
336 for (i = 0; i < 4; i++)
338 carry = 0;
339 for (j = 0; j < 4; j++)
341 k = i + j;
342 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
343 carry += arg1[i] * arg2[j];
344 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
345 carry += prod[k];
346 prod[k] = LOWPART (carry);
347 carry = HIGHPART (carry);
349 prod[i + 4] = carry;
352 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
354 /* Check for overflow by calculating the top half of the answer in full;
355 it should agree with the low half's sign bit. */
356 decode (prod + 4, &toplow, &tophigh);
357 if (h1 < 0)
359 neg_double (l2, h2, &neglow, &neghigh);
360 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
362 if (h2 < 0)
364 neg_double (l1, h1, &neglow, &neghigh);
365 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
367 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
370 /* Shift the doubleword integer in L1, H1 left by COUNT places
371 keeping only PREC bits of result.
372 Shift right if COUNT is negative.
373 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
374 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
376 void
377 lshift_double (l1, h1, count, prec, lv, hv, arith)
378 unsigned HOST_WIDE_INT l1;
379 HOST_WIDE_INT h1, count;
380 unsigned int prec;
381 unsigned HOST_WIDE_INT *lv;
382 HOST_WIDE_INT *hv;
383 int arith;
385 unsigned HOST_WIDE_INT signmask;
387 if (count < 0)
389 rshift_double (l1, h1, -count, prec, lv, hv, arith);
390 return;
393 #ifdef SHIFT_COUNT_TRUNCATED
394 if (SHIFT_COUNT_TRUNCATED)
395 count %= prec;
396 #endif
398 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
400 /* Shifting by the host word size is undefined according to the
401 ANSI standard, so we must handle this as a special case. */
402 *hv = 0;
403 *lv = 0;
405 else if (count >= HOST_BITS_PER_WIDE_INT)
407 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
408 *lv = 0;
410 else
412 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
413 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
414 *lv = l1 << count;
417 /* Sign extend all bits that are beyond the precision. */
419 signmask = -((prec > HOST_BITS_PER_WIDE_INT
420 ? ((unsigned HOST_WIDE_INT) *hv
421 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
422 : (*lv >> (prec - 1))) & 1);
424 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
426 else if (prec >= HOST_BITS_PER_WIDE_INT)
428 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
429 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
431 else
433 *hv = signmask;
434 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
435 *lv |= signmask << prec;
439 /* Shift the doubleword integer in L1, H1 right by COUNT places
440 keeping only PREC bits of result. COUNT must be positive.
441 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
442 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
444 void
445 rshift_double (l1, h1, count, prec, lv, hv, arith)
446 unsigned HOST_WIDE_INT l1;
447 HOST_WIDE_INT h1, count;
448 unsigned int prec;
449 unsigned HOST_WIDE_INT *lv;
450 HOST_WIDE_INT *hv;
451 int arith;
453 unsigned HOST_WIDE_INT signmask;
455 signmask = (arith
456 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
457 : 0);
459 #ifdef SHIFT_COUNT_TRUNCATED
460 if (SHIFT_COUNT_TRUNCATED)
461 count %= prec;
462 #endif
464 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
466 /* Shifting by the host word size is undefined according to the
467 ANSI standard, so we must handle this as a special case. */
468 *hv = 0;
469 *lv = 0;
471 else if (count >= HOST_BITS_PER_WIDE_INT)
473 *hv = 0;
474 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
476 else
478 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
479 *lv = ((l1 >> count)
480 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
483 /* Zero / sign extend all bits that are beyond the precision. */
485 if (count >= (HOST_WIDE_INT)prec)
487 *hv = signmask;
488 *lv = signmask;
490 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
492 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
494 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
495 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
497 else
499 *hv = signmask;
500 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
501 *lv |= signmask << (prec - count);
505 /* Rotate the doubleword integer in L1, H1 left by COUNT places
506 keeping only PREC bits of result.
507 Rotate right if COUNT is negative.
508 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
510 void
511 lrotate_double (l1, h1, count, prec, lv, hv)
512 unsigned HOST_WIDE_INT l1;
513 HOST_WIDE_INT h1, count;
514 unsigned int prec;
515 unsigned HOST_WIDE_INT *lv;
516 HOST_WIDE_INT *hv;
518 unsigned HOST_WIDE_INT s1l, s2l;
519 HOST_WIDE_INT s1h, s2h;
521 count %= prec;
522 if (count < 0)
523 count += prec;
525 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
526 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
527 *lv = s1l | s2l;
528 *hv = s1h | s2h;
531 /* Rotate the doubleword integer in L1, H1 left by COUNT places
532 keeping only PREC bits of result. COUNT must be positive.
533 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
535 void
536 rrotate_double (l1, h1, count, prec, lv, hv)
537 unsigned HOST_WIDE_INT l1;
538 HOST_WIDE_INT h1, count;
539 unsigned int prec;
540 unsigned HOST_WIDE_INT *lv;
541 HOST_WIDE_INT *hv;
543 unsigned HOST_WIDE_INT s1l, s2l;
544 HOST_WIDE_INT s1h, s2h;
546 count %= prec;
547 if (count < 0)
548 count += prec;
550 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
551 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
552 *lv = s1l | s2l;
553 *hv = s1h | s2h;
556 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
557 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
558 CODE is a tree code for a kind of division, one of
559 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
560 or EXACT_DIV_EXPR
561 It controls how the quotient is rounded to an integer.
562 Return nonzero if the operation overflows.
563 UNS nonzero says do unsigned division. */
566 div_and_round_double (code, uns,
567 lnum_orig, hnum_orig, lden_orig, hden_orig,
568 lquo, hquo, lrem, hrem)
569 enum tree_code code;
570 int uns;
571 unsigned HOST_WIDE_INT lnum_orig; /* num == numerator == dividend */
572 HOST_WIDE_INT hnum_orig;
573 unsigned HOST_WIDE_INT lden_orig; /* den == denominator == divisor */
574 HOST_WIDE_INT hden_orig;
575 unsigned HOST_WIDE_INT *lquo, *lrem;
576 HOST_WIDE_INT *hquo, *hrem;
578 int quo_neg = 0;
579 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
580 HOST_WIDE_INT den[4], quo[4];
581 int i, j;
582 unsigned HOST_WIDE_INT work;
583 unsigned HOST_WIDE_INT carry = 0;
584 unsigned HOST_WIDE_INT lnum = lnum_orig;
585 HOST_WIDE_INT hnum = hnum_orig;
586 unsigned HOST_WIDE_INT lden = lden_orig;
587 HOST_WIDE_INT hden = hden_orig;
588 int overflow = 0;
590 if (hden == 0 && lden == 0)
591 overflow = 1, lden = 1;
593 /* calculate quotient sign and convert operands to unsigned. */
594 if (!uns)
596 if (hnum < 0)
598 quo_neg = ~ quo_neg;
599 /* (minimum integer) / (-1) is the only overflow case. */
600 if (neg_double (lnum, hnum, &lnum, &hnum)
601 && ((HOST_WIDE_INT) lden & hden) == -1)
602 overflow = 1;
604 if (hden < 0)
606 quo_neg = ~ quo_neg;
607 neg_double (lden, hden, &lden, &hden);
611 if (hnum == 0 && hden == 0)
612 { /* single precision */
613 *hquo = *hrem = 0;
614 /* This unsigned division rounds toward zero. */
615 *lquo = lnum / lden;
616 goto finish_up;
619 if (hnum == 0)
620 { /* trivial case: dividend < divisor */
621 /* hden != 0 already checked. */
622 *hquo = *lquo = 0;
623 *hrem = hnum;
624 *lrem = lnum;
625 goto finish_up;
628 memset ((char *) quo, 0, sizeof quo);
630 memset ((char *) num, 0, sizeof num); /* to zero 9th element */
631 memset ((char *) den, 0, sizeof den);
633 encode (num, lnum, hnum);
634 encode (den, lden, hden);
636 /* Special code for when the divisor < BASE. */
637 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
639 /* hnum != 0 already checked. */
640 for (i = 4 - 1; i >= 0; i--)
642 work = num[i] + carry * BASE;
643 quo[i] = work / lden;
644 carry = work % lden;
647 else
649 /* Full double precision division,
650 with thanks to Don Knuth's "Seminumerical Algorithms". */
651 int num_hi_sig, den_hi_sig;
652 unsigned HOST_WIDE_INT quo_est, scale;
654 /* Find the highest nonzero divisor digit. */
655 for (i = 4 - 1;; i--)
656 if (den[i] != 0)
658 den_hi_sig = i;
659 break;
662 /* Insure that the first digit of the divisor is at least BASE/2.
663 This is required by the quotient digit estimation algorithm. */
665 scale = BASE / (den[den_hi_sig] + 1);
666 if (scale > 1)
667 { /* scale divisor and dividend */
668 carry = 0;
669 for (i = 0; i <= 4 - 1; i++)
671 work = (num[i] * scale) + carry;
672 num[i] = LOWPART (work);
673 carry = HIGHPART (work);
676 num[4] = carry;
677 carry = 0;
678 for (i = 0; i <= 4 - 1; i++)
680 work = (den[i] * scale) + carry;
681 den[i] = LOWPART (work);
682 carry = HIGHPART (work);
683 if (den[i] != 0) den_hi_sig = i;
687 num_hi_sig = 4;
689 /* Main loop */
690 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
692 /* Guess the next quotient digit, quo_est, by dividing the first
693 two remaining dividend digits by the high order quotient digit.
694 quo_est is never low and is at most 2 high. */
695 unsigned HOST_WIDE_INT tmp;
697 num_hi_sig = i + den_hi_sig + 1;
698 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
699 if (num[num_hi_sig] != den[den_hi_sig])
700 quo_est = work / den[den_hi_sig];
701 else
702 quo_est = BASE - 1;
704 /* Refine quo_est so it's usually correct, and at most one high. */
705 tmp = work - quo_est * den[den_hi_sig];
706 if (tmp < BASE
707 && (den[den_hi_sig - 1] * quo_est
708 > (tmp * BASE + num[num_hi_sig - 2])))
709 quo_est--;
711 /* Try QUO_EST as the quotient digit, by multiplying the
712 divisor by QUO_EST and subtracting from the remaining dividend.
713 Keep in mind that QUO_EST is the I - 1st digit. */
715 carry = 0;
716 for (j = 0; j <= den_hi_sig; j++)
718 work = quo_est * den[j] + carry;
719 carry = HIGHPART (work);
720 work = num[i + j] - LOWPART (work);
721 num[i + j] = LOWPART (work);
722 carry += HIGHPART (work) != 0;
725 /* If quo_est was high by one, then num[i] went negative and
726 we need to correct things. */
727 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
729 quo_est--;
730 carry = 0; /* add divisor back in */
731 for (j = 0; j <= den_hi_sig; j++)
733 work = num[i + j] + den[j] + carry;
734 carry = HIGHPART (work);
735 num[i + j] = LOWPART (work);
738 num [num_hi_sig] += carry;
741 /* Store the quotient digit. */
742 quo[i] = quo_est;
746 decode (quo, lquo, hquo);
748 finish_up:
749 /* if result is negative, make it so. */
750 if (quo_neg)
751 neg_double (*lquo, *hquo, lquo, hquo);
753 /* compute trial remainder: rem = num - (quo * den) */
754 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
755 neg_double (*lrem, *hrem, lrem, hrem);
756 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
758 switch (code)
760 case TRUNC_DIV_EXPR:
761 case TRUNC_MOD_EXPR: /* round toward zero */
762 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
763 return overflow;
765 case FLOOR_DIV_EXPR:
766 case FLOOR_MOD_EXPR: /* round toward negative infinity */
767 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
769 /* quo = quo - 1; */
770 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
771 lquo, hquo);
773 else
774 return overflow;
775 break;
777 case CEIL_DIV_EXPR:
778 case CEIL_MOD_EXPR: /* round toward positive infinity */
779 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
781 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
782 lquo, hquo);
784 else
785 return overflow;
786 break;
788 case ROUND_DIV_EXPR:
789 case ROUND_MOD_EXPR: /* round to closest integer */
791 unsigned HOST_WIDE_INT labs_rem = *lrem;
792 HOST_WIDE_INT habs_rem = *hrem;
793 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
794 HOST_WIDE_INT habs_den = hden, htwice;
796 /* Get absolute values */
797 if (*hrem < 0)
798 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
799 if (hden < 0)
800 neg_double (lden, hden, &labs_den, &habs_den);
802 /* If (2 * abs (lrem) >= abs (lden)) */
803 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
804 labs_rem, habs_rem, &ltwice, &htwice);
806 if (((unsigned HOST_WIDE_INT) habs_den
807 < (unsigned HOST_WIDE_INT) htwice)
808 || (((unsigned HOST_WIDE_INT) habs_den
809 == (unsigned HOST_WIDE_INT) htwice)
810 && (labs_den < ltwice)))
812 if (*hquo < 0)
813 /* quo = quo - 1; */
814 add_double (*lquo, *hquo,
815 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
816 else
817 /* quo = quo + 1; */
818 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
819 lquo, hquo);
821 else
822 return overflow;
824 break;
826 default:
827 abort ();
830 /* compute true remainder: rem = num - (quo * den) */
831 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
832 neg_double (*lrem, *hrem, lrem, hrem);
833 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
834 return overflow;
837 /* Given T, an expression, return the negation of T. Allow for T to be
838 null, in which case return null. */
840 static tree
841 negate_expr (t)
842 tree t;
844 tree type;
845 tree tem;
847 if (t == 0)
848 return 0;
850 type = TREE_TYPE (t);
851 STRIP_SIGN_NOPS (t);
853 switch (TREE_CODE (t))
855 case INTEGER_CST:
856 case REAL_CST:
857 if (! TREE_UNSIGNED (type)
858 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
859 && ! TREE_OVERFLOW (tem))
860 return tem;
861 break;
863 case NEGATE_EXPR:
864 return convert (type, TREE_OPERAND (t, 0));
866 case MINUS_EXPR:
867 /* - (A - B) -> B - A */
868 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
869 return convert (type,
870 fold (build (MINUS_EXPR, TREE_TYPE (t),
871 TREE_OPERAND (t, 1),
872 TREE_OPERAND (t, 0))));
873 break;
875 default:
876 break;
879 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
882 /* Split a tree IN into a constant, literal and variable parts that could be
883 combined with CODE to make IN. "constant" means an expression with
884 TREE_CONSTANT but that isn't an actual constant. CODE must be a
885 commutative arithmetic operation. Store the constant part into *CONP,
886 the literal in *LITP and return the variable part. If a part isn't
887 present, set it to null. If the tree does not decompose in this way,
888 return the entire tree as the variable part and the other parts as null.
890 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
891 case, we negate an operand that was subtracted. Except if it is a
892 literal for which we use *MINUS_LITP instead.
894 If NEGATE_P is true, we are negating all of IN, again except a literal
895 for which we use *MINUS_LITP instead.
897 If IN is itself a literal or constant, return it as appropriate.
899 Note that we do not guarantee that any of the three values will be the
900 same type as IN, but they will have the same signedness and mode. */
902 static tree
903 split_tree (in, code, conp, litp, minus_litp, negate_p)
904 tree in;
905 enum tree_code code;
906 tree *conp, *litp, *minus_litp;
907 int negate_p;
909 tree var = 0;
911 *conp = 0;
912 *litp = 0;
913 *minus_litp = 0;
915 /* Strip any conversions that don't change the machine mode or signedness. */
916 STRIP_SIGN_NOPS (in);
918 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
919 *litp = in;
920 else if (TREE_CODE (in) == code
921 || (! FLOAT_TYPE_P (TREE_TYPE (in))
922 /* We can associate addition and subtraction together (even
923 though the C standard doesn't say so) for integers because
924 the value is not affected. For reals, the value might be
925 affected, so we can't. */
926 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
927 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
929 tree op0 = TREE_OPERAND (in, 0);
930 tree op1 = TREE_OPERAND (in, 1);
931 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
932 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
934 /* First see if either of the operands is a literal, then a constant. */
935 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
936 *litp = op0, op0 = 0;
937 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
938 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
940 if (op0 != 0 && TREE_CONSTANT (op0))
941 *conp = op0, op0 = 0;
942 else if (op1 != 0 && TREE_CONSTANT (op1))
943 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
945 /* If we haven't dealt with either operand, this is not a case we can
946 decompose. Otherwise, VAR is either of the ones remaining, if any. */
947 if (op0 != 0 && op1 != 0)
948 var = in;
949 else if (op0 != 0)
950 var = op0;
951 else
952 var = op1, neg_var_p = neg1_p;
954 /* Now do any needed negations. */
955 if (neg_litp_p)
956 *minus_litp = *litp, *litp = 0;
957 if (neg_conp_p)
958 *conp = negate_expr (*conp);
959 if (neg_var_p)
960 var = negate_expr (var);
962 else if (TREE_CONSTANT (in))
963 *conp = in;
964 else
965 var = in;
967 if (negate_p)
969 if (*litp)
970 *minus_litp = *litp, *litp = 0;
971 else if (*minus_litp)
972 *litp = *minus_litp, *minus_litp = 0;
973 *conp = negate_expr (*conp);
974 var = negate_expr (var);
977 return var;
980 /* Re-associate trees split by the above function. T1 and T2 are either
981 expressions to associate or null. Return the new expression, if any. If
982 we build an operation, do it in TYPE and with CODE. */
984 static tree
985 associate_trees (t1, t2, code, type)
986 tree t1, t2;
987 enum tree_code code;
988 tree type;
990 if (t1 == 0)
991 return t2;
992 else if (t2 == 0)
993 return t1;
995 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
996 try to fold this since we will have infinite recursion. But do
997 deal with any NEGATE_EXPRs. */
998 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
999 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1001 if (code == PLUS_EXPR)
1003 if (TREE_CODE (t1) == NEGATE_EXPR)
1004 return build (MINUS_EXPR, type, convert (type, t2),
1005 convert (type, TREE_OPERAND (t1, 0)));
1006 else if (TREE_CODE (t2) == NEGATE_EXPR)
1007 return build (MINUS_EXPR, type, convert (type, t1),
1008 convert (type, TREE_OPERAND (t2, 0)));
1010 return build (code, type, convert (type, t1), convert (type, t2));
1013 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1016 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1017 to produce a new constant.
1019 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1021 static tree
1022 int_const_binop (code, arg1, arg2, notrunc)
1023 enum tree_code code;
1024 tree arg1, arg2;
1025 int notrunc;
1027 unsigned HOST_WIDE_INT int1l, int2l;
1028 HOST_WIDE_INT int1h, int2h;
1029 unsigned HOST_WIDE_INT low;
1030 HOST_WIDE_INT hi;
1031 unsigned HOST_WIDE_INT garbagel;
1032 HOST_WIDE_INT garbageh;
1033 tree t;
1034 tree type = TREE_TYPE (arg1);
1035 int uns = TREE_UNSIGNED (type);
1036 int is_sizetype
1037 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1038 int overflow = 0;
1039 int no_overflow = 0;
1041 int1l = TREE_INT_CST_LOW (arg1);
1042 int1h = TREE_INT_CST_HIGH (arg1);
1043 int2l = TREE_INT_CST_LOW (arg2);
1044 int2h = TREE_INT_CST_HIGH (arg2);
1046 switch (code)
1048 case BIT_IOR_EXPR:
1049 low = int1l | int2l, hi = int1h | int2h;
1050 break;
1052 case BIT_XOR_EXPR:
1053 low = int1l ^ int2l, hi = int1h ^ int2h;
1054 break;
1056 case BIT_AND_EXPR:
1057 low = int1l & int2l, hi = int1h & int2h;
1058 break;
1060 case BIT_ANDTC_EXPR:
1061 low = int1l & ~int2l, hi = int1h & ~int2h;
1062 break;
1064 case RSHIFT_EXPR:
1065 int2l = -int2l;
1066 case LSHIFT_EXPR:
1067 /* It's unclear from the C standard whether shifts can overflow.
1068 The following code ignores overflow; perhaps a C standard
1069 interpretation ruling is needed. */
1070 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1071 &low, &hi, !uns);
1072 no_overflow = 1;
1073 break;
1075 case RROTATE_EXPR:
1076 int2l = - int2l;
1077 case LROTATE_EXPR:
1078 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1079 &low, &hi);
1080 break;
1082 case PLUS_EXPR:
1083 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1084 break;
1086 case MINUS_EXPR:
1087 neg_double (int2l, int2h, &low, &hi);
1088 add_double (int1l, int1h, low, hi, &low, &hi);
1089 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1090 break;
1092 case MULT_EXPR:
1093 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1094 break;
1096 case TRUNC_DIV_EXPR:
1097 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1098 case EXACT_DIV_EXPR:
1099 /* This is a shortcut for a common special case. */
1100 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1101 && ! TREE_CONSTANT_OVERFLOW (arg1)
1102 && ! TREE_CONSTANT_OVERFLOW (arg2)
1103 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1105 if (code == CEIL_DIV_EXPR)
1106 int1l += int2l - 1;
1108 low = int1l / int2l, hi = 0;
1109 break;
1112 /* ... fall through ... */
1114 case ROUND_DIV_EXPR:
1115 if (int2h == 0 && int2l == 1)
1117 low = int1l, hi = int1h;
1118 break;
1120 if (int1l == int2l && int1h == int2h
1121 && ! (int1l == 0 && int1h == 0))
1123 low = 1, hi = 0;
1124 break;
1126 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1127 &low, &hi, &garbagel, &garbageh);
1128 break;
1130 case TRUNC_MOD_EXPR:
1131 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1132 /* This is a shortcut for a common special case. */
1133 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1134 && ! TREE_CONSTANT_OVERFLOW (arg1)
1135 && ! TREE_CONSTANT_OVERFLOW (arg2)
1136 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1138 if (code == CEIL_MOD_EXPR)
1139 int1l += int2l - 1;
1140 low = int1l % int2l, hi = 0;
1141 break;
1144 /* ... fall through ... */
1146 case ROUND_MOD_EXPR:
1147 overflow = div_and_round_double (code, uns,
1148 int1l, int1h, int2l, int2h,
1149 &garbagel, &garbageh, &low, &hi);
1150 break;
1152 case MIN_EXPR:
1153 case MAX_EXPR:
1154 if (uns)
1155 low = (((unsigned HOST_WIDE_INT) int1h
1156 < (unsigned HOST_WIDE_INT) int2h)
1157 || (((unsigned HOST_WIDE_INT) int1h
1158 == (unsigned HOST_WIDE_INT) int2h)
1159 && int1l < int2l));
1160 else
1161 low = (int1h < int2h
1162 || (int1h == int2h && int1l < int2l));
1164 if (low == (code == MIN_EXPR))
1165 low = int1l, hi = int1h;
1166 else
1167 low = int2l, hi = int2h;
1168 break;
1170 default:
1171 abort ();
1174 /* If this is for a sizetype, can be represented as one (signed)
1175 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1176 constants. */
1177 if (is_sizetype
1178 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1179 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1180 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1181 return size_int_type_wide (low, type);
1182 else
1184 t = build_int_2 (low, hi);
1185 TREE_TYPE (t) = TREE_TYPE (arg1);
1188 TREE_OVERFLOW (t)
1189 = ((notrunc
1190 ? (!uns || is_sizetype) && overflow
1191 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1192 && ! no_overflow))
1193 | TREE_OVERFLOW (arg1)
1194 | TREE_OVERFLOW (arg2));
1196 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1197 So check if force_fit_type truncated the value. */
1198 if (is_sizetype
1199 && ! TREE_OVERFLOW (t)
1200 && (TREE_INT_CST_HIGH (t) != hi
1201 || TREE_INT_CST_LOW (t) != low))
1202 TREE_OVERFLOW (t) = 1;
1204 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1205 | TREE_CONSTANT_OVERFLOW (arg1)
1206 | TREE_CONSTANT_OVERFLOW (arg2));
1207 return t;
1210 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1211 constant. We assume ARG1 and ARG2 have the same data type, or at least
1212 are the same kind of constant and the same machine mode.
1214 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1216 static tree
1217 const_binop (code, arg1, arg2, notrunc)
1218 enum tree_code code;
1219 tree arg1, arg2;
1220 int notrunc;
1222 STRIP_NOPS (arg1);
1223 STRIP_NOPS (arg2);
1225 if (TREE_CODE (arg1) == INTEGER_CST)
1226 return int_const_binop (code, arg1, arg2, notrunc);
1228 if (TREE_CODE (arg1) == REAL_CST)
1230 REAL_VALUE_TYPE d1;
1231 REAL_VALUE_TYPE d2;
1232 REAL_VALUE_TYPE value;
1233 tree t;
1235 d1 = TREE_REAL_CST (arg1);
1236 d2 = TREE_REAL_CST (arg2);
1238 /* If either operand is a NaN, just return it. Otherwise, set up
1239 for floating-point trap; we return an overflow. */
1240 if (REAL_VALUE_ISNAN (d1))
1241 return arg1;
1242 else if (REAL_VALUE_ISNAN (d2))
1243 return arg2;
1245 REAL_ARITHMETIC (value, code, d1, d2);
1247 t = build_real (TREE_TYPE (arg1),
1248 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)),
1249 value));
1251 TREE_OVERFLOW (t)
1252 = (force_fit_type (t, 0)
1253 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1254 TREE_CONSTANT_OVERFLOW (t)
1255 = TREE_OVERFLOW (t)
1256 | TREE_CONSTANT_OVERFLOW (arg1)
1257 | TREE_CONSTANT_OVERFLOW (arg2);
1258 return t;
1260 if (TREE_CODE (arg1) == COMPLEX_CST)
1262 tree type = TREE_TYPE (arg1);
1263 tree r1 = TREE_REALPART (arg1);
1264 tree i1 = TREE_IMAGPART (arg1);
1265 tree r2 = TREE_REALPART (arg2);
1266 tree i2 = TREE_IMAGPART (arg2);
1267 tree t;
1269 switch (code)
1271 case PLUS_EXPR:
1272 t = build_complex (type,
1273 const_binop (PLUS_EXPR, r1, r2, notrunc),
1274 const_binop (PLUS_EXPR, i1, i2, notrunc));
1275 break;
1277 case MINUS_EXPR:
1278 t = build_complex (type,
1279 const_binop (MINUS_EXPR, r1, r2, notrunc),
1280 const_binop (MINUS_EXPR, i1, i2, notrunc));
1281 break;
1283 case MULT_EXPR:
1284 t = build_complex (type,
1285 const_binop (MINUS_EXPR,
1286 const_binop (MULT_EXPR,
1287 r1, r2, notrunc),
1288 const_binop (MULT_EXPR,
1289 i1, i2, notrunc),
1290 notrunc),
1291 const_binop (PLUS_EXPR,
1292 const_binop (MULT_EXPR,
1293 r1, i2, notrunc),
1294 const_binop (MULT_EXPR,
1295 i1, r2, notrunc),
1296 notrunc));
1297 break;
1299 case RDIV_EXPR:
1301 tree magsquared
1302 = const_binop (PLUS_EXPR,
1303 const_binop (MULT_EXPR, r2, r2, notrunc),
1304 const_binop (MULT_EXPR, i2, i2, notrunc),
1305 notrunc);
1307 t = build_complex (type,
1308 const_binop
1309 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1310 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1311 const_binop (PLUS_EXPR,
1312 const_binop (MULT_EXPR, r1, r2,
1313 notrunc),
1314 const_binop (MULT_EXPR, i1, i2,
1315 notrunc),
1316 notrunc),
1317 magsquared, notrunc),
1318 const_binop
1319 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1320 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1321 const_binop (MINUS_EXPR,
1322 const_binop (MULT_EXPR, i1, r2,
1323 notrunc),
1324 const_binop (MULT_EXPR, r1, i2,
1325 notrunc),
1326 notrunc),
1327 magsquared, notrunc));
1329 break;
1331 default:
1332 abort ();
1334 return t;
1336 return 0;
1339 /* These are the hash table functions for the hash table of INTEGER_CST
1340 nodes of a sizetype. */
1342 /* Return the hash code code X, an INTEGER_CST. */
1344 static hashval_t
1345 size_htab_hash (x)
1346 const void *x;
1348 tree t = (tree) x;
1350 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1351 ^ htab_hash_pointer (TREE_TYPE (t))
1352 ^ (TREE_OVERFLOW (t) << 20));
1355 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1356 is the same as that given by *Y, which is the same. */
1358 static int
1359 size_htab_eq (x, y)
1360 const void *x;
1361 const void *y;
1363 tree xt = (tree) x;
1364 tree yt = (tree) y;
1366 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1367 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1368 && TREE_TYPE (xt) == TREE_TYPE (yt)
1369 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1372 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1373 bits are given by NUMBER and of the sizetype represented by KIND. */
1375 tree
1376 size_int_wide (number, kind)
1377 HOST_WIDE_INT number;
1378 enum size_type_kind kind;
1380 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1383 /* Likewise, but the desired type is specified explicitly. */
1385 static GTY (()) tree new_const;
1386 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1387 htab_t size_htab;
1389 tree
1390 size_int_type_wide (number, type)
1391 HOST_WIDE_INT number;
1392 tree type;
1394 PTR *slot;
1396 if (size_htab == 0)
1398 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1399 new_const = make_node (INTEGER_CST);
1402 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1403 hash table, we return the value from the hash table. Otherwise, we
1404 place that in the hash table and make a new node for the next time. */
1405 TREE_INT_CST_LOW (new_const) = number;
1406 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1407 TREE_TYPE (new_const) = type;
1408 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1409 = force_fit_type (new_const, 0);
1411 slot = htab_find_slot (size_htab, new_const, INSERT);
1412 if (*slot == 0)
1414 tree t = new_const;
1416 *slot = (PTR) new_const;
1417 new_const = make_node (INTEGER_CST);
1418 return t;
1420 else
1421 return (tree) *slot;
1424 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1425 is a tree code. The type of the result is taken from the operands.
1426 Both must be the same type integer type and it must be a size type.
1427 If the operands are constant, so is the result. */
1429 tree
1430 size_binop (code, arg0, arg1)
1431 enum tree_code code;
1432 tree arg0, arg1;
1434 tree type = TREE_TYPE (arg0);
1436 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1437 || type != TREE_TYPE (arg1))
1438 abort ();
1440 /* Handle the special case of two integer constants faster. */
1441 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1443 /* And some specific cases even faster than that. */
1444 if (code == PLUS_EXPR && integer_zerop (arg0))
1445 return arg1;
1446 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1447 && integer_zerop (arg1))
1448 return arg0;
1449 else if (code == MULT_EXPR && integer_onep (arg0))
1450 return arg1;
1452 /* Handle general case of two integer constants. */
1453 return int_const_binop (code, arg0, arg1, 0);
1456 if (arg0 == error_mark_node || arg1 == error_mark_node)
1457 return error_mark_node;
1459 return fold (build (code, type, arg0, arg1));
1462 /* Given two values, either both of sizetype or both of bitsizetype,
1463 compute the difference between the two values. Return the value
1464 in signed type corresponding to the type of the operands. */
1466 tree
1467 size_diffop (arg0, arg1)
1468 tree arg0, arg1;
1470 tree type = TREE_TYPE (arg0);
1471 tree ctype;
1473 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1474 || type != TREE_TYPE (arg1))
1475 abort ();
1477 /* If the type is already signed, just do the simple thing. */
1478 if (! TREE_UNSIGNED (type))
1479 return size_binop (MINUS_EXPR, arg0, arg1);
1481 ctype = (type == bitsizetype || type == ubitsizetype
1482 ? sbitsizetype : ssizetype);
1484 /* If either operand is not a constant, do the conversions to the signed
1485 type and subtract. The hardware will do the right thing with any
1486 overflow in the subtraction. */
1487 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1488 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1489 convert (ctype, arg1));
1491 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1492 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1493 overflow) and negate (which can't either). Special-case a result
1494 of zero while we're here. */
1495 if (tree_int_cst_equal (arg0, arg1))
1496 return convert (ctype, integer_zero_node);
1497 else if (tree_int_cst_lt (arg1, arg0))
1498 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1499 else
1500 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1501 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1505 /* Given T, a tree representing type conversion of ARG1, a constant,
1506 return a constant tree representing the result of conversion. */
1508 static tree
1509 fold_convert (t, arg1)
1510 tree t;
1511 tree arg1;
1513 tree type = TREE_TYPE (t);
1514 int overflow = 0;
1516 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1518 if (TREE_CODE (arg1) == INTEGER_CST)
1520 /* If we would build a constant wider than GCC supports,
1521 leave the conversion unfolded. */
1522 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1523 return t;
1525 /* If we are trying to make a sizetype for a small integer, use
1526 size_int to pick up cached types to reduce duplicate nodes. */
1527 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1528 && !TREE_CONSTANT_OVERFLOW (arg1)
1529 && compare_tree_int (arg1, 10000) < 0)
1530 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1532 /* Given an integer constant, make new constant with new type,
1533 appropriately sign-extended or truncated. */
1534 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1535 TREE_INT_CST_HIGH (arg1));
1536 TREE_TYPE (t) = type;
1537 /* Indicate an overflow if (1) ARG1 already overflowed,
1538 or (2) force_fit_type indicates an overflow.
1539 Tell force_fit_type that an overflow has already occurred
1540 if ARG1 is a too-large unsigned value and T is signed.
1541 But don't indicate an overflow if converting a pointer. */
1542 TREE_OVERFLOW (t)
1543 = ((force_fit_type (t,
1544 (TREE_INT_CST_HIGH (arg1) < 0
1545 && (TREE_UNSIGNED (type)
1546 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1547 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1548 || TREE_OVERFLOW (arg1));
1549 TREE_CONSTANT_OVERFLOW (t)
1550 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1552 else if (TREE_CODE (arg1) == REAL_CST)
1554 /* Don't initialize these, use assignments.
1555 Initialized local aggregates don't work on old compilers. */
1556 REAL_VALUE_TYPE x;
1557 REAL_VALUE_TYPE l;
1558 REAL_VALUE_TYPE u;
1559 tree type1 = TREE_TYPE (arg1);
1560 int no_upper_bound;
1562 x = TREE_REAL_CST (arg1);
1563 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1565 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1566 if (!no_upper_bound)
1567 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1569 /* See if X will be in range after truncation towards 0.
1570 To compensate for truncation, move the bounds away from 0,
1571 but reject if X exactly equals the adjusted bounds. */
1572 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1573 if (!no_upper_bound)
1574 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1575 /* If X is a NaN, use zero instead and show we have an overflow.
1576 Otherwise, range check. */
1577 if (REAL_VALUE_ISNAN (x))
1578 overflow = 1, x = dconst0;
1579 else if (! (REAL_VALUES_LESS (l, x)
1580 && !no_upper_bound
1581 && REAL_VALUES_LESS (x, u)))
1582 overflow = 1;
1585 HOST_WIDE_INT low, high;
1586 REAL_VALUE_TO_INT (&low, &high, x);
1587 t = build_int_2 (low, high);
1589 TREE_TYPE (t) = type;
1590 TREE_OVERFLOW (t)
1591 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1592 TREE_CONSTANT_OVERFLOW (t)
1593 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1595 TREE_TYPE (t) = type;
1597 else if (TREE_CODE (type) == REAL_TYPE)
1599 if (TREE_CODE (arg1) == INTEGER_CST)
1600 return build_real_from_int_cst (type, arg1);
1601 if (TREE_CODE (arg1) == REAL_CST)
1603 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1605 /* We make a copy of ARG1 so that we don't modify an
1606 existing constant tree. */
1607 t = copy_node (arg1);
1608 TREE_TYPE (t) = type;
1609 return t;
1612 t = build_real (type,
1613 real_value_truncate (TYPE_MODE (type),
1614 TREE_REAL_CST (arg1)));
1616 TREE_OVERFLOW (t)
1617 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1618 TREE_CONSTANT_OVERFLOW (t)
1619 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1620 return t;
1623 TREE_CONSTANT (t) = 1;
1624 return t;
1627 /* Return an expr equal to X but certainly not valid as an lvalue. */
1629 tree
1630 non_lvalue (x)
1631 tree x;
1633 tree result;
1635 /* These things are certainly not lvalues. */
1636 if (TREE_CODE (x) == NON_LVALUE_EXPR
1637 || TREE_CODE (x) == INTEGER_CST
1638 || TREE_CODE (x) == REAL_CST
1639 || TREE_CODE (x) == STRING_CST
1640 || TREE_CODE (x) == ADDR_EXPR)
1641 return x;
1643 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1644 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1645 return result;
1648 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1649 Zero means allow extended lvalues. */
1651 int pedantic_lvalues;
1653 /* When pedantic, return an expr equal to X but certainly not valid as a
1654 pedantic lvalue. Otherwise, return X. */
1656 tree
1657 pedantic_non_lvalue (x)
1658 tree x;
1660 if (pedantic_lvalues)
1661 return non_lvalue (x);
1662 else
1663 return x;
1666 /* Given a tree comparison code, return the code that is the logical inverse
1667 of the given code. It is not safe to do this for floating-point
1668 comparisons, except for NE_EXPR and EQ_EXPR. */
1670 static enum tree_code
1671 invert_tree_comparison (code)
1672 enum tree_code code;
1674 switch (code)
1676 case EQ_EXPR:
1677 return NE_EXPR;
1678 case NE_EXPR:
1679 return EQ_EXPR;
1680 case GT_EXPR:
1681 return LE_EXPR;
1682 case GE_EXPR:
1683 return LT_EXPR;
1684 case LT_EXPR:
1685 return GE_EXPR;
1686 case LE_EXPR:
1687 return GT_EXPR;
1688 default:
1689 abort ();
1693 /* Similar, but return the comparison that results if the operands are
1694 swapped. This is safe for floating-point. */
1696 static enum tree_code
1697 swap_tree_comparison (code)
1698 enum tree_code code;
1700 switch (code)
1702 case EQ_EXPR:
1703 case NE_EXPR:
1704 return code;
1705 case GT_EXPR:
1706 return LT_EXPR;
1707 case GE_EXPR:
1708 return LE_EXPR;
1709 case LT_EXPR:
1710 return GT_EXPR;
1711 case LE_EXPR:
1712 return GE_EXPR;
1713 default:
1714 abort ();
1719 /* Convert a comparison tree code from an enum tree_code representation
1720 into a compcode bit-based encoding. This function is the inverse of
1721 compcode_to_comparison. */
1723 static int
1724 comparison_to_compcode (code)
1725 enum tree_code code;
1727 switch (code)
1729 case LT_EXPR:
1730 return COMPCODE_LT;
1731 case EQ_EXPR:
1732 return COMPCODE_EQ;
1733 case LE_EXPR:
1734 return COMPCODE_LE;
1735 case GT_EXPR:
1736 return COMPCODE_GT;
1737 case NE_EXPR:
1738 return COMPCODE_NE;
1739 case GE_EXPR:
1740 return COMPCODE_GE;
1741 default:
1742 abort ();
1746 /* Convert a compcode bit-based encoding of a comparison operator back
1747 to GCC's enum tree_code representation. This function is the
1748 inverse of comparison_to_compcode. */
1750 static enum tree_code
1751 compcode_to_comparison (code)
1752 int code;
1754 switch (code)
1756 case COMPCODE_LT:
1757 return LT_EXPR;
1758 case COMPCODE_EQ:
1759 return EQ_EXPR;
1760 case COMPCODE_LE:
1761 return LE_EXPR;
1762 case COMPCODE_GT:
1763 return GT_EXPR;
1764 case COMPCODE_NE:
1765 return NE_EXPR;
1766 case COMPCODE_GE:
1767 return GE_EXPR;
1768 default:
1769 abort ();
1773 /* Return nonzero if CODE is a tree code that represents a truth value. */
1775 static int
1776 truth_value_p (code)
1777 enum tree_code code;
1779 return (TREE_CODE_CLASS (code) == '<'
1780 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1781 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1782 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1785 /* Return nonzero if two operands are necessarily equal.
1786 If ONLY_CONST is nonzero, only return nonzero for constants.
1787 This function tests whether the operands are indistinguishable;
1788 it does not test whether they are equal using C's == operation.
1789 The distinction is important for IEEE floating point, because
1790 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1791 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1794 operand_equal_p (arg0, arg1, only_const)
1795 tree arg0, arg1;
1796 int only_const;
1798 /* If both types don't have the same signedness, then we can't consider
1799 them equal. We must check this before the STRIP_NOPS calls
1800 because they may change the signedness of the arguments. */
1801 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1802 return 0;
1804 STRIP_NOPS (arg0);
1805 STRIP_NOPS (arg1);
1807 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1808 /* This is needed for conversions and for COMPONENT_REF.
1809 Might as well play it safe and always test this. */
1810 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1811 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1812 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1813 return 0;
1815 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1816 We don't care about side effects in that case because the SAVE_EXPR
1817 takes care of that for us. In all other cases, two expressions are
1818 equal if they have no side effects. If we have two identical
1819 expressions with side effects that should be treated the same due
1820 to the only side effects being identical SAVE_EXPR's, that will
1821 be detected in the recursive calls below. */
1822 if (arg0 == arg1 && ! only_const
1823 && (TREE_CODE (arg0) == SAVE_EXPR
1824 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1825 return 1;
1827 /* Next handle constant cases, those for which we can return 1 even
1828 if ONLY_CONST is set. */
1829 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1830 switch (TREE_CODE (arg0))
1832 case INTEGER_CST:
1833 return (! TREE_CONSTANT_OVERFLOW (arg0)
1834 && ! TREE_CONSTANT_OVERFLOW (arg1)
1835 && tree_int_cst_equal (arg0, arg1));
1837 case REAL_CST:
1838 return (! TREE_CONSTANT_OVERFLOW (arg0)
1839 && ! TREE_CONSTANT_OVERFLOW (arg1)
1840 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1841 TREE_REAL_CST (arg1)));
1843 case VECTOR_CST:
1845 tree v1, v2;
1847 if (TREE_CONSTANT_OVERFLOW (arg0)
1848 || TREE_CONSTANT_OVERFLOW (arg1))
1849 return 0;
1851 v1 = TREE_VECTOR_CST_ELTS (arg0);
1852 v2 = TREE_VECTOR_CST_ELTS (arg1);
1853 while (v1 && v2)
1855 if (!operand_equal_p (v1, v2, only_const))
1856 return 0;
1857 v1 = TREE_CHAIN (v1);
1858 v2 = TREE_CHAIN (v2);
1861 return 1;
1864 case COMPLEX_CST:
1865 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1866 only_const)
1867 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1868 only_const));
1870 case STRING_CST:
1871 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1872 && ! memcmp (TREE_STRING_POINTER (arg0),
1873 TREE_STRING_POINTER (arg1),
1874 TREE_STRING_LENGTH (arg0)));
1876 case ADDR_EXPR:
1877 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1879 default:
1880 break;
1883 if (only_const)
1884 return 0;
1886 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1888 case '1':
1889 /* Two conversions are equal only if signedness and modes match. */
1890 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1891 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1892 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1893 return 0;
1895 return operand_equal_p (TREE_OPERAND (arg0, 0),
1896 TREE_OPERAND (arg1, 0), 0);
1898 case '<':
1899 case '2':
1900 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1901 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1903 return 1;
1905 /* For commutative ops, allow the other order. */
1906 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1907 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1908 || TREE_CODE (arg0) == BIT_IOR_EXPR
1909 || TREE_CODE (arg0) == BIT_XOR_EXPR
1910 || TREE_CODE (arg0) == BIT_AND_EXPR
1911 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1912 && operand_equal_p (TREE_OPERAND (arg0, 0),
1913 TREE_OPERAND (arg1, 1), 0)
1914 && operand_equal_p (TREE_OPERAND (arg0, 1),
1915 TREE_OPERAND (arg1, 0), 0));
1917 case 'r':
1918 /* If either of the pointer (or reference) expressions we are dereferencing
1919 contain a side effect, these cannot be equal. */
1920 if (TREE_SIDE_EFFECTS (arg0)
1921 || TREE_SIDE_EFFECTS (arg1))
1922 return 0;
1924 switch (TREE_CODE (arg0))
1926 case INDIRECT_REF:
1927 return operand_equal_p (TREE_OPERAND (arg0, 0),
1928 TREE_OPERAND (arg1, 0), 0);
1930 case COMPONENT_REF:
1931 case ARRAY_REF:
1932 case ARRAY_RANGE_REF:
1933 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1934 TREE_OPERAND (arg1, 0), 0)
1935 && operand_equal_p (TREE_OPERAND (arg0, 1),
1936 TREE_OPERAND (arg1, 1), 0));
1938 case BIT_FIELD_REF:
1939 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1940 TREE_OPERAND (arg1, 0), 0)
1941 && operand_equal_p (TREE_OPERAND (arg0, 1),
1942 TREE_OPERAND (arg1, 1), 0)
1943 && operand_equal_p (TREE_OPERAND (arg0, 2),
1944 TREE_OPERAND (arg1, 2), 0));
1945 default:
1946 return 0;
1949 case 'e':
1950 if (TREE_CODE (arg0) == RTL_EXPR)
1951 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
1952 return 0;
1954 default:
1955 return 0;
1959 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1960 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1962 When in doubt, return 0. */
1964 static int
1965 operand_equal_for_comparison_p (arg0, arg1, other)
1966 tree arg0, arg1;
1967 tree other;
1969 int unsignedp1, unsignedpo;
1970 tree primarg0, primarg1, primother;
1971 unsigned int correct_width;
1973 if (operand_equal_p (arg0, arg1, 0))
1974 return 1;
1976 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1977 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1978 return 0;
1980 /* Discard any conversions that don't change the modes of ARG0 and ARG1
1981 and see if the inner values are the same. This removes any
1982 signedness comparison, which doesn't matter here. */
1983 primarg0 = arg0, primarg1 = arg1;
1984 STRIP_NOPS (primarg0);
1985 STRIP_NOPS (primarg1);
1986 if (operand_equal_p (primarg0, primarg1, 0))
1987 return 1;
1989 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1990 actual comparison operand, ARG0.
1992 First throw away any conversions to wider types
1993 already present in the operands. */
1995 primarg1 = get_narrower (arg1, &unsignedp1);
1996 primother = get_narrower (other, &unsignedpo);
1998 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1999 if (unsignedp1 == unsignedpo
2000 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2001 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2003 tree type = TREE_TYPE (arg0);
2005 /* Make sure shorter operand is extended the right way
2006 to match the longer operand. */
2007 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2008 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2010 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2011 return 1;
2014 return 0;
2017 /* See if ARG is an expression that is either a comparison or is performing
2018 arithmetic on comparisons. The comparisons must only be comparing
2019 two different values, which will be stored in *CVAL1 and *CVAL2; if
2020 they are nonzero it means that some operands have already been found.
2021 No variables may be used anywhere else in the expression except in the
2022 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2023 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2025 If this is true, return 1. Otherwise, return zero. */
2027 static int
2028 twoval_comparison_p (arg, cval1, cval2, save_p)
2029 tree arg;
2030 tree *cval1, *cval2;
2031 int *save_p;
2033 enum tree_code code = TREE_CODE (arg);
2034 char class = TREE_CODE_CLASS (code);
2036 /* We can handle some of the 'e' cases here. */
2037 if (class == 'e' && code == TRUTH_NOT_EXPR)
2038 class = '1';
2039 else if (class == 'e'
2040 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2041 || code == COMPOUND_EXPR))
2042 class = '2';
2044 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2045 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2047 /* If we've already found a CVAL1 or CVAL2, this expression is
2048 two complex to handle. */
2049 if (*cval1 || *cval2)
2050 return 0;
2052 class = '1';
2053 *save_p = 1;
2056 switch (class)
2058 case '1':
2059 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2061 case '2':
2062 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2063 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2064 cval1, cval2, save_p));
2066 case 'c':
2067 return 1;
2069 case 'e':
2070 if (code == COND_EXPR)
2071 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2072 cval1, cval2, save_p)
2073 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2074 cval1, cval2, save_p)
2075 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2076 cval1, cval2, save_p));
2077 return 0;
2079 case '<':
2080 /* First see if we can handle the first operand, then the second. For
2081 the second operand, we know *CVAL1 can't be zero. It must be that
2082 one side of the comparison is each of the values; test for the
2083 case where this isn't true by failing if the two operands
2084 are the same. */
2086 if (operand_equal_p (TREE_OPERAND (arg, 0),
2087 TREE_OPERAND (arg, 1), 0))
2088 return 0;
2090 if (*cval1 == 0)
2091 *cval1 = TREE_OPERAND (arg, 0);
2092 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2094 else if (*cval2 == 0)
2095 *cval2 = TREE_OPERAND (arg, 0);
2096 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2098 else
2099 return 0;
2101 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2103 else if (*cval2 == 0)
2104 *cval2 = TREE_OPERAND (arg, 1);
2105 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2107 else
2108 return 0;
2110 return 1;
2112 default:
2113 return 0;
2117 /* ARG is a tree that is known to contain just arithmetic operations and
2118 comparisons. Evaluate the operations in the tree substituting NEW0 for
2119 any occurrence of OLD0 as an operand of a comparison and likewise for
2120 NEW1 and OLD1. */
2122 static tree
2123 eval_subst (arg, old0, new0, old1, new1)
2124 tree arg;
2125 tree old0, new0, old1, new1;
2127 tree type = TREE_TYPE (arg);
2128 enum tree_code code = TREE_CODE (arg);
2129 char class = TREE_CODE_CLASS (code);
2131 /* We can handle some of the 'e' cases here. */
2132 if (class == 'e' && code == TRUTH_NOT_EXPR)
2133 class = '1';
2134 else if (class == 'e'
2135 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2136 class = '2';
2138 switch (class)
2140 case '1':
2141 return fold (build1 (code, type,
2142 eval_subst (TREE_OPERAND (arg, 0),
2143 old0, new0, old1, new1)));
2145 case '2':
2146 return fold (build (code, type,
2147 eval_subst (TREE_OPERAND (arg, 0),
2148 old0, new0, old1, new1),
2149 eval_subst (TREE_OPERAND (arg, 1),
2150 old0, new0, old1, new1)));
2152 case 'e':
2153 switch (code)
2155 case SAVE_EXPR:
2156 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2158 case COMPOUND_EXPR:
2159 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2161 case COND_EXPR:
2162 return fold (build (code, type,
2163 eval_subst (TREE_OPERAND (arg, 0),
2164 old0, new0, old1, new1),
2165 eval_subst (TREE_OPERAND (arg, 1),
2166 old0, new0, old1, new1),
2167 eval_subst (TREE_OPERAND (arg, 2),
2168 old0, new0, old1, new1)));
2169 default:
2170 break;
2172 /* fall through - ??? */
2174 case '<':
2176 tree arg0 = TREE_OPERAND (arg, 0);
2177 tree arg1 = TREE_OPERAND (arg, 1);
2179 /* We need to check both for exact equality and tree equality. The
2180 former will be true if the operand has a side-effect. In that
2181 case, we know the operand occurred exactly once. */
2183 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2184 arg0 = new0;
2185 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2186 arg0 = new1;
2188 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2189 arg1 = new0;
2190 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2191 arg1 = new1;
2193 return fold (build (code, type, arg0, arg1));
2196 default:
2197 return arg;
2201 /* Return a tree for the case when the result of an expression is RESULT
2202 converted to TYPE and OMITTED was previously an operand of the expression
2203 but is now not needed (e.g., we folded OMITTED * 0).
2205 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2206 the conversion of RESULT to TYPE. */
2208 static tree
2209 omit_one_operand (type, result, omitted)
2210 tree type, result, omitted;
2212 tree t = convert (type, result);
2214 if (TREE_SIDE_EFFECTS (omitted))
2215 return build (COMPOUND_EXPR, type, omitted, t);
2217 return non_lvalue (t);
2220 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2222 static tree
2223 pedantic_omit_one_operand (type, result, omitted)
2224 tree type, result, omitted;
2226 tree t = convert (type, result);
2228 if (TREE_SIDE_EFFECTS (omitted))
2229 return build (COMPOUND_EXPR, type, omitted, t);
2231 return pedantic_non_lvalue (t);
2234 /* Return a simplified tree node for the truth-negation of ARG. This
2235 never alters ARG itself. We assume that ARG is an operation that
2236 returns a truth value (0 or 1). */
2238 tree
2239 invert_truthvalue (arg)
2240 tree arg;
2242 tree type = TREE_TYPE (arg);
2243 enum tree_code code = TREE_CODE (arg);
2245 if (code == ERROR_MARK)
2246 return arg;
2248 /* If this is a comparison, we can simply invert it, except for
2249 floating-point non-equality comparisons, in which case we just
2250 enclose a TRUTH_NOT_EXPR around what we have. */
2252 if (TREE_CODE_CLASS (code) == '<')
2254 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2255 && !flag_unsafe_math_optimizations
2256 && code != NE_EXPR
2257 && code != EQ_EXPR)
2258 return build1 (TRUTH_NOT_EXPR, type, arg);
2259 else
2260 return build (invert_tree_comparison (code), type,
2261 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2264 switch (code)
2266 case INTEGER_CST:
2267 return convert (type, build_int_2 (integer_zerop (arg), 0));
2269 case TRUTH_AND_EXPR:
2270 return build (TRUTH_OR_EXPR, type,
2271 invert_truthvalue (TREE_OPERAND (arg, 0)),
2272 invert_truthvalue (TREE_OPERAND (arg, 1)));
2274 case TRUTH_OR_EXPR:
2275 return build (TRUTH_AND_EXPR, type,
2276 invert_truthvalue (TREE_OPERAND (arg, 0)),
2277 invert_truthvalue (TREE_OPERAND (arg, 1)));
2279 case TRUTH_XOR_EXPR:
2280 /* Here we can invert either operand. We invert the first operand
2281 unless the second operand is a TRUTH_NOT_EXPR in which case our
2282 result is the XOR of the first operand with the inside of the
2283 negation of the second operand. */
2285 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2286 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2287 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2288 else
2289 return build (TRUTH_XOR_EXPR, type,
2290 invert_truthvalue (TREE_OPERAND (arg, 0)),
2291 TREE_OPERAND (arg, 1));
2293 case TRUTH_ANDIF_EXPR:
2294 return build (TRUTH_ORIF_EXPR, type,
2295 invert_truthvalue (TREE_OPERAND (arg, 0)),
2296 invert_truthvalue (TREE_OPERAND (arg, 1)));
2298 case TRUTH_ORIF_EXPR:
2299 return build (TRUTH_ANDIF_EXPR, type,
2300 invert_truthvalue (TREE_OPERAND (arg, 0)),
2301 invert_truthvalue (TREE_OPERAND (arg, 1)));
2303 case TRUTH_NOT_EXPR:
2304 return TREE_OPERAND (arg, 0);
2306 case COND_EXPR:
2307 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2308 invert_truthvalue (TREE_OPERAND (arg, 1)),
2309 invert_truthvalue (TREE_OPERAND (arg, 2)));
2311 case COMPOUND_EXPR:
2312 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2313 invert_truthvalue (TREE_OPERAND (arg, 1)));
2315 case WITH_RECORD_EXPR:
2316 return build (WITH_RECORD_EXPR, type,
2317 invert_truthvalue (TREE_OPERAND (arg, 0)),
2318 TREE_OPERAND (arg, 1));
2320 case NON_LVALUE_EXPR:
2321 return invert_truthvalue (TREE_OPERAND (arg, 0));
2323 case NOP_EXPR:
2324 case CONVERT_EXPR:
2325 case FLOAT_EXPR:
2326 return build1 (TREE_CODE (arg), type,
2327 invert_truthvalue (TREE_OPERAND (arg, 0)));
2329 case BIT_AND_EXPR:
2330 if (!integer_onep (TREE_OPERAND (arg, 1)))
2331 break;
2332 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2334 case SAVE_EXPR:
2335 return build1 (TRUTH_NOT_EXPR, type, arg);
2337 case CLEANUP_POINT_EXPR:
2338 return build1 (CLEANUP_POINT_EXPR, type,
2339 invert_truthvalue (TREE_OPERAND (arg, 0)));
2341 default:
2342 break;
2344 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2345 abort ();
2346 return build1 (TRUTH_NOT_EXPR, type, arg);
2349 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2350 operands are another bit-wise operation with a common input. If so,
2351 distribute the bit operations to save an operation and possibly two if
2352 constants are involved. For example, convert
2353 (A | B) & (A | C) into A | (B & C)
2354 Further simplification will occur if B and C are constants.
2356 If this optimization cannot be done, 0 will be returned. */
2358 static tree
2359 distribute_bit_expr (code, type, arg0, arg1)
2360 enum tree_code code;
2361 tree type;
2362 tree arg0, arg1;
2364 tree common;
2365 tree left, right;
2367 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2368 || TREE_CODE (arg0) == code
2369 || (TREE_CODE (arg0) != BIT_AND_EXPR
2370 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2371 return 0;
2373 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2375 common = TREE_OPERAND (arg0, 0);
2376 left = TREE_OPERAND (arg0, 1);
2377 right = TREE_OPERAND (arg1, 1);
2379 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2381 common = TREE_OPERAND (arg0, 0);
2382 left = TREE_OPERAND (arg0, 1);
2383 right = TREE_OPERAND (arg1, 0);
2385 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2387 common = TREE_OPERAND (arg0, 1);
2388 left = TREE_OPERAND (arg0, 0);
2389 right = TREE_OPERAND (arg1, 1);
2391 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2393 common = TREE_OPERAND (arg0, 1);
2394 left = TREE_OPERAND (arg0, 0);
2395 right = TREE_OPERAND (arg1, 0);
2397 else
2398 return 0;
2400 return fold (build (TREE_CODE (arg0), type, common,
2401 fold (build (code, type, left, right))));
2404 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2405 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2407 static tree
2408 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2409 tree inner;
2410 tree type;
2411 int bitsize, bitpos;
2412 int unsignedp;
2414 tree result = build (BIT_FIELD_REF, type, inner,
2415 size_int (bitsize), bitsize_int (bitpos));
2417 TREE_UNSIGNED (result) = unsignedp;
2419 return result;
2422 /* Optimize a bit-field compare.
2424 There are two cases: First is a compare against a constant and the
2425 second is a comparison of two items where the fields are at the same
2426 bit position relative to the start of a chunk (byte, halfword, word)
2427 large enough to contain it. In these cases we can avoid the shift
2428 implicit in bitfield extractions.
2430 For constants, we emit a compare of the shifted constant with the
2431 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2432 compared. For two fields at the same position, we do the ANDs with the
2433 similar mask and compare the result of the ANDs.
2435 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2436 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2437 are the left and right operands of the comparison, respectively.
2439 If the optimization described above can be done, we return the resulting
2440 tree. Otherwise we return zero. */
2442 static tree
2443 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2444 enum tree_code code;
2445 tree compare_type;
2446 tree lhs, rhs;
2448 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2449 tree type = TREE_TYPE (lhs);
2450 tree signed_type, unsigned_type;
2451 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2452 enum machine_mode lmode, rmode, nmode;
2453 int lunsignedp, runsignedp;
2454 int lvolatilep = 0, rvolatilep = 0;
2455 tree linner, rinner = NULL_TREE;
2456 tree mask;
2457 tree offset;
2459 /* Get all the information about the extractions being done. If the bit size
2460 if the same as the size of the underlying object, we aren't doing an
2461 extraction at all and so can do nothing. We also don't want to
2462 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2463 then will no longer be able to replace it. */
2464 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2465 &lunsignedp, &lvolatilep);
2466 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2467 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2468 return 0;
2470 if (!const_p)
2472 /* If this is not a constant, we can only do something if bit positions,
2473 sizes, and signedness are the same. */
2474 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2475 &runsignedp, &rvolatilep);
2477 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2478 || lunsignedp != runsignedp || offset != 0
2479 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2480 return 0;
2483 /* See if we can find a mode to refer to this field. We should be able to,
2484 but fail if we can't. */
2485 nmode = get_best_mode (lbitsize, lbitpos,
2486 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2487 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2488 TYPE_ALIGN (TREE_TYPE (rinner))),
2489 word_mode, lvolatilep || rvolatilep);
2490 if (nmode == VOIDmode)
2491 return 0;
2493 /* Set signed and unsigned types of the precision of this mode for the
2494 shifts below. */
2495 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2496 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2498 /* Compute the bit position and size for the new reference and our offset
2499 within it. If the new reference is the same size as the original, we
2500 won't optimize anything, so return zero. */
2501 nbitsize = GET_MODE_BITSIZE (nmode);
2502 nbitpos = lbitpos & ~ (nbitsize - 1);
2503 lbitpos -= nbitpos;
2504 if (nbitsize == lbitsize)
2505 return 0;
2507 if (BYTES_BIG_ENDIAN)
2508 lbitpos = nbitsize - lbitsize - lbitpos;
2510 /* Make the mask to be used against the extracted field. */
2511 mask = build_int_2 (~0, ~0);
2512 TREE_TYPE (mask) = unsigned_type;
2513 force_fit_type (mask, 0);
2514 mask = convert (unsigned_type, mask);
2515 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2516 mask = const_binop (RSHIFT_EXPR, mask,
2517 size_int (nbitsize - lbitsize - lbitpos), 0);
2519 if (! const_p)
2520 /* If not comparing with constant, just rework the comparison
2521 and return. */
2522 return build (code, compare_type,
2523 build (BIT_AND_EXPR, unsigned_type,
2524 make_bit_field_ref (linner, unsigned_type,
2525 nbitsize, nbitpos, 1),
2526 mask),
2527 build (BIT_AND_EXPR, unsigned_type,
2528 make_bit_field_ref (rinner, unsigned_type,
2529 nbitsize, nbitpos, 1),
2530 mask));
2532 /* Otherwise, we are handling the constant case. See if the constant is too
2533 big for the field. Warn and return a tree of for 0 (false) if so. We do
2534 this not only for its own sake, but to avoid having to test for this
2535 error case below. If we didn't, we might generate wrong code.
2537 For unsigned fields, the constant shifted right by the field length should
2538 be all zero. For signed fields, the high-order bits should agree with
2539 the sign bit. */
2541 if (lunsignedp)
2543 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2544 convert (unsigned_type, rhs),
2545 size_int (lbitsize), 0)))
2547 warning ("comparison is always %d due to width of bit-field",
2548 code == NE_EXPR);
2549 return convert (compare_type,
2550 (code == NE_EXPR
2551 ? integer_one_node : integer_zero_node));
2554 else
2556 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2557 size_int (lbitsize - 1), 0);
2558 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2560 warning ("comparison is always %d due to width of bit-field",
2561 code == NE_EXPR);
2562 return convert (compare_type,
2563 (code == NE_EXPR
2564 ? integer_one_node : integer_zero_node));
2568 /* Single-bit compares should always be against zero. */
2569 if (lbitsize == 1 && ! integer_zerop (rhs))
2571 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2572 rhs = convert (type, integer_zero_node);
2575 /* Make a new bitfield reference, shift the constant over the
2576 appropriate number of bits and mask it with the computed mask
2577 (in case this was a signed field). If we changed it, make a new one. */
2578 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2579 if (lvolatilep)
2581 TREE_SIDE_EFFECTS (lhs) = 1;
2582 TREE_THIS_VOLATILE (lhs) = 1;
2585 rhs = fold (const_binop (BIT_AND_EXPR,
2586 const_binop (LSHIFT_EXPR,
2587 convert (unsigned_type, rhs),
2588 size_int (lbitpos), 0),
2589 mask, 0));
2591 return build (code, compare_type,
2592 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2593 rhs);
2596 /* Subroutine for fold_truthop: decode a field reference.
2598 If EXP is a comparison reference, we return the innermost reference.
2600 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2601 set to the starting bit number.
2603 If the innermost field can be completely contained in a mode-sized
2604 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2606 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2607 otherwise it is not changed.
2609 *PUNSIGNEDP is set to the signedness of the field.
2611 *PMASK is set to the mask used. This is either contained in a
2612 BIT_AND_EXPR or derived from the width of the field.
2614 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2616 Return 0 if this is not a component reference or is one that we can't
2617 do anything with. */
2619 static tree
2620 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2621 pvolatilep, pmask, pand_mask)
2622 tree exp;
2623 HOST_WIDE_INT *pbitsize, *pbitpos;
2624 enum machine_mode *pmode;
2625 int *punsignedp, *pvolatilep;
2626 tree *pmask;
2627 tree *pand_mask;
2629 tree and_mask = 0;
2630 tree mask, inner, offset;
2631 tree unsigned_type;
2632 unsigned int precision;
2634 /* All the optimizations using this function assume integer fields.
2635 There are problems with FP fields since the type_for_size call
2636 below can fail for, e.g., XFmode. */
2637 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2638 return 0;
2640 STRIP_NOPS (exp);
2642 if (TREE_CODE (exp) == BIT_AND_EXPR)
2644 and_mask = TREE_OPERAND (exp, 1);
2645 exp = TREE_OPERAND (exp, 0);
2646 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2647 if (TREE_CODE (and_mask) != INTEGER_CST)
2648 return 0;
2651 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2652 punsignedp, pvolatilep);
2653 if ((inner == exp && and_mask == 0)
2654 || *pbitsize < 0 || offset != 0
2655 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2656 return 0;
2658 /* Compute the mask to access the bitfield. */
2659 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2660 precision = TYPE_PRECISION (unsigned_type);
2662 mask = build_int_2 (~0, ~0);
2663 TREE_TYPE (mask) = unsigned_type;
2664 force_fit_type (mask, 0);
2665 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2666 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2668 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2669 if (and_mask != 0)
2670 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2671 convert (unsigned_type, and_mask), mask));
2673 *pmask = mask;
2674 *pand_mask = and_mask;
2675 return inner;
2678 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2679 bit positions. */
2681 static int
2682 all_ones_mask_p (mask, size)
2683 tree mask;
2684 int size;
2686 tree type = TREE_TYPE (mask);
2687 unsigned int precision = TYPE_PRECISION (type);
2688 tree tmask;
2690 tmask = build_int_2 (~0, ~0);
2691 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2692 force_fit_type (tmask, 0);
2693 return
2694 tree_int_cst_equal (mask,
2695 const_binop (RSHIFT_EXPR,
2696 const_binop (LSHIFT_EXPR, tmask,
2697 size_int (precision - size),
2699 size_int (precision - size), 0));
2702 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2703 represents the sign bit of EXP's type. If EXP represents a sign
2704 or zero extension, also test VAL against the unextended type.
2705 The return value is the (sub)expression whose sign bit is VAL,
2706 or NULL_TREE otherwise. */
2708 static tree
2709 sign_bit_p (exp, val)
2710 tree exp;
2711 tree val;
2713 unsigned HOST_WIDE_INT lo;
2714 HOST_WIDE_INT hi;
2715 int width;
2716 tree t;
2718 /* Tree EXP must have an integral type. */
2719 t = TREE_TYPE (exp);
2720 if (! INTEGRAL_TYPE_P (t))
2721 return NULL_TREE;
2723 /* Tree VAL must be an integer constant. */
2724 if (TREE_CODE (val) != INTEGER_CST
2725 || TREE_CONSTANT_OVERFLOW (val))
2726 return NULL_TREE;
2728 width = TYPE_PRECISION (t);
2729 if (width > HOST_BITS_PER_WIDE_INT)
2731 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2732 lo = 0;
2734 else
2736 hi = 0;
2737 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2740 if (TREE_INT_CST_HIGH (val) == hi && TREE_INT_CST_LOW (val) == lo)
2741 return exp;
2743 /* Handle extension from a narrower type. */
2744 if (TREE_CODE (exp) == NOP_EXPR
2745 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2746 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2748 return NULL_TREE;
2751 /* Subroutine for fold_truthop: determine if an operand is simple enough
2752 to be evaluated unconditionally. */
2754 static int
2755 simple_operand_p (exp)
2756 tree exp;
2758 /* Strip any conversions that don't change the machine mode. */
2759 while ((TREE_CODE (exp) == NOP_EXPR
2760 || TREE_CODE (exp) == CONVERT_EXPR)
2761 && (TYPE_MODE (TREE_TYPE (exp))
2762 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2763 exp = TREE_OPERAND (exp, 0);
2765 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2766 || (DECL_P (exp)
2767 && ! TREE_ADDRESSABLE (exp)
2768 && ! TREE_THIS_VOLATILE (exp)
2769 && ! DECL_NONLOCAL (exp)
2770 /* Don't regard global variables as simple. They may be
2771 allocated in ways unknown to the compiler (shared memory,
2772 #pragma weak, etc). */
2773 && ! TREE_PUBLIC (exp)
2774 && ! DECL_EXTERNAL (exp)
2775 /* Loading a static variable is unduly expensive, but global
2776 registers aren't expensive. */
2777 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2780 /* The following functions are subroutines to fold_range_test and allow it to
2781 try to change a logical combination of comparisons into a range test.
2783 For example, both
2784 X == 2 || X == 3 || X == 4 || X == 5
2786 X >= 2 && X <= 5
2787 are converted to
2788 (unsigned) (X - 2) <= 3
2790 We describe each set of comparisons as being either inside or outside
2791 a range, using a variable named like IN_P, and then describe the
2792 range with a lower and upper bound. If one of the bounds is omitted,
2793 it represents either the highest or lowest value of the type.
2795 In the comments below, we represent a range by two numbers in brackets
2796 preceded by a "+" to designate being inside that range, or a "-" to
2797 designate being outside that range, so the condition can be inverted by
2798 flipping the prefix. An omitted bound is represented by a "-". For
2799 example, "- [-, 10]" means being outside the range starting at the lowest
2800 possible value and ending at 10, in other words, being greater than 10.
2801 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2802 always false.
2804 We set up things so that the missing bounds are handled in a consistent
2805 manner so neither a missing bound nor "true" and "false" need to be
2806 handled using a special case. */
2808 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2809 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2810 and UPPER1_P are nonzero if the respective argument is an upper bound
2811 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2812 must be specified for a comparison. ARG1 will be converted to ARG0's
2813 type if both are specified. */
2815 static tree
2816 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2817 enum tree_code code;
2818 tree type;
2819 tree arg0, arg1;
2820 int upper0_p, upper1_p;
2822 tree tem;
2823 int result;
2824 int sgn0, sgn1;
2826 /* If neither arg represents infinity, do the normal operation.
2827 Else, if not a comparison, return infinity. Else handle the special
2828 comparison rules. Note that most of the cases below won't occur, but
2829 are handled for consistency. */
2831 if (arg0 != 0 && arg1 != 0)
2833 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2834 arg0, convert (TREE_TYPE (arg0), arg1)));
2835 STRIP_NOPS (tem);
2836 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2839 if (TREE_CODE_CLASS (code) != '<')
2840 return 0;
2842 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2843 for neither. In real maths, we cannot assume open ended ranges are
2844 the same. But, this is computer arithmetic, where numbers are finite.
2845 We can therefore make the transformation of any unbounded range with
2846 the value Z, Z being greater than any representable number. This permits
2847 us to treat unbounded ranges as equal. */
2848 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2849 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2850 switch (code)
2852 case EQ_EXPR:
2853 result = sgn0 == sgn1;
2854 break;
2855 case NE_EXPR:
2856 result = sgn0 != sgn1;
2857 break;
2858 case LT_EXPR:
2859 result = sgn0 < sgn1;
2860 break;
2861 case LE_EXPR:
2862 result = sgn0 <= sgn1;
2863 break;
2864 case GT_EXPR:
2865 result = sgn0 > sgn1;
2866 break;
2867 case GE_EXPR:
2868 result = sgn0 >= sgn1;
2869 break;
2870 default:
2871 abort ();
2874 return convert (type, result ? integer_one_node : integer_zero_node);
2877 /* Given EXP, a logical expression, set the range it is testing into
2878 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2879 actually being tested. *PLOW and *PHIGH will be made of the same type
2880 as the returned expression. If EXP is not a comparison, we will most
2881 likely not be returning a useful value and range. */
2883 static tree
2884 make_range (exp, pin_p, plow, phigh)
2885 tree exp;
2886 int *pin_p;
2887 tree *plow, *phigh;
2889 enum tree_code code;
2890 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2891 tree orig_type = NULL_TREE;
2892 int in_p, n_in_p;
2893 tree low, high, n_low, n_high;
2895 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2896 and see if we can refine the range. Some of the cases below may not
2897 happen, but it doesn't seem worth worrying about this. We "continue"
2898 the outer loop when we've changed something; otherwise we "break"
2899 the switch, which will "break" the while. */
2901 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2903 while (1)
2905 code = TREE_CODE (exp);
2907 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2909 arg0 = TREE_OPERAND (exp, 0);
2910 if (TREE_CODE_CLASS (code) == '<'
2911 || TREE_CODE_CLASS (code) == '1'
2912 || TREE_CODE_CLASS (code) == '2')
2913 type = TREE_TYPE (arg0);
2914 if (TREE_CODE_CLASS (code) == '2'
2915 || TREE_CODE_CLASS (code) == '<'
2916 || (TREE_CODE_CLASS (code) == 'e'
2917 && TREE_CODE_LENGTH (code) > 1))
2918 arg1 = TREE_OPERAND (exp, 1);
2921 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
2922 lose a cast by accident. */
2923 if (type != NULL_TREE && orig_type == NULL_TREE)
2924 orig_type = type;
2926 switch (code)
2928 case TRUTH_NOT_EXPR:
2929 in_p = ! in_p, exp = arg0;
2930 continue;
2932 case EQ_EXPR: case NE_EXPR:
2933 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2934 /* We can only do something if the range is testing for zero
2935 and if the second operand is an integer constant. Note that
2936 saying something is "in" the range we make is done by
2937 complementing IN_P since it will set in the initial case of
2938 being not equal to zero; "out" is leaving it alone. */
2939 if (low == 0 || high == 0
2940 || ! integer_zerop (low) || ! integer_zerop (high)
2941 || TREE_CODE (arg1) != INTEGER_CST)
2942 break;
2944 switch (code)
2946 case NE_EXPR: /* - [c, c] */
2947 low = high = arg1;
2948 break;
2949 case EQ_EXPR: /* + [c, c] */
2950 in_p = ! in_p, low = high = arg1;
2951 break;
2952 case GT_EXPR: /* - [-, c] */
2953 low = 0, high = arg1;
2954 break;
2955 case GE_EXPR: /* + [c, -] */
2956 in_p = ! in_p, low = arg1, high = 0;
2957 break;
2958 case LT_EXPR: /* - [c, -] */
2959 low = arg1, high = 0;
2960 break;
2961 case LE_EXPR: /* + [-, c] */
2962 in_p = ! in_p, low = 0, high = arg1;
2963 break;
2964 default:
2965 abort ();
2968 exp = arg0;
2970 /* If this is an unsigned comparison, we also know that EXP is
2971 greater than or equal to zero. We base the range tests we make
2972 on that fact, so we record it here so we can parse existing
2973 range tests. */
2974 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2976 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2977 1, convert (type, integer_zero_node),
2978 NULL_TREE))
2979 break;
2981 in_p = n_in_p, low = n_low, high = n_high;
2983 /* If the high bound is missing, but we
2984 have a low bound, reverse the range so
2985 it goes from zero to the low bound minus 1. */
2986 if (high == 0 && low)
2988 in_p = ! in_p;
2989 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2990 integer_one_node, 0);
2991 low = convert (type, integer_zero_node);
2994 continue;
2996 case NEGATE_EXPR:
2997 /* (-x) IN [a,b] -> x in [-b, -a] */
2998 n_low = range_binop (MINUS_EXPR, type,
2999 convert (type, integer_zero_node), 0, high, 1);
3000 n_high = range_binop (MINUS_EXPR, type,
3001 convert (type, integer_zero_node), 0, low, 0);
3002 low = n_low, high = n_high;
3003 exp = arg0;
3004 continue;
3006 case BIT_NOT_EXPR:
3007 /* ~ X -> -X - 1 */
3008 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3009 convert (type, integer_one_node));
3010 continue;
3012 case PLUS_EXPR: case MINUS_EXPR:
3013 if (TREE_CODE (arg1) != INTEGER_CST)
3014 break;
3016 /* If EXP is signed, any overflow in the computation is undefined,
3017 so we don't worry about it so long as our computations on
3018 the bounds don't overflow. For unsigned, overflow is defined
3019 and this is exactly the right thing. */
3020 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3021 type, low, 0, arg1, 0);
3022 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3023 type, high, 1, arg1, 0);
3024 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3025 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3026 break;
3028 /* Check for an unsigned range which has wrapped around the maximum
3029 value thus making n_high < n_low, and normalize it. */
3030 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3032 low = range_binop (PLUS_EXPR, type, n_high, 0,
3033 integer_one_node, 0);
3034 high = range_binop (MINUS_EXPR, type, n_low, 0,
3035 integer_one_node, 0);
3037 /* If the range is of the form +/- [ x+1, x ], we won't
3038 be able to normalize it. But then, it represents the
3039 whole range or the empty set, so make it
3040 +/- [ -, - ]. */
3041 if (tree_int_cst_equal (n_low, low)
3042 && tree_int_cst_equal (n_high, high))
3043 low = high = 0;
3044 else
3045 in_p = ! in_p;
3047 else
3048 low = n_low, high = n_high;
3050 exp = arg0;
3051 continue;
3053 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3054 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3055 break;
3057 if (! INTEGRAL_TYPE_P (type)
3058 || (low != 0 && ! int_fits_type_p (low, type))
3059 || (high != 0 && ! int_fits_type_p (high, type)))
3060 break;
3062 n_low = low, n_high = high;
3064 if (n_low != 0)
3065 n_low = convert (type, n_low);
3067 if (n_high != 0)
3068 n_high = convert (type, n_high);
3070 /* If we're converting from an unsigned to a signed type,
3071 we will be doing the comparison as unsigned. The tests above
3072 have already verified that LOW and HIGH are both positive.
3074 So we have to make sure that the original unsigned value will
3075 be interpreted as positive. */
3076 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3078 tree equiv_type = (*lang_hooks.types.type_for_mode)
3079 (TYPE_MODE (type), 1);
3080 tree high_positive;
3082 /* A range without an upper bound is, naturally, unbounded.
3083 Since convert would have cropped a very large value, use
3084 the max value for the destination type. */
3085 high_positive
3086 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3087 : TYPE_MAX_VALUE (type);
3089 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3090 high_positive = fold (build (RSHIFT_EXPR, type,
3091 convert (type, high_positive),
3092 convert (type, integer_one_node)));
3094 /* If the low bound is specified, "and" the range with the
3095 range for which the original unsigned value will be
3096 positive. */
3097 if (low != 0)
3099 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3100 1, n_low, n_high,
3101 1, convert (type, integer_zero_node),
3102 high_positive))
3103 break;
3105 in_p = (n_in_p == in_p);
3107 else
3109 /* Otherwise, "or" the range with the range of the input
3110 that will be interpreted as negative. */
3111 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3112 0, n_low, n_high,
3113 1, convert (type, integer_zero_node),
3114 high_positive))
3115 break;
3117 in_p = (in_p != n_in_p);
3121 exp = arg0;
3122 low = n_low, high = n_high;
3123 continue;
3125 default:
3126 break;
3129 break;
3132 /* If EXP is a constant, we can evaluate whether this is true or false. */
3133 if (TREE_CODE (exp) == INTEGER_CST)
3135 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3136 exp, 0, low, 0))
3137 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3138 exp, 1, high, 1)));
3139 low = high = 0;
3140 exp = 0;
3143 *pin_p = in_p, *plow = low, *phigh = high;
3144 return exp;
3147 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3148 type, TYPE, return an expression to test if EXP is in (or out of, depending
3149 on IN_P) the range. */
3151 static tree
3152 build_range_check (type, exp, in_p, low, high)
3153 tree type;
3154 tree exp;
3155 int in_p;
3156 tree low, high;
3158 tree etype = TREE_TYPE (exp);
3159 tree value;
3161 if (! in_p
3162 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3163 return invert_truthvalue (value);
3165 if (low == 0 && high == 0)
3166 return convert (type, integer_one_node);
3168 if (low == 0)
3169 return fold (build (LE_EXPR, type, exp, high));
3171 if (high == 0)
3172 return fold (build (GE_EXPR, type, exp, low));
3174 if (operand_equal_p (low, high, 0))
3175 return fold (build (EQ_EXPR, type, exp, low));
3177 if (integer_zerop (low))
3179 if (! TREE_UNSIGNED (etype))
3181 etype = (*lang_hooks.types.unsigned_type) (etype);
3182 high = convert (etype, high);
3183 exp = convert (etype, exp);
3185 return build_range_check (type, exp, 1, 0, high);
3188 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3189 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3191 unsigned HOST_WIDE_INT lo;
3192 HOST_WIDE_INT hi;
3193 int prec;
3195 prec = TYPE_PRECISION (etype);
3196 if (prec <= HOST_BITS_PER_WIDE_INT)
3198 hi = 0;
3199 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3201 else
3203 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3204 lo = (unsigned HOST_WIDE_INT) -1;
3207 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3209 if (TREE_UNSIGNED (etype))
3211 etype = (*lang_hooks.types.signed_type) (etype);
3212 exp = convert (etype, exp);
3214 return fold (build (GT_EXPR, type, exp,
3215 convert (etype, integer_zero_node)));
3219 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3220 && ! TREE_OVERFLOW (value))
3221 return build_range_check (type,
3222 fold (build (MINUS_EXPR, etype, exp, low)),
3223 1, convert (etype, integer_zero_node), value);
3225 return 0;
3228 /* Given two ranges, see if we can merge them into one. Return 1 if we
3229 can, 0 if we can't. Set the output range into the specified parameters. */
3231 static int
3232 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3233 int *pin_p;
3234 tree *plow, *phigh;
3235 int in0_p, in1_p;
3236 tree low0, high0, low1, high1;
3238 int no_overlap;
3239 int subset;
3240 int temp;
3241 tree tem;
3242 int in_p;
3243 tree low, high;
3244 int lowequal = ((low0 == 0 && low1 == 0)
3245 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3246 low0, 0, low1, 0)));
3247 int highequal = ((high0 == 0 && high1 == 0)
3248 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3249 high0, 1, high1, 1)));
3251 /* Make range 0 be the range that starts first, or ends last if they
3252 start at the same value. Swap them if it isn't. */
3253 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3254 low0, 0, low1, 0))
3255 || (lowequal
3256 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3257 high1, 1, high0, 1))))
3259 temp = in0_p, in0_p = in1_p, in1_p = temp;
3260 tem = low0, low0 = low1, low1 = tem;
3261 tem = high0, high0 = high1, high1 = tem;
3264 /* Now flag two cases, whether the ranges are disjoint or whether the
3265 second range is totally subsumed in the first. Note that the tests
3266 below are simplified by the ones above. */
3267 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3268 high0, 1, low1, 0));
3269 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3270 high1, 1, high0, 1));
3272 /* We now have four cases, depending on whether we are including or
3273 excluding the two ranges. */
3274 if (in0_p && in1_p)
3276 /* If they don't overlap, the result is false. If the second range
3277 is a subset it is the result. Otherwise, the range is from the start
3278 of the second to the end of the first. */
3279 if (no_overlap)
3280 in_p = 0, low = high = 0;
3281 else if (subset)
3282 in_p = 1, low = low1, high = high1;
3283 else
3284 in_p = 1, low = low1, high = high0;
3287 else if (in0_p && ! in1_p)
3289 /* If they don't overlap, the result is the first range. If they are
3290 equal, the result is false. If the second range is a subset of the
3291 first, and the ranges begin at the same place, we go from just after
3292 the end of the first range to the end of the second. If the second
3293 range is not a subset of the first, or if it is a subset and both
3294 ranges end at the same place, the range starts at the start of the
3295 first range and ends just before the second range.
3296 Otherwise, we can't describe this as a single range. */
3297 if (no_overlap)
3298 in_p = 1, low = low0, high = high0;
3299 else if (lowequal && highequal)
3300 in_p = 0, low = high = 0;
3301 else if (subset && lowequal)
3303 in_p = 1, high = high0;
3304 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3305 integer_one_node, 0);
3307 else if (! subset || highequal)
3309 in_p = 1, low = low0;
3310 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3311 integer_one_node, 0);
3313 else
3314 return 0;
3317 else if (! in0_p && in1_p)
3319 /* If they don't overlap, the result is the second range. If the second
3320 is a subset of the first, the result is false. Otherwise,
3321 the range starts just after the first range and ends at the
3322 end of the second. */
3323 if (no_overlap)
3324 in_p = 1, low = low1, high = high1;
3325 else if (subset || highequal)
3326 in_p = 0, low = high = 0;
3327 else
3329 in_p = 1, high = high1;
3330 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3331 integer_one_node, 0);
3335 else
3337 /* The case where we are excluding both ranges. Here the complex case
3338 is if they don't overlap. In that case, the only time we have a
3339 range is if they are adjacent. If the second is a subset of the
3340 first, the result is the first. Otherwise, the range to exclude
3341 starts at the beginning of the first range and ends at the end of the
3342 second. */
3343 if (no_overlap)
3345 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3346 range_binop (PLUS_EXPR, NULL_TREE,
3347 high0, 1,
3348 integer_one_node, 1),
3349 1, low1, 0)))
3350 in_p = 0, low = low0, high = high1;
3351 else
3352 return 0;
3354 else if (subset)
3355 in_p = 0, low = low0, high = high0;
3356 else
3357 in_p = 0, low = low0, high = high1;
3360 *pin_p = in_p, *plow = low, *phigh = high;
3361 return 1;
3364 /* EXP is some logical combination of boolean tests. See if we can
3365 merge it into some range test. Return the new tree if so. */
3367 static tree
3368 fold_range_test (exp)
3369 tree exp;
3371 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3372 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3373 int in0_p, in1_p, in_p;
3374 tree low0, low1, low, high0, high1, high;
3375 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3376 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3377 tree tem;
3379 /* If this is an OR operation, invert both sides; we will invert
3380 again at the end. */
3381 if (or_op)
3382 in0_p = ! in0_p, in1_p = ! in1_p;
3384 /* If both expressions are the same, if we can merge the ranges, and we
3385 can build the range test, return it or it inverted. If one of the
3386 ranges is always true or always false, consider it to be the same
3387 expression as the other. */
3388 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3389 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3390 in1_p, low1, high1)
3391 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3392 lhs != 0 ? lhs
3393 : rhs != 0 ? rhs : integer_zero_node,
3394 in_p, low, high))))
3395 return or_op ? invert_truthvalue (tem) : tem;
3397 /* On machines where the branch cost is expensive, if this is a
3398 short-circuited branch and the underlying object on both sides
3399 is the same, make a non-short-circuit operation. */
3400 else if (BRANCH_COST >= 2
3401 && lhs != 0 && rhs != 0
3402 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3403 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3404 && operand_equal_p (lhs, rhs, 0))
3406 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3407 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3408 which cases we can't do this. */
3409 if (simple_operand_p (lhs))
3410 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3411 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3412 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3413 TREE_OPERAND (exp, 1));
3415 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3416 && ! contains_placeholder_p (lhs))
3418 tree common = save_expr (lhs);
3420 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3421 or_op ? ! in0_p : in0_p,
3422 low0, high0))
3423 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3424 or_op ? ! in1_p : in1_p,
3425 low1, high1))))
3426 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3427 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3428 TREE_TYPE (exp), lhs, rhs);
3432 return 0;
3435 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3436 bit value. Arrange things so the extra bits will be set to zero if and
3437 only if C is signed-extended to its full width. If MASK is nonzero,
3438 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3440 static tree
3441 unextend (c, p, unsignedp, mask)
3442 tree c;
3443 int p;
3444 int unsignedp;
3445 tree mask;
3447 tree type = TREE_TYPE (c);
3448 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3449 tree temp;
3451 if (p == modesize || unsignedp)
3452 return c;
3454 /* We work by getting just the sign bit into the low-order bit, then
3455 into the high-order bit, then sign-extend. We then XOR that value
3456 with C. */
3457 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3458 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3460 /* We must use a signed type in order to get an arithmetic right shift.
3461 However, we must also avoid introducing accidental overflows, so that
3462 a subsequent call to integer_zerop will work. Hence we must
3463 do the type conversion here. At this point, the constant is either
3464 zero or one, and the conversion to a signed type can never overflow.
3465 We could get an overflow if this conversion is done anywhere else. */
3466 if (TREE_UNSIGNED (type))
3467 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3469 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3470 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3471 if (mask != 0)
3472 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3473 /* If necessary, convert the type back to match the type of C. */
3474 if (TREE_UNSIGNED (type))
3475 temp = convert (type, temp);
3477 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3480 /* Find ways of folding logical expressions of LHS and RHS:
3481 Try to merge two comparisons to the same innermost item.
3482 Look for range tests like "ch >= '0' && ch <= '9'".
3483 Look for combinations of simple terms on machines with expensive branches
3484 and evaluate the RHS unconditionally.
3486 For example, if we have p->a == 2 && p->b == 4 and we can make an
3487 object large enough to span both A and B, we can do this with a comparison
3488 against the object ANDed with the a mask.
3490 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3491 operations to do this with one comparison.
3493 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3494 function and the one above.
3496 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3497 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3499 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3500 two operands.
3502 We return the simplified tree or 0 if no optimization is possible. */
3504 static tree
3505 fold_truthop (code, truth_type, lhs, rhs)
3506 enum tree_code code;
3507 tree truth_type, lhs, rhs;
3509 /* If this is the "or" of two comparisons, we can do something if
3510 the comparisons are NE_EXPR. If this is the "and", we can do something
3511 if the comparisons are EQ_EXPR. I.e.,
3512 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3514 WANTED_CODE is this operation code. For single bit fields, we can
3515 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3516 comparison for one-bit fields. */
3518 enum tree_code wanted_code;
3519 enum tree_code lcode, rcode;
3520 tree ll_arg, lr_arg, rl_arg, rr_arg;
3521 tree ll_inner, lr_inner, rl_inner, rr_inner;
3522 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3523 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3524 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3525 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3526 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3527 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3528 enum machine_mode lnmode, rnmode;
3529 tree ll_mask, lr_mask, rl_mask, rr_mask;
3530 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3531 tree l_const, r_const;
3532 tree lntype, rntype, result;
3533 int first_bit, end_bit;
3534 int volatilep;
3536 /* Start by getting the comparison codes. Fail if anything is volatile.
3537 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3538 it were surrounded with a NE_EXPR. */
3540 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3541 return 0;
3543 lcode = TREE_CODE (lhs);
3544 rcode = TREE_CODE (rhs);
3546 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3547 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3549 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3550 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3552 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3553 return 0;
3555 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3556 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3558 ll_arg = TREE_OPERAND (lhs, 0);
3559 lr_arg = TREE_OPERAND (lhs, 1);
3560 rl_arg = TREE_OPERAND (rhs, 0);
3561 rr_arg = TREE_OPERAND (rhs, 1);
3563 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3564 if (simple_operand_p (ll_arg)
3565 && simple_operand_p (lr_arg)
3566 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3568 int compcode;
3570 if (operand_equal_p (ll_arg, rl_arg, 0)
3571 && operand_equal_p (lr_arg, rr_arg, 0))
3573 int lcompcode, rcompcode;
3575 lcompcode = comparison_to_compcode (lcode);
3576 rcompcode = comparison_to_compcode (rcode);
3577 compcode = (code == TRUTH_AND_EXPR)
3578 ? lcompcode & rcompcode
3579 : lcompcode | rcompcode;
3581 else if (operand_equal_p (ll_arg, rr_arg, 0)
3582 && operand_equal_p (lr_arg, rl_arg, 0))
3584 int lcompcode, rcompcode;
3586 rcode = swap_tree_comparison (rcode);
3587 lcompcode = comparison_to_compcode (lcode);
3588 rcompcode = comparison_to_compcode (rcode);
3589 compcode = (code == TRUTH_AND_EXPR)
3590 ? lcompcode & rcompcode
3591 : lcompcode | rcompcode;
3593 else
3594 compcode = -1;
3596 if (compcode == COMPCODE_TRUE)
3597 return convert (truth_type, integer_one_node);
3598 else if (compcode == COMPCODE_FALSE)
3599 return convert (truth_type, integer_zero_node);
3600 else if (compcode != -1)
3601 return build (compcode_to_comparison (compcode),
3602 truth_type, ll_arg, lr_arg);
3605 /* If the RHS can be evaluated unconditionally and its operands are
3606 simple, it wins to evaluate the RHS unconditionally on machines
3607 with expensive branches. In this case, this isn't a comparison
3608 that can be merged. Avoid doing this if the RHS is a floating-point
3609 comparison since those can trap. */
3611 if (BRANCH_COST >= 2
3612 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3613 && simple_operand_p (rl_arg)
3614 && simple_operand_p (rr_arg))
3616 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3617 if (code == TRUTH_OR_EXPR
3618 && lcode == NE_EXPR && integer_zerop (lr_arg)
3619 && rcode == NE_EXPR && integer_zerop (rr_arg)
3620 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3621 return build (NE_EXPR, truth_type,
3622 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3623 ll_arg, rl_arg),
3624 integer_zero_node);
3626 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3627 if (code == TRUTH_AND_EXPR
3628 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3629 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3630 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3631 return build (EQ_EXPR, truth_type,
3632 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3633 ll_arg, rl_arg),
3634 integer_zero_node);
3636 return build (code, truth_type, lhs, rhs);
3639 /* See if the comparisons can be merged. Then get all the parameters for
3640 each side. */
3642 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3643 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3644 return 0;
3646 volatilep = 0;
3647 ll_inner = decode_field_reference (ll_arg,
3648 &ll_bitsize, &ll_bitpos, &ll_mode,
3649 &ll_unsignedp, &volatilep, &ll_mask,
3650 &ll_and_mask);
3651 lr_inner = decode_field_reference (lr_arg,
3652 &lr_bitsize, &lr_bitpos, &lr_mode,
3653 &lr_unsignedp, &volatilep, &lr_mask,
3654 &lr_and_mask);
3655 rl_inner = decode_field_reference (rl_arg,
3656 &rl_bitsize, &rl_bitpos, &rl_mode,
3657 &rl_unsignedp, &volatilep, &rl_mask,
3658 &rl_and_mask);
3659 rr_inner = decode_field_reference (rr_arg,
3660 &rr_bitsize, &rr_bitpos, &rr_mode,
3661 &rr_unsignedp, &volatilep, &rr_mask,
3662 &rr_and_mask);
3664 /* It must be true that the inner operation on the lhs of each
3665 comparison must be the same if we are to be able to do anything.
3666 Then see if we have constants. If not, the same must be true for
3667 the rhs's. */
3668 if (volatilep || ll_inner == 0 || rl_inner == 0
3669 || ! operand_equal_p (ll_inner, rl_inner, 0))
3670 return 0;
3672 if (TREE_CODE (lr_arg) == INTEGER_CST
3673 && TREE_CODE (rr_arg) == INTEGER_CST)
3674 l_const = lr_arg, r_const = rr_arg;
3675 else if (lr_inner == 0 || rr_inner == 0
3676 || ! operand_equal_p (lr_inner, rr_inner, 0))
3677 return 0;
3678 else
3679 l_const = r_const = 0;
3681 /* If either comparison code is not correct for our logical operation,
3682 fail. However, we can convert a one-bit comparison against zero into
3683 the opposite comparison against that bit being set in the field. */
3685 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3686 if (lcode != wanted_code)
3688 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3690 /* Make the left operand unsigned, since we are only interested
3691 in the value of one bit. Otherwise we are doing the wrong
3692 thing below. */
3693 ll_unsignedp = 1;
3694 l_const = ll_mask;
3696 else
3697 return 0;
3700 /* This is analogous to the code for l_const above. */
3701 if (rcode != wanted_code)
3703 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3705 rl_unsignedp = 1;
3706 r_const = rl_mask;
3708 else
3709 return 0;
3712 /* After this point all optimizations will generate bit-field
3713 references, which we might not want. */
3714 if (! (*lang_hooks.can_use_bit_fields_p) ())
3715 return 0;
3717 /* See if we can find a mode that contains both fields being compared on
3718 the left. If we can't, fail. Otherwise, update all constants and masks
3719 to be relative to a field of that size. */
3720 first_bit = MIN (ll_bitpos, rl_bitpos);
3721 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3722 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3723 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3724 volatilep);
3725 if (lnmode == VOIDmode)
3726 return 0;
3728 lnbitsize = GET_MODE_BITSIZE (lnmode);
3729 lnbitpos = first_bit & ~ (lnbitsize - 1);
3730 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3731 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3733 if (BYTES_BIG_ENDIAN)
3735 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3736 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3739 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3740 size_int (xll_bitpos), 0);
3741 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3742 size_int (xrl_bitpos), 0);
3744 if (l_const)
3746 l_const = convert (lntype, l_const);
3747 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3748 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3749 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3750 fold (build1 (BIT_NOT_EXPR,
3751 lntype, ll_mask)),
3752 0)))
3754 warning ("comparison is always %d", wanted_code == NE_EXPR);
3756 return convert (truth_type,
3757 wanted_code == NE_EXPR
3758 ? integer_one_node : integer_zero_node);
3761 if (r_const)
3763 r_const = convert (lntype, r_const);
3764 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3765 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3766 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3767 fold (build1 (BIT_NOT_EXPR,
3768 lntype, rl_mask)),
3769 0)))
3771 warning ("comparison is always %d", wanted_code == NE_EXPR);
3773 return convert (truth_type,
3774 wanted_code == NE_EXPR
3775 ? integer_one_node : integer_zero_node);
3779 /* If the right sides are not constant, do the same for it. Also,
3780 disallow this optimization if a size or signedness mismatch occurs
3781 between the left and right sides. */
3782 if (l_const == 0)
3784 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3785 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3786 /* Make sure the two fields on the right
3787 correspond to the left without being swapped. */
3788 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3789 return 0;
3791 first_bit = MIN (lr_bitpos, rr_bitpos);
3792 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3793 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3794 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3795 volatilep);
3796 if (rnmode == VOIDmode)
3797 return 0;
3799 rnbitsize = GET_MODE_BITSIZE (rnmode);
3800 rnbitpos = first_bit & ~ (rnbitsize - 1);
3801 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3802 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3804 if (BYTES_BIG_ENDIAN)
3806 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3807 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3810 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3811 size_int (xlr_bitpos), 0);
3812 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3813 size_int (xrr_bitpos), 0);
3815 /* Make a mask that corresponds to both fields being compared.
3816 Do this for both items being compared. If the operands are the
3817 same size and the bits being compared are in the same position
3818 then we can do this by masking both and comparing the masked
3819 results. */
3820 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3821 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3822 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3824 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3825 ll_unsignedp || rl_unsignedp);
3826 if (! all_ones_mask_p (ll_mask, lnbitsize))
3827 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3829 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3830 lr_unsignedp || rr_unsignedp);
3831 if (! all_ones_mask_p (lr_mask, rnbitsize))
3832 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3834 return build (wanted_code, truth_type, lhs, rhs);
3837 /* There is still another way we can do something: If both pairs of
3838 fields being compared are adjacent, we may be able to make a wider
3839 field containing them both.
3841 Note that we still must mask the lhs/rhs expressions. Furthermore,
3842 the mask must be shifted to account for the shift done by
3843 make_bit_field_ref. */
3844 if ((ll_bitsize + ll_bitpos == rl_bitpos
3845 && lr_bitsize + lr_bitpos == rr_bitpos)
3846 || (ll_bitpos == rl_bitpos + rl_bitsize
3847 && lr_bitpos == rr_bitpos + rr_bitsize))
3849 tree type;
3851 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3852 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3853 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3854 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3856 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3857 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3858 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3859 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3861 /* Convert to the smaller type before masking out unwanted bits. */
3862 type = lntype;
3863 if (lntype != rntype)
3865 if (lnbitsize > rnbitsize)
3867 lhs = convert (rntype, lhs);
3868 ll_mask = convert (rntype, ll_mask);
3869 type = rntype;
3871 else if (lnbitsize < rnbitsize)
3873 rhs = convert (lntype, rhs);
3874 lr_mask = convert (lntype, lr_mask);
3875 type = lntype;
3879 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3880 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3882 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3883 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3885 return build (wanted_code, truth_type, lhs, rhs);
3888 return 0;
3891 /* Handle the case of comparisons with constants. If there is something in
3892 common between the masks, those bits of the constants must be the same.
3893 If not, the condition is always false. Test for this to avoid generating
3894 incorrect code below. */
3895 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3896 if (! integer_zerop (result)
3897 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3898 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3900 if (wanted_code == NE_EXPR)
3902 warning ("`or' of unmatched not-equal tests is always 1");
3903 return convert (truth_type, integer_one_node);
3905 else
3907 warning ("`and' of mutually exclusive equal-tests is always 0");
3908 return convert (truth_type, integer_zero_node);
3912 /* Construct the expression we will return. First get the component
3913 reference we will make. Unless the mask is all ones the width of
3914 that field, perform the mask operation. Then compare with the
3915 merged constant. */
3916 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3917 ll_unsignedp || rl_unsignedp);
3919 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3920 if (! all_ones_mask_p (ll_mask, lnbitsize))
3921 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
3923 return build (wanted_code, truth_type, result,
3924 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3927 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
3928 constant. */
3930 static tree
3931 optimize_minmax_comparison (t)
3932 tree t;
3934 tree type = TREE_TYPE (t);
3935 tree arg0 = TREE_OPERAND (t, 0);
3936 enum tree_code op_code;
3937 tree comp_const = TREE_OPERAND (t, 1);
3938 tree minmax_const;
3939 int consts_equal, consts_lt;
3940 tree inner;
3942 STRIP_SIGN_NOPS (arg0);
3944 op_code = TREE_CODE (arg0);
3945 minmax_const = TREE_OPERAND (arg0, 1);
3946 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
3947 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
3948 inner = TREE_OPERAND (arg0, 0);
3950 /* If something does not permit us to optimize, return the original tree. */
3951 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
3952 || TREE_CODE (comp_const) != INTEGER_CST
3953 || TREE_CONSTANT_OVERFLOW (comp_const)
3954 || TREE_CODE (minmax_const) != INTEGER_CST
3955 || TREE_CONSTANT_OVERFLOW (minmax_const))
3956 return t;
3958 /* Now handle all the various comparison codes. We only handle EQ_EXPR
3959 and GT_EXPR, doing the rest with recursive calls using logical
3960 simplifications. */
3961 switch (TREE_CODE (t))
3963 case NE_EXPR: case LT_EXPR: case LE_EXPR:
3964 return
3965 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
3967 case GE_EXPR:
3968 return
3969 fold (build (TRUTH_ORIF_EXPR, type,
3970 optimize_minmax_comparison
3971 (build (EQ_EXPR, type, arg0, comp_const)),
3972 optimize_minmax_comparison
3973 (build (GT_EXPR, type, arg0, comp_const))));
3975 case EQ_EXPR:
3976 if (op_code == MAX_EXPR && consts_equal)
3977 /* MAX (X, 0) == 0 -> X <= 0 */
3978 return fold (build (LE_EXPR, type, inner, comp_const));
3980 else if (op_code == MAX_EXPR && consts_lt)
3981 /* MAX (X, 0) == 5 -> X == 5 */
3982 return fold (build (EQ_EXPR, type, inner, comp_const));
3984 else if (op_code == MAX_EXPR)
3985 /* MAX (X, 0) == -1 -> false */
3986 return omit_one_operand (type, integer_zero_node, inner);
3988 else if (consts_equal)
3989 /* MIN (X, 0) == 0 -> X >= 0 */
3990 return fold (build (GE_EXPR, type, inner, comp_const));
3992 else if (consts_lt)
3993 /* MIN (X, 0) == 5 -> false */
3994 return omit_one_operand (type, integer_zero_node, inner);
3996 else
3997 /* MIN (X, 0) == -1 -> X == -1 */
3998 return fold (build (EQ_EXPR, type, inner, comp_const));
4000 case GT_EXPR:
4001 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4002 /* MAX (X, 0) > 0 -> X > 0
4003 MAX (X, 0) > 5 -> X > 5 */
4004 return fold (build (GT_EXPR, type, inner, comp_const));
4006 else if (op_code == MAX_EXPR)
4007 /* MAX (X, 0) > -1 -> true */
4008 return omit_one_operand (type, integer_one_node, inner);
4010 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4011 /* MIN (X, 0) > 0 -> false
4012 MIN (X, 0) > 5 -> false */
4013 return omit_one_operand (type, integer_zero_node, inner);
4015 else
4016 /* MIN (X, 0) > -1 -> X > -1 */
4017 return fold (build (GT_EXPR, type, inner, comp_const));
4019 default:
4020 return t;
4024 /* T is an integer expression that is being multiplied, divided, or taken a
4025 modulus (CODE says which and what kind of divide or modulus) by a
4026 constant C. See if we can eliminate that operation by folding it with
4027 other operations already in T. WIDE_TYPE, if non-null, is a type that
4028 should be used for the computation if wider than our type.
4030 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4031 (X * 2) + (Y * 4). We must, however, be assured that either the original
4032 expression would not overflow or that overflow is undefined for the type
4033 in the language in question.
4035 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4036 the machine has a multiply-accumulate insn or that this is part of an
4037 addressing calculation.
4039 If we return a non-null expression, it is an equivalent form of the
4040 original computation, but need not be in the original type. */
4042 static tree
4043 extract_muldiv (t, c, code, wide_type)
4044 tree t;
4045 tree c;
4046 enum tree_code code;
4047 tree wide_type;
4049 tree type = TREE_TYPE (t);
4050 enum tree_code tcode = TREE_CODE (t);
4051 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4052 > GET_MODE_SIZE (TYPE_MODE (type)))
4053 ? wide_type : type);
4054 tree t1, t2;
4055 int same_p = tcode == code;
4056 tree op0 = NULL_TREE, op1 = NULL_TREE;
4058 /* Don't deal with constants of zero here; they confuse the code below. */
4059 if (integer_zerop (c))
4060 return NULL_TREE;
4062 if (TREE_CODE_CLASS (tcode) == '1')
4063 op0 = TREE_OPERAND (t, 0);
4065 if (TREE_CODE_CLASS (tcode) == '2')
4066 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4068 /* Note that we need not handle conditional operations here since fold
4069 already handles those cases. So just do arithmetic here. */
4070 switch (tcode)
4072 case INTEGER_CST:
4073 /* For a constant, we can always simplify if we are a multiply
4074 or (for divide and modulus) if it is a multiple of our constant. */
4075 if (code == MULT_EXPR
4076 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4077 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4078 break;
4080 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4081 /* If op0 is an expression ... */
4082 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4083 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4084 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4085 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4086 /* ... and is unsigned, and its type is smaller than ctype,
4087 then we cannot pass through as widening. */
4088 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4089 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4090 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4091 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4092 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4093 /* ... or its type is larger than ctype,
4094 then we cannot pass through this truncation. */
4095 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4096 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))))
4097 break;
4099 /* Pass the constant down and see if we can make a simplification. If
4100 we can, replace this expression with the inner simplification for
4101 possible later conversion to our or some other type. */
4102 if (0 != (t1 = extract_muldiv (op0, convert (TREE_TYPE (op0), c), code,
4103 code == MULT_EXPR ? ctype : NULL_TREE)))
4104 return t1;
4105 break;
4107 case NEGATE_EXPR: case ABS_EXPR:
4108 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4109 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4110 break;
4112 case MIN_EXPR: case MAX_EXPR:
4113 /* If widening the type changes the signedness, then we can't perform
4114 this optimization as that changes the result. */
4115 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4116 break;
4118 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4119 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4120 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4122 if (tree_int_cst_sgn (c) < 0)
4123 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4125 return fold (build (tcode, ctype, convert (ctype, t1),
4126 convert (ctype, t2)));
4128 break;
4130 case WITH_RECORD_EXPR:
4131 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4132 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4133 TREE_OPERAND (t, 1));
4134 break;
4136 case SAVE_EXPR:
4137 /* If this has not been evaluated and the operand has no side effects,
4138 we can see if we can do something inside it and make a new one.
4139 Note that this test is overly conservative since we can do this
4140 if the only reason it had side effects is that it was another
4141 similar SAVE_EXPR, but that isn't worth bothering with. */
4142 if (SAVE_EXPR_RTL (t) == 0 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))
4143 && 0 != (t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code,
4144 wide_type)))
4146 t1 = save_expr (t1);
4147 if (SAVE_EXPR_PERSISTENT_P (t) && TREE_CODE (t1) == SAVE_EXPR)
4148 SAVE_EXPR_PERSISTENT_P (t1) = 1;
4149 if (is_pending_size (t))
4150 put_pending_size (t1);
4151 return t1;
4153 break;
4155 case LSHIFT_EXPR: case RSHIFT_EXPR:
4156 /* If the second operand is constant, this is a multiplication
4157 or floor division, by a power of two, so we can treat it that
4158 way unless the multiplier or divisor overflows. */
4159 if (TREE_CODE (op1) == INTEGER_CST
4160 /* const_binop may not detect overflow correctly,
4161 so check for it explicitly here. */
4162 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4163 && TREE_INT_CST_HIGH (op1) == 0
4164 && 0 != (t1 = convert (ctype,
4165 const_binop (LSHIFT_EXPR, size_one_node,
4166 op1, 0)))
4167 && ! TREE_OVERFLOW (t1))
4168 return extract_muldiv (build (tcode == LSHIFT_EXPR
4169 ? MULT_EXPR : FLOOR_DIV_EXPR,
4170 ctype, convert (ctype, op0), t1),
4171 c, code, wide_type);
4172 break;
4174 case PLUS_EXPR: case MINUS_EXPR:
4175 /* See if we can eliminate the operation on both sides. If we can, we
4176 can return a new PLUS or MINUS. If we can't, the only remaining
4177 cases where we can do anything are if the second operand is a
4178 constant. */
4179 t1 = extract_muldiv (op0, c, code, wide_type);
4180 t2 = extract_muldiv (op1, c, code, wide_type);
4181 if (t1 != 0 && t2 != 0
4182 && (code == MULT_EXPR
4183 /* If not multiplication, we can only do this if both operands
4184 are divisible by c. */
4185 || (multiple_of_p (ctype, op0, c)
4186 && multiple_of_p (ctype, op1, c))))
4187 return fold (build (tcode, ctype, convert (ctype, t1),
4188 convert (ctype, t2)));
4190 /* If this was a subtraction, negate OP1 and set it to be an addition.
4191 This simplifies the logic below. */
4192 if (tcode == MINUS_EXPR)
4193 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4195 if (TREE_CODE (op1) != INTEGER_CST)
4196 break;
4198 /* If either OP1 or C are negative, this optimization is not safe for
4199 some of the division and remainder types while for others we need
4200 to change the code. */
4201 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4203 if (code == CEIL_DIV_EXPR)
4204 code = FLOOR_DIV_EXPR;
4205 else if (code == FLOOR_DIV_EXPR)
4206 code = CEIL_DIV_EXPR;
4207 else if (code != MULT_EXPR
4208 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4209 break;
4212 /* If it's a multiply or a division/modulus operation of a multiple
4213 of our constant, do the operation and verify it doesn't overflow. */
4214 if (code == MULT_EXPR
4215 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4217 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4218 if (op1 == 0 || TREE_OVERFLOW (op1))
4219 break;
4221 else
4222 break;
4224 /* If we have an unsigned type is not a sizetype, we cannot widen
4225 the operation since it will change the result if the original
4226 computation overflowed. */
4227 if (TREE_UNSIGNED (ctype)
4228 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4229 && ctype != type)
4230 break;
4232 /* If we were able to eliminate our operation from the first side,
4233 apply our operation to the second side and reform the PLUS. */
4234 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4235 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4237 /* The last case is if we are a multiply. In that case, we can
4238 apply the distributive law to commute the multiply and addition
4239 if the multiplication of the constants doesn't overflow. */
4240 if (code == MULT_EXPR)
4241 return fold (build (tcode, ctype, fold (build (code, ctype,
4242 convert (ctype, op0),
4243 convert (ctype, c))),
4244 op1));
4246 break;
4248 case MULT_EXPR:
4249 /* We have a special case here if we are doing something like
4250 (C * 8) % 4 since we know that's zero. */
4251 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4252 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4253 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4254 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4255 return omit_one_operand (type, integer_zero_node, op0);
4257 /* ... fall through ... */
4259 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4260 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4261 /* If we can extract our operation from the LHS, do so and return a
4262 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4263 do something only if the second operand is a constant. */
4264 if (same_p
4265 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4266 return fold (build (tcode, ctype, convert (ctype, t1),
4267 convert (ctype, op1)));
4268 else if (tcode == MULT_EXPR && code == MULT_EXPR
4269 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4270 return fold (build (tcode, ctype, convert (ctype, op0),
4271 convert (ctype, t1)));
4272 else if (TREE_CODE (op1) != INTEGER_CST)
4273 return 0;
4275 /* If these are the same operation types, we can associate them
4276 assuming no overflow. */
4277 if (tcode == code
4278 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4279 convert (ctype, c), 0))
4280 && ! TREE_OVERFLOW (t1))
4281 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4283 /* If these operations "cancel" each other, we have the main
4284 optimizations of this pass, which occur when either constant is a
4285 multiple of the other, in which case we replace this with either an
4286 operation or CODE or TCODE.
4288 If we have an unsigned type that is not a sizetype, we cannot do
4289 this since it will change the result if the original computation
4290 overflowed. */
4291 if ((! TREE_UNSIGNED (ctype)
4292 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4293 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4294 || (tcode == MULT_EXPR
4295 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4296 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4298 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4299 return fold (build (tcode, ctype, convert (ctype, op0),
4300 convert (ctype,
4301 const_binop (TRUNC_DIV_EXPR,
4302 op1, c, 0))));
4303 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4304 return fold (build (code, ctype, convert (ctype, op0),
4305 convert (ctype,
4306 const_binop (TRUNC_DIV_EXPR,
4307 c, op1, 0))));
4309 break;
4311 default:
4312 break;
4315 return 0;
4318 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4319 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4320 that we may sometimes modify the tree. */
4322 static tree
4323 strip_compound_expr (t, s)
4324 tree t;
4325 tree s;
4327 enum tree_code code = TREE_CODE (t);
4329 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4330 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4331 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4332 return TREE_OPERAND (t, 1);
4334 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4335 don't bother handling any other types. */
4336 else if (code == COND_EXPR)
4338 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4339 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4340 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4342 else if (TREE_CODE_CLASS (code) == '1')
4343 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4344 else if (TREE_CODE_CLASS (code) == '<'
4345 || TREE_CODE_CLASS (code) == '2')
4347 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4348 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4351 return t;
4354 /* Return a node which has the indicated constant VALUE (either 0 or
4355 1), and is of the indicated TYPE. */
4357 static tree
4358 constant_boolean_node (value, type)
4359 int value;
4360 tree type;
4362 if (type == integer_type_node)
4363 return value ? integer_one_node : integer_zero_node;
4364 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4365 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4366 integer_zero_node);
4367 else
4369 tree t = build_int_2 (value, 0);
4371 TREE_TYPE (t) = type;
4372 return t;
4376 /* Utility function for the following routine, to see how complex a nesting of
4377 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4378 we don't care (to avoid spending too much time on complex expressions.). */
4380 static int
4381 count_cond (expr, lim)
4382 tree expr;
4383 int lim;
4385 int ctrue, cfalse;
4387 if (TREE_CODE (expr) != COND_EXPR)
4388 return 0;
4389 else if (lim <= 0)
4390 return 0;
4392 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4393 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4394 return MIN (lim, 1 + ctrue + cfalse);
4397 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4398 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4399 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4400 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4401 COND is the first argument to CODE; otherwise (as in the example
4402 given here), it is the second argument. TYPE is the type of the
4403 original expression. */
4405 static tree
4406 fold_binary_op_with_conditional_arg (code, type, cond, arg, cond_first_p)
4407 enum tree_code code;
4408 tree type;
4409 tree cond;
4410 tree arg;
4411 int cond_first_p;
4413 tree test, true_value, false_value;
4414 tree lhs = NULL_TREE;
4415 tree rhs = NULL_TREE;
4416 /* In the end, we'll produce a COND_EXPR. Both arms of the
4417 conditional expression will be binary operations. The left-hand
4418 side of the expression to be executed if the condition is true
4419 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4420 of the expression to be executed if the condition is true will be
4421 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4422 but apply to the expression to be executed if the conditional is
4423 false. */
4424 tree *true_lhs;
4425 tree *true_rhs;
4426 tree *false_lhs;
4427 tree *false_rhs;
4428 /* These are the codes to use for the left-hand side and right-hand
4429 side of the COND_EXPR. Normally, they are the same as CODE. */
4430 enum tree_code lhs_code = code;
4431 enum tree_code rhs_code = code;
4432 /* And these are the types of the expressions. */
4433 tree lhs_type = type;
4434 tree rhs_type = type;
4435 int save = 0;
4437 if (cond_first_p)
4439 true_rhs = false_rhs = &arg;
4440 true_lhs = &true_value;
4441 false_lhs = &false_value;
4443 else
4445 true_lhs = false_lhs = &arg;
4446 true_rhs = &true_value;
4447 false_rhs = &false_value;
4450 if (TREE_CODE (cond) == COND_EXPR)
4452 test = TREE_OPERAND (cond, 0);
4453 true_value = TREE_OPERAND (cond, 1);
4454 false_value = TREE_OPERAND (cond, 2);
4455 /* If this operand throws an expression, then it does not make
4456 sense to try to perform a logical or arithmetic operation
4457 involving it. Instead of building `a + throw 3' for example,
4458 we simply build `a, throw 3'. */
4459 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4461 if (! cond_first_p)
4463 lhs_code = COMPOUND_EXPR;
4464 lhs_type = void_type_node;
4466 else
4467 lhs = true_value;
4469 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4471 if (! cond_first_p)
4473 rhs_code = COMPOUND_EXPR;
4474 rhs_type = void_type_node;
4476 else
4477 rhs = false_value;
4480 else
4482 tree testtype = TREE_TYPE (cond);
4483 test = cond;
4484 true_value = convert (testtype, integer_one_node);
4485 false_value = convert (testtype, integer_zero_node);
4488 /* If ARG is complex we want to make sure we only evaluate
4489 it once. Though this is only required if it is volatile, it
4490 might be more efficient even if it is not. However, if we
4491 succeed in folding one part to a constant, we do not need
4492 to make this SAVE_EXPR. Since we do this optimization
4493 primarily to see if we do end up with constant and this
4494 SAVE_EXPR interferes with later optimizations, suppressing
4495 it when we can is important.
4497 If we are not in a function, we can't make a SAVE_EXPR, so don't
4498 try to do so. Don't try to see if the result is a constant
4499 if an arm is a COND_EXPR since we get exponential behavior
4500 in that case. */
4502 if (TREE_CODE (arg) == SAVE_EXPR)
4503 save = 1;
4504 else if (lhs == 0 && rhs == 0
4505 && !TREE_CONSTANT (arg)
4506 && (*lang_hooks.decls.global_bindings_p) () == 0
4507 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4508 || TREE_SIDE_EFFECTS (arg)))
4510 if (TREE_CODE (true_value) != COND_EXPR)
4511 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4513 if (TREE_CODE (false_value) != COND_EXPR)
4514 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4516 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4517 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4519 arg = save_expr (arg);
4520 lhs = rhs = 0;
4521 save = 1;
4525 if (lhs == 0)
4526 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4527 if (rhs == 0)
4528 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4530 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4532 if (save)
4533 return build (COMPOUND_EXPR, type,
4534 convert (void_type_node, arg),
4535 strip_compound_expr (test, arg));
4536 else
4537 return convert (type, test);
4541 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4543 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4544 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4545 ADDEND is the same as X.
4547 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4548 and finite. The problematic cases are when X is zero, and its mode
4549 has signed zeros. In the case of rounding towards -infinity,
4550 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4551 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4553 static bool
4554 fold_real_zero_addition_p (type, addend, negate)
4555 tree type, addend;
4556 int negate;
4558 if (!real_zerop (addend))
4559 return false;
4561 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4562 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4563 return true;
4565 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4566 if (TREE_CODE (addend) == REAL_CST
4567 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4568 negate = !negate;
4570 /* The mode has signed zeros, and we have to honor their sign.
4571 In this situation, there is only one case we can return true for.
4572 X - 0 is the same as X unless rounding towards -infinity is
4573 supported. */
4574 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4578 /* Perform constant folding and related simplification of EXPR.
4579 The related simplifications include x*1 => x, x*0 => 0, etc.,
4580 and application of the associative law.
4581 NOP_EXPR conversions may be removed freely (as long as we
4582 are careful not to change the C type of the overall expression)
4583 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4584 but we can constant-fold them if they have constant operands. */
4586 tree
4587 fold (expr)
4588 tree expr;
4590 tree t = expr;
4591 tree t1 = NULL_TREE;
4592 tree tem;
4593 tree type = TREE_TYPE (expr);
4594 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4595 enum tree_code code = TREE_CODE (t);
4596 int kind = TREE_CODE_CLASS (code);
4597 int invert;
4598 /* WINS will be nonzero when the switch is done
4599 if all operands are constant. */
4600 int wins = 1;
4602 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4603 Likewise for a SAVE_EXPR that's already been evaluated. */
4604 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
4605 return t;
4607 /* Return right away if a constant. */
4608 if (kind == 'c')
4609 return t;
4611 #ifdef MAX_INTEGER_COMPUTATION_MODE
4612 check_max_integer_computation_mode (expr);
4613 #endif
4615 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4617 tree subop;
4619 /* Special case for conversion ops that can have fixed point args. */
4620 arg0 = TREE_OPERAND (t, 0);
4622 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4623 if (arg0 != 0)
4624 STRIP_SIGN_NOPS (arg0);
4626 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4627 subop = TREE_REALPART (arg0);
4628 else
4629 subop = arg0;
4631 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4632 && TREE_CODE (subop) != REAL_CST
4634 /* Note that TREE_CONSTANT isn't enough:
4635 static var addresses are constant but we can't
4636 do arithmetic on them. */
4637 wins = 0;
4639 else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
4641 int len = first_rtl_op (code);
4642 int i;
4643 for (i = 0; i < len; i++)
4645 tree op = TREE_OPERAND (t, i);
4646 tree subop;
4648 if (op == 0)
4649 continue; /* Valid for CALL_EXPR, at least. */
4651 if (kind == '<' || code == RSHIFT_EXPR)
4653 /* Signedness matters here. Perhaps we can refine this
4654 later. */
4655 STRIP_SIGN_NOPS (op);
4657 else
4658 /* Strip any conversions that don't change the mode. */
4659 STRIP_NOPS (op);
4661 if (TREE_CODE (op) == COMPLEX_CST)
4662 subop = TREE_REALPART (op);
4663 else
4664 subop = op;
4666 if (TREE_CODE (subop) != INTEGER_CST
4667 && TREE_CODE (subop) != REAL_CST)
4668 /* Note that TREE_CONSTANT isn't enough:
4669 static var addresses are constant but we can't
4670 do arithmetic on them. */
4671 wins = 0;
4673 if (i == 0)
4674 arg0 = op;
4675 else if (i == 1)
4676 arg1 = op;
4680 /* If this is a commutative operation, and ARG0 is a constant, move it
4681 to ARG1 to reduce the number of tests below. */
4682 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4683 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4684 || code == BIT_AND_EXPR)
4685 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4687 tem = arg0; arg0 = arg1; arg1 = tem;
4689 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4690 TREE_OPERAND (t, 1) = tem;
4693 /* Now WINS is set as described above,
4694 ARG0 is the first operand of EXPR,
4695 and ARG1 is the second operand (if it has more than one operand).
4697 First check for cases where an arithmetic operation is applied to a
4698 compound, conditional, or comparison operation. Push the arithmetic
4699 operation inside the compound or conditional to see if any folding
4700 can then be done. Convert comparison to conditional for this purpose.
4701 The also optimizes non-constant cases that used to be done in
4702 expand_expr.
4704 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
4705 one of the operands is a comparison and the other is a comparison, a
4706 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4707 code below would make the expression more complex. Change it to a
4708 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4709 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4711 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4712 || code == EQ_EXPR || code == NE_EXPR)
4713 && ((truth_value_p (TREE_CODE (arg0))
4714 && (truth_value_p (TREE_CODE (arg1))
4715 || (TREE_CODE (arg1) == BIT_AND_EXPR
4716 && integer_onep (TREE_OPERAND (arg1, 1)))))
4717 || (truth_value_p (TREE_CODE (arg1))
4718 && (truth_value_p (TREE_CODE (arg0))
4719 || (TREE_CODE (arg0) == BIT_AND_EXPR
4720 && integer_onep (TREE_OPERAND (arg0, 1)))))))
4722 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4723 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4724 : TRUTH_XOR_EXPR,
4725 type, arg0, arg1));
4727 if (code == EQ_EXPR)
4728 t = invert_truthvalue (t);
4730 return t;
4733 if (TREE_CODE_CLASS (code) == '1')
4735 if (TREE_CODE (arg0) == COMPOUND_EXPR)
4736 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4737 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
4738 else if (TREE_CODE (arg0) == COND_EXPR)
4740 tree arg01 = TREE_OPERAND (arg0, 1);
4741 tree arg02 = TREE_OPERAND (arg0, 2);
4742 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
4743 arg01 = fold (build1 (code, type, arg01));
4744 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
4745 arg02 = fold (build1 (code, type, arg02));
4746 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
4747 arg01, arg02));
4749 /* If this was a conversion, and all we did was to move into
4750 inside the COND_EXPR, bring it back out. But leave it if
4751 it is a conversion from integer to integer and the
4752 result precision is no wider than a word since such a
4753 conversion is cheap and may be optimized away by combine,
4754 while it couldn't if it were outside the COND_EXPR. Then return
4755 so we don't get into an infinite recursion loop taking the
4756 conversion out and then back in. */
4758 if ((code == NOP_EXPR || code == CONVERT_EXPR
4759 || code == NON_LVALUE_EXPR)
4760 && TREE_CODE (t) == COND_EXPR
4761 && TREE_CODE (TREE_OPERAND (t, 1)) == code
4762 && TREE_CODE (TREE_OPERAND (t, 2)) == code
4763 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
4764 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
4765 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
4766 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
4767 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
4768 && (INTEGRAL_TYPE_P
4769 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
4770 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
4771 t = build1 (code, type,
4772 build (COND_EXPR,
4773 TREE_TYPE (TREE_OPERAND
4774 (TREE_OPERAND (t, 1), 0)),
4775 TREE_OPERAND (t, 0),
4776 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
4777 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
4778 return t;
4780 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
4781 return fold (build (COND_EXPR, type, arg0,
4782 fold (build1 (code, type, integer_one_node)),
4783 fold (build1 (code, type, integer_zero_node))));
4785 else if (TREE_CODE_CLASS (code) == '2'
4786 || TREE_CODE_CLASS (code) == '<')
4788 if (TREE_CODE (arg1) == COMPOUND_EXPR
4789 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
4790 && ! TREE_SIDE_EFFECTS (arg0))
4791 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4792 fold (build (code, type,
4793 arg0, TREE_OPERAND (arg1, 1))));
4794 else if ((TREE_CODE (arg1) == COND_EXPR
4795 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
4796 && TREE_CODE_CLASS (code) != '<'))
4797 && (TREE_CODE (arg0) != COND_EXPR
4798 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4799 && (! TREE_SIDE_EFFECTS (arg0)
4800 || ((*lang_hooks.decls.global_bindings_p) () == 0
4801 && ! contains_placeholder_p (arg0))))
4802 return
4803 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
4804 /*cond_first_p=*/0);
4805 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4806 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4807 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4808 else if ((TREE_CODE (arg0) == COND_EXPR
4809 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4810 && TREE_CODE_CLASS (code) != '<'))
4811 && (TREE_CODE (arg1) != COND_EXPR
4812 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4813 && (! TREE_SIDE_EFFECTS (arg1)
4814 || ((*lang_hooks.decls.global_bindings_p) () == 0
4815 && ! contains_placeholder_p (arg1))))
4816 return
4817 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
4818 /*cond_first_p=*/1);
4820 else if (TREE_CODE_CLASS (code) == '<'
4821 && TREE_CODE (arg0) == COMPOUND_EXPR)
4822 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4823 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4824 else if (TREE_CODE_CLASS (code) == '<'
4825 && TREE_CODE (arg1) == COMPOUND_EXPR)
4826 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4827 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
4829 switch (code)
4831 case INTEGER_CST:
4832 case REAL_CST:
4833 case VECTOR_CST:
4834 case STRING_CST:
4835 case COMPLEX_CST:
4836 case CONSTRUCTOR:
4837 return t;
4839 case CONST_DECL:
4840 return fold (DECL_INITIAL (t));
4842 case NOP_EXPR:
4843 case FLOAT_EXPR:
4844 case CONVERT_EXPR:
4845 case FIX_TRUNC_EXPR:
4846 /* Other kinds of FIX are not handled properly by fold_convert. */
4848 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4849 return TREE_OPERAND (t, 0);
4851 /* Handle cases of two conversions in a row. */
4852 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4853 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4855 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4856 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4857 tree final_type = TREE_TYPE (t);
4858 int inside_int = INTEGRAL_TYPE_P (inside_type);
4859 int inside_ptr = POINTER_TYPE_P (inside_type);
4860 int inside_float = FLOAT_TYPE_P (inside_type);
4861 unsigned int inside_prec = TYPE_PRECISION (inside_type);
4862 int inside_unsignedp = TREE_UNSIGNED (inside_type);
4863 int inter_int = INTEGRAL_TYPE_P (inter_type);
4864 int inter_ptr = POINTER_TYPE_P (inter_type);
4865 int inter_float = FLOAT_TYPE_P (inter_type);
4866 unsigned int inter_prec = TYPE_PRECISION (inter_type);
4867 int inter_unsignedp = TREE_UNSIGNED (inter_type);
4868 int final_int = INTEGRAL_TYPE_P (final_type);
4869 int final_ptr = POINTER_TYPE_P (final_type);
4870 int final_float = FLOAT_TYPE_P (final_type);
4871 unsigned int final_prec = TYPE_PRECISION (final_type);
4872 int final_unsignedp = TREE_UNSIGNED (final_type);
4874 /* In addition to the cases of two conversions in a row
4875 handled below, if we are converting something to its own
4876 type via an object of identical or wider precision, neither
4877 conversion is needed. */
4878 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
4879 && ((inter_int && final_int) || (inter_float && final_float))
4880 && inter_prec >= final_prec)
4881 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4883 /* Likewise, if the intermediate and final types are either both
4884 float or both integer, we don't need the middle conversion if
4885 it is wider than the final type and doesn't change the signedness
4886 (for integers). Avoid this if the final type is a pointer
4887 since then we sometimes need the inner conversion. Likewise if
4888 the outer has a precision not equal to the size of its mode. */
4889 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4890 || (inter_float && inside_float))
4891 && inter_prec >= inside_prec
4892 && (inter_float || inter_unsignedp == inside_unsignedp)
4893 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4894 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4895 && ! final_ptr)
4896 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4898 /* If we have a sign-extension of a zero-extended value, we can
4899 replace that by a single zero-extension. */
4900 if (inside_int && inter_int && final_int
4901 && inside_prec < inter_prec && inter_prec < final_prec
4902 && inside_unsignedp && !inter_unsignedp)
4903 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4905 /* Two conversions in a row are not needed unless:
4906 - some conversion is floating-point (overstrict for now), or
4907 - the intermediate type is narrower than both initial and
4908 final, or
4909 - the intermediate type and innermost type differ in signedness,
4910 and the outermost type is wider than the intermediate, or
4911 - the initial type is a pointer type and the precisions of the
4912 intermediate and final types differ, or
4913 - the final type is a pointer type and the precisions of the
4914 initial and intermediate types differ. */
4915 if (! inside_float && ! inter_float && ! final_float
4916 && (inter_prec > inside_prec || inter_prec > final_prec)
4917 && ! (inside_int && inter_int
4918 && inter_unsignedp != inside_unsignedp
4919 && inter_prec < final_prec)
4920 && ((inter_unsignedp && inter_prec > inside_prec)
4921 == (final_unsignedp && final_prec > inter_prec))
4922 && ! (inside_ptr && inter_prec != final_prec)
4923 && ! (final_ptr && inside_prec != inter_prec)
4924 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4925 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4926 && ! final_ptr)
4927 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4930 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
4931 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
4932 /* Detect assigning a bitfield. */
4933 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
4934 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
4936 /* Don't leave an assignment inside a conversion
4937 unless assigning a bitfield. */
4938 tree prev = TREE_OPERAND (t, 0);
4939 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
4940 /* First do the assignment, then return converted constant. */
4941 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
4942 TREE_USED (t) = 1;
4943 return t;
4946 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
4947 constants (if x has signed type, the sign bit cannot be set
4948 in c). This folds extension into the BIT_AND_EXPR. */
4949 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
4950 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
4951 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
4952 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
4954 tree and = TREE_OPERAND (t, 0);
4955 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
4956 int change = 0;
4958 if (TREE_UNSIGNED (TREE_TYPE (and))
4959 || (TYPE_PRECISION (TREE_TYPE (t))
4960 <= TYPE_PRECISION (TREE_TYPE (and))))
4961 change = 1;
4962 else if (TYPE_PRECISION (TREE_TYPE (and1))
4963 <= HOST_BITS_PER_WIDE_INT
4964 && host_integerp (and1, 1))
4966 unsigned HOST_WIDE_INT cst;
4968 cst = tree_low_cst (and1, 1);
4969 cst &= (HOST_WIDE_INT) -1
4970 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
4971 change = (cst == 0);
4972 #ifdef LOAD_EXTEND_OP
4973 if (change
4974 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
4975 == ZERO_EXTEND))
4977 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
4978 and0 = convert (uns, and0);
4979 and1 = convert (uns, and1);
4981 #endif
4983 if (change)
4984 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
4985 convert (TREE_TYPE (t), and0),
4986 convert (TREE_TYPE (t), and1)));
4989 if (!wins)
4991 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
4992 return t;
4994 return fold_convert (t, arg0);
4996 case VIEW_CONVERT_EXPR:
4997 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
4998 return build1 (VIEW_CONVERT_EXPR, type,
4999 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5000 return t;
5002 case COMPONENT_REF:
5003 if (TREE_CODE (arg0) == CONSTRUCTOR)
5005 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5006 if (m)
5007 t = TREE_VALUE (m);
5009 return t;
5011 case RANGE_EXPR:
5012 TREE_CONSTANT (t) = wins;
5013 return t;
5015 case NEGATE_EXPR:
5016 if (wins)
5018 if (TREE_CODE (arg0) == INTEGER_CST)
5020 unsigned HOST_WIDE_INT low;
5021 HOST_WIDE_INT high;
5022 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5023 TREE_INT_CST_HIGH (arg0),
5024 &low, &high);
5025 t = build_int_2 (low, high);
5026 TREE_TYPE (t) = type;
5027 TREE_OVERFLOW (t)
5028 = (TREE_OVERFLOW (arg0)
5029 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5030 TREE_CONSTANT_OVERFLOW (t)
5031 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5033 else if (TREE_CODE (arg0) == REAL_CST)
5034 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5036 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5037 return TREE_OPERAND (arg0, 0);
5038 /* Convert -((double)float) into (double)(-float). */
5039 else if (TREE_CODE (arg0) == NOP_EXPR
5040 && TREE_CODE (type) == REAL_TYPE)
5042 tree targ0 = strip_float_extensions (arg0);
5043 if (targ0 != arg0)
5044 return convert (type, build1 (NEGATE_EXPR, TREE_TYPE (targ0), targ0));
5048 /* Convert - (a - b) to (b - a) for non-floating-point. */
5049 else if (TREE_CODE (arg0) == MINUS_EXPR
5050 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5051 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5052 TREE_OPERAND (arg0, 0));
5054 return t;
5056 case ABS_EXPR:
5057 if (wins)
5059 if (TREE_CODE (arg0) == INTEGER_CST)
5061 /* If the value is unsigned, then the absolute value is
5062 the same as the ordinary value. */
5063 if (TREE_UNSIGNED (type))
5064 return arg0;
5065 /* Similarly, if the value is non-negative. */
5066 else if (INT_CST_LT (integer_minus_one_node, arg0))
5067 return arg0;
5068 /* If the value is negative, then the absolute value is
5069 its negation. */
5070 else
5072 unsigned HOST_WIDE_INT low;
5073 HOST_WIDE_INT high;
5074 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5075 TREE_INT_CST_HIGH (arg0),
5076 &low, &high);
5077 t = build_int_2 (low, high);
5078 TREE_TYPE (t) = type;
5079 TREE_OVERFLOW (t)
5080 = (TREE_OVERFLOW (arg0)
5081 | force_fit_type (t, overflow));
5082 TREE_CONSTANT_OVERFLOW (t)
5083 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5086 else if (TREE_CODE (arg0) == REAL_CST)
5088 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5089 t = build_real (type,
5090 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5093 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
5094 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
5095 /* Convert fabs((double)float) into (double)fabsf(float). */
5096 else if (TREE_CODE (arg0) == NOP_EXPR
5097 && TREE_CODE (type) == REAL_TYPE)
5099 tree targ0 = strip_float_extensions (arg0);
5100 if (targ0 != arg0)
5101 return convert (type, build1 (ABS_EXPR, TREE_TYPE (targ0), targ0));
5104 else
5106 /* fabs(sqrt(x)) = sqrt(x) and fabs(exp(x)) = exp(x). */
5107 enum built_in_function fcode = builtin_mathfn_code (arg0);
5108 if (fcode == BUILT_IN_SQRT
5109 || fcode == BUILT_IN_SQRTF
5110 || fcode == BUILT_IN_SQRTL
5111 || fcode == BUILT_IN_EXP
5112 || fcode == BUILT_IN_EXPF
5113 || fcode == BUILT_IN_EXPL)
5114 t = arg0;
5116 return t;
5118 case CONJ_EXPR:
5119 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5120 return convert (type, arg0);
5121 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5122 return build (COMPLEX_EXPR, type,
5123 TREE_OPERAND (arg0, 0),
5124 negate_expr (TREE_OPERAND (arg0, 1)));
5125 else if (TREE_CODE (arg0) == COMPLEX_CST)
5126 return build_complex (type, TREE_REALPART (arg0),
5127 negate_expr (TREE_IMAGPART (arg0)));
5128 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5129 return fold (build (TREE_CODE (arg0), type,
5130 fold (build1 (CONJ_EXPR, type,
5131 TREE_OPERAND (arg0, 0))),
5132 fold (build1 (CONJ_EXPR,
5133 type, TREE_OPERAND (arg0, 1)))));
5134 else if (TREE_CODE (arg0) == CONJ_EXPR)
5135 return TREE_OPERAND (arg0, 0);
5136 return t;
5138 case BIT_NOT_EXPR:
5139 if (wins)
5141 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5142 ~ TREE_INT_CST_HIGH (arg0));
5143 TREE_TYPE (t) = type;
5144 force_fit_type (t, 0);
5145 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5146 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5148 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5149 return TREE_OPERAND (arg0, 0);
5150 return t;
5152 case PLUS_EXPR:
5153 /* A + (-B) -> A - B */
5154 if (TREE_CODE (arg1) == NEGATE_EXPR)
5155 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5156 /* (-A) + B -> B - A */
5157 if (TREE_CODE (arg0) == NEGATE_EXPR)
5158 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5159 else if (! FLOAT_TYPE_P (type))
5161 if (integer_zerop (arg1))
5162 return non_lvalue (convert (type, arg0));
5164 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5165 with a constant, and the two constants have no bits in common,
5166 we should treat this as a BIT_IOR_EXPR since this may produce more
5167 simplifications. */
5168 if (TREE_CODE (arg0) == BIT_AND_EXPR
5169 && TREE_CODE (arg1) == BIT_AND_EXPR
5170 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5171 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5172 && integer_zerop (const_binop (BIT_AND_EXPR,
5173 TREE_OPERAND (arg0, 1),
5174 TREE_OPERAND (arg1, 1), 0)))
5176 code = BIT_IOR_EXPR;
5177 goto bit_ior;
5180 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5181 (plus (plus (mult) (mult)) (foo)) so that we can
5182 take advantage of the factoring cases below. */
5183 if ((TREE_CODE (arg0) == PLUS_EXPR
5184 && TREE_CODE (arg1) == MULT_EXPR)
5185 || (TREE_CODE (arg1) == PLUS_EXPR
5186 && TREE_CODE (arg0) == MULT_EXPR))
5188 tree parg0, parg1, parg, marg;
5190 if (TREE_CODE (arg0) == PLUS_EXPR)
5191 parg = arg0, marg = arg1;
5192 else
5193 parg = arg1, marg = arg0;
5194 parg0 = TREE_OPERAND (parg, 0);
5195 parg1 = TREE_OPERAND (parg, 1);
5196 STRIP_NOPS (parg0);
5197 STRIP_NOPS (parg1);
5199 if (TREE_CODE (parg0) == MULT_EXPR
5200 && TREE_CODE (parg1) != MULT_EXPR)
5201 return fold (build (PLUS_EXPR, type,
5202 fold (build (PLUS_EXPR, type, parg0, marg)),
5203 parg1));
5204 if (TREE_CODE (parg0) != MULT_EXPR
5205 && TREE_CODE (parg1) == MULT_EXPR)
5206 return fold (build (PLUS_EXPR, type,
5207 fold (build (PLUS_EXPR, type, parg1, marg)),
5208 parg0));
5211 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5213 tree arg00, arg01, arg10, arg11;
5214 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5216 /* (A * C) + (B * C) -> (A+B) * C.
5217 We are most concerned about the case where C is a constant,
5218 but other combinations show up during loop reduction. Since
5219 it is not difficult, try all four possibilities. */
5221 arg00 = TREE_OPERAND (arg0, 0);
5222 arg01 = TREE_OPERAND (arg0, 1);
5223 arg10 = TREE_OPERAND (arg1, 0);
5224 arg11 = TREE_OPERAND (arg1, 1);
5225 same = NULL_TREE;
5227 if (operand_equal_p (arg01, arg11, 0))
5228 same = arg01, alt0 = arg00, alt1 = arg10;
5229 else if (operand_equal_p (arg00, arg10, 0))
5230 same = arg00, alt0 = arg01, alt1 = arg11;
5231 else if (operand_equal_p (arg00, arg11, 0))
5232 same = arg00, alt0 = arg01, alt1 = arg10;
5233 else if (operand_equal_p (arg01, arg10, 0))
5234 same = arg01, alt0 = arg00, alt1 = arg11;
5236 /* No identical multiplicands; see if we can find a common
5237 power-of-two factor in non-power-of-two multiplies. This
5238 can help in multi-dimensional array access. */
5239 else if (TREE_CODE (arg01) == INTEGER_CST
5240 && TREE_CODE (arg11) == INTEGER_CST
5241 && TREE_INT_CST_HIGH (arg01) == 0
5242 && TREE_INT_CST_HIGH (arg11) == 0)
5244 HOST_WIDE_INT int01, int11, tmp;
5245 int01 = TREE_INT_CST_LOW (arg01);
5246 int11 = TREE_INT_CST_LOW (arg11);
5248 /* Move min of absolute values to int11. */
5249 if ((int01 >= 0 ? int01 : -int01)
5250 < (int11 >= 0 ? int11 : -int11))
5252 tmp = int01, int01 = int11, int11 = tmp;
5253 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5254 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5257 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5259 alt0 = fold (build (MULT_EXPR, type, arg00,
5260 build_int_2 (int01 / int11, 0)));
5261 alt1 = arg10;
5262 same = arg11;
5266 if (same)
5267 return fold (build (MULT_EXPR, type,
5268 fold (build (PLUS_EXPR, type, alt0, alt1)),
5269 same));
5273 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5274 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5275 return non_lvalue (convert (type, arg0));
5277 /* Likewise if the operands are reversed. */
5278 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5279 return non_lvalue (convert (type, arg1));
5281 bit_rotate:
5282 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5283 is a rotate of A by C1 bits. */
5284 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5285 is a rotate of A by B bits. */
5287 enum tree_code code0, code1;
5288 code0 = TREE_CODE (arg0);
5289 code1 = TREE_CODE (arg1);
5290 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5291 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5292 && operand_equal_p (TREE_OPERAND (arg0, 0),
5293 TREE_OPERAND (arg1, 0), 0)
5294 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5296 tree tree01, tree11;
5297 enum tree_code code01, code11;
5299 tree01 = TREE_OPERAND (arg0, 1);
5300 tree11 = TREE_OPERAND (arg1, 1);
5301 STRIP_NOPS (tree01);
5302 STRIP_NOPS (tree11);
5303 code01 = TREE_CODE (tree01);
5304 code11 = TREE_CODE (tree11);
5305 if (code01 == INTEGER_CST
5306 && code11 == INTEGER_CST
5307 && TREE_INT_CST_HIGH (tree01) == 0
5308 && TREE_INT_CST_HIGH (tree11) == 0
5309 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5310 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5311 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5312 code0 == LSHIFT_EXPR ? tree01 : tree11);
5313 else if (code11 == MINUS_EXPR)
5315 tree tree110, tree111;
5316 tree110 = TREE_OPERAND (tree11, 0);
5317 tree111 = TREE_OPERAND (tree11, 1);
5318 STRIP_NOPS (tree110);
5319 STRIP_NOPS (tree111);
5320 if (TREE_CODE (tree110) == INTEGER_CST
5321 && 0 == compare_tree_int (tree110,
5322 TYPE_PRECISION
5323 (TREE_TYPE (TREE_OPERAND
5324 (arg0, 0))))
5325 && operand_equal_p (tree01, tree111, 0))
5326 return build ((code0 == LSHIFT_EXPR
5327 ? LROTATE_EXPR
5328 : RROTATE_EXPR),
5329 type, TREE_OPERAND (arg0, 0), tree01);
5331 else if (code01 == MINUS_EXPR)
5333 tree tree010, tree011;
5334 tree010 = TREE_OPERAND (tree01, 0);
5335 tree011 = TREE_OPERAND (tree01, 1);
5336 STRIP_NOPS (tree010);
5337 STRIP_NOPS (tree011);
5338 if (TREE_CODE (tree010) == INTEGER_CST
5339 && 0 == compare_tree_int (tree010,
5340 TYPE_PRECISION
5341 (TREE_TYPE (TREE_OPERAND
5342 (arg0, 0))))
5343 && operand_equal_p (tree11, tree011, 0))
5344 return build ((code0 != LSHIFT_EXPR
5345 ? LROTATE_EXPR
5346 : RROTATE_EXPR),
5347 type, TREE_OPERAND (arg0, 0), tree11);
5352 associate:
5353 /* In most languages, can't associate operations on floats through
5354 parentheses. Rather than remember where the parentheses were, we
5355 don't associate floats at all. It shouldn't matter much. However,
5356 associating multiplications is only very slightly inaccurate, so do
5357 that if -funsafe-math-optimizations is specified. */
5359 if (! wins
5360 && (! FLOAT_TYPE_P (type)
5361 || (flag_unsafe_math_optimizations && code == MULT_EXPR)))
5363 tree var0, con0, lit0, minus_lit0;
5364 tree var1, con1, lit1, minus_lit1;
5366 /* Split both trees into variables, constants, and literals. Then
5367 associate each group together, the constants with literals,
5368 then the result with variables. This increases the chances of
5369 literals being recombined later and of generating relocatable
5370 expressions for the sum of a constant and literal. */
5371 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5372 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5373 code == MINUS_EXPR);
5375 /* Only do something if we found more than two objects. Otherwise,
5376 nothing has changed and we risk infinite recursion. */
5377 if (2 < ((var0 != 0) + (var1 != 0)
5378 + (con0 != 0) + (con1 != 0)
5379 + (lit0 != 0) + (lit1 != 0)
5380 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5382 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5383 if (code == MINUS_EXPR)
5384 code = PLUS_EXPR;
5386 var0 = associate_trees (var0, var1, code, type);
5387 con0 = associate_trees (con0, con1, code, type);
5388 lit0 = associate_trees (lit0, lit1, code, type);
5389 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5391 /* Preserve the MINUS_EXPR if the negative part of the literal is
5392 greater than the positive part. Otherwise, the multiplicative
5393 folding code (i.e extract_muldiv) may be fooled in case
5394 unsigned constants are substracted, like in the following
5395 example: ((X*2 + 4) - 8U)/2. */
5396 if (minus_lit0 && lit0)
5398 if (tree_int_cst_lt (lit0, minus_lit0))
5400 minus_lit0 = associate_trees (minus_lit0, lit0,
5401 MINUS_EXPR, type);
5402 lit0 = 0;
5404 else
5406 lit0 = associate_trees (lit0, minus_lit0,
5407 MINUS_EXPR, type);
5408 minus_lit0 = 0;
5411 if (minus_lit0)
5413 if (con0 == 0)
5414 return convert (type, associate_trees (var0, minus_lit0,
5415 MINUS_EXPR, type));
5416 else
5418 con0 = associate_trees (con0, minus_lit0,
5419 MINUS_EXPR, type);
5420 return convert (type, associate_trees (var0, con0,
5421 PLUS_EXPR, type));
5425 con0 = associate_trees (con0, lit0, code, type);
5426 return convert (type, associate_trees (var0, con0, code, type));
5430 binary:
5431 if (wins)
5432 t1 = const_binop (code, arg0, arg1, 0);
5433 if (t1 != NULL_TREE)
5435 /* The return value should always have
5436 the same type as the original expression. */
5437 if (TREE_TYPE (t1) != TREE_TYPE (t))
5438 t1 = convert (TREE_TYPE (t), t1);
5440 return t1;
5442 return t;
5444 case MINUS_EXPR:
5445 /* A - (-B) -> A + B */
5446 if (TREE_CODE (arg1) == NEGATE_EXPR)
5447 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5448 /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5449 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5450 return
5451 fold (build (MINUS_EXPR, type,
5452 build_real (TREE_TYPE (arg1),
5453 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1))),
5454 TREE_OPERAND (arg0, 0)));
5456 if (! FLOAT_TYPE_P (type))
5458 if (! wins && integer_zerop (arg0))
5459 return negate_expr (convert (type, arg1));
5460 if (integer_zerop (arg1))
5461 return non_lvalue (convert (type, arg0));
5463 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5464 about the case where C is a constant, just try one of the
5465 four possibilities. */
5467 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5468 && operand_equal_p (TREE_OPERAND (arg0, 1),
5469 TREE_OPERAND (arg1, 1), 0))
5470 return fold (build (MULT_EXPR, type,
5471 fold (build (MINUS_EXPR, type,
5472 TREE_OPERAND (arg0, 0),
5473 TREE_OPERAND (arg1, 0))),
5474 TREE_OPERAND (arg0, 1)));
5477 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5478 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5479 return non_lvalue (convert (type, arg0));
5481 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
5482 ARG0 is zero and X + ARG0 reduces to X, since that would mean
5483 (-ARG1 + ARG0) reduces to -ARG1. */
5484 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5485 return negate_expr (convert (type, arg1));
5487 /* Fold &x - &x. This can happen from &x.foo - &x.
5488 This is unsafe for certain floats even in non-IEEE formats.
5489 In IEEE, it is unsafe because it does wrong for NaNs.
5490 Also note that operand_equal_p is always false if an operand
5491 is volatile. */
5493 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
5494 && operand_equal_p (arg0, arg1, 0))
5495 return convert (type, integer_zero_node);
5497 goto associate;
5499 case MULT_EXPR:
5500 /* (-A) * (-B) -> A * B */
5501 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5502 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5503 TREE_OPERAND (arg1, 0)));
5505 if (! FLOAT_TYPE_P (type))
5507 if (integer_zerop (arg1))
5508 return omit_one_operand (type, arg1, arg0);
5509 if (integer_onep (arg1))
5510 return non_lvalue (convert (type, arg0));
5512 /* (a * (1 << b)) is (a << b) */
5513 if (TREE_CODE (arg1) == LSHIFT_EXPR
5514 && integer_onep (TREE_OPERAND (arg1, 0)))
5515 return fold (build (LSHIFT_EXPR, type, arg0,
5516 TREE_OPERAND (arg1, 1)));
5517 if (TREE_CODE (arg0) == LSHIFT_EXPR
5518 && integer_onep (TREE_OPERAND (arg0, 0)))
5519 return fold (build (LSHIFT_EXPR, type, arg1,
5520 TREE_OPERAND (arg0, 1)));
5522 if (TREE_CODE (arg1) == INTEGER_CST
5523 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5524 code, NULL_TREE)))
5525 return convert (type, tem);
5528 else
5530 /* Maybe fold x * 0 to 0. The expressions aren't the same
5531 when x is NaN, since x * 0 is also NaN. Nor are they the
5532 same in modes with signed zeros, since multiplying a
5533 negative value by 0 gives -0, not +0. */
5534 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
5535 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
5536 && real_zerop (arg1))
5537 return omit_one_operand (type, arg1, arg0);
5538 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
5539 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5540 && real_onep (arg1))
5541 return non_lvalue (convert (type, arg0));
5543 /* Transform x * -1.0 into -x. */
5544 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5545 && real_minus_onep (arg1))
5546 return fold (build1 (NEGATE_EXPR, type, arg0));
5548 /* x*2 is x+x */
5549 if (! wins && real_twop (arg1)
5550 && (*lang_hooks.decls.global_bindings_p) () == 0
5551 && ! contains_placeholder_p (arg0))
5553 tree arg = save_expr (arg0);
5554 return build (PLUS_EXPR, type, arg, arg);
5557 if (flag_unsafe_math_optimizations)
5559 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
5560 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
5562 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
5563 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
5564 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
5565 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
5567 tree sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5568 tree arg = build (MULT_EXPR, type,
5569 TREE_VALUE (TREE_OPERAND (arg0, 1)),
5570 TREE_VALUE (TREE_OPERAND (arg1, 1)));
5571 tree arglist = build_tree_list (NULL_TREE, arg);
5572 return fold (build_function_call_expr (sqrtfn, arglist));
5575 /* Optimize exp(x)*exp(y) as exp(x+y). */
5576 if ((fcode0 == BUILT_IN_EXP && fcode1 == BUILT_IN_EXP)
5577 || (fcode0 == BUILT_IN_EXPF && fcode1 == BUILT_IN_EXPF)
5578 || (fcode0 == BUILT_IN_EXPL && fcode1 == BUILT_IN_EXPL))
5580 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5581 tree arg = build (PLUS_EXPR, type,
5582 TREE_VALUE (TREE_OPERAND (arg0, 1)),
5583 TREE_VALUE (TREE_OPERAND (arg1, 1)));
5584 tree arglist = build_tree_list (NULL_TREE, arg);
5585 return fold (build_function_call_expr (expfn, arglist));
5589 goto associate;
5591 case BIT_IOR_EXPR:
5592 bit_ior:
5593 if (integer_all_onesp (arg1))
5594 return omit_one_operand (type, arg1, arg0);
5595 if (integer_zerop (arg1))
5596 return non_lvalue (convert (type, arg0));
5597 t1 = distribute_bit_expr (code, type, arg0, arg1);
5598 if (t1 != NULL_TREE)
5599 return t1;
5601 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5603 This results in more efficient code for machines without a NAND
5604 instruction. Combine will canonicalize to the first form
5605 which will allow use of NAND instructions provided by the
5606 backend if they exist. */
5607 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5608 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5610 return fold (build1 (BIT_NOT_EXPR, type,
5611 build (BIT_AND_EXPR, type,
5612 TREE_OPERAND (arg0, 0),
5613 TREE_OPERAND (arg1, 0))));
5616 /* See if this can be simplified into a rotate first. If that
5617 is unsuccessful continue in the association code. */
5618 goto bit_rotate;
5620 case BIT_XOR_EXPR:
5621 if (integer_zerop (arg1))
5622 return non_lvalue (convert (type, arg0));
5623 if (integer_all_onesp (arg1))
5624 return fold (build1 (BIT_NOT_EXPR, type, arg0));
5626 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5627 with a constant, and the two constants have no bits in common,
5628 we should treat this as a BIT_IOR_EXPR since this may produce more
5629 simplifications. */
5630 if (TREE_CODE (arg0) == BIT_AND_EXPR
5631 && TREE_CODE (arg1) == BIT_AND_EXPR
5632 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5633 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5634 && integer_zerop (const_binop (BIT_AND_EXPR,
5635 TREE_OPERAND (arg0, 1),
5636 TREE_OPERAND (arg1, 1), 0)))
5638 code = BIT_IOR_EXPR;
5639 goto bit_ior;
5642 /* See if this can be simplified into a rotate first. If that
5643 is unsuccessful continue in the association code. */
5644 goto bit_rotate;
5646 case BIT_AND_EXPR:
5647 bit_and:
5648 if (integer_all_onesp (arg1))
5649 return non_lvalue (convert (type, arg0));
5650 if (integer_zerop (arg1))
5651 return omit_one_operand (type, arg1, arg0);
5652 t1 = distribute_bit_expr (code, type, arg0, arg1);
5653 if (t1 != NULL_TREE)
5654 return t1;
5655 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5656 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5657 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5659 unsigned int prec
5660 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5662 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5663 && (~TREE_INT_CST_LOW (arg1)
5664 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5665 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5668 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5670 This results in more efficient code for machines without a NOR
5671 instruction. Combine will canonicalize to the first form
5672 which will allow use of NOR instructions provided by the
5673 backend if they exist. */
5674 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5675 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5677 return fold (build1 (BIT_NOT_EXPR, type,
5678 build (BIT_IOR_EXPR, type,
5679 TREE_OPERAND (arg0, 0),
5680 TREE_OPERAND (arg1, 0))));
5683 goto associate;
5685 case BIT_ANDTC_EXPR:
5686 if (integer_all_onesp (arg0))
5687 return non_lvalue (convert (type, arg1));
5688 if (integer_zerop (arg0))
5689 return omit_one_operand (type, arg0, arg1);
5690 if (TREE_CODE (arg1) == INTEGER_CST)
5692 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5693 code = BIT_AND_EXPR;
5694 goto bit_and;
5696 goto binary;
5698 case RDIV_EXPR:
5699 /* Don't touch a floating-point divide by zero unless the mode
5700 of the constant can represent infinity. */
5701 if (TREE_CODE (arg1) == REAL_CST
5702 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
5703 && real_zerop (arg1))
5704 return t;
5706 /* (-A) / (-B) -> A / B */
5707 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5708 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5709 TREE_OPERAND (arg1, 0)));
5711 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
5712 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5713 && real_onep (arg1))
5714 return non_lvalue (convert (type, arg0));
5716 /* If ARG1 is a constant, we can convert this to a multiply by the
5717 reciprocal. This does not have the same rounding properties,
5718 so only do this if -funsafe-math-optimizations. We can actually
5719 always safely do it if ARG1 is a power of two, but it's hard to
5720 tell if it is or not in a portable manner. */
5721 if (TREE_CODE (arg1) == REAL_CST)
5723 if (flag_unsafe_math_optimizations
5724 && 0 != (tem = const_binop (code, build_real (type, dconst1),
5725 arg1, 0)))
5726 return fold (build (MULT_EXPR, type, arg0, tem));
5727 /* Find the reciprocal if optimizing and the result is exact. */
5728 else if (optimize)
5730 REAL_VALUE_TYPE r;
5731 r = TREE_REAL_CST (arg1);
5732 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5734 tem = build_real (type, r);
5735 return fold (build (MULT_EXPR, type, arg0, tem));
5739 /* Convert A/B/C to A/(B*C). */
5740 if (flag_unsafe_math_optimizations
5741 && TREE_CODE (arg0) == RDIV_EXPR)
5743 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5744 build (MULT_EXPR, type, TREE_OPERAND (arg0, 1),
5745 arg1)));
5747 /* Convert A/(B/C) to (A/B)*C. */
5748 if (flag_unsafe_math_optimizations
5749 && TREE_CODE (arg1) == RDIV_EXPR)
5751 return fold (build (MULT_EXPR, type,
5752 build (RDIV_EXPR, type, arg0,
5753 TREE_OPERAND (arg1, 0)),
5754 TREE_OPERAND (arg1, 1)));
5757 /* Optimize x/exp(y) into x*exp(-y). */
5758 if (flag_unsafe_math_optimizations)
5760 enum built_in_function fcode = builtin_mathfn_code (arg1);
5761 if (fcode == BUILT_IN_EXP
5762 || fcode == BUILT_IN_EXPF
5763 || fcode == BUILT_IN_EXPL)
5765 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
5766 tree arg = build1 (NEGATE_EXPR, type,
5767 TREE_VALUE (TREE_OPERAND (arg1, 1)));
5768 tree arglist = build_tree_list (NULL_TREE, arg);
5769 arg1 = build_function_call_expr (expfn, arglist);
5770 return fold (build (MULT_EXPR, type, arg0, arg1));
5773 goto binary;
5775 case TRUNC_DIV_EXPR:
5776 case ROUND_DIV_EXPR:
5777 case FLOOR_DIV_EXPR:
5778 case CEIL_DIV_EXPR:
5779 case EXACT_DIV_EXPR:
5780 if (integer_onep (arg1))
5781 return non_lvalue (convert (type, arg0));
5782 if (integer_zerop (arg1))
5783 return t;
5785 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5786 operation, EXACT_DIV_EXPR.
5788 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5789 At one time others generated faster code, it's not clear if they do
5790 after the last round to changes to the DIV code in expmed.c. */
5791 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5792 && multiple_of_p (type, arg0, arg1))
5793 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5795 if (TREE_CODE (arg1) == INTEGER_CST
5796 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5797 code, NULL_TREE)))
5798 return convert (type, tem);
5800 goto binary;
5802 case CEIL_MOD_EXPR:
5803 case FLOOR_MOD_EXPR:
5804 case ROUND_MOD_EXPR:
5805 case TRUNC_MOD_EXPR:
5806 if (integer_onep (arg1))
5807 return omit_one_operand (type, integer_zero_node, arg0);
5808 if (integer_zerop (arg1))
5809 return t;
5811 if (TREE_CODE (arg1) == INTEGER_CST
5812 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5813 code, NULL_TREE)))
5814 return convert (type, tem);
5816 goto binary;
5818 case LROTATE_EXPR:
5819 case RROTATE_EXPR:
5820 if (integer_all_onesp (arg0))
5821 return omit_one_operand (type, arg0, arg1);
5822 goto shift;
5824 case RSHIFT_EXPR:
5825 /* Optimize -1 >> x for arithmetic right shifts. */
5826 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
5827 return omit_one_operand (type, arg0, arg1);
5828 /* ... fall through ... */
5830 case LSHIFT_EXPR:
5831 shift:
5832 if (integer_zerop (arg1))
5833 return non_lvalue (convert (type, arg0));
5834 if (integer_zerop (arg0))
5835 return omit_one_operand (type, arg0, arg1);
5837 /* Since negative shift count is not well-defined,
5838 don't try to compute it in the compiler. */
5839 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5840 return t;
5841 /* Rewrite an LROTATE_EXPR by a constant into an
5842 RROTATE_EXPR by a new constant. */
5843 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5845 TREE_SET_CODE (t, RROTATE_EXPR);
5846 code = RROTATE_EXPR;
5847 TREE_OPERAND (t, 1) = arg1
5848 = const_binop
5849 (MINUS_EXPR,
5850 convert (TREE_TYPE (arg1),
5851 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5852 arg1, 0);
5853 if (tree_int_cst_sgn (arg1) < 0)
5854 return t;
5857 /* If we have a rotate of a bit operation with the rotate count and
5858 the second operand of the bit operation both constant,
5859 permute the two operations. */
5860 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5861 && (TREE_CODE (arg0) == BIT_AND_EXPR
5862 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5863 || TREE_CODE (arg0) == BIT_IOR_EXPR
5864 || TREE_CODE (arg0) == BIT_XOR_EXPR)
5865 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5866 return fold (build (TREE_CODE (arg0), type,
5867 fold (build (code, type,
5868 TREE_OPERAND (arg0, 0), arg1)),
5869 fold (build (code, type,
5870 TREE_OPERAND (arg0, 1), arg1))));
5872 /* Two consecutive rotates adding up to the width of the mode can
5873 be ignored. */
5874 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5875 && TREE_CODE (arg0) == RROTATE_EXPR
5876 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5877 && TREE_INT_CST_HIGH (arg1) == 0
5878 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5879 && ((TREE_INT_CST_LOW (arg1)
5880 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5881 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
5882 return TREE_OPERAND (arg0, 0);
5884 goto binary;
5886 case MIN_EXPR:
5887 if (operand_equal_p (arg0, arg1, 0))
5888 return omit_one_operand (type, arg0, arg1);
5889 if (INTEGRAL_TYPE_P (type)
5890 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5891 return omit_one_operand (type, arg1, arg0);
5892 goto associate;
5894 case MAX_EXPR:
5895 if (operand_equal_p (arg0, arg1, 0))
5896 return omit_one_operand (type, arg0, arg1);
5897 if (INTEGRAL_TYPE_P (type)
5898 && TYPE_MAX_VALUE (type)
5899 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5900 return omit_one_operand (type, arg1, arg0);
5901 goto associate;
5903 case TRUTH_NOT_EXPR:
5904 /* Note that the operand of this must be an int
5905 and its values must be 0 or 1.
5906 ("true" is a fixed value perhaps depending on the language,
5907 but we don't handle values other than 1 correctly yet.) */
5908 tem = invert_truthvalue (arg0);
5909 /* Avoid infinite recursion. */
5910 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5911 return t;
5912 return convert (type, tem);
5914 case TRUTH_ANDIF_EXPR:
5915 /* Note that the operands of this must be ints
5916 and their values must be 0 or 1.
5917 ("true" is a fixed value perhaps depending on the language.) */
5918 /* If first arg is constant zero, return it. */
5919 if (integer_zerop (arg0))
5920 return convert (type, arg0);
5921 case TRUTH_AND_EXPR:
5922 /* If either arg is constant true, drop it. */
5923 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5924 return non_lvalue (convert (type, arg1));
5925 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
5926 /* Preserve sequence points. */
5927 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5928 return non_lvalue (convert (type, arg0));
5929 /* If second arg is constant zero, result is zero, but first arg
5930 must be evaluated. */
5931 if (integer_zerop (arg1))
5932 return omit_one_operand (type, arg1, arg0);
5933 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5934 case will be handled here. */
5935 if (integer_zerop (arg0))
5936 return omit_one_operand (type, arg0, arg1);
5938 truth_andor:
5939 /* We only do these simplifications if we are optimizing. */
5940 if (!optimize)
5941 return t;
5943 /* Check for things like (A || B) && (A || C). We can convert this
5944 to A || (B && C). Note that either operator can be any of the four
5945 truth and/or operations and the transformation will still be
5946 valid. Also note that we only care about order for the
5947 ANDIF and ORIF operators. If B contains side effects, this
5948 might change the truth-value of A. */
5949 if (TREE_CODE (arg0) == TREE_CODE (arg1)
5950 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5951 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5952 || TREE_CODE (arg0) == TRUTH_AND_EXPR
5953 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5954 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5956 tree a00 = TREE_OPERAND (arg0, 0);
5957 tree a01 = TREE_OPERAND (arg0, 1);
5958 tree a10 = TREE_OPERAND (arg1, 0);
5959 tree a11 = TREE_OPERAND (arg1, 1);
5960 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5961 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5962 && (code == TRUTH_AND_EXPR
5963 || code == TRUTH_OR_EXPR));
5965 if (operand_equal_p (a00, a10, 0))
5966 return fold (build (TREE_CODE (arg0), type, a00,
5967 fold (build (code, type, a01, a11))));
5968 else if (commutative && operand_equal_p (a00, a11, 0))
5969 return fold (build (TREE_CODE (arg0), type, a00,
5970 fold (build (code, type, a01, a10))));
5971 else if (commutative && operand_equal_p (a01, a10, 0))
5972 return fold (build (TREE_CODE (arg0), type, a01,
5973 fold (build (code, type, a00, a11))));
5975 /* This case if tricky because we must either have commutative
5976 operators or else A10 must not have side-effects. */
5978 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5979 && operand_equal_p (a01, a11, 0))
5980 return fold (build (TREE_CODE (arg0), type,
5981 fold (build (code, type, a00, a10)),
5982 a01));
5985 /* See if we can build a range comparison. */
5986 if (0 != (tem = fold_range_test (t)))
5987 return tem;
5989 /* Check for the possibility of merging component references. If our
5990 lhs is another similar operation, try to merge its rhs with our
5991 rhs. Then try to merge our lhs and rhs. */
5992 if (TREE_CODE (arg0) == code
5993 && 0 != (tem = fold_truthop (code, type,
5994 TREE_OPERAND (arg0, 1), arg1)))
5995 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5997 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5998 return tem;
6000 return t;
6002 case TRUTH_ORIF_EXPR:
6003 /* Note that the operands of this must be ints
6004 and their values must be 0 or true.
6005 ("true" is a fixed value perhaps depending on the language.) */
6006 /* If first arg is constant true, return it. */
6007 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6008 return convert (type, arg0);
6009 case TRUTH_OR_EXPR:
6010 /* If either arg is constant zero, drop it. */
6011 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
6012 return non_lvalue (convert (type, arg1));
6013 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
6014 /* Preserve sequence points. */
6015 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6016 return non_lvalue (convert (type, arg0));
6017 /* If second arg is constant true, result is true, but we must
6018 evaluate first arg. */
6019 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6020 return omit_one_operand (type, arg1, arg0);
6021 /* Likewise for first arg, but note this only occurs here for
6022 TRUTH_OR_EXPR. */
6023 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6024 return omit_one_operand (type, arg0, arg1);
6025 goto truth_andor;
6027 case TRUTH_XOR_EXPR:
6028 /* If either arg is constant zero, drop it. */
6029 if (integer_zerop (arg0))
6030 return non_lvalue (convert (type, arg1));
6031 if (integer_zerop (arg1))
6032 return non_lvalue (convert (type, arg0));
6033 /* If either arg is constant true, this is a logical inversion. */
6034 if (integer_onep (arg0))
6035 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6036 if (integer_onep (arg1))
6037 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6038 return t;
6040 case EQ_EXPR:
6041 case NE_EXPR:
6042 case LT_EXPR:
6043 case GT_EXPR:
6044 case LE_EXPR:
6045 case GE_EXPR:
6046 /* If one arg is a real or integer constant, put it last. */
6047 if ((TREE_CODE (arg0) == INTEGER_CST
6048 && TREE_CODE (arg1) != INTEGER_CST)
6049 || (TREE_CODE (arg0) == REAL_CST
6050 && TREE_CODE (arg0) != REAL_CST))
6052 TREE_OPERAND (t, 0) = arg1;
6053 TREE_OPERAND (t, 1) = arg0;
6054 arg0 = TREE_OPERAND (t, 0);
6055 arg1 = TREE_OPERAND (t, 1);
6056 code = swap_tree_comparison (code);
6057 TREE_SET_CODE (t, code);
6060 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6062 tree targ0 = strip_float_extensions (arg0);
6063 tree targ1 = strip_float_extensions (arg1);
6064 tree newtype = TREE_TYPE (targ0);
6066 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
6067 newtype = TREE_TYPE (targ1);
6069 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
6070 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
6071 return fold (build (code, type, convert (newtype, targ0),
6072 convert (newtype, targ1)));
6074 /* (-a) CMP (-b) -> b CMP a */
6075 if (TREE_CODE (arg0) == NEGATE_EXPR
6076 && TREE_CODE (arg1) == NEGATE_EXPR)
6077 return fold (build (code, type, TREE_OPERAND (arg1, 0),
6078 TREE_OPERAND (arg0, 0)));
6079 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6080 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
6081 return
6082 fold (build
6083 (swap_tree_comparison (code), type,
6084 TREE_OPERAND (arg0, 0),
6085 build_real (TREE_TYPE (arg1),
6086 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
6087 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6088 /* a CMP (-0) -> a CMP 0 */
6089 if (TREE_CODE (arg1) == REAL_CST
6090 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
6091 return fold (build (code, type, arg0,
6092 build_real (TREE_TYPE (arg1), dconst0)));
6094 /* If this is a comparison of a real constant with a PLUS_EXPR
6095 or a MINUS_EXPR of a real constant, we can convert it into a
6096 comparison with a revised real constant as long as no overflow
6097 occurs when unsafe_math_optimizations are enabled. */
6098 if (flag_unsafe_math_optimizations
6099 && TREE_CODE (arg1) == REAL_CST
6100 && (TREE_CODE (arg0) == PLUS_EXPR
6101 || TREE_CODE (arg0) == MINUS_EXPR)
6102 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6103 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6104 ? MINUS_EXPR : PLUS_EXPR,
6105 arg1, TREE_OPERAND (arg0, 1), 0))
6106 && ! TREE_CONSTANT_OVERFLOW (tem))
6107 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6110 /* Convert foo++ == CONST into ++foo == CONST + INCR.
6111 First, see if one arg is constant; find the constant arg
6112 and the other one. */
6114 tree constop = 0, varop = NULL_TREE;
6115 int constopnum = -1;
6117 if (TREE_CONSTANT (arg1))
6118 constopnum = 1, constop = arg1, varop = arg0;
6119 if (TREE_CONSTANT (arg0))
6120 constopnum = 0, constop = arg0, varop = arg1;
6122 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6124 /* This optimization is invalid for ordered comparisons
6125 if CONST+INCR overflows or if foo+incr might overflow.
6126 This optimization is invalid for floating point due to rounding.
6127 For pointer types we assume overflow doesn't happen. */
6128 if (POINTER_TYPE_P (TREE_TYPE (varop))
6129 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6130 && (code == EQ_EXPR || code == NE_EXPR)))
6132 tree newconst
6133 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6134 constop, TREE_OPERAND (varop, 1)));
6136 /* Do not overwrite the current varop to be a preincrement,
6137 create a new node so that we won't confuse our caller who
6138 might create trees and throw them away, reusing the
6139 arguments that they passed to build. This shows up in
6140 the THEN or ELSE parts of ?: being postincrements. */
6141 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
6142 TREE_OPERAND (varop, 0),
6143 TREE_OPERAND (varop, 1));
6145 /* If VAROP is a reference to a bitfield, we must mask
6146 the constant by the width of the field. */
6147 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6148 && DECL_BIT_FIELD(TREE_OPERAND
6149 (TREE_OPERAND (varop, 0), 1)))
6151 int size
6152 = TREE_INT_CST_LOW (DECL_SIZE
6153 (TREE_OPERAND
6154 (TREE_OPERAND (varop, 0), 1)));
6155 tree mask, unsigned_type;
6156 unsigned int precision;
6157 tree folded_compare;
6159 /* First check whether the comparison would come out
6160 always the same. If we don't do that we would
6161 change the meaning with the masking. */
6162 if (constopnum == 0)
6163 folded_compare = fold (build (code, type, constop,
6164 TREE_OPERAND (varop, 0)));
6165 else
6166 folded_compare = fold (build (code, type,
6167 TREE_OPERAND (varop, 0),
6168 constop));
6169 if (integer_zerop (folded_compare)
6170 || integer_onep (folded_compare))
6171 return omit_one_operand (type, folded_compare, varop);
6173 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6174 precision = TYPE_PRECISION (unsigned_type);
6175 mask = build_int_2 (~0, ~0);
6176 TREE_TYPE (mask) = unsigned_type;
6177 force_fit_type (mask, 0);
6178 mask = const_binop (RSHIFT_EXPR, mask,
6179 size_int (precision - size), 0);
6180 newconst = fold (build (BIT_AND_EXPR,
6181 TREE_TYPE (varop), newconst,
6182 convert (TREE_TYPE (varop),
6183 mask)));
6186 t = build (code, type,
6187 (constopnum == 0) ? newconst : varop,
6188 (constopnum == 1) ? newconst : varop);
6189 return t;
6192 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
6194 if (POINTER_TYPE_P (TREE_TYPE (varop))
6195 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6196 && (code == EQ_EXPR || code == NE_EXPR)))
6198 tree newconst
6199 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
6200 constop, TREE_OPERAND (varop, 1)));
6202 /* Do not overwrite the current varop to be a predecrement,
6203 create a new node so that we won't confuse our caller who
6204 might create trees and throw them away, reusing the
6205 arguments that they passed to build. This shows up in
6206 the THEN or ELSE parts of ?: being postdecrements. */
6207 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
6208 TREE_OPERAND (varop, 0),
6209 TREE_OPERAND (varop, 1));
6211 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6212 && DECL_BIT_FIELD(TREE_OPERAND
6213 (TREE_OPERAND (varop, 0), 1)))
6215 int size
6216 = TREE_INT_CST_LOW (DECL_SIZE
6217 (TREE_OPERAND
6218 (TREE_OPERAND (varop, 0), 1)));
6219 tree mask, unsigned_type;
6220 unsigned int precision;
6221 tree folded_compare;
6223 if (constopnum == 0)
6224 folded_compare = fold (build (code, type, constop,
6225 TREE_OPERAND (varop, 0)));
6226 else
6227 folded_compare = fold (build (code, type,
6228 TREE_OPERAND (varop, 0),
6229 constop));
6230 if (integer_zerop (folded_compare)
6231 || integer_onep (folded_compare))
6232 return omit_one_operand (type, folded_compare, varop);
6234 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6235 precision = TYPE_PRECISION (unsigned_type);
6236 mask = build_int_2 (~0, ~0);
6237 TREE_TYPE (mask) = TREE_TYPE (varop);
6238 force_fit_type (mask, 0);
6239 mask = const_binop (RSHIFT_EXPR, mask,
6240 size_int (precision - size), 0);
6241 newconst = fold (build (BIT_AND_EXPR,
6242 TREE_TYPE (varop), newconst,
6243 convert (TREE_TYPE (varop),
6244 mask)));
6247 t = build (code, type,
6248 (constopnum == 0) ? newconst : varop,
6249 (constopnum == 1) ? newconst : varop);
6250 return t;
6255 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
6256 This transformation affects the cases which are handled in later
6257 optimizations involving comparisons with non-negative constants. */
6258 if (TREE_CODE (arg1) == INTEGER_CST
6259 && TREE_CODE (arg0) != INTEGER_CST
6260 && tree_int_cst_sgn (arg1) > 0)
6262 switch (code)
6264 case GE_EXPR:
6265 code = GT_EXPR;
6266 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6267 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6268 break;
6270 case LT_EXPR:
6271 code = LE_EXPR;
6272 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6273 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6274 break;
6276 default:
6277 break;
6281 /* Comparisons with the highest or lowest possible integer of
6282 the specified size will have known values. */
6284 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6286 if (TREE_CODE (arg1) == INTEGER_CST
6287 && ! TREE_CONSTANT_OVERFLOW (arg1)
6288 && width <= HOST_BITS_PER_WIDE_INT
6289 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6290 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6292 unsigned HOST_WIDE_INT signed_max;
6293 unsigned HOST_WIDE_INT max, min;
6295 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
6297 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6299 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
6300 min = 0;
6302 else
6304 max = signed_max;
6305 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
6308 if (TREE_INT_CST_HIGH (arg1) == 0
6309 && TREE_INT_CST_LOW (arg1) == max)
6310 switch (code)
6312 case GT_EXPR:
6313 return omit_one_operand (type,
6314 convert (type, integer_zero_node),
6315 arg0);
6316 case GE_EXPR:
6317 code = EQ_EXPR;
6318 TREE_SET_CODE (t, EQ_EXPR);
6319 break;
6320 case LE_EXPR:
6321 return omit_one_operand (type,
6322 convert (type, integer_one_node),
6323 arg0);
6324 case LT_EXPR:
6325 code = NE_EXPR;
6326 TREE_SET_CODE (t, NE_EXPR);
6327 break;
6329 /* The GE_EXPR and LT_EXPR cases above are not normally
6330 reached because of previous transformations. */
6332 default:
6333 break;
6335 else if (TREE_INT_CST_HIGH (arg1) == 0
6336 && TREE_INT_CST_LOW (arg1) == max - 1)
6337 switch (code)
6339 case GT_EXPR:
6340 code = EQ_EXPR;
6341 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6342 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6343 break;
6344 case LE_EXPR:
6345 code = NE_EXPR;
6346 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6347 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6348 break;
6349 default:
6350 break;
6352 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6353 && TREE_INT_CST_LOW (arg1) == min)
6354 switch (code)
6356 case LT_EXPR:
6357 return omit_one_operand (type,
6358 convert (type, integer_zero_node),
6359 arg0);
6360 case LE_EXPR:
6361 code = EQ_EXPR;
6362 TREE_SET_CODE (t, EQ_EXPR);
6363 break;
6365 case GE_EXPR:
6366 return omit_one_operand (type,
6367 convert (type, integer_one_node),
6368 arg0);
6369 case GT_EXPR:
6370 code = NE_EXPR;
6371 TREE_SET_CODE (t, NE_EXPR);
6372 break;
6374 default:
6375 break;
6377 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6378 && TREE_INT_CST_LOW (arg1) == min + 1)
6379 switch (code)
6381 case GE_EXPR:
6382 code = NE_EXPR;
6383 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6384 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6385 break;
6386 case LT_EXPR:
6387 code = EQ_EXPR;
6388 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6389 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6390 break;
6391 default:
6392 break;
6395 else if (TREE_INT_CST_HIGH (arg1) == 0
6396 && TREE_INT_CST_LOW (arg1) == signed_max
6397 && TREE_UNSIGNED (TREE_TYPE (arg1))
6398 /* signed_type does not work on pointer types. */
6399 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
6401 /* The following case also applies to X < signed_max+1
6402 and X >= signed_max+1 because previous transformations. */
6403 if (code == LE_EXPR || code == GT_EXPR)
6405 tree st0, st1;
6406 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
6407 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
6408 return fold
6409 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
6410 type, convert (st0, arg0),
6411 convert (st1, integer_zero_node)));
6417 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6418 a MINUS_EXPR of a constant, we can convert it into a comparison with
6419 a revised constant as long as no overflow occurs. */
6420 if ((code == EQ_EXPR || code == NE_EXPR)
6421 && TREE_CODE (arg1) == INTEGER_CST
6422 && (TREE_CODE (arg0) == PLUS_EXPR
6423 || TREE_CODE (arg0) == MINUS_EXPR)
6424 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6425 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6426 ? MINUS_EXPR : PLUS_EXPR,
6427 arg1, TREE_OPERAND (arg0, 1), 0))
6428 && ! TREE_CONSTANT_OVERFLOW (tem))
6429 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6431 /* Similarly for a NEGATE_EXPR. */
6432 else if ((code == EQ_EXPR || code == NE_EXPR)
6433 && TREE_CODE (arg0) == NEGATE_EXPR
6434 && TREE_CODE (arg1) == INTEGER_CST
6435 && 0 != (tem = negate_expr (arg1))
6436 && TREE_CODE (tem) == INTEGER_CST
6437 && ! TREE_CONSTANT_OVERFLOW (tem))
6438 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6440 /* If we have X - Y == 0, we can convert that to X == Y and similarly
6441 for !=. Don't do this for ordered comparisons due to overflow. */
6442 else if ((code == NE_EXPR || code == EQ_EXPR)
6443 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
6444 return fold (build (code, type,
6445 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
6447 /* If we are widening one operand of an integer comparison,
6448 see if the other operand is similarly being widened. Perhaps we
6449 can do the comparison in the narrower type. */
6450 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
6451 && TREE_CODE (arg0) == NOP_EXPR
6452 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
6453 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
6454 && (TREE_TYPE (t1) == TREE_TYPE (tem)
6455 || (TREE_CODE (t1) == INTEGER_CST
6456 && int_fits_type_p (t1, TREE_TYPE (tem)))))
6457 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
6459 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
6460 constant, we can simplify it. */
6461 else if (TREE_CODE (arg1) == INTEGER_CST
6462 && (TREE_CODE (arg0) == MIN_EXPR
6463 || TREE_CODE (arg0) == MAX_EXPR)
6464 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6465 return optimize_minmax_comparison (t);
6467 /* If we are comparing an ABS_EXPR with a constant, we can
6468 convert all the cases into explicit comparisons, but they may
6469 well not be faster than doing the ABS and one comparison.
6470 But ABS (X) <= C is a range comparison, which becomes a subtraction
6471 and a comparison, and is probably faster. */
6472 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6473 && TREE_CODE (arg0) == ABS_EXPR
6474 && ! TREE_SIDE_EFFECTS (arg0)
6475 && (0 != (tem = negate_expr (arg1)))
6476 && TREE_CODE (tem) == INTEGER_CST
6477 && ! TREE_CONSTANT_OVERFLOW (tem))
6478 return fold (build (TRUTH_ANDIF_EXPR, type,
6479 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
6480 build (LE_EXPR, type,
6481 TREE_OPERAND (arg0, 0), arg1)));
6483 /* If this is an EQ or NE comparison with zero and ARG0 is
6484 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
6485 two operations, but the latter can be done in one less insn
6486 on machines that have only two-operand insns or on which a
6487 constant cannot be the first operand. */
6488 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
6489 && TREE_CODE (arg0) == BIT_AND_EXPR)
6491 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
6492 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
6493 return
6494 fold (build (code, type,
6495 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6496 build (RSHIFT_EXPR,
6497 TREE_TYPE (TREE_OPERAND (arg0, 0)),
6498 TREE_OPERAND (arg0, 1),
6499 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
6500 convert (TREE_TYPE (arg0),
6501 integer_one_node)),
6502 arg1));
6503 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
6504 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
6505 return
6506 fold (build (code, type,
6507 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6508 build (RSHIFT_EXPR,
6509 TREE_TYPE (TREE_OPERAND (arg0, 1)),
6510 TREE_OPERAND (arg0, 0),
6511 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
6512 convert (TREE_TYPE (arg0),
6513 integer_one_node)),
6514 arg1));
6517 /* If this is an NE or EQ comparison of zero against the result of a
6518 signed MOD operation whose second operand is a power of 2, make
6519 the MOD operation unsigned since it is simpler and equivalent. */
6520 if ((code == NE_EXPR || code == EQ_EXPR)
6521 && integer_zerop (arg1)
6522 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
6523 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
6524 || TREE_CODE (arg0) == CEIL_MOD_EXPR
6525 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
6526 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
6527 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6529 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
6530 tree newmod = build (TREE_CODE (arg0), newtype,
6531 convert (newtype, TREE_OPERAND (arg0, 0)),
6532 convert (newtype, TREE_OPERAND (arg0, 1)));
6534 return build (code, type, newmod, convert (newtype, arg1));
6537 /* If this is an NE comparison of zero with an AND of one, remove the
6538 comparison since the AND will give the correct value. */
6539 if (code == NE_EXPR && integer_zerop (arg1)
6540 && TREE_CODE (arg0) == BIT_AND_EXPR
6541 && integer_onep (TREE_OPERAND (arg0, 1)))
6542 return convert (type, arg0);
6544 /* If we have (A & C) == C where C is a power of 2, convert this into
6545 (A & C) != 0. Similarly for NE_EXPR. */
6546 if ((code == EQ_EXPR || code == NE_EXPR)
6547 && TREE_CODE (arg0) == BIT_AND_EXPR
6548 && integer_pow2p (TREE_OPERAND (arg0, 1))
6549 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6550 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6551 arg0, integer_zero_node));
6553 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6554 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6555 if ((code == EQ_EXPR || code == NE_EXPR)
6556 && TREE_CODE (arg0) == BIT_AND_EXPR
6557 && integer_zerop (arg1))
6559 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0),
6560 TREE_OPERAND (arg0, 1));
6561 if (arg00 != NULL_TREE)
6563 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
6564 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
6565 convert (stype, arg00),
6566 convert (stype, integer_zero_node)));
6570 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6571 and similarly for >= into !=. */
6572 if ((code == LT_EXPR || code == GE_EXPR)
6573 && TREE_UNSIGNED (TREE_TYPE (arg0))
6574 && TREE_CODE (arg1) == LSHIFT_EXPR
6575 && integer_onep (TREE_OPERAND (arg1, 0)))
6576 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6577 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6578 TREE_OPERAND (arg1, 1)),
6579 convert (TREE_TYPE (arg0), integer_zero_node));
6581 else if ((code == LT_EXPR || code == GE_EXPR)
6582 && TREE_UNSIGNED (TREE_TYPE (arg0))
6583 && (TREE_CODE (arg1) == NOP_EXPR
6584 || TREE_CODE (arg1) == CONVERT_EXPR)
6585 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6586 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
6587 return
6588 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6589 convert (TREE_TYPE (arg0),
6590 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6591 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
6592 convert (TREE_TYPE (arg0), integer_zero_node));
6594 /* Simplify comparison of something with itself. (For IEEE
6595 floating-point, we can only do some of these simplifications.) */
6596 if (operand_equal_p (arg0, arg1, 0))
6598 switch (code)
6600 case EQ_EXPR:
6601 case GE_EXPR:
6602 case LE_EXPR:
6603 if (! FLOAT_TYPE_P (TREE_TYPE (arg0)))
6604 return constant_boolean_node (1, type);
6605 code = EQ_EXPR;
6606 TREE_SET_CODE (t, code);
6607 break;
6609 case NE_EXPR:
6610 /* For NE, we can only do this simplification if integer. */
6611 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6612 break;
6613 /* ... fall through ... */
6614 case GT_EXPR:
6615 case LT_EXPR:
6616 return constant_boolean_node (0, type);
6617 default:
6618 abort ();
6622 /* If we are comparing an expression that just has comparisons
6623 of two integer values, arithmetic expressions of those comparisons,
6624 and constants, we can simplify it. There are only three cases
6625 to check: the two values can either be equal, the first can be
6626 greater, or the second can be greater. Fold the expression for
6627 those three values. Since each value must be 0 or 1, we have
6628 eight possibilities, each of which corresponds to the constant 0
6629 or 1 or one of the six possible comparisons.
6631 This handles common cases like (a > b) == 0 but also handles
6632 expressions like ((x > y) - (y > x)) > 0, which supposedly
6633 occur in macroized code. */
6635 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
6637 tree cval1 = 0, cval2 = 0;
6638 int save_p = 0;
6640 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
6641 /* Don't handle degenerate cases here; they should already
6642 have been handled anyway. */
6643 && cval1 != 0 && cval2 != 0
6644 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
6645 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
6646 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
6647 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
6648 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
6649 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
6650 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
6652 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
6653 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
6655 /* We can't just pass T to eval_subst in case cval1 or cval2
6656 was the same as ARG1. */
6658 tree high_result
6659 = fold (build (code, type,
6660 eval_subst (arg0, cval1, maxval, cval2, minval),
6661 arg1));
6662 tree equal_result
6663 = fold (build (code, type,
6664 eval_subst (arg0, cval1, maxval, cval2, maxval),
6665 arg1));
6666 tree low_result
6667 = fold (build (code, type,
6668 eval_subst (arg0, cval1, minval, cval2, maxval),
6669 arg1));
6671 /* All three of these results should be 0 or 1. Confirm they
6672 are. Then use those values to select the proper code
6673 to use. */
6675 if ((integer_zerop (high_result)
6676 || integer_onep (high_result))
6677 && (integer_zerop (equal_result)
6678 || integer_onep (equal_result))
6679 && (integer_zerop (low_result)
6680 || integer_onep (low_result)))
6682 /* Make a 3-bit mask with the high-order bit being the
6683 value for `>', the next for '=', and the low for '<'. */
6684 switch ((integer_onep (high_result) * 4)
6685 + (integer_onep (equal_result) * 2)
6686 + integer_onep (low_result))
6688 case 0:
6689 /* Always false. */
6690 return omit_one_operand (type, integer_zero_node, arg0);
6691 case 1:
6692 code = LT_EXPR;
6693 break;
6694 case 2:
6695 code = EQ_EXPR;
6696 break;
6697 case 3:
6698 code = LE_EXPR;
6699 break;
6700 case 4:
6701 code = GT_EXPR;
6702 break;
6703 case 5:
6704 code = NE_EXPR;
6705 break;
6706 case 6:
6707 code = GE_EXPR;
6708 break;
6709 case 7:
6710 /* Always true. */
6711 return omit_one_operand (type, integer_one_node, arg0);
6714 t = build (code, type, cval1, cval2);
6715 if (save_p)
6716 return save_expr (t);
6717 else
6718 return fold (t);
6723 /* If this is a comparison of a field, we may be able to simplify it. */
6724 if (((TREE_CODE (arg0) == COMPONENT_REF
6725 && (*lang_hooks.can_use_bit_fields_p) ())
6726 || TREE_CODE (arg0) == BIT_FIELD_REF)
6727 && (code == EQ_EXPR || code == NE_EXPR)
6728 /* Handle the constant case even without -O
6729 to make sure the warnings are given. */
6730 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6732 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6733 return t1 ? t1 : t;
6736 /* If this is a comparison of complex values and either or both sides
6737 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6738 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6739 This may prevent needless evaluations. */
6740 if ((code == EQ_EXPR || code == NE_EXPR)
6741 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6742 && (TREE_CODE (arg0) == COMPLEX_EXPR
6743 || TREE_CODE (arg1) == COMPLEX_EXPR
6744 || TREE_CODE (arg0) == COMPLEX_CST
6745 || TREE_CODE (arg1) == COMPLEX_CST))
6747 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6748 tree real0, imag0, real1, imag1;
6750 arg0 = save_expr (arg0);
6751 arg1 = save_expr (arg1);
6752 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6753 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6754 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6755 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6757 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6758 : TRUTH_ORIF_EXPR),
6759 type,
6760 fold (build (code, type, real0, real1)),
6761 fold (build (code, type, imag0, imag1))));
6764 /* Optimize comparisons of strlen vs zero to a compare of the
6765 first character of the string vs zero. To wit,
6766 strlen(ptr) == 0 => *ptr == 0
6767 strlen(ptr) != 0 => *ptr != 0
6768 Other cases should reduce to one of these two (or a constant)
6769 due to the return value of strlen being unsigned. */
6770 if ((code == EQ_EXPR || code == NE_EXPR)
6771 && integer_zerop (arg1)
6772 && TREE_CODE (arg0) == CALL_EXPR
6773 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
6775 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6776 tree arglist;
6778 if (TREE_CODE (fndecl) == FUNCTION_DECL
6779 && DECL_BUILT_IN (fndecl)
6780 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
6781 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
6782 && (arglist = TREE_OPERAND (arg0, 1))
6783 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
6784 && ! TREE_CHAIN (arglist))
6785 return fold (build (code, type,
6786 build1 (INDIRECT_REF, char_type_node,
6787 TREE_VALUE(arglist)),
6788 integer_zero_node));
6791 /* From here on, the only cases we handle are when the result is
6792 known to be a constant.
6794 To compute GT, swap the arguments and do LT.
6795 To compute GE, do LT and invert the result.
6796 To compute LE, swap the arguments, do LT and invert the result.
6797 To compute NE, do EQ and invert the result.
6799 Therefore, the code below must handle only EQ and LT. */
6801 if (code == LE_EXPR || code == GT_EXPR)
6803 tem = arg0, arg0 = arg1, arg1 = tem;
6804 code = swap_tree_comparison (code);
6807 /* Note that it is safe to invert for real values here because we
6808 will check below in the one case that it matters. */
6810 t1 = NULL_TREE;
6811 invert = 0;
6812 if (code == NE_EXPR || code == GE_EXPR)
6814 invert = 1;
6815 code = invert_tree_comparison (code);
6818 /* Compute a result for LT or EQ if args permit;
6819 otherwise return T. */
6820 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6822 if (code == EQ_EXPR)
6823 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
6824 else
6825 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6826 ? INT_CST_LT_UNSIGNED (arg0, arg1)
6827 : INT_CST_LT (arg0, arg1)),
6831 #if 0 /* This is no longer useful, but breaks some real code. */
6832 /* Assume a nonexplicit constant cannot equal an explicit one,
6833 since such code would be undefined anyway.
6834 Exception: on sysvr4, using #pragma weak,
6835 a label can come out as 0. */
6836 else if (TREE_CODE (arg1) == INTEGER_CST
6837 && !integer_zerop (arg1)
6838 && TREE_CONSTANT (arg0)
6839 && TREE_CODE (arg0) == ADDR_EXPR
6840 && code == EQ_EXPR)
6841 t1 = build_int_2 (0, 0);
6842 #endif
6843 /* Two real constants can be compared explicitly. */
6844 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6846 /* If either operand is a NaN, the result is false with two
6847 exceptions: First, an NE_EXPR is true on NaNs, but that case
6848 is already handled correctly since we will be inverting the
6849 result for NE_EXPR. Second, if we had inverted a LE_EXPR
6850 or a GE_EXPR into a LT_EXPR, we must return true so that it
6851 will be inverted into false. */
6853 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6854 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6855 t1 = build_int_2 (invert && code == LT_EXPR, 0);
6857 else if (code == EQ_EXPR)
6858 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6859 TREE_REAL_CST (arg1)),
6861 else
6862 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6863 TREE_REAL_CST (arg1)),
6867 if (t1 == NULL_TREE)
6868 return t;
6870 if (invert)
6871 TREE_INT_CST_LOW (t1) ^= 1;
6873 TREE_TYPE (t1) = type;
6874 if (TREE_CODE (type) == BOOLEAN_TYPE)
6875 return (*lang_hooks.truthvalue_conversion) (t1);
6876 return t1;
6878 case COND_EXPR:
6879 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6880 so all simple results must be passed through pedantic_non_lvalue. */
6881 if (TREE_CODE (arg0) == INTEGER_CST)
6882 return pedantic_non_lvalue
6883 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
6884 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6885 return pedantic_omit_one_operand (type, arg1, arg0);
6887 /* If the second operand is zero, invert the comparison and swap
6888 the second and third operands. Likewise if the second operand
6889 is constant and the third is not or if the third operand is
6890 equivalent to the first operand of the comparison. */
6892 if (integer_zerop (arg1)
6893 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6894 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6895 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6896 TREE_OPERAND (t, 2),
6897 TREE_OPERAND (arg0, 1))))
6899 /* See if this can be inverted. If it can't, possibly because
6900 it was a floating-point inequality comparison, don't do
6901 anything. */
6902 tem = invert_truthvalue (arg0);
6904 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6906 t = build (code, type, tem,
6907 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6908 arg0 = tem;
6909 /* arg1 should be the first argument of the new T. */
6910 arg1 = TREE_OPERAND (t, 1);
6911 STRIP_NOPS (arg1);
6915 /* If we have A op B ? A : C, we may be able to convert this to a
6916 simpler expression, depending on the operation and the values
6917 of B and C. Signed zeros prevent all of these transformations,
6918 for reasons given above each one. */
6920 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6921 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6922 arg1, TREE_OPERAND (arg0, 1))
6923 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
6925 tree arg2 = TREE_OPERAND (t, 2);
6926 enum tree_code comp_code = TREE_CODE (arg0);
6928 STRIP_NOPS (arg2);
6930 /* If we have A op 0 ? A : -A, consider applying the following
6931 transformations:
6933 A == 0? A : -A same as -A
6934 A != 0? A : -A same as A
6935 A >= 0? A : -A same as abs (A)
6936 A > 0? A : -A same as abs (A)
6937 A <= 0? A : -A same as -abs (A)
6938 A < 0? A : -A same as -abs (A)
6940 None of these transformations work for modes with signed
6941 zeros. If A is +/-0, the first two transformations will
6942 change the sign of the result (from +0 to -0, or vice
6943 versa). The last four will fix the sign of the result,
6944 even though the original expressions could be positive or
6945 negative, depending on the sign of A.
6947 Note that all these transformations are correct if A is
6948 NaN, since the two alternatives (A and -A) are also NaNs. */
6949 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6950 ? real_zerop (TREE_OPERAND (arg0, 1))
6951 : integer_zerop (TREE_OPERAND (arg0, 1)))
6952 && TREE_CODE (arg2) == NEGATE_EXPR
6953 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6954 switch (comp_code)
6956 case EQ_EXPR:
6957 return
6958 pedantic_non_lvalue
6959 (convert (type,
6960 negate_expr
6961 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
6962 arg1))));
6963 case NE_EXPR:
6964 return pedantic_non_lvalue (convert (type, arg1));
6965 case GE_EXPR:
6966 case GT_EXPR:
6967 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6968 arg1 = convert ((*lang_hooks.types.signed_type)
6969 (TREE_TYPE (arg1)), arg1);
6970 return pedantic_non_lvalue
6971 (convert (type, fold (build1 (ABS_EXPR,
6972 TREE_TYPE (arg1), arg1))));
6973 case LE_EXPR:
6974 case LT_EXPR:
6975 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6976 arg1 = convert ((lang_hooks.types.signed_type)
6977 (TREE_TYPE (arg1)), arg1);
6978 return pedantic_non_lvalue
6979 (negate_expr (convert (type,
6980 fold (build1 (ABS_EXPR,
6981 TREE_TYPE (arg1),
6982 arg1)))));
6983 default:
6984 abort ();
6987 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6988 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6989 both transformations are correct when A is NaN: A != 0
6990 is then true, and A == 0 is false. */
6992 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6994 if (comp_code == NE_EXPR)
6995 return pedantic_non_lvalue (convert (type, arg1));
6996 else if (comp_code == EQ_EXPR)
6997 return pedantic_non_lvalue (convert (type, integer_zero_node));
7000 /* Try some transformations of A op B ? A : B.
7002 A == B? A : B same as B
7003 A != B? A : B same as A
7004 A >= B? A : B same as max (A, B)
7005 A > B? A : B same as max (B, A)
7006 A <= B? A : B same as min (A, B)
7007 A < B? A : B same as min (B, A)
7009 As above, these transformations don't work in the presence
7010 of signed zeros. For example, if A and B are zeros of
7011 opposite sign, the first two transformations will change
7012 the sign of the result. In the last four, the original
7013 expressions give different results for (A=+0, B=-0) and
7014 (A=-0, B=+0), but the transformed expressions do not.
7016 The first two transformations are correct if either A or B
7017 is a NaN. In the first transformation, the condition will
7018 be false, and B will indeed be chosen. In the case of the
7019 second transformation, the condition A != B will be true,
7020 and A will be chosen.
7022 The conversions to max() and min() are not correct if B is
7023 a number and A is not. The conditions in the original
7024 expressions will be false, so all four give B. The min()
7025 and max() versions would give a NaN instead. */
7026 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
7027 arg2, TREE_OPERAND (arg0, 0)))
7029 tree comp_op0 = TREE_OPERAND (arg0, 0);
7030 tree comp_op1 = TREE_OPERAND (arg0, 1);
7031 tree comp_type = TREE_TYPE (comp_op0);
7033 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
7034 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
7036 comp_type = type;
7037 comp_op0 = arg1;
7038 comp_op1 = arg2;
7041 switch (comp_code)
7043 case EQ_EXPR:
7044 return pedantic_non_lvalue (convert (type, arg2));
7045 case NE_EXPR:
7046 return pedantic_non_lvalue (convert (type, arg1));
7047 case LE_EXPR:
7048 case LT_EXPR:
7049 /* In C++ a ?: expression can be an lvalue, so put the
7050 operand which will be used if they are equal first
7051 so that we can convert this back to the
7052 corresponding COND_EXPR. */
7053 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7054 return pedantic_non_lvalue
7055 (convert (type, fold (build (MIN_EXPR, comp_type,
7056 (comp_code == LE_EXPR
7057 ? comp_op0 : comp_op1),
7058 (comp_code == LE_EXPR
7059 ? comp_op1 : comp_op0)))));
7060 break;
7061 case GE_EXPR:
7062 case GT_EXPR:
7063 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7064 return pedantic_non_lvalue
7065 (convert (type, fold (build (MAX_EXPR, comp_type,
7066 (comp_code == GE_EXPR
7067 ? comp_op0 : comp_op1),
7068 (comp_code == GE_EXPR
7069 ? comp_op1 : comp_op0)))));
7070 break;
7071 default:
7072 abort ();
7076 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
7077 we might still be able to simplify this. For example,
7078 if C1 is one less or one more than C2, this might have started
7079 out as a MIN or MAX and been transformed by this function.
7080 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
7082 if (INTEGRAL_TYPE_P (type)
7083 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7084 && TREE_CODE (arg2) == INTEGER_CST)
7085 switch (comp_code)
7087 case EQ_EXPR:
7088 /* We can replace A with C1 in this case. */
7089 arg1 = convert (type, TREE_OPERAND (arg0, 1));
7090 t = build (code, type, TREE_OPERAND (t, 0), arg1,
7091 TREE_OPERAND (t, 2));
7092 break;
7094 case LT_EXPR:
7095 /* If C1 is C2 + 1, this is min(A, C2). */
7096 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7097 && operand_equal_p (TREE_OPERAND (arg0, 1),
7098 const_binop (PLUS_EXPR, arg2,
7099 integer_one_node, 0), 1))
7100 return pedantic_non_lvalue
7101 (fold (build (MIN_EXPR, type, arg1, arg2)));
7102 break;
7104 case LE_EXPR:
7105 /* If C1 is C2 - 1, this is min(A, C2). */
7106 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7107 && operand_equal_p (TREE_OPERAND (arg0, 1),
7108 const_binop (MINUS_EXPR, arg2,
7109 integer_one_node, 0), 1))
7110 return pedantic_non_lvalue
7111 (fold (build (MIN_EXPR, type, arg1, arg2)));
7112 break;
7114 case GT_EXPR:
7115 /* If C1 is C2 - 1, this is max(A, C2). */
7116 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7117 && operand_equal_p (TREE_OPERAND (arg0, 1),
7118 const_binop (MINUS_EXPR, arg2,
7119 integer_one_node, 0), 1))
7120 return pedantic_non_lvalue
7121 (fold (build (MAX_EXPR, type, arg1, arg2)));
7122 break;
7124 case GE_EXPR:
7125 /* If C1 is C2 + 1, this is max(A, C2). */
7126 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7127 && operand_equal_p (TREE_OPERAND (arg0, 1),
7128 const_binop (PLUS_EXPR, arg2,
7129 integer_one_node, 0), 1))
7130 return pedantic_non_lvalue
7131 (fold (build (MAX_EXPR, type, arg1, arg2)));
7132 break;
7133 case NE_EXPR:
7134 break;
7135 default:
7136 abort ();
7140 /* If the second operand is simpler than the third, swap them
7141 since that produces better jump optimization results. */
7142 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
7143 || TREE_CODE (arg1) == SAVE_EXPR)
7144 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
7145 || DECL_P (TREE_OPERAND (t, 2))
7146 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
7148 /* See if this can be inverted. If it can't, possibly because
7149 it was a floating-point inequality comparison, don't do
7150 anything. */
7151 tem = invert_truthvalue (arg0);
7153 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7155 t = build (code, type, tem,
7156 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7157 arg0 = tem;
7158 /* arg1 should be the first argument of the new T. */
7159 arg1 = TREE_OPERAND (t, 1);
7160 STRIP_NOPS (arg1);
7164 /* Convert A ? 1 : 0 to simply A. */
7165 if (integer_onep (TREE_OPERAND (t, 1))
7166 && integer_zerop (TREE_OPERAND (t, 2))
7167 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
7168 call to fold will try to move the conversion inside
7169 a COND, which will recurse. In that case, the COND_EXPR
7170 is probably the best choice, so leave it alone. */
7171 && type == TREE_TYPE (arg0))
7172 return pedantic_non_lvalue (arg0);
7174 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
7175 over COND_EXPR in cases such as floating point comparisons. */
7176 if (integer_zerop (TREE_OPERAND (t, 1))
7177 && integer_onep (TREE_OPERAND (t, 2))
7178 && truth_value_p (TREE_CODE (arg0)))
7179 return pedantic_non_lvalue (convert (type,
7180 invert_truthvalue (arg0)));
7182 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
7183 operation is simply A & 2. */
7185 if (integer_zerop (TREE_OPERAND (t, 2))
7186 && TREE_CODE (arg0) == NE_EXPR
7187 && integer_zerop (TREE_OPERAND (arg0, 1))
7188 && integer_pow2p (arg1)
7189 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
7190 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
7191 arg1, 1))
7192 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
7194 /* Convert A ? B : 0 into A && B if A and B are truth values. */
7195 if (integer_zerop (TREE_OPERAND (t, 2))
7196 && truth_value_p (TREE_CODE (arg0))
7197 && truth_value_p (TREE_CODE (arg1)))
7198 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
7199 arg0, arg1)));
7201 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
7202 if (integer_onep (TREE_OPERAND (t, 2))
7203 && truth_value_p (TREE_CODE (arg0))
7204 && truth_value_p (TREE_CODE (arg1)))
7206 /* Only perform transformation if ARG0 is easily inverted. */
7207 tem = invert_truthvalue (arg0);
7208 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7209 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
7210 tem, arg1)));
7213 return t;
7215 case COMPOUND_EXPR:
7216 /* When pedantic, a compound expression can be neither an lvalue
7217 nor an integer constant expression. */
7218 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
7219 return t;
7220 /* Don't let (0, 0) be null pointer constant. */
7221 if (integer_zerop (arg1))
7222 return build1 (NOP_EXPR, type, arg1);
7223 return convert (type, arg1);
7225 case COMPLEX_EXPR:
7226 if (wins)
7227 return build_complex (type, arg0, arg1);
7228 return t;
7230 case REALPART_EXPR:
7231 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7232 return t;
7233 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7234 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7235 TREE_OPERAND (arg0, 1));
7236 else if (TREE_CODE (arg0) == COMPLEX_CST)
7237 return TREE_REALPART (arg0);
7238 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7239 return fold (build (TREE_CODE (arg0), type,
7240 fold (build1 (REALPART_EXPR, type,
7241 TREE_OPERAND (arg0, 0))),
7242 fold (build1 (REALPART_EXPR,
7243 type, TREE_OPERAND (arg0, 1)))));
7244 return t;
7246 case IMAGPART_EXPR:
7247 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7248 return convert (type, integer_zero_node);
7249 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7250 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7251 TREE_OPERAND (arg0, 0));
7252 else if (TREE_CODE (arg0) == COMPLEX_CST)
7253 return TREE_IMAGPART (arg0);
7254 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7255 return fold (build (TREE_CODE (arg0), type,
7256 fold (build1 (IMAGPART_EXPR, type,
7257 TREE_OPERAND (arg0, 0))),
7258 fold (build1 (IMAGPART_EXPR, type,
7259 TREE_OPERAND (arg0, 1)))));
7260 return t;
7262 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
7263 appropriate. */
7264 case CLEANUP_POINT_EXPR:
7265 if (! has_cleanups (arg0))
7266 return TREE_OPERAND (t, 0);
7269 enum tree_code code0 = TREE_CODE (arg0);
7270 int kind0 = TREE_CODE_CLASS (code0);
7271 tree arg00 = TREE_OPERAND (arg0, 0);
7272 tree arg01;
7274 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
7275 return fold (build1 (code0, type,
7276 fold (build1 (CLEANUP_POINT_EXPR,
7277 TREE_TYPE (arg00), arg00))));
7279 if (kind0 == '<' || kind0 == '2'
7280 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
7281 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
7282 || code0 == TRUTH_XOR_EXPR)
7284 arg01 = TREE_OPERAND (arg0, 1);
7286 if (TREE_CONSTANT (arg00)
7287 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
7288 && ! has_cleanups (arg00)))
7289 return fold (build (code0, type, arg00,
7290 fold (build1 (CLEANUP_POINT_EXPR,
7291 TREE_TYPE (arg01), arg01))));
7293 if (TREE_CONSTANT (arg01))
7294 return fold (build (code0, type,
7295 fold (build1 (CLEANUP_POINT_EXPR,
7296 TREE_TYPE (arg00), arg00)),
7297 arg01));
7300 return t;
7303 case CALL_EXPR:
7304 /* Check for a built-in function. */
7305 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
7306 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
7307 == FUNCTION_DECL)
7308 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
7310 tree tmp = fold_builtin (expr);
7311 if (tmp)
7312 return tmp;
7314 return t;
7316 default:
7317 return t;
7318 } /* switch (code) */
7321 /* Determine if first argument is a multiple of second argument. Return 0 if
7322 it is not, or we cannot easily determined it to be.
7324 An example of the sort of thing we care about (at this point; this routine
7325 could surely be made more general, and expanded to do what the *_DIV_EXPR's
7326 fold cases do now) is discovering that
7328 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7330 is a multiple of
7332 SAVE_EXPR (J * 8)
7334 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7336 This code also handles discovering that
7338 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7340 is a multiple of 8 so we don't have to worry about dealing with a
7341 possible remainder.
7343 Note that we *look* inside a SAVE_EXPR only to determine how it was
7344 calculated; it is not safe for fold to do much of anything else with the
7345 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7346 at run time. For example, the latter example above *cannot* be implemented
7347 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7348 evaluation time of the original SAVE_EXPR is not necessarily the same at
7349 the time the new expression is evaluated. The only optimization of this
7350 sort that would be valid is changing
7352 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7354 divided by 8 to
7356 SAVE_EXPR (I) * SAVE_EXPR (J)
7358 (where the same SAVE_EXPR (J) is used in the original and the
7359 transformed version). */
7361 static int
7362 multiple_of_p (type, top, bottom)
7363 tree type;
7364 tree top;
7365 tree bottom;
7367 if (operand_equal_p (top, bottom, 0))
7368 return 1;
7370 if (TREE_CODE (type) != INTEGER_TYPE)
7371 return 0;
7373 switch (TREE_CODE (top))
7375 case MULT_EXPR:
7376 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7377 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7379 case PLUS_EXPR:
7380 case MINUS_EXPR:
7381 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7382 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7384 case LSHIFT_EXPR:
7385 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
7387 tree op1, t1;
7389 op1 = TREE_OPERAND (top, 1);
7390 /* const_binop may not detect overflow correctly,
7391 so check for it explicitly here. */
7392 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
7393 > TREE_INT_CST_LOW (op1)
7394 && TREE_INT_CST_HIGH (op1) == 0
7395 && 0 != (t1 = convert (type,
7396 const_binop (LSHIFT_EXPR, size_one_node,
7397 op1, 0)))
7398 && ! TREE_OVERFLOW (t1))
7399 return multiple_of_p (type, t1, bottom);
7401 return 0;
7403 case NOP_EXPR:
7404 /* Can't handle conversions from non-integral or wider integral type. */
7405 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
7406 || (TYPE_PRECISION (type)
7407 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
7408 return 0;
7410 /* .. fall through ... */
7412 case SAVE_EXPR:
7413 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7415 case INTEGER_CST:
7416 if (TREE_CODE (bottom) != INTEGER_CST
7417 || (TREE_UNSIGNED (type)
7418 && (tree_int_cst_sgn (top) < 0
7419 || tree_int_cst_sgn (bottom) < 0)))
7420 return 0;
7421 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7422 top, bottom, 0));
7424 default:
7425 return 0;
7429 /* Return true if `t' is known to be non-negative. */
7432 tree_expr_nonnegative_p (t)
7433 tree t;
7435 switch (TREE_CODE (t))
7437 case ABS_EXPR:
7438 case FFS_EXPR:
7439 return 1;
7440 case INTEGER_CST:
7441 return tree_int_cst_sgn (t) >= 0;
7442 case TRUNC_DIV_EXPR:
7443 case CEIL_DIV_EXPR:
7444 case FLOOR_DIV_EXPR:
7445 case ROUND_DIV_EXPR:
7446 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7447 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7448 case TRUNC_MOD_EXPR:
7449 case CEIL_MOD_EXPR:
7450 case FLOOR_MOD_EXPR:
7451 case ROUND_MOD_EXPR:
7452 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7453 case COND_EXPR:
7454 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
7455 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
7456 case COMPOUND_EXPR:
7457 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7458 case MIN_EXPR:
7459 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7460 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7461 case MAX_EXPR:
7462 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7463 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7464 case MODIFY_EXPR:
7465 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7466 case BIND_EXPR:
7467 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7468 case SAVE_EXPR:
7469 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7470 case NON_LVALUE_EXPR:
7471 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7472 case RTL_EXPR:
7473 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
7475 default:
7476 if (truth_value_p (TREE_CODE (t)))
7477 /* Truth values evaluate to 0 or 1, which is nonnegative. */
7478 return 1;
7479 else
7480 /* We don't know sign of `t', so be conservative and return false. */
7481 return 0;
7485 /* Return true if `r' is known to be non-negative.
7486 Only handles constants at the moment. */
7489 rtl_expr_nonnegative_p (r)
7490 rtx r;
7492 switch (GET_CODE (r))
7494 case CONST_INT:
7495 return INTVAL (r) >= 0;
7497 case CONST_DOUBLE:
7498 if (GET_MODE (r) == VOIDmode)
7499 return CONST_DOUBLE_HIGH (r) >= 0;
7500 return 0;
7502 case CONST_VECTOR:
7504 int units, i;
7505 rtx elt;
7507 units = CONST_VECTOR_NUNITS (r);
7509 for (i = 0; i < units; ++i)
7511 elt = CONST_VECTOR_ELT (r, i);
7512 if (!rtl_expr_nonnegative_p (elt))
7513 return 0;
7516 return 1;
7519 case SYMBOL_REF:
7520 case LABEL_REF:
7521 /* These are always nonnegative. */
7522 return 1;
7524 default:
7525 return 0;
7529 #include "gt-fold-const.h"