oops - minor formatting tidy ups to previous delta
[official-gcc.git] / gcc / fold-const.c
blob033dbfcfc62da4dedbab772bfd895878c9db949b
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include "config.h"
46 #include "system.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "real.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "toplev.h"
54 #include "ggc.h"
55 #include "hashtab.h"
56 #include "langhooks.h"
58 static void encode PARAMS ((HOST_WIDE_INT *,
59 unsigned HOST_WIDE_INT,
60 HOST_WIDE_INT));
61 static void decode PARAMS ((HOST_WIDE_INT *,
62 unsigned HOST_WIDE_INT *,
63 HOST_WIDE_INT *));
64 static tree negate_expr PARAMS ((tree));
65 static tree split_tree PARAMS ((tree, enum tree_code, tree *, tree *,
66 tree *, int));
67 static tree associate_trees PARAMS ((tree, tree, enum tree_code, tree));
68 static tree int_const_binop PARAMS ((enum tree_code, tree, tree, int));
69 static tree const_binop PARAMS ((enum tree_code, tree, tree, int));
70 static hashval_t size_htab_hash PARAMS ((const void *));
71 static int size_htab_eq PARAMS ((const void *, const void *));
72 static tree fold_convert PARAMS ((tree, tree));
73 static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
74 static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
75 static int comparison_to_compcode PARAMS ((enum tree_code));
76 static enum tree_code compcode_to_comparison PARAMS ((int));
77 static int truth_value_p PARAMS ((enum tree_code));
78 static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
79 static int twoval_comparison_p PARAMS ((tree, tree *, tree *, int *));
80 static tree eval_subst PARAMS ((tree, tree, tree, tree, tree));
81 static tree omit_one_operand PARAMS ((tree, tree, tree));
82 static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
83 static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
84 static tree make_bit_field_ref PARAMS ((tree, tree, int, int, int));
85 static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
86 tree, tree));
87 static tree decode_field_reference PARAMS ((tree, HOST_WIDE_INT *,
88 HOST_WIDE_INT *,
89 enum machine_mode *, int *,
90 int *, tree *, tree *));
91 static int all_ones_mask_p PARAMS ((tree, int));
92 static tree sign_bit_p PARAMS ((tree, tree));
93 static int simple_operand_p PARAMS ((tree));
94 static tree range_binop PARAMS ((enum tree_code, tree, tree, int,
95 tree, int));
96 static tree make_range PARAMS ((tree, int *, tree *, tree *));
97 static tree build_range_check PARAMS ((tree, tree, int, tree, tree));
98 static int merge_ranges PARAMS ((int *, tree *, tree *, int, tree, tree,
99 int, tree, tree));
100 static tree fold_range_test PARAMS ((tree));
101 static tree unextend PARAMS ((tree, int, int, tree));
102 static tree fold_truthop PARAMS ((enum tree_code, tree, tree, tree));
103 static tree optimize_minmax_comparison PARAMS ((tree));
104 static tree extract_muldiv PARAMS ((tree, tree, enum tree_code, tree));
105 static tree strip_compound_expr PARAMS ((tree, tree));
106 static int multiple_of_p PARAMS ((tree, tree, tree));
107 static tree constant_boolean_node PARAMS ((int, tree));
108 static int count_cond PARAMS ((tree, int));
109 static tree fold_binary_op_with_conditional_arg
110 PARAMS ((enum tree_code, tree, tree, tree, int));
111 static bool fold_real_zero_addition_p PARAMS ((tree, tree, int));
113 /* The following constants represent a bit based encoding of GCC's
114 comparison operators. This encoding simplifies transformations
115 on relational comparison operators, such as AND and OR. */
116 #define COMPCODE_FALSE 0
117 #define COMPCODE_LT 1
118 #define COMPCODE_EQ 2
119 #define COMPCODE_LE 3
120 #define COMPCODE_GT 4
121 #define COMPCODE_NE 5
122 #define COMPCODE_GE 6
123 #define COMPCODE_TRUE 7
125 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
126 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
127 and SUM1. Then this yields nonzero if overflow occurred during the
128 addition.
130 Overflow occurs if A and B have the same sign, but A and SUM differ in
131 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
132 sign. */
133 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
135 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
136 We do that by representing the two-word integer in 4 words, with only
137 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
138 number. The value of the word is LOWPART + HIGHPART * BASE. */
140 #define LOWPART(x) \
141 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
142 #define HIGHPART(x) \
143 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
144 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
146 /* Unpack a two-word integer into 4 words.
147 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
148 WORDS points to the array of HOST_WIDE_INTs. */
150 static void
151 encode (words, low, hi)
152 HOST_WIDE_INT *words;
153 unsigned HOST_WIDE_INT low;
154 HOST_WIDE_INT hi;
156 words[0] = LOWPART (low);
157 words[1] = HIGHPART (low);
158 words[2] = LOWPART (hi);
159 words[3] = HIGHPART (hi);
162 /* Pack an array of 4 words into a two-word integer.
163 WORDS points to the array of words.
164 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
166 static void
167 decode (words, low, hi)
168 HOST_WIDE_INT *words;
169 unsigned HOST_WIDE_INT *low;
170 HOST_WIDE_INT *hi;
172 *low = words[0] + words[1] * BASE;
173 *hi = words[2] + words[3] * BASE;
176 /* Make the integer constant T valid for its type by setting to 0 or 1 all
177 the bits in the constant that don't belong in the type.
179 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
180 nonzero, a signed overflow has already occurred in calculating T, so
181 propagate it.
183 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
184 if it exists. */
187 force_fit_type (t, overflow)
188 tree t;
189 int overflow;
191 unsigned HOST_WIDE_INT low;
192 HOST_WIDE_INT high;
193 unsigned int prec;
195 if (TREE_CODE (t) == REAL_CST)
197 #ifdef CHECK_FLOAT_VALUE
198 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
199 overflow);
200 #endif
201 return overflow;
204 else if (TREE_CODE (t) != INTEGER_CST)
205 return overflow;
207 low = TREE_INT_CST_LOW (t);
208 high = TREE_INT_CST_HIGH (t);
210 if (POINTER_TYPE_P (TREE_TYPE (t)))
211 prec = POINTER_SIZE;
212 else
213 prec = TYPE_PRECISION (TREE_TYPE (t));
215 /* First clear all bits that are beyond the type's precision. */
217 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
219 else if (prec > HOST_BITS_PER_WIDE_INT)
220 TREE_INT_CST_HIGH (t)
221 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
222 else
224 TREE_INT_CST_HIGH (t) = 0;
225 if (prec < HOST_BITS_PER_WIDE_INT)
226 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
229 /* Unsigned types do not suffer sign extension or overflow unless they
230 are a sizetype. */
231 if (TREE_UNSIGNED (TREE_TYPE (t))
232 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
233 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
234 return overflow;
236 /* If the value's sign bit is set, extend the sign. */
237 if (prec != 2 * HOST_BITS_PER_WIDE_INT
238 && (prec > HOST_BITS_PER_WIDE_INT
239 ? 0 != (TREE_INT_CST_HIGH (t)
240 & ((HOST_WIDE_INT) 1
241 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
242 : 0 != (TREE_INT_CST_LOW (t)
243 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
245 /* Value is negative:
246 set to 1 all the bits that are outside this type's precision. */
247 if (prec > HOST_BITS_PER_WIDE_INT)
248 TREE_INT_CST_HIGH (t)
249 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
250 else
252 TREE_INT_CST_HIGH (t) = -1;
253 if (prec < HOST_BITS_PER_WIDE_INT)
254 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
258 /* Return nonzero if signed overflow occurred. */
259 return
260 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
261 != 0);
264 /* Add two doubleword integers with doubleword result.
265 Each argument is given as two `HOST_WIDE_INT' pieces.
266 One argument is L1 and H1; the other, L2 and H2.
267 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
270 add_double (l1, h1, l2, h2, lv, hv)
271 unsigned HOST_WIDE_INT l1, l2;
272 HOST_WIDE_INT h1, h2;
273 unsigned HOST_WIDE_INT *lv;
274 HOST_WIDE_INT *hv;
276 unsigned HOST_WIDE_INT l;
277 HOST_WIDE_INT h;
279 l = l1 + l2;
280 h = h1 + h2 + (l < l1);
282 *lv = l;
283 *hv = h;
284 return OVERFLOW_SUM_SIGN (h1, h2, h);
287 /* Negate a doubleword integer with doubleword result.
288 Return nonzero if the operation overflows, assuming it's signed.
289 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
290 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
293 neg_double (l1, h1, lv, hv)
294 unsigned HOST_WIDE_INT l1;
295 HOST_WIDE_INT h1;
296 unsigned HOST_WIDE_INT *lv;
297 HOST_WIDE_INT *hv;
299 if (l1 == 0)
301 *lv = 0;
302 *hv = - h1;
303 return (*hv & h1) < 0;
305 else
307 *lv = -l1;
308 *hv = ~h1;
309 return 0;
313 /* Multiply two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows, assuming it's signed.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 mul_double (l1, h1, l2, h2, lv, hv)
321 unsigned HOST_WIDE_INT l1, l2;
322 HOST_WIDE_INT h1, h2;
323 unsigned HOST_WIDE_INT *lv;
324 HOST_WIDE_INT *hv;
326 HOST_WIDE_INT arg1[4];
327 HOST_WIDE_INT arg2[4];
328 HOST_WIDE_INT prod[4 * 2];
329 unsigned HOST_WIDE_INT carry;
330 int i, j, k;
331 unsigned HOST_WIDE_INT toplow, neglow;
332 HOST_WIDE_INT tophigh, neghigh;
334 encode (arg1, l1, h1);
335 encode (arg2, l2, h2);
337 memset ((char *) prod, 0, sizeof prod);
339 for (i = 0; i < 4; i++)
341 carry = 0;
342 for (j = 0; j < 4; j++)
344 k = i + j;
345 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
346 carry += arg1[i] * arg2[j];
347 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
348 carry += prod[k];
349 prod[k] = LOWPART (carry);
350 carry = HIGHPART (carry);
352 prod[i + 4] = carry;
355 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
357 /* Check for overflow by calculating the top half of the answer in full;
358 it should agree with the low half's sign bit. */
359 decode (prod + 4, &toplow, &tophigh);
360 if (h1 < 0)
362 neg_double (l2, h2, &neglow, &neghigh);
363 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
365 if (h2 < 0)
367 neg_double (l1, h1, &neglow, &neghigh);
368 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
370 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
373 /* Shift the doubleword integer in L1, H1 left by COUNT places
374 keeping only PREC bits of result.
375 Shift right if COUNT is negative.
376 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
377 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
379 void
380 lshift_double (l1, h1, count, prec, lv, hv, arith)
381 unsigned HOST_WIDE_INT l1;
382 HOST_WIDE_INT h1, count;
383 unsigned int prec;
384 unsigned HOST_WIDE_INT *lv;
385 HOST_WIDE_INT *hv;
386 int arith;
388 unsigned HOST_WIDE_INT signmask;
390 if (count < 0)
392 rshift_double (l1, h1, -count, prec, lv, hv, arith);
393 return;
396 #ifdef SHIFT_COUNT_TRUNCATED
397 if (SHIFT_COUNT_TRUNCATED)
398 count %= prec;
399 #endif
401 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
403 /* Shifting by the host word size is undefined according to the
404 ANSI standard, so we must handle this as a special case. */
405 *hv = 0;
406 *lv = 0;
408 else if (count >= HOST_BITS_PER_WIDE_INT)
410 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
411 *lv = 0;
413 else
415 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
416 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
417 *lv = l1 << count;
420 /* Sign extend all bits that are beyond the precision. */
422 signmask = -((prec > HOST_BITS_PER_WIDE_INT
423 ? ((unsigned HOST_WIDE_INT) *hv
424 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
425 : (*lv >> (prec - 1))) & 1);
427 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
429 else if (prec >= HOST_BITS_PER_WIDE_INT)
431 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
432 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
434 else
436 *hv = signmask;
437 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
438 *lv |= signmask << prec;
442 /* Shift the doubleword integer in L1, H1 right by COUNT places
443 keeping only PREC bits of result. COUNT must be positive.
444 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
445 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
447 void
448 rshift_double (l1, h1, count, prec, lv, hv, arith)
449 unsigned HOST_WIDE_INT l1;
450 HOST_WIDE_INT h1, count;
451 unsigned int prec;
452 unsigned HOST_WIDE_INT *lv;
453 HOST_WIDE_INT *hv;
454 int arith;
456 unsigned HOST_WIDE_INT signmask;
458 signmask = (arith
459 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
460 : 0);
462 #ifdef SHIFT_COUNT_TRUNCATED
463 if (SHIFT_COUNT_TRUNCATED)
464 count %= prec;
465 #endif
467 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
469 /* Shifting by the host word size is undefined according to the
470 ANSI standard, so we must handle this as a special case. */
471 *hv = 0;
472 *lv = 0;
474 else if (count >= HOST_BITS_PER_WIDE_INT)
476 *hv = 0;
477 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
479 else
481 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
482 *lv = ((l1 >> count)
483 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
486 /* Zero / sign extend all bits that are beyond the precision. */
488 if (count >= (HOST_WIDE_INT)prec)
490 *hv = signmask;
491 *lv = signmask;
493 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
495 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
497 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
498 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
500 else
502 *hv = signmask;
503 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
504 *lv |= signmask << (prec - count);
508 /* Rotate the doubleword integer in L1, H1 left by COUNT places
509 keeping only PREC bits of result.
510 Rotate right if COUNT is negative.
511 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
513 void
514 lrotate_double (l1, h1, count, prec, lv, hv)
515 unsigned HOST_WIDE_INT l1;
516 HOST_WIDE_INT h1, count;
517 unsigned int prec;
518 unsigned HOST_WIDE_INT *lv;
519 HOST_WIDE_INT *hv;
521 unsigned HOST_WIDE_INT s1l, s2l;
522 HOST_WIDE_INT s1h, s2h;
524 count %= prec;
525 if (count < 0)
526 count += prec;
528 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
529 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
530 *lv = s1l | s2l;
531 *hv = s1h | s2h;
534 /* Rotate the doubleword integer in L1, H1 left by COUNT places
535 keeping only PREC bits of result. COUNT must be positive.
536 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
538 void
539 rrotate_double (l1, h1, count, prec, lv, hv)
540 unsigned HOST_WIDE_INT l1;
541 HOST_WIDE_INT h1, count;
542 unsigned int prec;
543 unsigned HOST_WIDE_INT *lv;
544 HOST_WIDE_INT *hv;
546 unsigned HOST_WIDE_INT s1l, s2l;
547 HOST_WIDE_INT s1h, s2h;
549 count %= prec;
550 if (count < 0)
551 count += prec;
553 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
554 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
555 *lv = s1l | s2l;
556 *hv = s1h | s2h;
559 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
560 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
561 CODE is a tree code for a kind of division, one of
562 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
563 or EXACT_DIV_EXPR
564 It controls how the quotient is rounded to an integer.
565 Return nonzero if the operation overflows.
566 UNS nonzero says do unsigned division. */
569 div_and_round_double (code, uns,
570 lnum_orig, hnum_orig, lden_orig, hden_orig,
571 lquo, hquo, lrem, hrem)
572 enum tree_code code;
573 int uns;
574 unsigned HOST_WIDE_INT lnum_orig; /* num == numerator == dividend */
575 HOST_WIDE_INT hnum_orig;
576 unsigned HOST_WIDE_INT lden_orig; /* den == denominator == divisor */
577 HOST_WIDE_INT hden_orig;
578 unsigned HOST_WIDE_INT *lquo, *lrem;
579 HOST_WIDE_INT *hquo, *hrem;
581 int quo_neg = 0;
582 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
583 HOST_WIDE_INT den[4], quo[4];
584 int i, j;
585 unsigned HOST_WIDE_INT work;
586 unsigned HOST_WIDE_INT carry = 0;
587 unsigned HOST_WIDE_INT lnum = lnum_orig;
588 HOST_WIDE_INT hnum = hnum_orig;
589 unsigned HOST_WIDE_INT lden = lden_orig;
590 HOST_WIDE_INT hden = hden_orig;
591 int overflow = 0;
593 if (hden == 0 && lden == 0)
594 overflow = 1, lden = 1;
596 /* calculate quotient sign and convert operands to unsigned. */
597 if (!uns)
599 if (hnum < 0)
601 quo_neg = ~ quo_neg;
602 /* (minimum integer) / (-1) is the only overflow case. */
603 if (neg_double (lnum, hnum, &lnum, &hnum)
604 && ((HOST_WIDE_INT) lden & hden) == -1)
605 overflow = 1;
607 if (hden < 0)
609 quo_neg = ~ quo_neg;
610 neg_double (lden, hden, &lden, &hden);
614 if (hnum == 0 && hden == 0)
615 { /* single precision */
616 *hquo = *hrem = 0;
617 /* This unsigned division rounds toward zero. */
618 *lquo = lnum / lden;
619 goto finish_up;
622 if (hnum == 0)
623 { /* trivial case: dividend < divisor */
624 /* hden != 0 already checked. */
625 *hquo = *lquo = 0;
626 *hrem = hnum;
627 *lrem = lnum;
628 goto finish_up;
631 memset ((char *) quo, 0, sizeof quo);
633 memset ((char *) num, 0, sizeof num); /* to zero 9th element */
634 memset ((char *) den, 0, sizeof den);
636 encode (num, lnum, hnum);
637 encode (den, lden, hden);
639 /* Special code for when the divisor < BASE. */
640 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
642 /* hnum != 0 already checked. */
643 for (i = 4 - 1; i >= 0; i--)
645 work = num[i] + carry * BASE;
646 quo[i] = work / lden;
647 carry = work % lden;
650 else
652 /* Full double precision division,
653 with thanks to Don Knuth's "Seminumerical Algorithms". */
654 int num_hi_sig, den_hi_sig;
655 unsigned HOST_WIDE_INT quo_est, scale;
657 /* Find the highest non-zero divisor digit. */
658 for (i = 4 - 1;; i--)
659 if (den[i] != 0)
661 den_hi_sig = i;
662 break;
665 /* Insure that the first digit of the divisor is at least BASE/2.
666 This is required by the quotient digit estimation algorithm. */
668 scale = BASE / (den[den_hi_sig] + 1);
669 if (scale > 1)
670 { /* scale divisor and dividend */
671 carry = 0;
672 for (i = 0; i <= 4 - 1; i++)
674 work = (num[i] * scale) + carry;
675 num[i] = LOWPART (work);
676 carry = HIGHPART (work);
679 num[4] = carry;
680 carry = 0;
681 for (i = 0; i <= 4 - 1; i++)
683 work = (den[i] * scale) + carry;
684 den[i] = LOWPART (work);
685 carry = HIGHPART (work);
686 if (den[i] != 0) den_hi_sig = i;
690 num_hi_sig = 4;
692 /* Main loop */
693 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
695 /* Guess the next quotient digit, quo_est, by dividing the first
696 two remaining dividend digits by the high order quotient digit.
697 quo_est is never low and is at most 2 high. */
698 unsigned HOST_WIDE_INT tmp;
700 num_hi_sig = i + den_hi_sig + 1;
701 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
702 if (num[num_hi_sig] != den[den_hi_sig])
703 quo_est = work / den[den_hi_sig];
704 else
705 quo_est = BASE - 1;
707 /* Refine quo_est so it's usually correct, and at most one high. */
708 tmp = work - quo_est * den[den_hi_sig];
709 if (tmp < BASE
710 && (den[den_hi_sig - 1] * quo_est
711 > (tmp * BASE + num[num_hi_sig - 2])))
712 quo_est--;
714 /* Try QUO_EST as the quotient digit, by multiplying the
715 divisor by QUO_EST and subtracting from the remaining dividend.
716 Keep in mind that QUO_EST is the I - 1st digit. */
718 carry = 0;
719 for (j = 0; j <= den_hi_sig; j++)
721 work = quo_est * den[j] + carry;
722 carry = HIGHPART (work);
723 work = num[i + j] - LOWPART (work);
724 num[i + j] = LOWPART (work);
725 carry += HIGHPART (work) != 0;
728 /* If quo_est was high by one, then num[i] went negative and
729 we need to correct things. */
730 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
732 quo_est--;
733 carry = 0; /* add divisor back in */
734 for (j = 0; j <= den_hi_sig; j++)
736 work = num[i + j] + den[j] + carry;
737 carry = HIGHPART (work);
738 num[i + j] = LOWPART (work);
741 num [num_hi_sig] += carry;
744 /* Store the quotient digit. */
745 quo[i] = quo_est;
749 decode (quo, lquo, hquo);
751 finish_up:
752 /* if result is negative, make it so. */
753 if (quo_neg)
754 neg_double (*lquo, *hquo, lquo, hquo);
756 /* compute trial remainder: rem = num - (quo * den) */
757 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
758 neg_double (*lrem, *hrem, lrem, hrem);
759 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
761 switch (code)
763 case TRUNC_DIV_EXPR:
764 case TRUNC_MOD_EXPR: /* round toward zero */
765 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
766 return overflow;
768 case FLOOR_DIV_EXPR:
769 case FLOOR_MOD_EXPR: /* round toward negative infinity */
770 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
772 /* quo = quo - 1; */
773 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
774 lquo, hquo);
776 else
777 return overflow;
778 break;
780 case CEIL_DIV_EXPR:
781 case CEIL_MOD_EXPR: /* round toward positive infinity */
782 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
784 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
785 lquo, hquo);
787 else
788 return overflow;
789 break;
791 case ROUND_DIV_EXPR:
792 case ROUND_MOD_EXPR: /* round to closest integer */
794 unsigned HOST_WIDE_INT labs_rem = *lrem;
795 HOST_WIDE_INT habs_rem = *hrem;
796 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
797 HOST_WIDE_INT habs_den = hden, htwice;
799 /* Get absolute values */
800 if (*hrem < 0)
801 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
802 if (hden < 0)
803 neg_double (lden, hden, &labs_den, &habs_den);
805 /* If (2 * abs (lrem) >= abs (lden)) */
806 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
807 labs_rem, habs_rem, &ltwice, &htwice);
809 if (((unsigned HOST_WIDE_INT) habs_den
810 < (unsigned HOST_WIDE_INT) htwice)
811 || (((unsigned HOST_WIDE_INT) habs_den
812 == (unsigned HOST_WIDE_INT) htwice)
813 && (labs_den < ltwice)))
815 if (*hquo < 0)
816 /* quo = quo - 1; */
817 add_double (*lquo, *hquo,
818 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
819 else
820 /* quo = quo + 1; */
821 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
822 lquo, hquo);
824 else
825 return overflow;
827 break;
829 default:
830 abort ();
833 /* compute true remainder: rem = num - (quo * den) */
834 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
835 neg_double (*lrem, *hrem, lrem, hrem);
836 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
837 return overflow;
840 /* Given T, an expression, return the negation of T. Allow for T to be
841 null, in which case return null. */
843 static tree
844 negate_expr (t)
845 tree t;
847 tree type;
848 tree tem;
850 if (t == 0)
851 return 0;
853 type = TREE_TYPE (t);
854 STRIP_SIGN_NOPS (t);
856 switch (TREE_CODE (t))
858 case INTEGER_CST:
859 case REAL_CST:
860 if (! TREE_UNSIGNED (type)
861 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
862 && ! TREE_OVERFLOW (tem))
863 return tem;
864 break;
866 case NEGATE_EXPR:
867 return convert (type, TREE_OPERAND (t, 0));
869 case MINUS_EXPR:
870 /* - (A - B) -> B - A */
871 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
872 return convert (type,
873 fold (build (MINUS_EXPR, TREE_TYPE (t),
874 TREE_OPERAND (t, 1),
875 TREE_OPERAND (t, 0))));
876 break;
878 default:
879 break;
882 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
885 /* Split a tree IN into a constant, literal and variable parts that could be
886 combined with CODE to make IN. "constant" means an expression with
887 TREE_CONSTANT but that isn't an actual constant. CODE must be a
888 commutative arithmetic operation. Store the constant part into *CONP,
889 the literal in *LITP and return the variable part. If a part isn't
890 present, set it to null. If the tree does not decompose in this way,
891 return the entire tree as the variable part and the other parts as null.
893 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
894 case, we negate an operand that was subtracted. Except if it is a
895 literal for which we use *MINUS_LITP instead.
897 If NEGATE_P is true, we are negating all of IN, again except a literal
898 for which we use *MINUS_LITP instead.
900 If IN is itself a literal or constant, return it as appropriate.
902 Note that we do not guarantee that any of the three values will be the
903 same type as IN, but they will have the same signedness and mode. */
905 static tree
906 split_tree (in, code, conp, litp, minus_litp, negate_p)
907 tree in;
908 enum tree_code code;
909 tree *conp, *litp, *minus_litp;
910 int negate_p;
912 tree var = 0;
914 *conp = 0;
915 *litp = 0;
916 *minus_litp = 0;
918 /* Strip any conversions that don't change the machine mode or signedness. */
919 STRIP_SIGN_NOPS (in);
921 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
922 *litp = in;
923 else if (TREE_CODE (in) == code
924 || (! FLOAT_TYPE_P (TREE_TYPE (in))
925 /* We can associate addition and subtraction together (even
926 though the C standard doesn't say so) for integers because
927 the value is not affected. For reals, the value might be
928 affected, so we can't. */
929 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
930 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
932 tree op0 = TREE_OPERAND (in, 0);
933 tree op1 = TREE_OPERAND (in, 1);
934 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
935 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
937 /* First see if either of the operands is a literal, then a constant. */
938 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
939 *litp = op0, op0 = 0;
940 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
941 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
943 if (op0 != 0 && TREE_CONSTANT (op0))
944 *conp = op0, op0 = 0;
945 else if (op1 != 0 && TREE_CONSTANT (op1))
946 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
948 /* If we haven't dealt with either operand, this is not a case we can
949 decompose. Otherwise, VAR is either of the ones remaining, if any. */
950 if (op0 != 0 && op1 != 0)
951 var = in;
952 else if (op0 != 0)
953 var = op0;
954 else
955 var = op1, neg_var_p = neg1_p;
957 /* Now do any needed negations. */
958 if (neg_litp_p)
959 *minus_litp = *litp, *litp = 0;
960 if (neg_conp_p)
961 *conp = negate_expr (*conp);
962 if (neg_var_p)
963 var = negate_expr (var);
965 else if (TREE_CONSTANT (in))
966 *conp = in;
967 else
968 var = in;
970 if (negate_p)
972 if (*litp)
973 *minus_litp = *litp, *litp = 0;
974 else if (*minus_litp)
975 *litp = *minus_litp, *minus_litp = 0;
976 *conp = negate_expr (*conp);
977 var = negate_expr (var);
980 return var;
983 /* Re-associate trees split by the above function. T1 and T2 are either
984 expressions to associate or null. Return the new expression, if any. If
985 we build an operation, do it in TYPE and with CODE. */
987 static tree
988 associate_trees (t1, t2, code, type)
989 tree t1, t2;
990 enum tree_code code;
991 tree type;
993 if (t1 == 0)
994 return t2;
995 else if (t2 == 0)
996 return t1;
998 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
999 try to fold this since we will have infinite recursion. But do
1000 deal with any NEGATE_EXPRs. */
1001 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1002 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1004 if (code == PLUS_EXPR)
1006 if (TREE_CODE (t1) == NEGATE_EXPR)
1007 return build (MINUS_EXPR, type, convert (type, t2),
1008 convert (type, TREE_OPERAND (t1, 0)));
1009 else if (TREE_CODE (t2) == NEGATE_EXPR)
1010 return build (MINUS_EXPR, type, convert (type, t1),
1011 convert (type, TREE_OPERAND (t2, 0)));
1013 return build (code, type, convert (type, t1), convert (type, t2));
1016 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1019 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1020 to produce a new constant.
1022 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1024 static tree
1025 int_const_binop (code, arg1, arg2, notrunc)
1026 enum tree_code code;
1027 tree arg1, arg2;
1028 int notrunc;
1030 unsigned HOST_WIDE_INT int1l, int2l;
1031 HOST_WIDE_INT int1h, int2h;
1032 unsigned HOST_WIDE_INT low;
1033 HOST_WIDE_INT hi;
1034 unsigned HOST_WIDE_INT garbagel;
1035 HOST_WIDE_INT garbageh;
1036 tree t;
1037 tree type = TREE_TYPE (arg1);
1038 int uns = TREE_UNSIGNED (type);
1039 int is_sizetype
1040 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1041 int overflow = 0;
1042 int no_overflow = 0;
1044 int1l = TREE_INT_CST_LOW (arg1);
1045 int1h = TREE_INT_CST_HIGH (arg1);
1046 int2l = TREE_INT_CST_LOW (arg2);
1047 int2h = TREE_INT_CST_HIGH (arg2);
1049 switch (code)
1051 case BIT_IOR_EXPR:
1052 low = int1l | int2l, hi = int1h | int2h;
1053 break;
1055 case BIT_XOR_EXPR:
1056 low = int1l ^ int2l, hi = int1h ^ int2h;
1057 break;
1059 case BIT_AND_EXPR:
1060 low = int1l & int2l, hi = int1h & int2h;
1061 break;
1063 case BIT_ANDTC_EXPR:
1064 low = int1l & ~int2l, hi = int1h & ~int2h;
1065 break;
1067 case RSHIFT_EXPR:
1068 int2l = -int2l;
1069 case LSHIFT_EXPR:
1070 /* It's unclear from the C standard whether shifts can overflow.
1071 The following code ignores overflow; perhaps a C standard
1072 interpretation ruling is needed. */
1073 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1074 &low, &hi, !uns);
1075 no_overflow = 1;
1076 break;
1078 case RROTATE_EXPR:
1079 int2l = - int2l;
1080 case LROTATE_EXPR:
1081 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1082 &low, &hi);
1083 break;
1085 case PLUS_EXPR:
1086 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1087 break;
1089 case MINUS_EXPR:
1090 neg_double (int2l, int2h, &low, &hi);
1091 add_double (int1l, int1h, low, hi, &low, &hi);
1092 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1093 break;
1095 case MULT_EXPR:
1096 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1097 break;
1099 case TRUNC_DIV_EXPR:
1100 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1101 case EXACT_DIV_EXPR:
1102 /* This is a shortcut for a common special case. */
1103 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1104 && ! TREE_CONSTANT_OVERFLOW (arg1)
1105 && ! TREE_CONSTANT_OVERFLOW (arg2)
1106 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1108 if (code == CEIL_DIV_EXPR)
1109 int1l += int2l - 1;
1111 low = int1l / int2l, hi = 0;
1112 break;
1115 /* ... fall through ... */
1117 case ROUND_DIV_EXPR:
1118 if (int2h == 0 && int2l == 1)
1120 low = int1l, hi = int1h;
1121 break;
1123 if (int1l == int2l && int1h == int2h
1124 && ! (int1l == 0 && int1h == 0))
1126 low = 1, hi = 0;
1127 break;
1129 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1130 &low, &hi, &garbagel, &garbageh);
1131 break;
1133 case TRUNC_MOD_EXPR:
1134 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1135 /* This is a shortcut for a common special case. */
1136 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1137 && ! TREE_CONSTANT_OVERFLOW (arg1)
1138 && ! TREE_CONSTANT_OVERFLOW (arg2)
1139 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1141 if (code == CEIL_MOD_EXPR)
1142 int1l += int2l - 1;
1143 low = int1l % int2l, hi = 0;
1144 break;
1147 /* ... fall through ... */
1149 case ROUND_MOD_EXPR:
1150 overflow = div_and_round_double (code, uns,
1151 int1l, int1h, int2l, int2h,
1152 &garbagel, &garbageh, &low, &hi);
1153 break;
1155 case MIN_EXPR:
1156 case MAX_EXPR:
1157 if (uns)
1158 low = (((unsigned HOST_WIDE_INT) int1h
1159 < (unsigned HOST_WIDE_INT) int2h)
1160 || (((unsigned HOST_WIDE_INT) int1h
1161 == (unsigned HOST_WIDE_INT) int2h)
1162 && int1l < int2l));
1163 else
1164 low = (int1h < int2h
1165 || (int1h == int2h && int1l < int2l));
1167 if (low == (code == MIN_EXPR))
1168 low = int1l, hi = int1h;
1169 else
1170 low = int2l, hi = int2h;
1171 break;
1173 default:
1174 abort ();
1177 /* If this is for a sizetype, can be represented as one (signed)
1178 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1179 constants. */
1180 if (is_sizetype
1181 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1182 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1183 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1184 return size_int_type_wide (low, type);
1185 else
1187 t = build_int_2 (low, hi);
1188 TREE_TYPE (t) = TREE_TYPE (arg1);
1191 TREE_OVERFLOW (t)
1192 = ((notrunc
1193 ? (!uns || is_sizetype) && overflow
1194 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1195 && ! no_overflow))
1196 | TREE_OVERFLOW (arg1)
1197 | TREE_OVERFLOW (arg2));
1199 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1200 So check if force_fit_type truncated the value. */
1201 if (is_sizetype
1202 && ! TREE_OVERFLOW (t)
1203 && (TREE_INT_CST_HIGH (t) != hi
1204 || TREE_INT_CST_LOW (t) != low))
1205 TREE_OVERFLOW (t) = 1;
1207 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1208 | TREE_CONSTANT_OVERFLOW (arg1)
1209 | TREE_CONSTANT_OVERFLOW (arg2));
1210 return t;
1213 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1214 constant. We assume ARG1 and ARG2 have the same data type, or at least
1215 are the same kind of constant and the same machine mode.
1217 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1219 static tree
1220 const_binop (code, arg1, arg2, notrunc)
1221 enum tree_code code;
1222 tree arg1, arg2;
1223 int notrunc;
1225 STRIP_NOPS (arg1);
1226 STRIP_NOPS (arg2);
1228 if (TREE_CODE (arg1) == INTEGER_CST)
1229 return int_const_binop (code, arg1, arg2, notrunc);
1231 if (TREE_CODE (arg1) == REAL_CST)
1233 REAL_VALUE_TYPE d1;
1234 REAL_VALUE_TYPE d2;
1235 REAL_VALUE_TYPE value;
1236 tree t;
1238 d1 = TREE_REAL_CST (arg1);
1239 d2 = TREE_REAL_CST (arg2);
1241 /* If either operand is a NaN, just return it. Otherwise, set up
1242 for floating-point trap; we return an overflow. */
1243 if (REAL_VALUE_ISNAN (d1))
1244 return arg1;
1245 else if (REAL_VALUE_ISNAN (d2))
1246 return arg2;
1248 REAL_ARITHMETIC (value, code, d1, d2);
1250 t = build_real (TREE_TYPE (arg1),
1251 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)),
1252 value));
1254 TREE_OVERFLOW (t)
1255 = (force_fit_type (t, 0)
1256 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1257 TREE_CONSTANT_OVERFLOW (t)
1258 = TREE_OVERFLOW (t)
1259 | TREE_CONSTANT_OVERFLOW (arg1)
1260 | TREE_CONSTANT_OVERFLOW (arg2);
1261 return t;
1263 if (TREE_CODE (arg1) == COMPLEX_CST)
1265 tree type = TREE_TYPE (arg1);
1266 tree r1 = TREE_REALPART (arg1);
1267 tree i1 = TREE_IMAGPART (arg1);
1268 tree r2 = TREE_REALPART (arg2);
1269 tree i2 = TREE_IMAGPART (arg2);
1270 tree t;
1272 switch (code)
1274 case PLUS_EXPR:
1275 t = build_complex (type,
1276 const_binop (PLUS_EXPR, r1, r2, notrunc),
1277 const_binop (PLUS_EXPR, i1, i2, notrunc));
1278 break;
1280 case MINUS_EXPR:
1281 t = build_complex (type,
1282 const_binop (MINUS_EXPR, r1, r2, notrunc),
1283 const_binop (MINUS_EXPR, i1, i2, notrunc));
1284 break;
1286 case MULT_EXPR:
1287 t = build_complex (type,
1288 const_binop (MINUS_EXPR,
1289 const_binop (MULT_EXPR,
1290 r1, r2, notrunc),
1291 const_binop (MULT_EXPR,
1292 i1, i2, notrunc),
1293 notrunc),
1294 const_binop (PLUS_EXPR,
1295 const_binop (MULT_EXPR,
1296 r1, i2, notrunc),
1297 const_binop (MULT_EXPR,
1298 i1, r2, notrunc),
1299 notrunc));
1300 break;
1302 case RDIV_EXPR:
1304 tree magsquared
1305 = const_binop (PLUS_EXPR,
1306 const_binop (MULT_EXPR, r2, r2, notrunc),
1307 const_binop (MULT_EXPR, i2, i2, notrunc),
1308 notrunc);
1310 t = build_complex (type,
1311 const_binop
1312 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1313 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1314 const_binop (PLUS_EXPR,
1315 const_binop (MULT_EXPR, r1, r2,
1316 notrunc),
1317 const_binop (MULT_EXPR, i1, i2,
1318 notrunc),
1319 notrunc),
1320 magsquared, notrunc),
1321 const_binop
1322 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1323 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1324 const_binop (MINUS_EXPR,
1325 const_binop (MULT_EXPR, i1, r2,
1326 notrunc),
1327 const_binop (MULT_EXPR, r1, i2,
1328 notrunc),
1329 notrunc),
1330 magsquared, notrunc));
1332 break;
1334 default:
1335 abort ();
1337 return t;
1339 return 0;
1342 /* These are the hash table functions for the hash table of INTEGER_CST
1343 nodes of a sizetype. */
1345 /* Return the hash code code X, an INTEGER_CST. */
1347 static hashval_t
1348 size_htab_hash (x)
1349 const void *x;
1351 tree t = (tree) x;
1353 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1354 ^ (hashval_t) ((long) TREE_TYPE (t) >> 3)
1355 ^ (TREE_OVERFLOW (t) << 20));
1358 /* Return non-zero if the value represented by *X (an INTEGER_CST tree node)
1359 is the same as that given by *Y, which is the same. */
1361 static int
1362 size_htab_eq (x, y)
1363 const void *x;
1364 const void *y;
1366 tree xt = (tree) x;
1367 tree yt = (tree) y;
1369 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1370 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1371 && TREE_TYPE (xt) == TREE_TYPE (yt)
1372 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1375 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1376 bits are given by NUMBER and of the sizetype represented by KIND. */
1378 tree
1379 size_int_wide (number, kind)
1380 HOST_WIDE_INT number;
1381 enum size_type_kind kind;
1383 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1386 /* Likewise, but the desired type is specified explicitly. */
1388 static GTY (()) tree new_const;
1389 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1390 htab_t size_htab;
1392 tree
1393 size_int_type_wide (number, type)
1394 HOST_WIDE_INT number;
1395 tree type;
1397 PTR *slot;
1399 if (size_htab == 0)
1401 size_htab = htab_create (1024, size_htab_hash, size_htab_eq, NULL);
1402 new_const = make_node (INTEGER_CST);
1405 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1406 hash table, we return the value from the hash table. Otherwise, we
1407 place that in the hash table and make a new node for the next time. */
1408 TREE_INT_CST_LOW (new_const) = number;
1409 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1410 TREE_TYPE (new_const) = type;
1411 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1412 = force_fit_type (new_const, 0);
1414 slot = htab_find_slot (size_htab, new_const, INSERT);
1415 if (*slot == 0)
1417 tree t = new_const;
1419 *slot = (PTR) new_const;
1420 new_const = make_node (INTEGER_CST);
1421 return t;
1423 else
1424 return (tree) *slot;
1427 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1428 is a tree code. The type of the result is taken from the operands.
1429 Both must be the same type integer type and it must be a size type.
1430 If the operands are constant, so is the result. */
1432 tree
1433 size_binop (code, arg0, arg1)
1434 enum tree_code code;
1435 tree arg0, arg1;
1437 tree type = TREE_TYPE (arg0);
1439 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1440 || type != TREE_TYPE (arg1))
1441 abort ();
1443 /* Handle the special case of two integer constants faster. */
1444 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1446 /* And some specific cases even faster than that. */
1447 if (code == PLUS_EXPR && integer_zerop (arg0))
1448 return arg1;
1449 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1450 && integer_zerop (arg1))
1451 return arg0;
1452 else if (code == MULT_EXPR && integer_onep (arg0))
1453 return arg1;
1455 /* Handle general case of two integer constants. */
1456 return int_const_binop (code, arg0, arg1, 0);
1459 if (arg0 == error_mark_node || arg1 == error_mark_node)
1460 return error_mark_node;
1462 return fold (build (code, type, arg0, arg1));
1465 /* Given two values, either both of sizetype or both of bitsizetype,
1466 compute the difference between the two values. Return the value
1467 in signed type corresponding to the type of the operands. */
1469 tree
1470 size_diffop (arg0, arg1)
1471 tree arg0, arg1;
1473 tree type = TREE_TYPE (arg0);
1474 tree ctype;
1476 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1477 || type != TREE_TYPE (arg1))
1478 abort ();
1480 /* If the type is already signed, just do the simple thing. */
1481 if (! TREE_UNSIGNED (type))
1482 return size_binop (MINUS_EXPR, arg0, arg1);
1484 ctype = (type == bitsizetype || type == ubitsizetype
1485 ? sbitsizetype : ssizetype);
1487 /* If either operand is not a constant, do the conversions to the signed
1488 type and subtract. The hardware will do the right thing with any
1489 overflow in the subtraction. */
1490 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1491 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1492 convert (ctype, arg1));
1494 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1495 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1496 overflow) and negate (which can't either). Special-case a result
1497 of zero while we're here. */
1498 if (tree_int_cst_equal (arg0, arg1))
1499 return convert (ctype, integer_zero_node);
1500 else if (tree_int_cst_lt (arg1, arg0))
1501 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1502 else
1503 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1504 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1508 /* Given T, a tree representing type conversion of ARG1, a constant,
1509 return a constant tree representing the result of conversion. */
1511 static tree
1512 fold_convert (t, arg1)
1513 tree t;
1514 tree arg1;
1516 tree type = TREE_TYPE (t);
1517 int overflow = 0;
1519 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1521 if (TREE_CODE (arg1) == INTEGER_CST)
1523 /* If we would build a constant wider than GCC supports,
1524 leave the conversion unfolded. */
1525 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1526 return t;
1528 /* If we are trying to make a sizetype for a small integer, use
1529 size_int to pick up cached types to reduce duplicate nodes. */
1530 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1531 && !TREE_CONSTANT_OVERFLOW (arg1)
1532 && compare_tree_int (arg1, 10000) < 0)
1533 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1535 /* Given an integer constant, make new constant with new type,
1536 appropriately sign-extended or truncated. */
1537 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1538 TREE_INT_CST_HIGH (arg1));
1539 TREE_TYPE (t) = type;
1540 /* Indicate an overflow if (1) ARG1 already overflowed,
1541 or (2) force_fit_type indicates an overflow.
1542 Tell force_fit_type that an overflow has already occurred
1543 if ARG1 is a too-large unsigned value and T is signed.
1544 But don't indicate an overflow if converting a pointer. */
1545 TREE_OVERFLOW (t)
1546 = ((force_fit_type (t,
1547 (TREE_INT_CST_HIGH (arg1) < 0
1548 && (TREE_UNSIGNED (type)
1549 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1550 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1551 || TREE_OVERFLOW (arg1));
1552 TREE_CONSTANT_OVERFLOW (t)
1553 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1555 else if (TREE_CODE (arg1) == REAL_CST)
1557 /* Don't initialize these, use assignments.
1558 Initialized local aggregates don't work on old compilers. */
1559 REAL_VALUE_TYPE x;
1560 REAL_VALUE_TYPE l;
1561 REAL_VALUE_TYPE u;
1562 tree type1 = TREE_TYPE (arg1);
1563 int no_upper_bound;
1565 x = TREE_REAL_CST (arg1);
1566 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1568 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1569 if (!no_upper_bound)
1570 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1572 /* See if X will be in range after truncation towards 0.
1573 To compensate for truncation, move the bounds away from 0,
1574 but reject if X exactly equals the adjusted bounds. */
1575 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1576 if (!no_upper_bound)
1577 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1578 /* If X is a NaN, use zero instead and show we have an overflow.
1579 Otherwise, range check. */
1580 if (REAL_VALUE_ISNAN (x))
1581 overflow = 1, x = dconst0;
1582 else if (! (REAL_VALUES_LESS (l, x)
1583 && !no_upper_bound
1584 && REAL_VALUES_LESS (x, u)))
1585 overflow = 1;
1588 HOST_WIDE_INT low, high;
1589 REAL_VALUE_TO_INT (&low, &high, x);
1590 t = build_int_2 (low, high);
1592 TREE_TYPE (t) = type;
1593 TREE_OVERFLOW (t)
1594 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1595 TREE_CONSTANT_OVERFLOW (t)
1596 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1598 TREE_TYPE (t) = type;
1600 else if (TREE_CODE (type) == REAL_TYPE)
1602 if (TREE_CODE (arg1) == INTEGER_CST)
1603 return build_real_from_int_cst (type, arg1);
1604 if (TREE_CODE (arg1) == REAL_CST)
1606 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1608 /* We make a copy of ARG1 so that we don't modify an
1609 existing constant tree. */
1610 t = copy_node (arg1);
1611 TREE_TYPE (t) = type;
1612 return t;
1615 t = build_real (type,
1616 real_value_truncate (TYPE_MODE (type),
1617 TREE_REAL_CST (arg1)));
1619 TREE_OVERFLOW (t)
1620 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1621 TREE_CONSTANT_OVERFLOW (t)
1622 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1623 return t;
1626 TREE_CONSTANT (t) = 1;
1627 return t;
1630 /* Return an expr equal to X but certainly not valid as an lvalue. */
1632 tree
1633 non_lvalue (x)
1634 tree x;
1636 tree result;
1638 /* These things are certainly not lvalues. */
1639 if (TREE_CODE (x) == NON_LVALUE_EXPR
1640 || TREE_CODE (x) == INTEGER_CST
1641 || TREE_CODE (x) == REAL_CST
1642 || TREE_CODE (x) == STRING_CST
1643 || TREE_CODE (x) == ADDR_EXPR)
1644 return x;
1646 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1647 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1648 return result;
1651 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1652 Zero means allow extended lvalues. */
1654 int pedantic_lvalues;
1656 /* When pedantic, return an expr equal to X but certainly not valid as a
1657 pedantic lvalue. Otherwise, return X. */
1659 tree
1660 pedantic_non_lvalue (x)
1661 tree x;
1663 if (pedantic_lvalues)
1664 return non_lvalue (x);
1665 else
1666 return x;
1669 /* Given a tree comparison code, return the code that is the logical inverse
1670 of the given code. It is not safe to do this for floating-point
1671 comparisons, except for NE_EXPR and EQ_EXPR. */
1673 static enum tree_code
1674 invert_tree_comparison (code)
1675 enum tree_code code;
1677 switch (code)
1679 case EQ_EXPR:
1680 return NE_EXPR;
1681 case NE_EXPR:
1682 return EQ_EXPR;
1683 case GT_EXPR:
1684 return LE_EXPR;
1685 case GE_EXPR:
1686 return LT_EXPR;
1687 case LT_EXPR:
1688 return GE_EXPR;
1689 case LE_EXPR:
1690 return GT_EXPR;
1691 default:
1692 abort ();
1696 /* Similar, but return the comparison that results if the operands are
1697 swapped. This is safe for floating-point. */
1699 static enum tree_code
1700 swap_tree_comparison (code)
1701 enum tree_code code;
1703 switch (code)
1705 case EQ_EXPR:
1706 case NE_EXPR:
1707 return code;
1708 case GT_EXPR:
1709 return LT_EXPR;
1710 case GE_EXPR:
1711 return LE_EXPR;
1712 case LT_EXPR:
1713 return GT_EXPR;
1714 case LE_EXPR:
1715 return GE_EXPR;
1716 default:
1717 abort ();
1722 /* Convert a comparison tree code from an enum tree_code representation
1723 into a compcode bit-based encoding. This function is the inverse of
1724 compcode_to_comparison. */
1726 static int
1727 comparison_to_compcode (code)
1728 enum tree_code code;
1730 switch (code)
1732 case LT_EXPR:
1733 return COMPCODE_LT;
1734 case EQ_EXPR:
1735 return COMPCODE_EQ;
1736 case LE_EXPR:
1737 return COMPCODE_LE;
1738 case GT_EXPR:
1739 return COMPCODE_GT;
1740 case NE_EXPR:
1741 return COMPCODE_NE;
1742 case GE_EXPR:
1743 return COMPCODE_GE;
1744 default:
1745 abort ();
1749 /* Convert a compcode bit-based encoding of a comparison operator back
1750 to GCC's enum tree_code representation. This function is the
1751 inverse of comparison_to_compcode. */
1753 static enum tree_code
1754 compcode_to_comparison (code)
1755 int code;
1757 switch (code)
1759 case COMPCODE_LT:
1760 return LT_EXPR;
1761 case COMPCODE_EQ:
1762 return EQ_EXPR;
1763 case COMPCODE_LE:
1764 return LE_EXPR;
1765 case COMPCODE_GT:
1766 return GT_EXPR;
1767 case COMPCODE_NE:
1768 return NE_EXPR;
1769 case COMPCODE_GE:
1770 return GE_EXPR;
1771 default:
1772 abort ();
1776 /* Return nonzero if CODE is a tree code that represents a truth value. */
1778 static int
1779 truth_value_p (code)
1780 enum tree_code code;
1782 return (TREE_CODE_CLASS (code) == '<'
1783 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1784 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1785 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1788 /* Return nonzero if two operands are necessarily equal.
1789 If ONLY_CONST is non-zero, only return non-zero for constants.
1790 This function tests whether the operands are indistinguishable;
1791 it does not test whether they are equal using C's == operation.
1792 The distinction is important for IEEE floating point, because
1793 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1794 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1797 operand_equal_p (arg0, arg1, only_const)
1798 tree arg0, arg1;
1799 int only_const;
1801 /* If both types don't have the same signedness, then we can't consider
1802 them equal. We must check this before the STRIP_NOPS calls
1803 because they may change the signedness of the arguments. */
1804 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1805 return 0;
1807 STRIP_NOPS (arg0);
1808 STRIP_NOPS (arg1);
1810 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1811 /* This is needed for conversions and for COMPONENT_REF.
1812 Might as well play it safe and always test this. */
1813 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1814 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1815 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1816 return 0;
1818 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1819 We don't care about side effects in that case because the SAVE_EXPR
1820 takes care of that for us. In all other cases, two expressions are
1821 equal if they have no side effects. If we have two identical
1822 expressions with side effects that should be treated the same due
1823 to the only side effects being identical SAVE_EXPR's, that will
1824 be detected in the recursive calls below. */
1825 if (arg0 == arg1 && ! only_const
1826 && (TREE_CODE (arg0) == SAVE_EXPR
1827 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1828 return 1;
1830 /* Next handle constant cases, those for which we can return 1 even
1831 if ONLY_CONST is set. */
1832 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1833 switch (TREE_CODE (arg0))
1835 case INTEGER_CST:
1836 return (! TREE_CONSTANT_OVERFLOW (arg0)
1837 && ! TREE_CONSTANT_OVERFLOW (arg1)
1838 && tree_int_cst_equal (arg0, arg1));
1840 case REAL_CST:
1841 return (! TREE_CONSTANT_OVERFLOW (arg0)
1842 && ! TREE_CONSTANT_OVERFLOW (arg1)
1843 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1844 TREE_REAL_CST (arg1)));
1846 case VECTOR_CST:
1848 tree v1, v2;
1850 if (TREE_CONSTANT_OVERFLOW (arg0)
1851 || TREE_CONSTANT_OVERFLOW (arg1))
1852 return 0;
1854 v1 = TREE_VECTOR_CST_ELTS (arg0);
1855 v2 = TREE_VECTOR_CST_ELTS (arg1);
1856 while (v1 && v2)
1858 if (!operand_equal_p (v1, v2, only_const))
1859 return 0;
1860 v1 = TREE_CHAIN (v1);
1861 v2 = TREE_CHAIN (v2);
1864 return 1;
1867 case COMPLEX_CST:
1868 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1869 only_const)
1870 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1871 only_const));
1873 case STRING_CST:
1874 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1875 && ! memcmp (TREE_STRING_POINTER (arg0),
1876 TREE_STRING_POINTER (arg1),
1877 TREE_STRING_LENGTH (arg0)));
1879 case ADDR_EXPR:
1880 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1882 default:
1883 break;
1886 if (only_const)
1887 return 0;
1889 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1891 case '1':
1892 /* Two conversions are equal only if signedness and modes match. */
1893 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1894 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1895 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1896 return 0;
1898 return operand_equal_p (TREE_OPERAND (arg0, 0),
1899 TREE_OPERAND (arg1, 0), 0);
1901 case '<':
1902 case '2':
1903 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1904 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1906 return 1;
1908 /* For commutative ops, allow the other order. */
1909 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1910 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1911 || TREE_CODE (arg0) == BIT_IOR_EXPR
1912 || TREE_CODE (arg0) == BIT_XOR_EXPR
1913 || TREE_CODE (arg0) == BIT_AND_EXPR
1914 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1915 && operand_equal_p (TREE_OPERAND (arg0, 0),
1916 TREE_OPERAND (arg1, 1), 0)
1917 && operand_equal_p (TREE_OPERAND (arg0, 1),
1918 TREE_OPERAND (arg1, 0), 0));
1920 case 'r':
1921 /* If either of the pointer (or reference) expressions we are dereferencing
1922 contain a side effect, these cannot be equal. */
1923 if (TREE_SIDE_EFFECTS (arg0)
1924 || TREE_SIDE_EFFECTS (arg1))
1925 return 0;
1927 switch (TREE_CODE (arg0))
1929 case INDIRECT_REF:
1930 return operand_equal_p (TREE_OPERAND (arg0, 0),
1931 TREE_OPERAND (arg1, 0), 0);
1933 case COMPONENT_REF:
1934 case ARRAY_REF:
1935 case ARRAY_RANGE_REF:
1936 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1937 TREE_OPERAND (arg1, 0), 0)
1938 && operand_equal_p (TREE_OPERAND (arg0, 1),
1939 TREE_OPERAND (arg1, 1), 0));
1941 case BIT_FIELD_REF:
1942 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1943 TREE_OPERAND (arg1, 0), 0)
1944 && operand_equal_p (TREE_OPERAND (arg0, 1),
1945 TREE_OPERAND (arg1, 1), 0)
1946 && operand_equal_p (TREE_OPERAND (arg0, 2),
1947 TREE_OPERAND (arg1, 2), 0));
1948 default:
1949 return 0;
1952 case 'e':
1953 if (TREE_CODE (arg0) == RTL_EXPR)
1954 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
1955 return 0;
1957 default:
1958 return 0;
1962 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1963 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1965 When in doubt, return 0. */
1967 static int
1968 operand_equal_for_comparison_p (arg0, arg1, other)
1969 tree arg0, arg1;
1970 tree other;
1972 int unsignedp1, unsignedpo;
1973 tree primarg0, primarg1, primother;
1974 unsigned int correct_width;
1976 if (operand_equal_p (arg0, arg1, 0))
1977 return 1;
1979 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1980 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1981 return 0;
1983 /* Discard any conversions that don't change the modes of ARG0 and ARG1
1984 and see if the inner values are the same. This removes any
1985 signedness comparison, which doesn't matter here. */
1986 primarg0 = arg0, primarg1 = arg1;
1987 STRIP_NOPS (primarg0);
1988 STRIP_NOPS (primarg1);
1989 if (operand_equal_p (primarg0, primarg1, 0))
1990 return 1;
1992 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1993 actual comparison operand, ARG0.
1995 First throw away any conversions to wider types
1996 already present in the operands. */
1998 primarg1 = get_narrower (arg1, &unsignedp1);
1999 primother = get_narrower (other, &unsignedpo);
2001 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2002 if (unsignedp1 == unsignedpo
2003 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2004 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2006 tree type = TREE_TYPE (arg0);
2008 /* Make sure shorter operand is extended the right way
2009 to match the longer operand. */
2010 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2011 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2013 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2014 return 1;
2017 return 0;
2020 /* See if ARG is an expression that is either a comparison or is performing
2021 arithmetic on comparisons. The comparisons must only be comparing
2022 two different values, which will be stored in *CVAL1 and *CVAL2; if
2023 they are non-zero it means that some operands have already been found.
2024 No variables may be used anywhere else in the expression except in the
2025 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2026 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2028 If this is true, return 1. Otherwise, return zero. */
2030 static int
2031 twoval_comparison_p (arg, cval1, cval2, save_p)
2032 tree arg;
2033 tree *cval1, *cval2;
2034 int *save_p;
2036 enum tree_code code = TREE_CODE (arg);
2037 char class = TREE_CODE_CLASS (code);
2039 /* We can handle some of the 'e' cases here. */
2040 if (class == 'e' && code == TRUTH_NOT_EXPR)
2041 class = '1';
2042 else if (class == 'e'
2043 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2044 || code == COMPOUND_EXPR))
2045 class = '2';
2047 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2048 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2050 /* If we've already found a CVAL1 or CVAL2, this expression is
2051 two complex to handle. */
2052 if (*cval1 || *cval2)
2053 return 0;
2055 class = '1';
2056 *save_p = 1;
2059 switch (class)
2061 case '1':
2062 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2064 case '2':
2065 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2066 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2067 cval1, cval2, save_p));
2069 case 'c':
2070 return 1;
2072 case 'e':
2073 if (code == COND_EXPR)
2074 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2075 cval1, cval2, save_p)
2076 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2077 cval1, cval2, save_p)
2078 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2079 cval1, cval2, save_p));
2080 return 0;
2082 case '<':
2083 /* First see if we can handle the first operand, then the second. For
2084 the second operand, we know *CVAL1 can't be zero. It must be that
2085 one side of the comparison is each of the values; test for the
2086 case where this isn't true by failing if the two operands
2087 are the same. */
2089 if (operand_equal_p (TREE_OPERAND (arg, 0),
2090 TREE_OPERAND (arg, 1), 0))
2091 return 0;
2093 if (*cval1 == 0)
2094 *cval1 = TREE_OPERAND (arg, 0);
2095 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2097 else if (*cval2 == 0)
2098 *cval2 = TREE_OPERAND (arg, 0);
2099 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2101 else
2102 return 0;
2104 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2106 else if (*cval2 == 0)
2107 *cval2 = TREE_OPERAND (arg, 1);
2108 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2110 else
2111 return 0;
2113 return 1;
2115 default:
2116 return 0;
2120 /* ARG is a tree that is known to contain just arithmetic operations and
2121 comparisons. Evaluate the operations in the tree substituting NEW0 for
2122 any occurrence of OLD0 as an operand of a comparison and likewise for
2123 NEW1 and OLD1. */
2125 static tree
2126 eval_subst (arg, old0, new0, old1, new1)
2127 tree arg;
2128 tree old0, new0, old1, new1;
2130 tree type = TREE_TYPE (arg);
2131 enum tree_code code = TREE_CODE (arg);
2132 char class = TREE_CODE_CLASS (code);
2134 /* We can handle some of the 'e' cases here. */
2135 if (class == 'e' && code == TRUTH_NOT_EXPR)
2136 class = '1';
2137 else if (class == 'e'
2138 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2139 class = '2';
2141 switch (class)
2143 case '1':
2144 return fold (build1 (code, type,
2145 eval_subst (TREE_OPERAND (arg, 0),
2146 old0, new0, old1, new1)));
2148 case '2':
2149 return fold (build (code, type,
2150 eval_subst (TREE_OPERAND (arg, 0),
2151 old0, new0, old1, new1),
2152 eval_subst (TREE_OPERAND (arg, 1),
2153 old0, new0, old1, new1)));
2155 case 'e':
2156 switch (code)
2158 case SAVE_EXPR:
2159 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2161 case COMPOUND_EXPR:
2162 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2164 case COND_EXPR:
2165 return fold (build (code, type,
2166 eval_subst (TREE_OPERAND (arg, 0),
2167 old0, new0, old1, new1),
2168 eval_subst (TREE_OPERAND (arg, 1),
2169 old0, new0, old1, new1),
2170 eval_subst (TREE_OPERAND (arg, 2),
2171 old0, new0, old1, new1)));
2172 default:
2173 break;
2175 /* fall through - ??? */
2177 case '<':
2179 tree arg0 = TREE_OPERAND (arg, 0);
2180 tree arg1 = TREE_OPERAND (arg, 1);
2182 /* We need to check both for exact equality and tree equality. The
2183 former will be true if the operand has a side-effect. In that
2184 case, we know the operand occurred exactly once. */
2186 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2187 arg0 = new0;
2188 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2189 arg0 = new1;
2191 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2192 arg1 = new0;
2193 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2194 arg1 = new1;
2196 return fold (build (code, type, arg0, arg1));
2199 default:
2200 return arg;
2204 /* Return a tree for the case when the result of an expression is RESULT
2205 converted to TYPE and OMITTED was previously an operand of the expression
2206 but is now not needed (e.g., we folded OMITTED * 0).
2208 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2209 the conversion of RESULT to TYPE. */
2211 static tree
2212 omit_one_operand (type, result, omitted)
2213 tree type, result, omitted;
2215 tree t = convert (type, result);
2217 if (TREE_SIDE_EFFECTS (omitted))
2218 return build (COMPOUND_EXPR, type, omitted, t);
2220 return non_lvalue (t);
2223 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2225 static tree
2226 pedantic_omit_one_operand (type, result, omitted)
2227 tree type, result, omitted;
2229 tree t = convert (type, result);
2231 if (TREE_SIDE_EFFECTS (omitted))
2232 return build (COMPOUND_EXPR, type, omitted, t);
2234 return pedantic_non_lvalue (t);
2237 /* Return a simplified tree node for the truth-negation of ARG. This
2238 never alters ARG itself. We assume that ARG is an operation that
2239 returns a truth value (0 or 1). */
2241 tree
2242 invert_truthvalue (arg)
2243 tree arg;
2245 tree type = TREE_TYPE (arg);
2246 enum tree_code code = TREE_CODE (arg);
2248 if (code == ERROR_MARK)
2249 return arg;
2251 /* If this is a comparison, we can simply invert it, except for
2252 floating-point non-equality comparisons, in which case we just
2253 enclose a TRUTH_NOT_EXPR around what we have. */
2255 if (TREE_CODE_CLASS (code) == '<')
2257 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2258 && !flag_unsafe_math_optimizations
2259 && code != NE_EXPR
2260 && code != EQ_EXPR)
2261 return build1 (TRUTH_NOT_EXPR, type, arg);
2262 else
2263 return build (invert_tree_comparison (code), type,
2264 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2267 switch (code)
2269 case INTEGER_CST:
2270 return convert (type, build_int_2 (integer_zerop (arg), 0));
2272 case TRUTH_AND_EXPR:
2273 return build (TRUTH_OR_EXPR, type,
2274 invert_truthvalue (TREE_OPERAND (arg, 0)),
2275 invert_truthvalue (TREE_OPERAND (arg, 1)));
2277 case TRUTH_OR_EXPR:
2278 return build (TRUTH_AND_EXPR, type,
2279 invert_truthvalue (TREE_OPERAND (arg, 0)),
2280 invert_truthvalue (TREE_OPERAND (arg, 1)));
2282 case TRUTH_XOR_EXPR:
2283 /* Here we can invert either operand. We invert the first operand
2284 unless the second operand is a TRUTH_NOT_EXPR in which case our
2285 result is the XOR of the first operand with the inside of the
2286 negation of the second operand. */
2288 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2289 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2290 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2291 else
2292 return build (TRUTH_XOR_EXPR, type,
2293 invert_truthvalue (TREE_OPERAND (arg, 0)),
2294 TREE_OPERAND (arg, 1));
2296 case TRUTH_ANDIF_EXPR:
2297 return build (TRUTH_ORIF_EXPR, type,
2298 invert_truthvalue (TREE_OPERAND (arg, 0)),
2299 invert_truthvalue (TREE_OPERAND (arg, 1)));
2301 case TRUTH_ORIF_EXPR:
2302 return build (TRUTH_ANDIF_EXPR, type,
2303 invert_truthvalue (TREE_OPERAND (arg, 0)),
2304 invert_truthvalue (TREE_OPERAND (arg, 1)));
2306 case TRUTH_NOT_EXPR:
2307 return TREE_OPERAND (arg, 0);
2309 case COND_EXPR:
2310 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2311 invert_truthvalue (TREE_OPERAND (arg, 1)),
2312 invert_truthvalue (TREE_OPERAND (arg, 2)));
2314 case COMPOUND_EXPR:
2315 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2316 invert_truthvalue (TREE_OPERAND (arg, 1)));
2318 case WITH_RECORD_EXPR:
2319 return build (WITH_RECORD_EXPR, type,
2320 invert_truthvalue (TREE_OPERAND (arg, 0)),
2321 TREE_OPERAND (arg, 1));
2323 case NON_LVALUE_EXPR:
2324 return invert_truthvalue (TREE_OPERAND (arg, 0));
2326 case NOP_EXPR:
2327 case CONVERT_EXPR:
2328 case FLOAT_EXPR:
2329 return build1 (TREE_CODE (arg), type,
2330 invert_truthvalue (TREE_OPERAND (arg, 0)));
2332 case BIT_AND_EXPR:
2333 if (!integer_onep (TREE_OPERAND (arg, 1)))
2334 break;
2335 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2337 case SAVE_EXPR:
2338 return build1 (TRUTH_NOT_EXPR, type, arg);
2340 case CLEANUP_POINT_EXPR:
2341 return build1 (CLEANUP_POINT_EXPR, type,
2342 invert_truthvalue (TREE_OPERAND (arg, 0)));
2344 default:
2345 break;
2347 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2348 abort ();
2349 return build1 (TRUTH_NOT_EXPR, type, arg);
2352 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2353 operands are another bit-wise operation with a common input. If so,
2354 distribute the bit operations to save an operation and possibly two if
2355 constants are involved. For example, convert
2356 (A | B) & (A | C) into A | (B & C)
2357 Further simplification will occur if B and C are constants.
2359 If this optimization cannot be done, 0 will be returned. */
2361 static tree
2362 distribute_bit_expr (code, type, arg0, arg1)
2363 enum tree_code code;
2364 tree type;
2365 tree arg0, arg1;
2367 tree common;
2368 tree left, right;
2370 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2371 || TREE_CODE (arg0) == code
2372 || (TREE_CODE (arg0) != BIT_AND_EXPR
2373 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2374 return 0;
2376 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2378 common = TREE_OPERAND (arg0, 0);
2379 left = TREE_OPERAND (arg0, 1);
2380 right = TREE_OPERAND (arg1, 1);
2382 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2384 common = TREE_OPERAND (arg0, 0);
2385 left = TREE_OPERAND (arg0, 1);
2386 right = TREE_OPERAND (arg1, 0);
2388 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2390 common = TREE_OPERAND (arg0, 1);
2391 left = TREE_OPERAND (arg0, 0);
2392 right = TREE_OPERAND (arg1, 1);
2394 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2396 common = TREE_OPERAND (arg0, 1);
2397 left = TREE_OPERAND (arg0, 0);
2398 right = TREE_OPERAND (arg1, 0);
2400 else
2401 return 0;
2403 return fold (build (TREE_CODE (arg0), type, common,
2404 fold (build (code, type, left, right))));
2407 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2408 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2410 static tree
2411 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2412 tree inner;
2413 tree type;
2414 int bitsize, bitpos;
2415 int unsignedp;
2417 tree result = build (BIT_FIELD_REF, type, inner,
2418 size_int (bitsize), bitsize_int (bitpos));
2420 TREE_UNSIGNED (result) = unsignedp;
2422 return result;
2425 /* Optimize a bit-field compare.
2427 There are two cases: First is a compare against a constant and the
2428 second is a comparison of two items where the fields are at the same
2429 bit position relative to the start of a chunk (byte, halfword, word)
2430 large enough to contain it. In these cases we can avoid the shift
2431 implicit in bitfield extractions.
2433 For constants, we emit a compare of the shifted constant with the
2434 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2435 compared. For two fields at the same position, we do the ANDs with the
2436 similar mask and compare the result of the ANDs.
2438 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2439 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2440 are the left and right operands of the comparison, respectively.
2442 If the optimization described above can be done, we return the resulting
2443 tree. Otherwise we return zero. */
2445 static tree
2446 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2447 enum tree_code code;
2448 tree compare_type;
2449 tree lhs, rhs;
2451 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2452 tree type = TREE_TYPE (lhs);
2453 tree signed_type, unsigned_type;
2454 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2455 enum machine_mode lmode, rmode, nmode;
2456 int lunsignedp, runsignedp;
2457 int lvolatilep = 0, rvolatilep = 0;
2458 tree linner, rinner = NULL_TREE;
2459 tree mask;
2460 tree offset;
2462 /* Get all the information about the extractions being done. If the bit size
2463 if the same as the size of the underlying object, we aren't doing an
2464 extraction at all and so can do nothing. We also don't want to
2465 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2466 then will no longer be able to replace it. */
2467 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2468 &lunsignedp, &lvolatilep);
2469 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2470 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2471 return 0;
2473 if (!const_p)
2475 /* If this is not a constant, we can only do something if bit positions,
2476 sizes, and signedness are the same. */
2477 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2478 &runsignedp, &rvolatilep);
2480 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2481 || lunsignedp != runsignedp || offset != 0
2482 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2483 return 0;
2486 /* See if we can find a mode to refer to this field. We should be able to,
2487 but fail if we can't. */
2488 nmode = get_best_mode (lbitsize, lbitpos,
2489 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2490 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2491 TYPE_ALIGN (TREE_TYPE (rinner))),
2492 word_mode, lvolatilep || rvolatilep);
2493 if (nmode == VOIDmode)
2494 return 0;
2496 /* Set signed and unsigned types of the precision of this mode for the
2497 shifts below. */
2498 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2499 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2501 /* Compute the bit position and size for the new reference and our offset
2502 within it. If the new reference is the same size as the original, we
2503 won't optimize anything, so return zero. */
2504 nbitsize = GET_MODE_BITSIZE (nmode);
2505 nbitpos = lbitpos & ~ (nbitsize - 1);
2506 lbitpos -= nbitpos;
2507 if (nbitsize == lbitsize)
2508 return 0;
2510 if (BYTES_BIG_ENDIAN)
2511 lbitpos = nbitsize - lbitsize - lbitpos;
2513 /* Make the mask to be used against the extracted field. */
2514 mask = build_int_2 (~0, ~0);
2515 TREE_TYPE (mask) = unsigned_type;
2516 force_fit_type (mask, 0);
2517 mask = convert (unsigned_type, mask);
2518 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2519 mask = const_binop (RSHIFT_EXPR, mask,
2520 size_int (nbitsize - lbitsize - lbitpos), 0);
2522 if (! const_p)
2523 /* If not comparing with constant, just rework the comparison
2524 and return. */
2525 return build (code, compare_type,
2526 build (BIT_AND_EXPR, unsigned_type,
2527 make_bit_field_ref (linner, unsigned_type,
2528 nbitsize, nbitpos, 1),
2529 mask),
2530 build (BIT_AND_EXPR, unsigned_type,
2531 make_bit_field_ref (rinner, unsigned_type,
2532 nbitsize, nbitpos, 1),
2533 mask));
2535 /* Otherwise, we are handling the constant case. See if the constant is too
2536 big for the field. Warn and return a tree of for 0 (false) if so. We do
2537 this not only for its own sake, but to avoid having to test for this
2538 error case below. If we didn't, we might generate wrong code.
2540 For unsigned fields, the constant shifted right by the field length should
2541 be all zero. For signed fields, the high-order bits should agree with
2542 the sign bit. */
2544 if (lunsignedp)
2546 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2547 convert (unsigned_type, rhs),
2548 size_int (lbitsize), 0)))
2550 warning ("comparison is always %d due to width of bit-field",
2551 code == NE_EXPR);
2552 return convert (compare_type,
2553 (code == NE_EXPR
2554 ? integer_one_node : integer_zero_node));
2557 else
2559 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2560 size_int (lbitsize - 1), 0);
2561 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2563 warning ("comparison is always %d due to width of bit-field",
2564 code == NE_EXPR);
2565 return convert (compare_type,
2566 (code == NE_EXPR
2567 ? integer_one_node : integer_zero_node));
2571 /* Single-bit compares should always be against zero. */
2572 if (lbitsize == 1 && ! integer_zerop (rhs))
2574 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2575 rhs = convert (type, integer_zero_node);
2578 /* Make a new bitfield reference, shift the constant over the
2579 appropriate number of bits and mask it with the computed mask
2580 (in case this was a signed field). If we changed it, make a new one. */
2581 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2582 if (lvolatilep)
2584 TREE_SIDE_EFFECTS (lhs) = 1;
2585 TREE_THIS_VOLATILE (lhs) = 1;
2588 rhs = fold (const_binop (BIT_AND_EXPR,
2589 const_binop (LSHIFT_EXPR,
2590 convert (unsigned_type, rhs),
2591 size_int (lbitpos), 0),
2592 mask, 0));
2594 return build (code, compare_type,
2595 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2596 rhs);
2599 /* Subroutine for fold_truthop: decode a field reference.
2601 If EXP is a comparison reference, we return the innermost reference.
2603 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2604 set to the starting bit number.
2606 If the innermost field can be completely contained in a mode-sized
2607 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2609 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2610 otherwise it is not changed.
2612 *PUNSIGNEDP is set to the signedness of the field.
2614 *PMASK is set to the mask used. This is either contained in a
2615 BIT_AND_EXPR or derived from the width of the field.
2617 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2619 Return 0 if this is not a component reference or is one that we can't
2620 do anything with. */
2622 static tree
2623 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2624 pvolatilep, pmask, pand_mask)
2625 tree exp;
2626 HOST_WIDE_INT *pbitsize, *pbitpos;
2627 enum machine_mode *pmode;
2628 int *punsignedp, *pvolatilep;
2629 tree *pmask;
2630 tree *pand_mask;
2632 tree and_mask = 0;
2633 tree mask, inner, offset;
2634 tree unsigned_type;
2635 unsigned int precision;
2637 /* All the optimizations using this function assume integer fields.
2638 There are problems with FP fields since the type_for_size call
2639 below can fail for, e.g., XFmode. */
2640 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2641 return 0;
2643 STRIP_NOPS (exp);
2645 if (TREE_CODE (exp) == BIT_AND_EXPR)
2647 and_mask = TREE_OPERAND (exp, 1);
2648 exp = TREE_OPERAND (exp, 0);
2649 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2650 if (TREE_CODE (and_mask) != INTEGER_CST)
2651 return 0;
2654 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2655 punsignedp, pvolatilep);
2656 if ((inner == exp && and_mask == 0)
2657 || *pbitsize < 0 || offset != 0
2658 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2659 return 0;
2661 /* Compute the mask to access the bitfield. */
2662 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2663 precision = TYPE_PRECISION (unsigned_type);
2665 mask = build_int_2 (~0, ~0);
2666 TREE_TYPE (mask) = unsigned_type;
2667 force_fit_type (mask, 0);
2668 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2669 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2671 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2672 if (and_mask != 0)
2673 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2674 convert (unsigned_type, and_mask), mask));
2676 *pmask = mask;
2677 *pand_mask = and_mask;
2678 return inner;
2681 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2682 bit positions. */
2684 static int
2685 all_ones_mask_p (mask, size)
2686 tree mask;
2687 int size;
2689 tree type = TREE_TYPE (mask);
2690 unsigned int precision = TYPE_PRECISION (type);
2691 tree tmask;
2693 tmask = build_int_2 (~0, ~0);
2694 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2695 force_fit_type (tmask, 0);
2696 return
2697 tree_int_cst_equal (mask,
2698 const_binop (RSHIFT_EXPR,
2699 const_binop (LSHIFT_EXPR, tmask,
2700 size_int (precision - size),
2702 size_int (precision - size), 0));
2705 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2706 represents the sign bit of EXP's type. If EXP represents a sign
2707 or zero extension, also test VAL against the unextended type.
2708 The return value is the (sub)expression whose sign bit is VAL,
2709 or NULL_TREE otherwise. */
2711 static tree
2712 sign_bit_p (exp, val)
2713 tree exp;
2714 tree val;
2716 unsigned HOST_WIDE_INT lo;
2717 HOST_WIDE_INT hi;
2718 int width;
2719 tree t;
2721 /* Tree EXP must have an integral type. */
2722 t = TREE_TYPE (exp);
2723 if (! INTEGRAL_TYPE_P (t))
2724 return NULL_TREE;
2726 /* Tree VAL must be an integer constant. */
2727 if (TREE_CODE (val) != INTEGER_CST
2728 || TREE_CONSTANT_OVERFLOW (val))
2729 return NULL_TREE;
2731 width = TYPE_PRECISION (t);
2732 if (width > HOST_BITS_PER_WIDE_INT)
2734 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2735 lo = 0;
2737 else
2739 hi = 0;
2740 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2743 if (TREE_INT_CST_HIGH (val) == hi && TREE_INT_CST_LOW (val) == lo)
2744 return exp;
2746 /* Handle extension from a narrower type. */
2747 if (TREE_CODE (exp) == NOP_EXPR
2748 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2749 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2751 return NULL_TREE;
2754 /* Subroutine for fold_truthop: determine if an operand is simple enough
2755 to be evaluated unconditionally. */
2757 static int
2758 simple_operand_p (exp)
2759 tree exp;
2761 /* Strip any conversions that don't change the machine mode. */
2762 while ((TREE_CODE (exp) == NOP_EXPR
2763 || TREE_CODE (exp) == CONVERT_EXPR)
2764 && (TYPE_MODE (TREE_TYPE (exp))
2765 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2766 exp = TREE_OPERAND (exp, 0);
2768 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2769 || (DECL_P (exp)
2770 && ! TREE_ADDRESSABLE (exp)
2771 && ! TREE_THIS_VOLATILE (exp)
2772 && ! DECL_NONLOCAL (exp)
2773 /* Don't regard global variables as simple. They may be
2774 allocated in ways unknown to the compiler (shared memory,
2775 #pragma weak, etc). */
2776 && ! TREE_PUBLIC (exp)
2777 && ! DECL_EXTERNAL (exp)
2778 /* Loading a static variable is unduly expensive, but global
2779 registers aren't expensive. */
2780 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2783 /* The following functions are subroutines to fold_range_test and allow it to
2784 try to change a logical combination of comparisons into a range test.
2786 For example, both
2787 X == 2 || X == 3 || X == 4 || X == 5
2789 X >= 2 && X <= 5
2790 are converted to
2791 (unsigned) (X - 2) <= 3
2793 We describe each set of comparisons as being either inside or outside
2794 a range, using a variable named like IN_P, and then describe the
2795 range with a lower and upper bound. If one of the bounds is omitted,
2796 it represents either the highest or lowest value of the type.
2798 In the comments below, we represent a range by two numbers in brackets
2799 preceded by a "+" to designate being inside that range, or a "-" to
2800 designate being outside that range, so the condition can be inverted by
2801 flipping the prefix. An omitted bound is represented by a "-". For
2802 example, "- [-, 10]" means being outside the range starting at the lowest
2803 possible value and ending at 10, in other words, being greater than 10.
2804 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2805 always false.
2807 We set up things so that the missing bounds are handled in a consistent
2808 manner so neither a missing bound nor "true" and "false" need to be
2809 handled using a special case. */
2811 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2812 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2813 and UPPER1_P are nonzero if the respective argument is an upper bound
2814 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2815 must be specified for a comparison. ARG1 will be converted to ARG0's
2816 type if both are specified. */
2818 static tree
2819 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2820 enum tree_code code;
2821 tree type;
2822 tree arg0, arg1;
2823 int upper0_p, upper1_p;
2825 tree tem;
2826 int result;
2827 int sgn0, sgn1;
2829 /* If neither arg represents infinity, do the normal operation.
2830 Else, if not a comparison, return infinity. Else handle the special
2831 comparison rules. Note that most of the cases below won't occur, but
2832 are handled for consistency. */
2834 if (arg0 != 0 && arg1 != 0)
2836 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2837 arg0, convert (TREE_TYPE (arg0), arg1)));
2838 STRIP_NOPS (tem);
2839 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2842 if (TREE_CODE_CLASS (code) != '<')
2843 return 0;
2845 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2846 for neither. In real maths, we cannot assume open ended ranges are
2847 the same. But, this is computer arithmetic, where numbers are finite.
2848 We can therefore make the transformation of any unbounded range with
2849 the value Z, Z being greater than any representable number. This permits
2850 us to treat unbounded ranges as equal. */
2851 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2852 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2853 switch (code)
2855 case EQ_EXPR:
2856 result = sgn0 == sgn1;
2857 break;
2858 case NE_EXPR:
2859 result = sgn0 != sgn1;
2860 break;
2861 case LT_EXPR:
2862 result = sgn0 < sgn1;
2863 break;
2864 case LE_EXPR:
2865 result = sgn0 <= sgn1;
2866 break;
2867 case GT_EXPR:
2868 result = sgn0 > sgn1;
2869 break;
2870 case GE_EXPR:
2871 result = sgn0 >= sgn1;
2872 break;
2873 default:
2874 abort ();
2877 return convert (type, result ? integer_one_node : integer_zero_node);
2880 /* Given EXP, a logical expression, set the range it is testing into
2881 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2882 actually being tested. *PLOW and *PHIGH will be made of the same type
2883 as the returned expression. If EXP is not a comparison, we will most
2884 likely not be returning a useful value and range. */
2886 static tree
2887 make_range (exp, pin_p, plow, phigh)
2888 tree exp;
2889 int *pin_p;
2890 tree *plow, *phigh;
2892 enum tree_code code;
2893 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2894 tree orig_type = NULL_TREE;
2895 int in_p, n_in_p;
2896 tree low, high, n_low, n_high;
2898 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2899 and see if we can refine the range. Some of the cases below may not
2900 happen, but it doesn't seem worth worrying about this. We "continue"
2901 the outer loop when we've changed something; otherwise we "break"
2902 the switch, which will "break" the while. */
2904 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2906 while (1)
2908 code = TREE_CODE (exp);
2910 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2912 arg0 = TREE_OPERAND (exp, 0);
2913 if (TREE_CODE_CLASS (code) == '<'
2914 || TREE_CODE_CLASS (code) == '1'
2915 || TREE_CODE_CLASS (code) == '2')
2916 type = TREE_TYPE (arg0);
2917 if (TREE_CODE_CLASS (code) == '2'
2918 || TREE_CODE_CLASS (code) == '<'
2919 || (TREE_CODE_CLASS (code) == 'e'
2920 && TREE_CODE_LENGTH (code) > 1))
2921 arg1 = TREE_OPERAND (exp, 1);
2924 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
2925 lose a cast by accident. */
2926 if (type != NULL_TREE && orig_type == NULL_TREE)
2927 orig_type = type;
2929 switch (code)
2931 case TRUTH_NOT_EXPR:
2932 in_p = ! in_p, exp = arg0;
2933 continue;
2935 case EQ_EXPR: case NE_EXPR:
2936 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2937 /* We can only do something if the range is testing for zero
2938 and if the second operand is an integer constant. Note that
2939 saying something is "in" the range we make is done by
2940 complementing IN_P since it will set in the initial case of
2941 being not equal to zero; "out" is leaving it alone. */
2942 if (low == 0 || high == 0
2943 || ! integer_zerop (low) || ! integer_zerop (high)
2944 || TREE_CODE (arg1) != INTEGER_CST)
2945 break;
2947 switch (code)
2949 case NE_EXPR: /* - [c, c] */
2950 low = high = arg1;
2951 break;
2952 case EQ_EXPR: /* + [c, c] */
2953 in_p = ! in_p, low = high = arg1;
2954 break;
2955 case GT_EXPR: /* - [-, c] */
2956 low = 0, high = arg1;
2957 break;
2958 case GE_EXPR: /* + [c, -] */
2959 in_p = ! in_p, low = arg1, high = 0;
2960 break;
2961 case LT_EXPR: /* - [c, -] */
2962 low = arg1, high = 0;
2963 break;
2964 case LE_EXPR: /* + [-, c] */
2965 in_p = ! in_p, low = 0, high = arg1;
2966 break;
2967 default:
2968 abort ();
2971 exp = arg0;
2973 /* If this is an unsigned comparison, we also know that EXP is
2974 greater than or equal to zero. We base the range tests we make
2975 on that fact, so we record it here so we can parse existing
2976 range tests. */
2977 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2979 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2980 1, convert (type, integer_zero_node),
2981 NULL_TREE))
2982 break;
2984 in_p = n_in_p, low = n_low, high = n_high;
2986 /* If the high bound is missing, but we
2987 have a low bound, reverse the range so
2988 it goes from zero to the low bound minus 1. */
2989 if (high == 0 && low)
2991 in_p = ! in_p;
2992 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2993 integer_one_node, 0);
2994 low = convert (type, integer_zero_node);
2997 continue;
2999 case NEGATE_EXPR:
3000 /* (-x) IN [a,b] -> x in [-b, -a] */
3001 n_low = range_binop (MINUS_EXPR, type,
3002 convert (type, integer_zero_node), 0, high, 1);
3003 n_high = range_binop (MINUS_EXPR, type,
3004 convert (type, integer_zero_node), 0, low, 0);
3005 low = n_low, high = n_high;
3006 exp = arg0;
3007 continue;
3009 case BIT_NOT_EXPR:
3010 /* ~ X -> -X - 1 */
3011 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3012 convert (type, integer_one_node));
3013 continue;
3015 case PLUS_EXPR: case MINUS_EXPR:
3016 if (TREE_CODE (arg1) != INTEGER_CST)
3017 break;
3019 /* If EXP is signed, any overflow in the computation is undefined,
3020 so we don't worry about it so long as our computations on
3021 the bounds don't overflow. For unsigned, overflow is defined
3022 and this is exactly the right thing. */
3023 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3024 type, low, 0, arg1, 0);
3025 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3026 type, high, 1, arg1, 0);
3027 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3028 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3029 break;
3031 /* Check for an unsigned range which has wrapped around the maximum
3032 value thus making n_high < n_low, and normalize it. */
3033 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3035 low = range_binop (PLUS_EXPR, type, n_high, 0,
3036 integer_one_node, 0);
3037 high = range_binop (MINUS_EXPR, type, n_low, 0,
3038 integer_one_node, 0);
3040 /* If the range is of the form +/- [ x+1, x ], we won't
3041 be able to normalize it. But then, it represents the
3042 whole range or the empty set, so make it
3043 +/- [ -, - ]. */
3044 if (tree_int_cst_equal (n_low, low)
3045 && tree_int_cst_equal (n_high, high))
3046 low = high = 0;
3047 else
3048 in_p = ! in_p;
3050 else
3051 low = n_low, high = n_high;
3053 exp = arg0;
3054 continue;
3056 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3057 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3058 break;
3060 if (! INTEGRAL_TYPE_P (type)
3061 || (low != 0 && ! int_fits_type_p (low, type))
3062 || (high != 0 && ! int_fits_type_p (high, type)))
3063 break;
3065 n_low = low, n_high = high;
3067 if (n_low != 0)
3068 n_low = convert (type, n_low);
3070 if (n_high != 0)
3071 n_high = convert (type, n_high);
3073 /* If we're converting from an unsigned to a signed type,
3074 we will be doing the comparison as unsigned. The tests above
3075 have already verified that LOW and HIGH are both positive.
3077 So we have to make sure that the original unsigned value will
3078 be interpreted as positive. */
3079 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3081 tree equiv_type = (*lang_hooks.types.type_for_mode)
3082 (TYPE_MODE (type), 1);
3083 tree high_positive;
3085 /* A range without an upper bound is, naturally, unbounded.
3086 Since convert would have cropped a very large value, use
3087 the max value for the destination type. */
3088 high_positive
3089 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3090 : TYPE_MAX_VALUE (type);
3092 high_positive = fold (build (RSHIFT_EXPR, type,
3093 convert (type, high_positive),
3094 convert (type, integer_one_node)));
3096 /* If the low bound is specified, "and" the range with the
3097 range for which the original unsigned value will be
3098 positive. */
3099 if (low != 0)
3101 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3102 1, n_low, n_high,
3103 1, convert (type, integer_zero_node),
3104 high_positive))
3105 break;
3107 in_p = (n_in_p == in_p);
3109 else
3111 /* Otherwise, "or" the range with the range of the input
3112 that will be interpreted as negative. */
3113 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3114 0, n_low, n_high,
3115 1, convert (type, integer_zero_node),
3116 high_positive))
3117 break;
3119 in_p = (in_p != n_in_p);
3123 exp = arg0;
3124 low = n_low, high = n_high;
3125 continue;
3127 default:
3128 break;
3131 break;
3134 /* If EXP is a constant, we can evaluate whether this is true or false. */
3135 if (TREE_CODE (exp) == INTEGER_CST)
3137 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3138 exp, 0, low, 0))
3139 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3140 exp, 1, high, 1)));
3141 low = high = 0;
3142 exp = 0;
3145 *pin_p = in_p, *plow = low, *phigh = high;
3146 return exp;
3149 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3150 type, TYPE, return an expression to test if EXP is in (or out of, depending
3151 on IN_P) the range. */
3153 static tree
3154 build_range_check (type, exp, in_p, low, high)
3155 tree type;
3156 tree exp;
3157 int in_p;
3158 tree low, high;
3160 tree etype = TREE_TYPE (exp);
3161 tree value;
3163 if (! in_p
3164 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3165 return invert_truthvalue (value);
3167 if (low == 0 && high == 0)
3168 return convert (type, integer_one_node);
3170 if (low == 0)
3171 return fold (build (LE_EXPR, type, exp, high));
3173 if (high == 0)
3174 return fold (build (GE_EXPR, type, exp, low));
3176 if (operand_equal_p (low, high, 0))
3177 return fold (build (EQ_EXPR, type, exp, low));
3179 if (integer_zerop (low))
3181 if (! TREE_UNSIGNED (etype))
3183 etype = (*lang_hooks.types.unsigned_type) (etype);
3184 high = convert (etype, high);
3185 exp = convert (etype, exp);
3187 return build_range_check (type, exp, 1, 0, high);
3190 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3191 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3193 unsigned HOST_WIDE_INT lo;
3194 HOST_WIDE_INT hi;
3195 int prec;
3197 prec = TYPE_PRECISION (etype);
3198 if (prec <= HOST_BITS_PER_WIDE_INT)
3200 hi = 0;
3201 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3203 else
3205 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3206 lo = (unsigned HOST_WIDE_INT) -1;
3209 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3211 if (TREE_UNSIGNED (etype))
3213 etype = (*lang_hooks.types.signed_type) (etype);
3214 exp = convert (etype, exp);
3216 return fold (build (GT_EXPR, type, exp,
3217 convert (etype, integer_zero_node)));
3221 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3222 && ! TREE_OVERFLOW (value))
3223 return build_range_check (type,
3224 fold (build (MINUS_EXPR, etype, exp, low)),
3225 1, convert (etype, integer_zero_node), value);
3227 return 0;
3230 /* Given two ranges, see if we can merge them into one. Return 1 if we
3231 can, 0 if we can't. Set the output range into the specified parameters. */
3233 static int
3234 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3235 int *pin_p;
3236 tree *plow, *phigh;
3237 int in0_p, in1_p;
3238 tree low0, high0, low1, high1;
3240 int no_overlap;
3241 int subset;
3242 int temp;
3243 tree tem;
3244 int in_p;
3245 tree low, high;
3246 int lowequal = ((low0 == 0 && low1 == 0)
3247 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3248 low0, 0, low1, 0)));
3249 int highequal = ((high0 == 0 && high1 == 0)
3250 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3251 high0, 1, high1, 1)));
3253 /* Make range 0 be the range that starts first, or ends last if they
3254 start at the same value. Swap them if it isn't. */
3255 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3256 low0, 0, low1, 0))
3257 || (lowequal
3258 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3259 high1, 1, high0, 1))))
3261 temp = in0_p, in0_p = in1_p, in1_p = temp;
3262 tem = low0, low0 = low1, low1 = tem;
3263 tem = high0, high0 = high1, high1 = tem;
3266 /* Now flag two cases, whether the ranges are disjoint or whether the
3267 second range is totally subsumed in the first. Note that the tests
3268 below are simplified by the ones above. */
3269 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3270 high0, 1, low1, 0));
3271 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3272 high1, 1, high0, 1));
3274 /* We now have four cases, depending on whether we are including or
3275 excluding the two ranges. */
3276 if (in0_p && in1_p)
3278 /* If they don't overlap, the result is false. If the second range
3279 is a subset it is the result. Otherwise, the range is from the start
3280 of the second to the end of the first. */
3281 if (no_overlap)
3282 in_p = 0, low = high = 0;
3283 else if (subset)
3284 in_p = 1, low = low1, high = high1;
3285 else
3286 in_p = 1, low = low1, high = high0;
3289 else if (in0_p && ! in1_p)
3291 /* If they don't overlap, the result is the first range. If they are
3292 equal, the result is false. If the second range is a subset of the
3293 first, and the ranges begin at the same place, we go from just after
3294 the end of the first range to the end of the second. If the second
3295 range is not a subset of the first, or if it is a subset and both
3296 ranges end at the same place, the range starts at the start of the
3297 first range and ends just before the second range.
3298 Otherwise, we can't describe this as a single range. */
3299 if (no_overlap)
3300 in_p = 1, low = low0, high = high0;
3301 else if (lowequal && highequal)
3302 in_p = 0, low = high = 0;
3303 else if (subset && lowequal)
3305 in_p = 1, high = high0;
3306 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3307 integer_one_node, 0);
3309 else if (! subset || highequal)
3311 in_p = 1, low = low0;
3312 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3313 integer_one_node, 0);
3315 else
3316 return 0;
3319 else if (! in0_p && in1_p)
3321 /* If they don't overlap, the result is the second range. If the second
3322 is a subset of the first, the result is false. Otherwise,
3323 the range starts just after the first range and ends at the
3324 end of the second. */
3325 if (no_overlap)
3326 in_p = 1, low = low1, high = high1;
3327 else if (subset || highequal)
3328 in_p = 0, low = high = 0;
3329 else
3331 in_p = 1, high = high1;
3332 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3333 integer_one_node, 0);
3337 else
3339 /* The case where we are excluding both ranges. Here the complex case
3340 is if they don't overlap. In that case, the only time we have a
3341 range is if they are adjacent. If the second is a subset of the
3342 first, the result is the first. Otherwise, the range to exclude
3343 starts at the beginning of the first range and ends at the end of the
3344 second. */
3345 if (no_overlap)
3347 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3348 range_binop (PLUS_EXPR, NULL_TREE,
3349 high0, 1,
3350 integer_one_node, 1),
3351 1, low1, 0)))
3352 in_p = 0, low = low0, high = high1;
3353 else
3354 return 0;
3356 else if (subset)
3357 in_p = 0, low = low0, high = high0;
3358 else
3359 in_p = 0, low = low0, high = high1;
3362 *pin_p = in_p, *plow = low, *phigh = high;
3363 return 1;
3366 /* EXP is some logical combination of boolean tests. See if we can
3367 merge it into some range test. Return the new tree if so. */
3369 static tree
3370 fold_range_test (exp)
3371 tree exp;
3373 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3374 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3375 int in0_p, in1_p, in_p;
3376 tree low0, low1, low, high0, high1, high;
3377 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3378 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3379 tree tem;
3381 /* If this is an OR operation, invert both sides; we will invert
3382 again at the end. */
3383 if (or_op)
3384 in0_p = ! in0_p, in1_p = ! in1_p;
3386 /* If both expressions are the same, if we can merge the ranges, and we
3387 can build the range test, return it or it inverted. If one of the
3388 ranges is always true or always false, consider it to be the same
3389 expression as the other. */
3390 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3391 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3392 in1_p, low1, high1)
3393 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3394 lhs != 0 ? lhs
3395 : rhs != 0 ? rhs : integer_zero_node,
3396 in_p, low, high))))
3397 return or_op ? invert_truthvalue (tem) : tem;
3399 /* On machines where the branch cost is expensive, if this is a
3400 short-circuited branch and the underlying object on both sides
3401 is the same, make a non-short-circuit operation. */
3402 else if (BRANCH_COST >= 2
3403 && lhs != 0 && rhs != 0
3404 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3405 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3406 && operand_equal_p (lhs, rhs, 0))
3408 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3409 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3410 which cases we can't do this. */
3411 if (simple_operand_p (lhs))
3412 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3413 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3414 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3415 TREE_OPERAND (exp, 1));
3417 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3418 && ! contains_placeholder_p (lhs))
3420 tree common = save_expr (lhs);
3422 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3423 or_op ? ! in0_p : in0_p,
3424 low0, high0))
3425 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3426 or_op ? ! in1_p : in1_p,
3427 low1, high1))))
3428 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3429 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3430 TREE_TYPE (exp), lhs, rhs);
3434 return 0;
3437 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3438 bit value. Arrange things so the extra bits will be set to zero if and
3439 only if C is signed-extended to its full width. If MASK is nonzero,
3440 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3442 static tree
3443 unextend (c, p, unsignedp, mask)
3444 tree c;
3445 int p;
3446 int unsignedp;
3447 tree mask;
3449 tree type = TREE_TYPE (c);
3450 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3451 tree temp;
3453 if (p == modesize || unsignedp)
3454 return c;
3456 /* We work by getting just the sign bit into the low-order bit, then
3457 into the high-order bit, then sign-extend. We then XOR that value
3458 with C. */
3459 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3460 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3462 /* We must use a signed type in order to get an arithmetic right shift.
3463 However, we must also avoid introducing accidental overflows, so that
3464 a subsequent call to integer_zerop will work. Hence we must
3465 do the type conversion here. At this point, the constant is either
3466 zero or one, and the conversion to a signed type can never overflow.
3467 We could get an overflow if this conversion is done anywhere else. */
3468 if (TREE_UNSIGNED (type))
3469 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3471 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3472 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3473 if (mask != 0)
3474 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3475 /* If necessary, convert the type back to match the type of C. */
3476 if (TREE_UNSIGNED (type))
3477 temp = convert (type, temp);
3479 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3482 /* Find ways of folding logical expressions of LHS and RHS:
3483 Try to merge two comparisons to the same innermost item.
3484 Look for range tests like "ch >= '0' && ch <= '9'".
3485 Look for combinations of simple terms on machines with expensive branches
3486 and evaluate the RHS unconditionally.
3488 For example, if we have p->a == 2 && p->b == 4 and we can make an
3489 object large enough to span both A and B, we can do this with a comparison
3490 against the object ANDed with the a mask.
3492 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3493 operations to do this with one comparison.
3495 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3496 function and the one above.
3498 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3499 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3501 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3502 two operands.
3504 We return the simplified tree or 0 if no optimization is possible. */
3506 static tree
3507 fold_truthop (code, truth_type, lhs, rhs)
3508 enum tree_code code;
3509 tree truth_type, lhs, rhs;
3511 /* If this is the "or" of two comparisons, we can do something if
3512 the comparisons are NE_EXPR. If this is the "and", we can do something
3513 if the comparisons are EQ_EXPR. I.e.,
3514 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3516 WANTED_CODE is this operation code. For single bit fields, we can
3517 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3518 comparison for one-bit fields. */
3520 enum tree_code wanted_code;
3521 enum tree_code lcode, rcode;
3522 tree ll_arg, lr_arg, rl_arg, rr_arg;
3523 tree ll_inner, lr_inner, rl_inner, rr_inner;
3524 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3525 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3526 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3527 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3528 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3529 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3530 enum machine_mode lnmode, rnmode;
3531 tree ll_mask, lr_mask, rl_mask, rr_mask;
3532 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3533 tree l_const, r_const;
3534 tree lntype, rntype, result;
3535 int first_bit, end_bit;
3536 int volatilep;
3538 /* Start by getting the comparison codes. Fail if anything is volatile.
3539 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3540 it were surrounded with a NE_EXPR. */
3542 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3543 return 0;
3545 lcode = TREE_CODE (lhs);
3546 rcode = TREE_CODE (rhs);
3548 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3549 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3551 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3552 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3554 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3555 return 0;
3557 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3558 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3560 ll_arg = TREE_OPERAND (lhs, 0);
3561 lr_arg = TREE_OPERAND (lhs, 1);
3562 rl_arg = TREE_OPERAND (rhs, 0);
3563 rr_arg = TREE_OPERAND (rhs, 1);
3565 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3566 if (simple_operand_p (ll_arg)
3567 && simple_operand_p (lr_arg)
3568 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3570 int compcode;
3572 if (operand_equal_p (ll_arg, rl_arg, 0)
3573 && operand_equal_p (lr_arg, rr_arg, 0))
3575 int lcompcode, rcompcode;
3577 lcompcode = comparison_to_compcode (lcode);
3578 rcompcode = comparison_to_compcode (rcode);
3579 compcode = (code == TRUTH_AND_EXPR)
3580 ? lcompcode & rcompcode
3581 : lcompcode | rcompcode;
3583 else if (operand_equal_p (ll_arg, rr_arg, 0)
3584 && operand_equal_p (lr_arg, rl_arg, 0))
3586 int lcompcode, rcompcode;
3588 rcode = swap_tree_comparison (rcode);
3589 lcompcode = comparison_to_compcode (lcode);
3590 rcompcode = comparison_to_compcode (rcode);
3591 compcode = (code == TRUTH_AND_EXPR)
3592 ? lcompcode & rcompcode
3593 : lcompcode | rcompcode;
3595 else
3596 compcode = -1;
3598 if (compcode == COMPCODE_TRUE)
3599 return convert (truth_type, integer_one_node);
3600 else if (compcode == COMPCODE_FALSE)
3601 return convert (truth_type, integer_zero_node);
3602 else if (compcode != -1)
3603 return build (compcode_to_comparison (compcode),
3604 truth_type, ll_arg, lr_arg);
3607 /* If the RHS can be evaluated unconditionally and its operands are
3608 simple, it wins to evaluate the RHS unconditionally on machines
3609 with expensive branches. In this case, this isn't a comparison
3610 that can be merged. Avoid doing this if the RHS is a floating-point
3611 comparison since those can trap. */
3613 if (BRANCH_COST >= 2
3614 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3615 && simple_operand_p (rl_arg)
3616 && simple_operand_p (rr_arg))
3618 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3619 if (code == TRUTH_OR_EXPR
3620 && lcode == NE_EXPR && integer_zerop (lr_arg)
3621 && rcode == NE_EXPR && integer_zerop (rr_arg)
3622 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3623 return build (NE_EXPR, truth_type,
3624 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3625 ll_arg, rl_arg),
3626 integer_zero_node);
3628 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3629 if (code == TRUTH_AND_EXPR
3630 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3631 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3632 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3633 return build (EQ_EXPR, truth_type,
3634 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3635 ll_arg, rl_arg),
3636 integer_zero_node);
3638 return build (code, truth_type, lhs, rhs);
3641 /* See if the comparisons can be merged. Then get all the parameters for
3642 each side. */
3644 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3645 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3646 return 0;
3648 volatilep = 0;
3649 ll_inner = decode_field_reference (ll_arg,
3650 &ll_bitsize, &ll_bitpos, &ll_mode,
3651 &ll_unsignedp, &volatilep, &ll_mask,
3652 &ll_and_mask);
3653 lr_inner = decode_field_reference (lr_arg,
3654 &lr_bitsize, &lr_bitpos, &lr_mode,
3655 &lr_unsignedp, &volatilep, &lr_mask,
3656 &lr_and_mask);
3657 rl_inner = decode_field_reference (rl_arg,
3658 &rl_bitsize, &rl_bitpos, &rl_mode,
3659 &rl_unsignedp, &volatilep, &rl_mask,
3660 &rl_and_mask);
3661 rr_inner = decode_field_reference (rr_arg,
3662 &rr_bitsize, &rr_bitpos, &rr_mode,
3663 &rr_unsignedp, &volatilep, &rr_mask,
3664 &rr_and_mask);
3666 /* It must be true that the inner operation on the lhs of each
3667 comparison must be the same if we are to be able to do anything.
3668 Then see if we have constants. If not, the same must be true for
3669 the rhs's. */
3670 if (volatilep || ll_inner == 0 || rl_inner == 0
3671 || ! operand_equal_p (ll_inner, rl_inner, 0))
3672 return 0;
3674 if (TREE_CODE (lr_arg) == INTEGER_CST
3675 && TREE_CODE (rr_arg) == INTEGER_CST)
3676 l_const = lr_arg, r_const = rr_arg;
3677 else if (lr_inner == 0 || rr_inner == 0
3678 || ! operand_equal_p (lr_inner, rr_inner, 0))
3679 return 0;
3680 else
3681 l_const = r_const = 0;
3683 /* If either comparison code is not correct for our logical operation,
3684 fail. However, we can convert a one-bit comparison against zero into
3685 the opposite comparison against that bit being set in the field. */
3687 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3688 if (lcode != wanted_code)
3690 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3692 /* Make the left operand unsigned, since we are only interested
3693 in the value of one bit. Otherwise we are doing the wrong
3694 thing below. */
3695 ll_unsignedp = 1;
3696 l_const = ll_mask;
3698 else
3699 return 0;
3702 /* This is analogous to the code for l_const above. */
3703 if (rcode != wanted_code)
3705 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3707 rl_unsignedp = 1;
3708 r_const = rl_mask;
3710 else
3711 return 0;
3714 /* See if we can find a mode that contains both fields being compared on
3715 the left. If we can't, fail. Otherwise, update all constants and masks
3716 to be relative to a field of that size. */
3717 first_bit = MIN (ll_bitpos, rl_bitpos);
3718 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3719 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3720 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3721 volatilep);
3722 if (lnmode == VOIDmode)
3723 return 0;
3725 lnbitsize = GET_MODE_BITSIZE (lnmode);
3726 lnbitpos = first_bit & ~ (lnbitsize - 1);
3727 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3728 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3730 if (BYTES_BIG_ENDIAN)
3732 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3733 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3736 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3737 size_int (xll_bitpos), 0);
3738 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3739 size_int (xrl_bitpos), 0);
3741 if (l_const)
3743 l_const = convert (lntype, l_const);
3744 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3745 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3746 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3747 fold (build1 (BIT_NOT_EXPR,
3748 lntype, ll_mask)),
3749 0)))
3751 warning ("comparison is always %d", wanted_code == NE_EXPR);
3753 return convert (truth_type,
3754 wanted_code == NE_EXPR
3755 ? integer_one_node : integer_zero_node);
3758 if (r_const)
3760 r_const = convert (lntype, r_const);
3761 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3762 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3763 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3764 fold (build1 (BIT_NOT_EXPR,
3765 lntype, rl_mask)),
3766 0)))
3768 warning ("comparison is always %d", wanted_code == NE_EXPR);
3770 return convert (truth_type,
3771 wanted_code == NE_EXPR
3772 ? integer_one_node : integer_zero_node);
3776 /* If the right sides are not constant, do the same for it. Also,
3777 disallow this optimization if a size or signedness mismatch occurs
3778 between the left and right sides. */
3779 if (l_const == 0)
3781 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3782 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3783 /* Make sure the two fields on the right
3784 correspond to the left without being swapped. */
3785 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3786 return 0;
3788 first_bit = MIN (lr_bitpos, rr_bitpos);
3789 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3790 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3791 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3792 volatilep);
3793 if (rnmode == VOIDmode)
3794 return 0;
3796 rnbitsize = GET_MODE_BITSIZE (rnmode);
3797 rnbitpos = first_bit & ~ (rnbitsize - 1);
3798 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3799 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3801 if (BYTES_BIG_ENDIAN)
3803 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3804 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3807 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3808 size_int (xlr_bitpos), 0);
3809 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3810 size_int (xrr_bitpos), 0);
3812 /* Make a mask that corresponds to both fields being compared.
3813 Do this for both items being compared. If the operands are the
3814 same size and the bits being compared are in the same position
3815 then we can do this by masking both and comparing the masked
3816 results. */
3817 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3818 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3819 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3821 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3822 ll_unsignedp || rl_unsignedp);
3823 if (! all_ones_mask_p (ll_mask, lnbitsize))
3824 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3826 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3827 lr_unsignedp || rr_unsignedp);
3828 if (! all_ones_mask_p (lr_mask, rnbitsize))
3829 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3831 return build (wanted_code, truth_type, lhs, rhs);
3834 /* There is still another way we can do something: If both pairs of
3835 fields being compared are adjacent, we may be able to make a wider
3836 field containing them both.
3838 Note that we still must mask the lhs/rhs expressions. Furthermore,
3839 the mask must be shifted to account for the shift done by
3840 make_bit_field_ref. */
3841 if ((ll_bitsize + ll_bitpos == rl_bitpos
3842 && lr_bitsize + lr_bitpos == rr_bitpos)
3843 || (ll_bitpos == rl_bitpos + rl_bitsize
3844 && lr_bitpos == rr_bitpos + rr_bitsize))
3846 tree type;
3848 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3849 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3850 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3851 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3853 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3854 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3855 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3856 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3858 /* Convert to the smaller type before masking out unwanted bits. */
3859 type = lntype;
3860 if (lntype != rntype)
3862 if (lnbitsize > rnbitsize)
3864 lhs = convert (rntype, lhs);
3865 ll_mask = convert (rntype, ll_mask);
3866 type = rntype;
3868 else if (lnbitsize < rnbitsize)
3870 rhs = convert (lntype, rhs);
3871 lr_mask = convert (lntype, lr_mask);
3872 type = lntype;
3876 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3877 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3879 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3880 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3882 return build (wanted_code, truth_type, lhs, rhs);
3885 return 0;
3888 /* Handle the case of comparisons with constants. If there is something in
3889 common between the masks, those bits of the constants must be the same.
3890 If not, the condition is always false. Test for this to avoid generating
3891 incorrect code below. */
3892 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3893 if (! integer_zerop (result)
3894 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3895 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3897 if (wanted_code == NE_EXPR)
3899 warning ("`or' of unmatched not-equal tests is always 1");
3900 return convert (truth_type, integer_one_node);
3902 else
3904 warning ("`and' of mutually exclusive equal-tests is always 0");
3905 return convert (truth_type, integer_zero_node);
3909 /* Construct the expression we will return. First get the component
3910 reference we will make. Unless the mask is all ones the width of
3911 that field, perform the mask operation. Then compare with the
3912 merged constant. */
3913 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3914 ll_unsignedp || rl_unsignedp);
3916 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3917 if (! all_ones_mask_p (ll_mask, lnbitsize))
3918 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
3920 return build (wanted_code, truth_type, result,
3921 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3924 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
3925 constant. */
3927 static tree
3928 optimize_minmax_comparison (t)
3929 tree t;
3931 tree type = TREE_TYPE (t);
3932 tree arg0 = TREE_OPERAND (t, 0);
3933 enum tree_code op_code;
3934 tree comp_const = TREE_OPERAND (t, 1);
3935 tree minmax_const;
3936 int consts_equal, consts_lt;
3937 tree inner;
3939 STRIP_SIGN_NOPS (arg0);
3941 op_code = TREE_CODE (arg0);
3942 minmax_const = TREE_OPERAND (arg0, 1);
3943 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
3944 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
3945 inner = TREE_OPERAND (arg0, 0);
3947 /* If something does not permit us to optimize, return the original tree. */
3948 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
3949 || TREE_CODE (comp_const) != INTEGER_CST
3950 || TREE_CONSTANT_OVERFLOW (comp_const)
3951 || TREE_CODE (minmax_const) != INTEGER_CST
3952 || TREE_CONSTANT_OVERFLOW (minmax_const))
3953 return t;
3955 /* Now handle all the various comparison codes. We only handle EQ_EXPR
3956 and GT_EXPR, doing the rest with recursive calls using logical
3957 simplifications. */
3958 switch (TREE_CODE (t))
3960 case NE_EXPR: case LT_EXPR: case LE_EXPR:
3961 return
3962 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
3964 case GE_EXPR:
3965 return
3966 fold (build (TRUTH_ORIF_EXPR, type,
3967 optimize_minmax_comparison
3968 (build (EQ_EXPR, type, arg0, comp_const)),
3969 optimize_minmax_comparison
3970 (build (GT_EXPR, type, arg0, comp_const))));
3972 case EQ_EXPR:
3973 if (op_code == MAX_EXPR && consts_equal)
3974 /* MAX (X, 0) == 0 -> X <= 0 */
3975 return fold (build (LE_EXPR, type, inner, comp_const));
3977 else if (op_code == MAX_EXPR && consts_lt)
3978 /* MAX (X, 0) == 5 -> X == 5 */
3979 return fold (build (EQ_EXPR, type, inner, comp_const));
3981 else if (op_code == MAX_EXPR)
3982 /* MAX (X, 0) == -1 -> false */
3983 return omit_one_operand (type, integer_zero_node, inner);
3985 else if (consts_equal)
3986 /* MIN (X, 0) == 0 -> X >= 0 */
3987 return fold (build (GE_EXPR, type, inner, comp_const));
3989 else if (consts_lt)
3990 /* MIN (X, 0) == 5 -> false */
3991 return omit_one_operand (type, integer_zero_node, inner);
3993 else
3994 /* MIN (X, 0) == -1 -> X == -1 */
3995 return fold (build (EQ_EXPR, type, inner, comp_const));
3997 case GT_EXPR:
3998 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
3999 /* MAX (X, 0) > 0 -> X > 0
4000 MAX (X, 0) > 5 -> X > 5 */
4001 return fold (build (GT_EXPR, type, inner, comp_const));
4003 else if (op_code == MAX_EXPR)
4004 /* MAX (X, 0) > -1 -> true */
4005 return omit_one_operand (type, integer_one_node, inner);
4007 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4008 /* MIN (X, 0) > 0 -> false
4009 MIN (X, 0) > 5 -> false */
4010 return omit_one_operand (type, integer_zero_node, inner);
4012 else
4013 /* MIN (X, 0) > -1 -> X > -1 */
4014 return fold (build (GT_EXPR, type, inner, comp_const));
4016 default:
4017 return t;
4021 /* T is an integer expression that is being multiplied, divided, or taken a
4022 modulus (CODE says which and what kind of divide or modulus) by a
4023 constant C. See if we can eliminate that operation by folding it with
4024 other operations already in T. WIDE_TYPE, if non-null, is a type that
4025 should be used for the computation if wider than our type.
4027 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4028 (X * 2) + (Y * 4). We must, however, be assured that either the original
4029 expression would not overflow or that overflow is undefined for the type
4030 in the language in question.
4032 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4033 the machine has a multiply-accumulate insn or that this is part of an
4034 addressing calculation.
4036 If we return a non-null expression, it is an equivalent form of the
4037 original computation, but need not be in the original type. */
4039 static tree
4040 extract_muldiv (t, c, code, wide_type)
4041 tree t;
4042 tree c;
4043 enum tree_code code;
4044 tree wide_type;
4046 tree type = TREE_TYPE (t);
4047 enum tree_code tcode = TREE_CODE (t);
4048 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4049 > GET_MODE_SIZE (TYPE_MODE (type)))
4050 ? wide_type : type);
4051 tree t1, t2;
4052 int same_p = tcode == code;
4053 tree op0 = NULL_TREE, op1 = NULL_TREE;
4055 /* Don't deal with constants of zero here; they confuse the code below. */
4056 if (integer_zerop (c))
4057 return NULL_TREE;
4059 if (TREE_CODE_CLASS (tcode) == '1')
4060 op0 = TREE_OPERAND (t, 0);
4062 if (TREE_CODE_CLASS (tcode) == '2')
4063 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4065 /* Note that we need not handle conditional operations here since fold
4066 already handles those cases. So just do arithmetic here. */
4067 switch (tcode)
4069 case INTEGER_CST:
4070 /* For a constant, we can always simplify if we are a multiply
4071 or (for divide and modulus) if it is a multiple of our constant. */
4072 if (code == MULT_EXPR
4073 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4074 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4075 break;
4077 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4078 /* If op0 is an expression ... */
4079 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4080 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4081 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4082 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4083 /* ... and is unsigned, and its type is smaller than ctype,
4084 then we cannot pass through as widening. */
4085 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4086 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4087 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4088 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4089 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4090 /* ... or its type is larger than ctype,
4091 then we cannot pass through this truncation. */
4092 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4093 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))))
4094 break;
4096 /* Pass the constant down and see if we can make a simplification. If
4097 we can, replace this expression with the inner simplification for
4098 possible later conversion to our or some other type. */
4099 if (0 != (t1 = extract_muldiv (op0, convert (TREE_TYPE (op0), c), code,
4100 code == MULT_EXPR ? ctype : NULL_TREE)))
4101 return t1;
4102 break;
4104 case NEGATE_EXPR: case ABS_EXPR:
4105 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4106 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4107 break;
4109 case MIN_EXPR: case MAX_EXPR:
4110 /* If widening the type changes the signedness, then we can't perform
4111 this optimization as that changes the result. */
4112 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4113 break;
4115 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4116 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4117 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4119 if (tree_int_cst_sgn (c) < 0)
4120 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4122 return fold (build (tcode, ctype, convert (ctype, t1),
4123 convert (ctype, t2)));
4125 break;
4127 case WITH_RECORD_EXPR:
4128 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4129 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4130 TREE_OPERAND (t, 1));
4131 break;
4133 case SAVE_EXPR:
4134 /* If this has not been evaluated and the operand has no side effects,
4135 we can see if we can do something inside it and make a new one.
4136 Note that this test is overly conservative since we can do this
4137 if the only reason it had side effects is that it was another
4138 similar SAVE_EXPR, but that isn't worth bothering with. */
4139 if (SAVE_EXPR_RTL (t) == 0 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))
4140 && 0 != (t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code,
4141 wide_type)))
4143 t1 = save_expr (t1);
4144 if (SAVE_EXPR_PERSISTENT_P (t) && TREE_CODE (t1) == SAVE_EXPR)
4145 SAVE_EXPR_PERSISTENT_P (t1) = 1;
4146 if (is_pending_size (t))
4147 put_pending_size (t1);
4148 return t1;
4150 break;
4152 case LSHIFT_EXPR: case RSHIFT_EXPR:
4153 /* If the second operand is constant, this is a multiplication
4154 or floor division, by a power of two, so we can treat it that
4155 way unless the multiplier or divisor overflows. */
4156 if (TREE_CODE (op1) == INTEGER_CST
4157 /* const_binop may not detect overflow correctly,
4158 so check for it explicitly here. */
4159 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4160 && TREE_INT_CST_HIGH (op1) == 0
4161 && 0 != (t1 = convert (ctype,
4162 const_binop (LSHIFT_EXPR, size_one_node,
4163 op1, 0)))
4164 && ! TREE_OVERFLOW (t1))
4165 return extract_muldiv (build (tcode == LSHIFT_EXPR
4166 ? MULT_EXPR : FLOOR_DIV_EXPR,
4167 ctype, convert (ctype, op0), t1),
4168 c, code, wide_type);
4169 break;
4171 case PLUS_EXPR: case MINUS_EXPR:
4172 /* See if we can eliminate the operation on both sides. If we can, we
4173 can return a new PLUS or MINUS. If we can't, the only remaining
4174 cases where we can do anything are if the second operand is a
4175 constant. */
4176 t1 = extract_muldiv (op0, c, code, wide_type);
4177 t2 = extract_muldiv (op1, c, code, wide_type);
4178 if (t1 != 0 && t2 != 0
4179 && (code == MULT_EXPR
4180 /* If not multiplication, we can only do this if either operand
4181 is divisible by c. */
4182 || multiple_of_p (ctype, op0, c)
4183 || multiple_of_p (ctype, op1, c)))
4184 return fold (build (tcode, ctype, convert (ctype, t1),
4185 convert (ctype, t2)));
4187 /* If this was a subtraction, negate OP1 and set it to be an addition.
4188 This simplifies the logic below. */
4189 if (tcode == MINUS_EXPR)
4190 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4192 if (TREE_CODE (op1) != INTEGER_CST)
4193 break;
4195 /* If either OP1 or C are negative, this optimization is not safe for
4196 some of the division and remainder types while for others we need
4197 to change the code. */
4198 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4200 if (code == CEIL_DIV_EXPR)
4201 code = FLOOR_DIV_EXPR;
4202 else if (code == FLOOR_DIV_EXPR)
4203 code = CEIL_DIV_EXPR;
4204 else if (code != MULT_EXPR
4205 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4206 break;
4209 /* If it's a multiply or a division/modulus operation of a multiple
4210 of our constant, do the operation and verify it doesn't overflow. */
4211 if (code == MULT_EXPR
4212 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4214 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4215 if (op1 == 0 || TREE_OVERFLOW (op1))
4216 break;
4218 else
4219 break;
4221 /* If we have an unsigned type is not a sizetype, we cannot widen
4222 the operation since it will change the result if the original
4223 computation overflowed. */
4224 if (TREE_UNSIGNED (ctype)
4225 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4226 && ctype != type)
4227 break;
4229 /* If we were able to eliminate our operation from the first side,
4230 apply our operation to the second side and reform the PLUS. */
4231 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4232 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4234 /* The last case is if we are a multiply. In that case, we can
4235 apply the distributive law to commute the multiply and addition
4236 if the multiplication of the constants doesn't overflow. */
4237 if (code == MULT_EXPR)
4238 return fold (build (tcode, ctype, fold (build (code, ctype,
4239 convert (ctype, op0),
4240 convert (ctype, c))),
4241 op1));
4243 break;
4245 case MULT_EXPR:
4246 /* We have a special case here if we are doing something like
4247 (C * 8) % 4 since we know that's zero. */
4248 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4249 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4250 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4251 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4252 return omit_one_operand (type, integer_zero_node, op0);
4254 /* ... fall through ... */
4256 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4257 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4258 /* If we can extract our operation from the LHS, do so and return a
4259 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4260 do something only if the second operand is a constant. */
4261 if (same_p
4262 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4263 return fold (build (tcode, ctype, convert (ctype, t1),
4264 convert (ctype, op1)));
4265 else if (tcode == MULT_EXPR && code == MULT_EXPR
4266 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4267 return fold (build (tcode, ctype, convert (ctype, op0),
4268 convert (ctype, t1)));
4269 else if (TREE_CODE (op1) != INTEGER_CST)
4270 return 0;
4272 /* If these are the same operation types, we can associate them
4273 assuming no overflow. */
4274 if (tcode == code
4275 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4276 convert (ctype, c), 0))
4277 && ! TREE_OVERFLOW (t1))
4278 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4280 /* If these operations "cancel" each other, we have the main
4281 optimizations of this pass, which occur when either constant is a
4282 multiple of the other, in which case we replace this with either an
4283 operation or CODE or TCODE.
4285 If we have an unsigned type that is not a sizetype, we cannot do
4286 this since it will change the result if the original computation
4287 overflowed. */
4288 if ((! TREE_UNSIGNED (ctype)
4289 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4290 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4291 || (tcode == MULT_EXPR
4292 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4293 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4295 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4296 return fold (build (tcode, ctype, convert (ctype, op0),
4297 convert (ctype,
4298 const_binop (TRUNC_DIV_EXPR,
4299 op1, c, 0))));
4300 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4301 return fold (build (code, ctype, convert (ctype, op0),
4302 convert (ctype,
4303 const_binop (TRUNC_DIV_EXPR,
4304 c, op1, 0))));
4306 break;
4308 default:
4309 break;
4312 return 0;
4315 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4316 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4317 that we may sometimes modify the tree. */
4319 static tree
4320 strip_compound_expr (t, s)
4321 tree t;
4322 tree s;
4324 enum tree_code code = TREE_CODE (t);
4326 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4327 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4328 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4329 return TREE_OPERAND (t, 1);
4331 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4332 don't bother handling any other types. */
4333 else if (code == COND_EXPR)
4335 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4336 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4337 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4339 else if (TREE_CODE_CLASS (code) == '1')
4340 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4341 else if (TREE_CODE_CLASS (code) == '<'
4342 || TREE_CODE_CLASS (code) == '2')
4344 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4345 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4348 return t;
4351 /* Return a node which has the indicated constant VALUE (either 0 or
4352 1), and is of the indicated TYPE. */
4354 static tree
4355 constant_boolean_node (value, type)
4356 int value;
4357 tree type;
4359 if (type == integer_type_node)
4360 return value ? integer_one_node : integer_zero_node;
4361 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4362 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4363 integer_zero_node);
4364 else
4366 tree t = build_int_2 (value, 0);
4368 TREE_TYPE (t) = type;
4369 return t;
4373 /* Utility function for the following routine, to see how complex a nesting of
4374 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4375 we don't care (to avoid spending too much time on complex expressions.). */
4377 static int
4378 count_cond (expr, lim)
4379 tree expr;
4380 int lim;
4382 int ctrue, cfalse;
4384 if (TREE_CODE (expr) != COND_EXPR)
4385 return 0;
4386 else if (lim <= 0)
4387 return 0;
4389 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4390 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4391 return MIN (lim, 1 + ctrue + cfalse);
4394 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4395 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4396 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4397 expression, and ARG to `a'. If COND_FIRST_P is non-zero, then the
4398 COND is the first argument to CODE; otherwise (as in the example
4399 given here), it is the second argument. TYPE is the type of the
4400 original expression. */
4402 static tree
4403 fold_binary_op_with_conditional_arg (code, type, cond, arg, cond_first_p)
4404 enum tree_code code;
4405 tree type;
4406 tree cond;
4407 tree arg;
4408 int cond_first_p;
4410 tree test, true_value, false_value;
4411 tree lhs = NULL_TREE;
4412 tree rhs = NULL_TREE;
4413 /* In the end, we'll produce a COND_EXPR. Both arms of the
4414 conditional expression will be binary operations. The left-hand
4415 side of the expression to be executed if the condition is true
4416 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4417 of the expression to be executed if the condition is true will be
4418 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4419 but apply to the expression to be executed if the conditional is
4420 false. */
4421 tree *true_lhs;
4422 tree *true_rhs;
4423 tree *false_lhs;
4424 tree *false_rhs;
4425 /* These are the codes to use for the left-hand side and right-hand
4426 side of the COND_EXPR. Normally, they are the same as CODE. */
4427 enum tree_code lhs_code = code;
4428 enum tree_code rhs_code = code;
4429 /* And these are the types of the expressions. */
4430 tree lhs_type = type;
4431 tree rhs_type = type;
4433 if (cond_first_p)
4435 true_rhs = false_rhs = &arg;
4436 true_lhs = &true_value;
4437 false_lhs = &false_value;
4439 else
4441 true_lhs = false_lhs = &arg;
4442 true_rhs = &true_value;
4443 false_rhs = &false_value;
4446 if (TREE_CODE (cond) == COND_EXPR)
4448 test = TREE_OPERAND (cond, 0);
4449 true_value = TREE_OPERAND (cond, 1);
4450 false_value = TREE_OPERAND (cond, 2);
4451 /* If this operand throws an expression, then it does not make
4452 sense to try to perform a logical or arithmetic operation
4453 involving it. Instead of building `a + throw 3' for example,
4454 we simply build `a, throw 3'. */
4455 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4457 lhs_code = COMPOUND_EXPR;
4458 if (!cond_first_p)
4459 lhs_type = void_type_node;
4461 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4463 rhs_code = COMPOUND_EXPR;
4464 if (!cond_first_p)
4465 rhs_type = void_type_node;
4468 else
4470 tree testtype = TREE_TYPE (cond);
4471 test = cond;
4472 true_value = convert (testtype, integer_one_node);
4473 false_value = convert (testtype, integer_zero_node);
4476 /* If ARG is complex we want to make sure we only evaluate
4477 it once. Though this is only required if it is volatile, it
4478 might be more efficient even if it is not. However, if we
4479 succeed in folding one part to a constant, we do not need
4480 to make this SAVE_EXPR. Since we do this optimization
4481 primarily to see if we do end up with constant and this
4482 SAVE_EXPR interferes with later optimizations, suppressing
4483 it when we can is important.
4485 If we are not in a function, we can't make a SAVE_EXPR, so don't
4486 try to do so. Don't try to see if the result is a constant
4487 if an arm is a COND_EXPR since we get exponential behavior
4488 in that case. */
4490 if (TREE_CODE (arg) != SAVE_EXPR && ! TREE_CONSTANT (arg)
4491 && (*lang_hooks.decls.global_bindings_p) () == 0
4492 && ((TREE_CODE (arg) != VAR_DECL
4493 && TREE_CODE (arg) != PARM_DECL)
4494 || TREE_SIDE_EFFECTS (arg)))
4496 if (TREE_CODE (true_value) != COND_EXPR)
4497 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4499 if (TREE_CODE (false_value) != COND_EXPR)
4500 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4502 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4503 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4504 arg = save_expr (arg), lhs = rhs = 0;
4507 if (lhs == 0)
4508 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4509 if (rhs == 0)
4510 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4512 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4514 if (TREE_CODE (arg) == SAVE_EXPR)
4515 return build (COMPOUND_EXPR, type,
4516 convert (void_type_node, arg),
4517 strip_compound_expr (test, arg));
4518 else
4519 return convert (type, test);
4523 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4525 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4526 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4527 ADDEND is the same as X.
4529 X + 0 and X - 0 both give X when X is NaN, infinite, or non-zero
4530 and finite. The problematic cases are when X is zero, and its mode
4531 has signed zeros. In the case of rounding towards -infinity,
4532 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4533 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4535 static bool
4536 fold_real_zero_addition_p (type, addend, negate)
4537 tree type, addend;
4538 int negate;
4540 if (!real_zerop (addend))
4541 return false;
4543 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4544 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4545 return true;
4547 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4548 if (TREE_CODE (addend) == REAL_CST
4549 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4550 negate = !negate;
4552 /* The mode has signed zeros, and we have to honor their sign.
4553 In this situation, there is only one case we can return true for.
4554 X - 0 is the same as X unless rounding towards -infinity is
4555 supported. */
4556 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4560 /* Perform constant folding and related simplification of EXPR.
4561 The related simplifications include x*1 => x, x*0 => 0, etc.,
4562 and application of the associative law.
4563 NOP_EXPR conversions may be removed freely (as long as we
4564 are careful not to change the C type of the overall expression)
4565 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4566 but we can constant-fold them if they have constant operands. */
4568 tree
4569 fold (expr)
4570 tree expr;
4572 tree t = expr;
4573 tree t1 = NULL_TREE;
4574 tree tem;
4575 tree type = TREE_TYPE (expr);
4576 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4577 enum tree_code code = TREE_CODE (t);
4578 int kind = TREE_CODE_CLASS (code);
4579 int invert;
4580 /* WINS will be nonzero when the switch is done
4581 if all operands are constant. */
4582 int wins = 1;
4584 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4585 Likewise for a SAVE_EXPR that's already been evaluated. */
4586 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
4587 return t;
4589 /* Return right away if a constant. */
4590 if (kind == 'c')
4591 return t;
4593 #ifdef MAX_INTEGER_COMPUTATION_MODE
4594 check_max_integer_computation_mode (expr);
4595 #endif
4597 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4599 tree subop;
4601 /* Special case for conversion ops that can have fixed point args. */
4602 arg0 = TREE_OPERAND (t, 0);
4604 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4605 if (arg0 != 0)
4606 STRIP_SIGN_NOPS (arg0);
4608 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4609 subop = TREE_REALPART (arg0);
4610 else
4611 subop = arg0;
4613 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4614 && TREE_CODE (subop) != REAL_CST
4616 /* Note that TREE_CONSTANT isn't enough:
4617 static var addresses are constant but we can't
4618 do arithmetic on them. */
4619 wins = 0;
4621 else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
4623 int len = first_rtl_op (code);
4624 int i;
4625 for (i = 0; i < len; i++)
4627 tree op = TREE_OPERAND (t, i);
4628 tree subop;
4630 if (op == 0)
4631 continue; /* Valid for CALL_EXPR, at least. */
4633 if (kind == '<' || code == RSHIFT_EXPR)
4635 /* Signedness matters here. Perhaps we can refine this
4636 later. */
4637 STRIP_SIGN_NOPS (op);
4639 else
4640 /* Strip any conversions that don't change the mode. */
4641 STRIP_NOPS (op);
4643 if (TREE_CODE (op) == COMPLEX_CST)
4644 subop = TREE_REALPART (op);
4645 else
4646 subop = op;
4648 if (TREE_CODE (subop) != INTEGER_CST
4649 && TREE_CODE (subop) != REAL_CST)
4650 /* Note that TREE_CONSTANT isn't enough:
4651 static var addresses are constant but we can't
4652 do arithmetic on them. */
4653 wins = 0;
4655 if (i == 0)
4656 arg0 = op;
4657 else if (i == 1)
4658 arg1 = op;
4662 /* If this is a commutative operation, and ARG0 is a constant, move it
4663 to ARG1 to reduce the number of tests below. */
4664 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4665 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4666 || code == BIT_AND_EXPR)
4667 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4669 tem = arg0; arg0 = arg1; arg1 = tem;
4671 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4672 TREE_OPERAND (t, 1) = tem;
4675 /* Now WINS is set as described above,
4676 ARG0 is the first operand of EXPR,
4677 and ARG1 is the second operand (if it has more than one operand).
4679 First check for cases where an arithmetic operation is applied to a
4680 compound, conditional, or comparison operation. Push the arithmetic
4681 operation inside the compound or conditional to see if any folding
4682 can then be done. Convert comparison to conditional for this purpose.
4683 The also optimizes non-constant cases that used to be done in
4684 expand_expr.
4686 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
4687 one of the operands is a comparison and the other is a comparison, a
4688 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4689 code below would make the expression more complex. Change it to a
4690 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4691 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4693 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4694 || code == EQ_EXPR || code == NE_EXPR)
4695 && ((truth_value_p (TREE_CODE (arg0))
4696 && (truth_value_p (TREE_CODE (arg1))
4697 || (TREE_CODE (arg1) == BIT_AND_EXPR
4698 && integer_onep (TREE_OPERAND (arg1, 1)))))
4699 || (truth_value_p (TREE_CODE (arg1))
4700 && (truth_value_p (TREE_CODE (arg0))
4701 || (TREE_CODE (arg0) == BIT_AND_EXPR
4702 && integer_onep (TREE_OPERAND (arg0, 1)))))))
4704 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4705 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4706 : TRUTH_XOR_EXPR,
4707 type, arg0, arg1));
4709 if (code == EQ_EXPR)
4710 t = invert_truthvalue (t);
4712 return t;
4715 if (TREE_CODE_CLASS (code) == '1')
4717 if (TREE_CODE (arg0) == COMPOUND_EXPR)
4718 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4719 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
4720 else if (TREE_CODE (arg0) == COND_EXPR)
4722 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
4723 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
4724 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
4726 /* If this was a conversion, and all we did was to move into
4727 inside the COND_EXPR, bring it back out. But leave it if
4728 it is a conversion from integer to integer and the
4729 result precision is no wider than a word since such a
4730 conversion is cheap and may be optimized away by combine,
4731 while it couldn't if it were outside the COND_EXPR. Then return
4732 so we don't get into an infinite recursion loop taking the
4733 conversion out and then back in. */
4735 if ((code == NOP_EXPR || code == CONVERT_EXPR
4736 || code == NON_LVALUE_EXPR)
4737 && TREE_CODE (t) == COND_EXPR
4738 && TREE_CODE (TREE_OPERAND (t, 1)) == code
4739 && TREE_CODE (TREE_OPERAND (t, 2)) == code
4740 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
4741 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
4742 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
4743 && (INTEGRAL_TYPE_P
4744 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
4745 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
4746 t = build1 (code, type,
4747 build (COND_EXPR,
4748 TREE_TYPE (TREE_OPERAND
4749 (TREE_OPERAND (t, 1), 0)),
4750 TREE_OPERAND (t, 0),
4751 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
4752 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
4753 return t;
4755 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
4756 return fold (build (COND_EXPR, type, arg0,
4757 fold (build1 (code, type, integer_one_node)),
4758 fold (build1 (code, type, integer_zero_node))));
4760 else if (TREE_CODE_CLASS (code) == '2'
4761 || TREE_CODE_CLASS (code) == '<')
4763 if (TREE_CODE (arg1) == COMPOUND_EXPR)
4764 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4765 fold (build (code, type,
4766 arg0, TREE_OPERAND (arg1, 1))));
4767 else if ((TREE_CODE (arg1) == COND_EXPR
4768 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
4769 && TREE_CODE_CLASS (code) != '<'))
4770 && (TREE_CODE (arg0) != COND_EXPR
4771 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4772 && (! TREE_SIDE_EFFECTS (arg0)
4773 || ((*lang_hooks.decls.global_bindings_p) () == 0
4774 && ! contains_placeholder_p (arg0))))
4775 return
4776 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
4777 /*cond_first_p=*/0);
4778 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4779 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4780 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4781 else if ((TREE_CODE (arg0) == COND_EXPR
4782 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4783 && TREE_CODE_CLASS (code) != '<'))
4784 && (TREE_CODE (arg1) != COND_EXPR
4785 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4786 && (! TREE_SIDE_EFFECTS (arg1)
4787 || ((*lang_hooks.decls.global_bindings_p) () == 0
4788 && ! contains_placeholder_p (arg1))))
4789 return
4790 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
4791 /*cond_first_p=*/1);
4793 else if (TREE_CODE_CLASS (code) == '<'
4794 && TREE_CODE (arg0) == COMPOUND_EXPR)
4795 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4796 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4797 else if (TREE_CODE_CLASS (code) == '<'
4798 && TREE_CODE (arg1) == COMPOUND_EXPR)
4799 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4800 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
4802 switch (code)
4804 case INTEGER_CST:
4805 case REAL_CST:
4806 case VECTOR_CST:
4807 case STRING_CST:
4808 case COMPLEX_CST:
4809 case CONSTRUCTOR:
4810 return t;
4812 case CONST_DECL:
4813 return fold (DECL_INITIAL (t));
4815 case NOP_EXPR:
4816 case FLOAT_EXPR:
4817 case CONVERT_EXPR:
4818 case FIX_TRUNC_EXPR:
4819 /* Other kinds of FIX are not handled properly by fold_convert. */
4821 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4822 return TREE_OPERAND (t, 0);
4824 /* Handle cases of two conversions in a row. */
4825 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4826 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4828 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4829 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4830 tree final_type = TREE_TYPE (t);
4831 int inside_int = INTEGRAL_TYPE_P (inside_type);
4832 int inside_ptr = POINTER_TYPE_P (inside_type);
4833 int inside_float = FLOAT_TYPE_P (inside_type);
4834 unsigned int inside_prec = TYPE_PRECISION (inside_type);
4835 int inside_unsignedp = TREE_UNSIGNED (inside_type);
4836 int inter_int = INTEGRAL_TYPE_P (inter_type);
4837 int inter_ptr = POINTER_TYPE_P (inter_type);
4838 int inter_float = FLOAT_TYPE_P (inter_type);
4839 unsigned int inter_prec = TYPE_PRECISION (inter_type);
4840 int inter_unsignedp = TREE_UNSIGNED (inter_type);
4841 int final_int = INTEGRAL_TYPE_P (final_type);
4842 int final_ptr = POINTER_TYPE_P (final_type);
4843 int final_float = FLOAT_TYPE_P (final_type);
4844 unsigned int final_prec = TYPE_PRECISION (final_type);
4845 int final_unsignedp = TREE_UNSIGNED (final_type);
4847 /* In addition to the cases of two conversions in a row
4848 handled below, if we are converting something to its own
4849 type via an object of identical or wider precision, neither
4850 conversion is needed. */
4851 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
4852 && ((inter_int && final_int) || (inter_float && final_float))
4853 && inter_prec >= final_prec)
4854 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4856 /* Likewise, if the intermediate and final types are either both
4857 float or both integer, we don't need the middle conversion if
4858 it is wider than the final type and doesn't change the signedness
4859 (for integers). Avoid this if the final type is a pointer
4860 since then we sometimes need the inner conversion. Likewise if
4861 the outer has a precision not equal to the size of its mode. */
4862 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4863 || (inter_float && inside_float))
4864 && inter_prec >= inside_prec
4865 && (inter_float || inter_unsignedp == inside_unsignedp)
4866 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4867 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4868 && ! final_ptr)
4869 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4871 /* If we have a sign-extension of a zero-extended value, we can
4872 replace that by a single zero-extension. */
4873 if (inside_int && inter_int && final_int
4874 && inside_prec < inter_prec && inter_prec < final_prec
4875 && inside_unsignedp && !inter_unsignedp)
4876 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4878 /* Two conversions in a row are not needed unless:
4879 - some conversion is floating-point (overstrict for now), or
4880 - the intermediate type is narrower than both initial and
4881 final, or
4882 - the intermediate type and innermost type differ in signedness,
4883 and the outermost type is wider than the intermediate, or
4884 - the initial type is a pointer type and the precisions of the
4885 intermediate and final types differ, or
4886 - the final type is a pointer type and the precisions of the
4887 initial and intermediate types differ. */
4888 if (! inside_float && ! inter_float && ! final_float
4889 && (inter_prec > inside_prec || inter_prec > final_prec)
4890 && ! (inside_int && inter_int
4891 && inter_unsignedp != inside_unsignedp
4892 && inter_prec < final_prec)
4893 && ((inter_unsignedp && inter_prec > inside_prec)
4894 == (final_unsignedp && final_prec > inter_prec))
4895 && ! (inside_ptr && inter_prec != final_prec)
4896 && ! (final_ptr && inside_prec != inter_prec)
4897 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4898 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4899 && ! final_ptr)
4900 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4903 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
4904 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
4905 /* Detect assigning a bitfield. */
4906 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
4907 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
4909 /* Don't leave an assignment inside a conversion
4910 unless assigning a bitfield. */
4911 tree prev = TREE_OPERAND (t, 0);
4912 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
4913 /* First do the assignment, then return converted constant. */
4914 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
4915 TREE_USED (t) = 1;
4916 return t;
4919 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
4920 constants (if x has signed type, the sign bit cannot be set
4921 in c). This folds extension into the BIT_AND_EXPR. */
4922 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
4923 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
4924 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
4925 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
4927 tree and = TREE_OPERAND (t, 0);
4928 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
4929 int change = 0;
4931 if (TREE_UNSIGNED (TREE_TYPE (and))
4932 || (TYPE_PRECISION (TREE_TYPE (t))
4933 <= TYPE_PRECISION (TREE_TYPE (and))))
4934 change = 1;
4935 else if (TYPE_PRECISION (TREE_TYPE (and1))
4936 <= HOST_BITS_PER_WIDE_INT
4937 && host_integerp (and1, 1))
4939 unsigned HOST_WIDE_INT cst;
4941 cst = tree_low_cst (and1, 1);
4942 cst &= (HOST_WIDE_INT) -1
4943 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
4944 change = (cst == 0);
4945 #ifdef LOAD_EXTEND_OP
4946 if (change
4947 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
4948 == ZERO_EXTEND))
4950 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
4951 and0 = convert (uns, and0);
4952 and1 = convert (uns, and1);
4954 #endif
4956 if (change)
4957 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
4958 convert (TREE_TYPE (t), and0),
4959 convert (TREE_TYPE (t), and1)));
4962 if (!wins)
4964 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
4965 return t;
4967 return fold_convert (t, arg0);
4969 case VIEW_CONVERT_EXPR:
4970 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
4971 return build1 (VIEW_CONVERT_EXPR, type,
4972 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4973 return t;
4975 case COMPONENT_REF:
4976 if (TREE_CODE (arg0) == CONSTRUCTOR)
4978 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
4979 if (m)
4980 t = TREE_VALUE (m);
4982 return t;
4984 case RANGE_EXPR:
4985 TREE_CONSTANT (t) = wins;
4986 return t;
4988 case NEGATE_EXPR:
4989 if (wins)
4991 if (TREE_CODE (arg0) == INTEGER_CST)
4993 unsigned HOST_WIDE_INT low;
4994 HOST_WIDE_INT high;
4995 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4996 TREE_INT_CST_HIGH (arg0),
4997 &low, &high);
4998 t = build_int_2 (low, high);
4999 TREE_TYPE (t) = type;
5000 TREE_OVERFLOW (t)
5001 = (TREE_OVERFLOW (arg0)
5002 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5003 TREE_CONSTANT_OVERFLOW (t)
5004 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5006 else if (TREE_CODE (arg0) == REAL_CST)
5007 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5009 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5010 return TREE_OPERAND (arg0, 0);
5012 /* Convert - (a - b) to (b - a) for non-floating-point. */
5013 else if (TREE_CODE (arg0) == MINUS_EXPR
5014 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5015 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5016 TREE_OPERAND (arg0, 0));
5018 return t;
5020 case ABS_EXPR:
5021 if (wins)
5023 if (TREE_CODE (arg0) == INTEGER_CST)
5025 /* If the value is unsigned, then the absolute value is
5026 the same as the ordinary value. */
5027 if (TREE_UNSIGNED (type))
5028 return arg0;
5029 /* Similarly, if the value is non-negative. */
5030 else if (INT_CST_LT (integer_minus_one_node, arg0))
5031 return arg0;
5032 /* If the value is negative, then the absolute value is
5033 its negation. */
5034 else
5036 unsigned HOST_WIDE_INT low;
5037 HOST_WIDE_INT high;
5038 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5039 TREE_INT_CST_HIGH (arg0),
5040 &low, &high);
5041 t = build_int_2 (low, high);
5042 TREE_TYPE (t) = type;
5043 TREE_OVERFLOW (t)
5044 = (TREE_OVERFLOW (arg0)
5045 | force_fit_type (t, overflow));
5046 TREE_CONSTANT_OVERFLOW (t)
5047 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5050 else if (TREE_CODE (arg0) == REAL_CST)
5052 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5053 t = build_real (type,
5054 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5057 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
5058 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
5059 return t;
5061 case CONJ_EXPR:
5062 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5063 return convert (type, arg0);
5064 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5065 return build (COMPLEX_EXPR, type,
5066 TREE_OPERAND (arg0, 0),
5067 negate_expr (TREE_OPERAND (arg0, 1)));
5068 else if (TREE_CODE (arg0) == COMPLEX_CST)
5069 return build_complex (type, TREE_REALPART (arg0),
5070 negate_expr (TREE_IMAGPART (arg0)));
5071 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5072 return fold (build (TREE_CODE (arg0), type,
5073 fold (build1 (CONJ_EXPR, type,
5074 TREE_OPERAND (arg0, 0))),
5075 fold (build1 (CONJ_EXPR,
5076 type, TREE_OPERAND (arg0, 1)))));
5077 else if (TREE_CODE (arg0) == CONJ_EXPR)
5078 return TREE_OPERAND (arg0, 0);
5079 return t;
5081 case BIT_NOT_EXPR:
5082 if (wins)
5084 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5085 ~ TREE_INT_CST_HIGH (arg0));
5086 TREE_TYPE (t) = type;
5087 force_fit_type (t, 0);
5088 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5089 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5091 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5092 return TREE_OPERAND (arg0, 0);
5093 return t;
5095 case PLUS_EXPR:
5096 /* A + (-B) -> A - B */
5097 if (TREE_CODE (arg1) == NEGATE_EXPR)
5098 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5099 /* (-A) + B -> B - A */
5100 if (TREE_CODE (arg0) == NEGATE_EXPR)
5101 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5102 else if (! FLOAT_TYPE_P (type))
5104 if (integer_zerop (arg1))
5105 return non_lvalue (convert (type, arg0));
5107 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5108 with a constant, and the two constants have no bits in common,
5109 we should treat this as a BIT_IOR_EXPR since this may produce more
5110 simplifications. */
5111 if (TREE_CODE (arg0) == BIT_AND_EXPR
5112 && TREE_CODE (arg1) == BIT_AND_EXPR
5113 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5114 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5115 && integer_zerop (const_binop (BIT_AND_EXPR,
5116 TREE_OPERAND (arg0, 1),
5117 TREE_OPERAND (arg1, 1), 0)))
5119 code = BIT_IOR_EXPR;
5120 goto bit_ior;
5123 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5124 (plus (plus (mult) (mult)) (foo)) so that we can
5125 take advantage of the factoring cases below. */
5126 if ((TREE_CODE (arg0) == PLUS_EXPR
5127 && TREE_CODE (arg1) == MULT_EXPR)
5128 || (TREE_CODE (arg1) == PLUS_EXPR
5129 && TREE_CODE (arg0) == MULT_EXPR))
5131 tree parg0, parg1, parg, marg;
5133 if (TREE_CODE (arg0) == PLUS_EXPR)
5134 parg = arg0, marg = arg1;
5135 else
5136 parg = arg1, marg = arg0;
5137 parg0 = TREE_OPERAND (parg, 0);
5138 parg1 = TREE_OPERAND (parg, 1);
5139 STRIP_NOPS (parg0);
5140 STRIP_NOPS (parg1);
5142 if (TREE_CODE (parg0) == MULT_EXPR
5143 && TREE_CODE (parg1) != MULT_EXPR)
5144 return fold (build (PLUS_EXPR, type,
5145 fold (build (PLUS_EXPR, type, parg0, marg)),
5146 parg1));
5147 if (TREE_CODE (parg0) != MULT_EXPR
5148 && TREE_CODE (parg1) == MULT_EXPR)
5149 return fold (build (PLUS_EXPR, type,
5150 fold (build (PLUS_EXPR, type, parg1, marg)),
5151 parg0));
5154 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5156 tree arg00, arg01, arg10, arg11;
5157 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5159 /* (A * C) + (B * C) -> (A+B) * C.
5160 We are most concerned about the case where C is a constant,
5161 but other combinations show up during loop reduction. Since
5162 it is not difficult, try all four possibilities. */
5164 arg00 = TREE_OPERAND (arg0, 0);
5165 arg01 = TREE_OPERAND (arg0, 1);
5166 arg10 = TREE_OPERAND (arg1, 0);
5167 arg11 = TREE_OPERAND (arg1, 1);
5168 same = NULL_TREE;
5170 if (operand_equal_p (arg01, arg11, 0))
5171 same = arg01, alt0 = arg00, alt1 = arg10;
5172 else if (operand_equal_p (arg00, arg10, 0))
5173 same = arg00, alt0 = arg01, alt1 = arg11;
5174 else if (operand_equal_p (arg00, arg11, 0))
5175 same = arg00, alt0 = arg01, alt1 = arg10;
5176 else if (operand_equal_p (arg01, arg10, 0))
5177 same = arg01, alt0 = arg00, alt1 = arg11;
5179 /* No identical multiplicands; see if we can find a common
5180 power-of-two factor in non-power-of-two multiplies. This
5181 can help in multi-dimensional array access. */
5182 else if (TREE_CODE (arg01) == INTEGER_CST
5183 && TREE_CODE (arg11) == INTEGER_CST
5184 && TREE_INT_CST_HIGH (arg01) == 0
5185 && TREE_INT_CST_HIGH (arg11) == 0)
5187 HOST_WIDE_INT int01, int11, tmp;
5188 int01 = TREE_INT_CST_LOW (arg01);
5189 int11 = TREE_INT_CST_LOW (arg11);
5191 /* Move min of absolute values to int11. */
5192 if ((int01 >= 0 ? int01 : -int01)
5193 < (int11 >= 0 ? int11 : -int11))
5195 tmp = int01, int01 = int11, int11 = tmp;
5196 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5197 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5200 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5202 alt0 = fold (build (MULT_EXPR, type, arg00,
5203 build_int_2 (int01 / int11, 0)));
5204 alt1 = arg10;
5205 same = arg11;
5209 if (same)
5210 return fold (build (MULT_EXPR, type,
5211 fold (build (PLUS_EXPR, type, alt0, alt1)),
5212 same));
5216 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5217 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5218 return non_lvalue (convert (type, arg0));
5220 /* Likewise if the operands are reversed. */
5221 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5222 return non_lvalue (convert (type, arg1));
5224 bit_rotate:
5225 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5226 is a rotate of A by C1 bits. */
5227 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5228 is a rotate of A by B bits. */
5230 enum tree_code code0, code1;
5231 code0 = TREE_CODE (arg0);
5232 code1 = TREE_CODE (arg1);
5233 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5234 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5235 && operand_equal_p (TREE_OPERAND (arg0, 0),
5236 TREE_OPERAND (arg1, 0), 0)
5237 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5239 tree tree01, tree11;
5240 enum tree_code code01, code11;
5242 tree01 = TREE_OPERAND (arg0, 1);
5243 tree11 = TREE_OPERAND (arg1, 1);
5244 STRIP_NOPS (tree01);
5245 STRIP_NOPS (tree11);
5246 code01 = TREE_CODE (tree01);
5247 code11 = TREE_CODE (tree11);
5248 if (code01 == INTEGER_CST
5249 && code11 == INTEGER_CST
5250 && TREE_INT_CST_HIGH (tree01) == 0
5251 && TREE_INT_CST_HIGH (tree11) == 0
5252 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5253 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5254 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5255 code0 == LSHIFT_EXPR ? tree01 : tree11);
5256 else if (code11 == MINUS_EXPR)
5258 tree tree110, tree111;
5259 tree110 = TREE_OPERAND (tree11, 0);
5260 tree111 = TREE_OPERAND (tree11, 1);
5261 STRIP_NOPS (tree110);
5262 STRIP_NOPS (tree111);
5263 if (TREE_CODE (tree110) == INTEGER_CST
5264 && 0 == compare_tree_int (tree110,
5265 TYPE_PRECISION
5266 (TREE_TYPE (TREE_OPERAND
5267 (arg0, 0))))
5268 && operand_equal_p (tree01, tree111, 0))
5269 return build ((code0 == LSHIFT_EXPR
5270 ? LROTATE_EXPR
5271 : RROTATE_EXPR),
5272 type, TREE_OPERAND (arg0, 0), tree01);
5274 else if (code01 == MINUS_EXPR)
5276 tree tree010, tree011;
5277 tree010 = TREE_OPERAND (tree01, 0);
5278 tree011 = TREE_OPERAND (tree01, 1);
5279 STRIP_NOPS (tree010);
5280 STRIP_NOPS (tree011);
5281 if (TREE_CODE (tree010) == INTEGER_CST
5282 && 0 == compare_tree_int (tree010,
5283 TYPE_PRECISION
5284 (TREE_TYPE (TREE_OPERAND
5285 (arg0, 0))))
5286 && operand_equal_p (tree11, tree011, 0))
5287 return build ((code0 != LSHIFT_EXPR
5288 ? LROTATE_EXPR
5289 : RROTATE_EXPR),
5290 type, TREE_OPERAND (arg0, 0), tree11);
5295 associate:
5296 /* In most languages, can't associate operations on floats through
5297 parentheses. Rather than remember where the parentheses were, we
5298 don't associate floats at all. It shouldn't matter much. However,
5299 associating multiplications is only very slightly inaccurate, so do
5300 that if -funsafe-math-optimizations is specified. */
5302 if (! wins
5303 && (! FLOAT_TYPE_P (type)
5304 || (flag_unsafe_math_optimizations && code == MULT_EXPR)))
5306 tree var0, con0, lit0, minus_lit0;
5307 tree var1, con1, lit1, minus_lit1;
5309 /* Split both trees into variables, constants, and literals. Then
5310 associate each group together, the constants with literals,
5311 then the result with variables. This increases the chances of
5312 literals being recombined later and of generating relocatable
5313 expressions for the sum of a constant and literal. */
5314 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5315 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5316 code == MINUS_EXPR);
5318 /* Only do something if we found more than two objects. Otherwise,
5319 nothing has changed and we risk infinite recursion. */
5320 if (2 < ((var0 != 0) + (var1 != 0)
5321 + (con0 != 0) + (con1 != 0)
5322 + (lit0 != 0) + (lit1 != 0)
5323 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5325 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5326 if (code == MINUS_EXPR)
5327 code = PLUS_EXPR;
5329 var0 = associate_trees (var0, var1, code, type);
5330 con0 = associate_trees (con0, con1, code, type);
5331 lit0 = associate_trees (lit0, lit1, code, type);
5332 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5334 /* Preserve the MINUS_EXPR if the negative part of the literal is
5335 greater than the positive part. Otherwise, the multiplicative
5336 folding code (i.e extract_muldiv) may be fooled in case
5337 unsigned constants are substracted, like in the following
5338 example: ((X*2 + 4) - 8U)/2. */
5339 if (minus_lit0 && lit0)
5341 if (tree_int_cst_lt (lit0, minus_lit0))
5343 minus_lit0 = associate_trees (minus_lit0, lit0,
5344 MINUS_EXPR, type);
5345 lit0 = 0;
5347 else
5349 lit0 = associate_trees (lit0, minus_lit0,
5350 MINUS_EXPR, type);
5351 minus_lit0 = 0;
5354 if (minus_lit0)
5356 if (con0 == 0)
5357 return convert (type, associate_trees (var0, minus_lit0,
5358 MINUS_EXPR, type));
5359 else
5361 con0 = associate_trees (con0, minus_lit0,
5362 MINUS_EXPR, type);
5363 return convert (type, associate_trees (var0, con0,
5364 PLUS_EXPR, type));
5368 con0 = associate_trees (con0, lit0, code, type);
5369 return convert (type, associate_trees (var0, con0, code, type));
5373 binary:
5374 if (wins)
5375 t1 = const_binop (code, arg0, arg1, 0);
5376 if (t1 != NULL_TREE)
5378 /* The return value should always have
5379 the same type as the original expression. */
5380 if (TREE_TYPE (t1) != TREE_TYPE (t))
5381 t1 = convert (TREE_TYPE (t), t1);
5383 return t1;
5385 return t;
5387 case MINUS_EXPR:
5388 /* A - (-B) -> A + B */
5389 if (TREE_CODE (arg1) == NEGATE_EXPR)
5390 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5391 /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5392 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5393 return
5394 fold (build (MINUS_EXPR, type,
5395 build_real (TREE_TYPE (arg1),
5396 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1))),
5397 TREE_OPERAND (arg0, 0)));
5399 if (! FLOAT_TYPE_P (type))
5401 if (! wins && integer_zerop (arg0))
5402 return negate_expr (convert (type, arg1));
5403 if (integer_zerop (arg1))
5404 return non_lvalue (convert (type, arg0));
5406 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5407 about the case where C is a constant, just try one of the
5408 four possibilities. */
5410 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5411 && operand_equal_p (TREE_OPERAND (arg0, 1),
5412 TREE_OPERAND (arg1, 1), 0))
5413 return fold (build (MULT_EXPR, type,
5414 fold (build (MINUS_EXPR, type,
5415 TREE_OPERAND (arg0, 0),
5416 TREE_OPERAND (arg1, 0))),
5417 TREE_OPERAND (arg0, 1)));
5420 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5421 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5422 return non_lvalue (convert (type, arg0));
5424 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
5425 ARG0 is zero and X + ARG0 reduces to X, since that would mean
5426 (-ARG1 + ARG0) reduces to -ARG1. */
5427 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5428 return negate_expr (convert (type, arg1));
5430 /* Fold &x - &x. This can happen from &x.foo - &x.
5431 This is unsafe for certain floats even in non-IEEE formats.
5432 In IEEE, it is unsafe because it does wrong for NaNs.
5433 Also note that operand_equal_p is always false if an operand
5434 is volatile. */
5436 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
5437 && operand_equal_p (arg0, arg1, 0))
5438 return convert (type, integer_zero_node);
5440 goto associate;
5442 case MULT_EXPR:
5443 /* (-A) * (-B) -> A * B */
5444 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5445 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5446 TREE_OPERAND (arg1, 0)));
5448 if (! FLOAT_TYPE_P (type))
5450 if (integer_zerop (arg1))
5451 return omit_one_operand (type, arg1, arg0);
5452 if (integer_onep (arg1))
5453 return non_lvalue (convert (type, arg0));
5455 /* (a * (1 << b)) is (a << b) */
5456 if (TREE_CODE (arg1) == LSHIFT_EXPR
5457 && integer_onep (TREE_OPERAND (arg1, 0)))
5458 return fold (build (LSHIFT_EXPR, type, arg0,
5459 TREE_OPERAND (arg1, 1)));
5460 if (TREE_CODE (arg0) == LSHIFT_EXPR
5461 && integer_onep (TREE_OPERAND (arg0, 0)))
5462 return fold (build (LSHIFT_EXPR, type, arg1,
5463 TREE_OPERAND (arg0, 1)));
5465 if (TREE_CODE (arg1) == INTEGER_CST
5466 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5467 code, NULL_TREE)))
5468 return convert (type, tem);
5471 else
5473 /* Maybe fold x * 0 to 0. The expressions aren't the same
5474 when x is NaN, since x * 0 is also NaN. Nor are they the
5475 same in modes with signed zeros, since multiplying a
5476 negative value by 0 gives -0, not +0. */
5477 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
5478 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
5479 && real_zerop (arg1))
5480 return omit_one_operand (type, arg1, arg0);
5481 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
5482 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5483 && real_onep (arg1))
5484 return non_lvalue (convert (type, arg0));
5486 /* Transform x * -1.0 into -x. */
5487 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5488 && real_minus_onep (arg1))
5489 return fold (build1 (NEGATE_EXPR, type, arg0));
5491 /* x*2 is x+x */
5492 if (! wins && real_twop (arg1)
5493 && (*lang_hooks.decls.global_bindings_p) () == 0
5494 && ! contains_placeholder_p (arg0))
5496 tree arg = save_expr (arg0);
5497 return build (PLUS_EXPR, type, arg, arg);
5500 goto associate;
5502 case BIT_IOR_EXPR:
5503 bit_ior:
5504 if (integer_all_onesp (arg1))
5505 return omit_one_operand (type, arg1, arg0);
5506 if (integer_zerop (arg1))
5507 return non_lvalue (convert (type, arg0));
5508 t1 = distribute_bit_expr (code, type, arg0, arg1);
5509 if (t1 != NULL_TREE)
5510 return t1;
5512 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5514 This results in more efficient code for machines without a NAND
5515 instruction. Combine will canonicalize to the first form
5516 which will allow use of NAND instructions provided by the
5517 backend if they exist. */
5518 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5519 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5521 return fold (build1 (BIT_NOT_EXPR, type,
5522 build (BIT_AND_EXPR, type,
5523 TREE_OPERAND (arg0, 0),
5524 TREE_OPERAND (arg1, 0))));
5527 /* See if this can be simplified into a rotate first. If that
5528 is unsuccessful continue in the association code. */
5529 goto bit_rotate;
5531 case BIT_XOR_EXPR:
5532 if (integer_zerop (arg1))
5533 return non_lvalue (convert (type, arg0));
5534 if (integer_all_onesp (arg1))
5535 return fold (build1 (BIT_NOT_EXPR, type, arg0));
5537 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5538 with a constant, and the two constants have no bits in common,
5539 we should treat this as a BIT_IOR_EXPR since this may produce more
5540 simplifications. */
5541 if (TREE_CODE (arg0) == BIT_AND_EXPR
5542 && TREE_CODE (arg1) == BIT_AND_EXPR
5543 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5544 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5545 && integer_zerop (const_binop (BIT_AND_EXPR,
5546 TREE_OPERAND (arg0, 1),
5547 TREE_OPERAND (arg1, 1), 0)))
5549 code = BIT_IOR_EXPR;
5550 goto bit_ior;
5553 /* See if this can be simplified into a rotate first. If that
5554 is unsuccessful continue in the association code. */
5555 goto bit_rotate;
5557 case BIT_AND_EXPR:
5558 bit_and:
5559 if (integer_all_onesp (arg1))
5560 return non_lvalue (convert (type, arg0));
5561 if (integer_zerop (arg1))
5562 return omit_one_operand (type, arg1, arg0);
5563 t1 = distribute_bit_expr (code, type, arg0, arg1);
5564 if (t1 != NULL_TREE)
5565 return t1;
5566 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5567 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5568 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5570 unsigned int prec
5571 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5573 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5574 && (~TREE_INT_CST_LOW (arg1)
5575 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5576 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5579 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5581 This results in more efficient code for machines without a NOR
5582 instruction. Combine will canonicalize to the first form
5583 which will allow use of NOR instructions provided by the
5584 backend if they exist. */
5585 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5586 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5588 return fold (build1 (BIT_NOT_EXPR, type,
5589 build (BIT_IOR_EXPR, type,
5590 TREE_OPERAND (arg0, 0),
5591 TREE_OPERAND (arg1, 0))));
5594 goto associate;
5596 case BIT_ANDTC_EXPR:
5597 if (integer_all_onesp (arg0))
5598 return non_lvalue (convert (type, arg1));
5599 if (integer_zerop (arg0))
5600 return omit_one_operand (type, arg0, arg1);
5601 if (TREE_CODE (arg1) == INTEGER_CST)
5603 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5604 code = BIT_AND_EXPR;
5605 goto bit_and;
5607 goto binary;
5609 case RDIV_EXPR:
5610 /* Don't touch a floating-point divide by zero unless the mode
5611 of the constant can represent infinity. */
5612 if (TREE_CODE (arg1) == REAL_CST
5613 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
5614 && real_zerop (arg1))
5615 return t;
5617 /* (-A) / (-B) -> A / B */
5618 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5619 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5620 TREE_OPERAND (arg1, 0)));
5622 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
5623 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5624 && real_onep (arg1))
5625 return non_lvalue (convert (type, arg0));
5627 /* If ARG1 is a constant, we can convert this to a multiply by the
5628 reciprocal. This does not have the same rounding properties,
5629 so only do this if -funsafe-math-optimizations. We can actually
5630 always safely do it if ARG1 is a power of two, but it's hard to
5631 tell if it is or not in a portable manner. */
5632 if (TREE_CODE (arg1) == REAL_CST)
5634 if (flag_unsafe_math_optimizations
5635 && 0 != (tem = const_binop (code, build_real (type, dconst1),
5636 arg1, 0)))
5637 return fold (build (MULT_EXPR, type, arg0, tem));
5638 /* Find the reciprocal if optimizing and the result is exact. */
5639 else if (optimize)
5641 REAL_VALUE_TYPE r;
5642 r = TREE_REAL_CST (arg1);
5643 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5645 tem = build_real (type, r);
5646 return fold (build (MULT_EXPR, type, arg0, tem));
5650 /* Convert A/B/C to A/(B*C). */
5651 if (flag_unsafe_math_optimizations
5652 && TREE_CODE (arg0) == RDIV_EXPR)
5654 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5655 build (MULT_EXPR, type, TREE_OPERAND (arg0, 1),
5656 arg1)));
5658 /* Convert A/(B/C) to (A/B)*C. */
5659 if (flag_unsafe_math_optimizations
5660 && TREE_CODE (arg1) == RDIV_EXPR)
5662 return fold (build (MULT_EXPR, type,
5663 build (RDIV_EXPR, type, arg0,
5664 TREE_OPERAND (arg1, 0)),
5665 TREE_OPERAND (arg1, 1)));
5667 goto binary;
5669 case TRUNC_DIV_EXPR:
5670 case ROUND_DIV_EXPR:
5671 case FLOOR_DIV_EXPR:
5672 case CEIL_DIV_EXPR:
5673 case EXACT_DIV_EXPR:
5674 if (integer_onep (arg1))
5675 return non_lvalue (convert (type, arg0));
5676 if (integer_zerop (arg1))
5677 return t;
5679 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5680 operation, EXACT_DIV_EXPR.
5682 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5683 At one time others generated faster code, it's not clear if they do
5684 after the last round to changes to the DIV code in expmed.c. */
5685 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5686 && multiple_of_p (type, arg0, arg1))
5687 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5689 if (TREE_CODE (arg1) == INTEGER_CST
5690 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5691 code, NULL_TREE)))
5692 return convert (type, tem);
5694 goto binary;
5696 case CEIL_MOD_EXPR:
5697 case FLOOR_MOD_EXPR:
5698 case ROUND_MOD_EXPR:
5699 case TRUNC_MOD_EXPR:
5700 if (integer_onep (arg1))
5701 return omit_one_operand (type, integer_zero_node, arg0);
5702 if (integer_zerop (arg1))
5703 return t;
5705 if (TREE_CODE (arg1) == INTEGER_CST
5706 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5707 code, NULL_TREE)))
5708 return convert (type, tem);
5710 goto binary;
5712 case LSHIFT_EXPR:
5713 case RSHIFT_EXPR:
5714 case LROTATE_EXPR:
5715 case RROTATE_EXPR:
5716 if (integer_zerop (arg1))
5717 return non_lvalue (convert (type, arg0));
5718 /* Since negative shift count is not well-defined,
5719 don't try to compute it in the compiler. */
5720 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5721 return t;
5722 /* Rewrite an LROTATE_EXPR by a constant into an
5723 RROTATE_EXPR by a new constant. */
5724 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5726 TREE_SET_CODE (t, RROTATE_EXPR);
5727 code = RROTATE_EXPR;
5728 TREE_OPERAND (t, 1) = arg1
5729 = const_binop
5730 (MINUS_EXPR,
5731 convert (TREE_TYPE (arg1),
5732 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5733 arg1, 0);
5734 if (tree_int_cst_sgn (arg1) < 0)
5735 return t;
5738 /* If we have a rotate of a bit operation with the rotate count and
5739 the second operand of the bit operation both constant,
5740 permute the two operations. */
5741 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5742 && (TREE_CODE (arg0) == BIT_AND_EXPR
5743 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5744 || TREE_CODE (arg0) == BIT_IOR_EXPR
5745 || TREE_CODE (arg0) == BIT_XOR_EXPR)
5746 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5747 return fold (build (TREE_CODE (arg0), type,
5748 fold (build (code, type,
5749 TREE_OPERAND (arg0, 0), arg1)),
5750 fold (build (code, type,
5751 TREE_OPERAND (arg0, 1), arg1))));
5753 /* Two consecutive rotates adding up to the width of the mode can
5754 be ignored. */
5755 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5756 && TREE_CODE (arg0) == RROTATE_EXPR
5757 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5758 && TREE_INT_CST_HIGH (arg1) == 0
5759 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5760 && ((TREE_INT_CST_LOW (arg1)
5761 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5762 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
5763 return TREE_OPERAND (arg0, 0);
5765 goto binary;
5767 case MIN_EXPR:
5768 if (operand_equal_p (arg0, arg1, 0))
5769 return omit_one_operand (type, arg0, arg1);
5770 if (INTEGRAL_TYPE_P (type)
5771 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5772 return omit_one_operand (type, arg1, arg0);
5773 goto associate;
5775 case MAX_EXPR:
5776 if (operand_equal_p (arg0, arg1, 0))
5777 return omit_one_operand (type, arg0, arg1);
5778 if (INTEGRAL_TYPE_P (type)
5779 && TYPE_MAX_VALUE (type)
5780 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5781 return omit_one_operand (type, arg1, arg0);
5782 goto associate;
5784 case TRUTH_NOT_EXPR:
5785 /* Note that the operand of this must be an int
5786 and its values must be 0 or 1.
5787 ("true" is a fixed value perhaps depending on the language,
5788 but we don't handle values other than 1 correctly yet.) */
5789 tem = invert_truthvalue (arg0);
5790 /* Avoid infinite recursion. */
5791 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5792 return t;
5793 return convert (type, tem);
5795 case TRUTH_ANDIF_EXPR:
5796 /* Note that the operands of this must be ints
5797 and their values must be 0 or 1.
5798 ("true" is a fixed value perhaps depending on the language.) */
5799 /* If first arg is constant zero, return it. */
5800 if (integer_zerop (arg0))
5801 return convert (type, arg0);
5802 case TRUTH_AND_EXPR:
5803 /* If either arg is constant true, drop it. */
5804 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5805 return non_lvalue (convert (type, arg1));
5806 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
5807 /* Preserve sequence points. */
5808 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5809 return non_lvalue (convert (type, arg0));
5810 /* If second arg is constant zero, result is zero, but first arg
5811 must be evaluated. */
5812 if (integer_zerop (arg1))
5813 return omit_one_operand (type, arg1, arg0);
5814 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5815 case will be handled here. */
5816 if (integer_zerop (arg0))
5817 return omit_one_operand (type, arg0, arg1);
5819 truth_andor:
5820 /* We only do these simplifications if we are optimizing. */
5821 if (!optimize)
5822 return t;
5824 /* Check for things like (A || B) && (A || C). We can convert this
5825 to A || (B && C). Note that either operator can be any of the four
5826 truth and/or operations and the transformation will still be
5827 valid. Also note that we only care about order for the
5828 ANDIF and ORIF operators. If B contains side effects, this
5829 might change the truth-value of A. */
5830 if (TREE_CODE (arg0) == TREE_CODE (arg1)
5831 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5832 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5833 || TREE_CODE (arg0) == TRUTH_AND_EXPR
5834 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5835 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5837 tree a00 = TREE_OPERAND (arg0, 0);
5838 tree a01 = TREE_OPERAND (arg0, 1);
5839 tree a10 = TREE_OPERAND (arg1, 0);
5840 tree a11 = TREE_OPERAND (arg1, 1);
5841 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5842 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5843 && (code == TRUTH_AND_EXPR
5844 || code == TRUTH_OR_EXPR));
5846 if (operand_equal_p (a00, a10, 0))
5847 return fold (build (TREE_CODE (arg0), type, a00,
5848 fold (build (code, type, a01, a11))));
5849 else if (commutative && operand_equal_p (a00, a11, 0))
5850 return fold (build (TREE_CODE (arg0), type, a00,
5851 fold (build (code, type, a01, a10))));
5852 else if (commutative && operand_equal_p (a01, a10, 0))
5853 return fold (build (TREE_CODE (arg0), type, a01,
5854 fold (build (code, type, a00, a11))));
5856 /* This case if tricky because we must either have commutative
5857 operators or else A10 must not have side-effects. */
5859 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5860 && operand_equal_p (a01, a11, 0))
5861 return fold (build (TREE_CODE (arg0), type,
5862 fold (build (code, type, a00, a10)),
5863 a01));
5866 /* See if we can build a range comparison. */
5867 if (0 != (tem = fold_range_test (t)))
5868 return tem;
5870 /* Check for the possibility of merging component references. If our
5871 lhs is another similar operation, try to merge its rhs with our
5872 rhs. Then try to merge our lhs and rhs. */
5873 if (TREE_CODE (arg0) == code
5874 && 0 != (tem = fold_truthop (code, type,
5875 TREE_OPERAND (arg0, 1), arg1)))
5876 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5878 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5879 return tem;
5881 return t;
5883 case TRUTH_ORIF_EXPR:
5884 /* Note that the operands of this must be ints
5885 and their values must be 0 or true.
5886 ("true" is a fixed value perhaps depending on the language.) */
5887 /* If first arg is constant true, return it. */
5888 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5889 return convert (type, arg0);
5890 case TRUTH_OR_EXPR:
5891 /* If either arg is constant zero, drop it. */
5892 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
5893 return non_lvalue (convert (type, arg1));
5894 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
5895 /* Preserve sequence points. */
5896 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5897 return non_lvalue (convert (type, arg0));
5898 /* If second arg is constant true, result is true, but we must
5899 evaluate first arg. */
5900 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5901 return omit_one_operand (type, arg1, arg0);
5902 /* Likewise for first arg, but note this only occurs here for
5903 TRUTH_OR_EXPR. */
5904 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5905 return omit_one_operand (type, arg0, arg1);
5906 goto truth_andor;
5908 case TRUTH_XOR_EXPR:
5909 /* If either arg is constant zero, drop it. */
5910 if (integer_zerop (arg0))
5911 return non_lvalue (convert (type, arg1));
5912 if (integer_zerop (arg1))
5913 return non_lvalue (convert (type, arg0));
5914 /* If either arg is constant true, this is a logical inversion. */
5915 if (integer_onep (arg0))
5916 return non_lvalue (convert (type, invert_truthvalue (arg1)));
5917 if (integer_onep (arg1))
5918 return non_lvalue (convert (type, invert_truthvalue (arg0)));
5919 return t;
5921 case EQ_EXPR:
5922 case NE_EXPR:
5923 case LT_EXPR:
5924 case GT_EXPR:
5925 case LE_EXPR:
5926 case GE_EXPR:
5927 /* If one arg is a real or integer constant, put it last. */
5928 if ((TREE_CODE (arg0) == INTEGER_CST
5929 && TREE_CODE (arg1) != INTEGER_CST)
5930 || (TREE_CODE (arg0) == REAL_CST
5931 && TREE_CODE (arg0) != REAL_CST))
5933 TREE_OPERAND (t, 0) = arg1;
5934 TREE_OPERAND (t, 1) = arg0;
5935 arg0 = TREE_OPERAND (t, 0);
5936 arg1 = TREE_OPERAND (t, 1);
5937 code = swap_tree_comparison (code);
5938 TREE_SET_CODE (t, code);
5941 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
5943 /* (-a) CMP (-b) -> b CMP a */
5944 if (TREE_CODE (arg0) == NEGATE_EXPR
5945 && TREE_CODE (arg1) == NEGATE_EXPR)
5946 return fold (build (code, type, TREE_OPERAND (arg1, 0),
5947 TREE_OPERAND (arg0, 0)));
5948 /* (-a) CMP CST -> a swap(CMP) (-CST) */
5949 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5950 return
5951 fold (build
5952 (swap_tree_comparison (code), type,
5953 TREE_OPERAND (arg0, 0),
5954 build_real (TREE_TYPE (arg1),
5955 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
5956 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
5957 /* a CMP (-0) -> a CMP 0 */
5958 if (TREE_CODE (arg1) == REAL_CST
5959 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
5960 return fold (build (code, type, arg0,
5961 build_real (TREE_TYPE (arg1), dconst0)));
5963 /* If this is a comparison of a real constant with a PLUS_EXPR
5964 or a MINUS_EXPR of a real constant, we can convert it into a
5965 comparison with a revised real constant as long as no overflow
5966 occurs when unsafe_math_optimizations are enabled. */
5967 if (flag_unsafe_math_optimizations
5968 && TREE_CODE (arg1) == REAL_CST
5969 && (TREE_CODE (arg0) == PLUS_EXPR
5970 || TREE_CODE (arg0) == MINUS_EXPR)
5971 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5972 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
5973 ? MINUS_EXPR : PLUS_EXPR,
5974 arg1, TREE_OPERAND (arg0, 1), 0))
5975 && ! TREE_CONSTANT_OVERFLOW (tem))
5976 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5979 /* Convert foo++ == CONST into ++foo == CONST + INCR.
5980 First, see if one arg is constant; find the constant arg
5981 and the other one. */
5983 tree constop = 0, varop = NULL_TREE;
5984 int constopnum = -1;
5986 if (TREE_CONSTANT (arg1))
5987 constopnum = 1, constop = arg1, varop = arg0;
5988 if (TREE_CONSTANT (arg0))
5989 constopnum = 0, constop = arg0, varop = arg1;
5991 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
5993 /* This optimization is invalid for ordered comparisons
5994 if CONST+INCR overflows or if foo+incr might overflow.
5995 This optimization is invalid for floating point due to rounding.
5996 For pointer types we assume overflow doesn't happen. */
5997 if (POINTER_TYPE_P (TREE_TYPE (varop))
5998 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5999 && (code == EQ_EXPR || code == NE_EXPR)))
6001 tree newconst
6002 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6003 constop, TREE_OPERAND (varop, 1)));
6005 /* Do not overwrite the current varop to be a preincrement,
6006 create a new node so that we won't confuse our caller who
6007 might create trees and throw them away, reusing the
6008 arguments that they passed to build. This shows up in
6009 the THEN or ELSE parts of ?: being postincrements. */
6010 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
6011 TREE_OPERAND (varop, 0),
6012 TREE_OPERAND (varop, 1));
6014 /* If VAROP is a reference to a bitfield, we must mask
6015 the constant by the width of the field. */
6016 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6017 && DECL_BIT_FIELD(TREE_OPERAND
6018 (TREE_OPERAND (varop, 0), 1)))
6020 int size
6021 = TREE_INT_CST_LOW (DECL_SIZE
6022 (TREE_OPERAND
6023 (TREE_OPERAND (varop, 0), 1)));
6024 tree mask, unsigned_type;
6025 unsigned int precision;
6026 tree folded_compare;
6028 /* First check whether the comparison would come out
6029 always the same. If we don't do that we would
6030 change the meaning with the masking. */
6031 if (constopnum == 0)
6032 folded_compare = fold (build (code, type, constop,
6033 TREE_OPERAND (varop, 0)));
6034 else
6035 folded_compare = fold (build (code, type,
6036 TREE_OPERAND (varop, 0),
6037 constop));
6038 if (integer_zerop (folded_compare)
6039 || integer_onep (folded_compare))
6040 return omit_one_operand (type, folded_compare, varop);
6042 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6043 precision = TYPE_PRECISION (unsigned_type);
6044 mask = build_int_2 (~0, ~0);
6045 TREE_TYPE (mask) = unsigned_type;
6046 force_fit_type (mask, 0);
6047 mask = const_binop (RSHIFT_EXPR, mask,
6048 size_int (precision - size), 0);
6049 newconst = fold (build (BIT_AND_EXPR,
6050 TREE_TYPE (varop), newconst,
6051 convert (TREE_TYPE (varop),
6052 mask)));
6055 t = build (code, type,
6056 (constopnum == 0) ? newconst : varop,
6057 (constopnum == 1) ? newconst : varop);
6058 return t;
6061 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
6063 if (POINTER_TYPE_P (TREE_TYPE (varop))
6064 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6065 && (code == EQ_EXPR || code == NE_EXPR)))
6067 tree newconst
6068 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
6069 constop, TREE_OPERAND (varop, 1)));
6071 /* Do not overwrite the current varop to be a predecrement,
6072 create a new node so that we won't confuse our caller who
6073 might create trees and throw them away, reusing the
6074 arguments that they passed to build. This shows up in
6075 the THEN or ELSE parts of ?: being postdecrements. */
6076 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
6077 TREE_OPERAND (varop, 0),
6078 TREE_OPERAND (varop, 1));
6080 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6081 && DECL_BIT_FIELD(TREE_OPERAND
6082 (TREE_OPERAND (varop, 0), 1)))
6084 int size
6085 = TREE_INT_CST_LOW (DECL_SIZE
6086 (TREE_OPERAND
6087 (TREE_OPERAND (varop, 0), 1)));
6088 tree mask, unsigned_type;
6089 unsigned int precision;
6090 tree folded_compare;
6092 if (constopnum == 0)
6093 folded_compare = fold (build (code, type, constop,
6094 TREE_OPERAND (varop, 0)));
6095 else
6096 folded_compare = fold (build (code, type,
6097 TREE_OPERAND (varop, 0),
6098 constop));
6099 if (integer_zerop (folded_compare)
6100 || integer_onep (folded_compare))
6101 return omit_one_operand (type, folded_compare, varop);
6103 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6104 precision = TYPE_PRECISION (unsigned_type);
6105 mask = build_int_2 (~0, ~0);
6106 TREE_TYPE (mask) = TREE_TYPE (varop);
6107 force_fit_type (mask, 0);
6108 mask = const_binop (RSHIFT_EXPR, mask,
6109 size_int (precision - size), 0);
6110 newconst = fold (build (BIT_AND_EXPR,
6111 TREE_TYPE (varop), newconst,
6112 convert (TREE_TYPE (varop),
6113 mask)));
6116 t = build (code, type,
6117 (constopnum == 0) ? newconst : varop,
6118 (constopnum == 1) ? newconst : varop);
6119 return t;
6124 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
6125 This transformation affects the cases which are handled in later
6126 optimizations involving comparisons with non-negative constants. */
6127 if (TREE_CODE (arg1) == INTEGER_CST
6128 && TREE_CODE (arg0) != INTEGER_CST
6129 && tree_int_cst_sgn (arg1) > 0)
6131 switch (code)
6133 case GE_EXPR:
6134 code = GT_EXPR;
6135 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6136 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6137 break;
6139 case LT_EXPR:
6140 code = LE_EXPR;
6141 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6142 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6143 break;
6145 default:
6146 break;
6150 /* Comparisons with the highest or lowest possible integer of
6151 the specified size will have known values. */
6153 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6155 if (TREE_CODE (arg1) == INTEGER_CST
6156 && ! TREE_CONSTANT_OVERFLOW (arg1)
6157 && width <= HOST_BITS_PER_WIDE_INT
6158 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6159 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6161 unsigned HOST_WIDE_INT signed_max;
6162 unsigned HOST_WIDE_INT max, min;
6164 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
6166 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6168 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
6169 min = 0;
6171 else
6173 max = signed_max;
6174 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
6177 if (TREE_INT_CST_HIGH (arg1) == 0
6178 && TREE_INT_CST_LOW (arg1) == max)
6179 switch (code)
6181 case GT_EXPR:
6182 return omit_one_operand (type,
6183 convert (type, integer_zero_node),
6184 arg0);
6185 case GE_EXPR:
6186 code = EQ_EXPR;
6187 TREE_SET_CODE (t, EQ_EXPR);
6188 break;
6189 case LE_EXPR:
6190 return omit_one_operand (type,
6191 convert (type, integer_one_node),
6192 arg0);
6193 case LT_EXPR:
6194 code = NE_EXPR;
6195 TREE_SET_CODE (t, NE_EXPR);
6196 break;
6198 /* The GE_EXPR and LT_EXPR cases above are not normally
6199 reached because of previous transformations. */
6201 default:
6202 break;
6204 else if (TREE_INT_CST_HIGH (arg1) == 0
6205 && TREE_INT_CST_LOW (arg1) == max - 1)
6206 switch (code)
6208 case GT_EXPR:
6209 code = EQ_EXPR;
6210 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6211 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6212 break;
6213 case LE_EXPR:
6214 code = NE_EXPR;
6215 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6216 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6217 break;
6218 default:
6219 break;
6221 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6222 && TREE_INT_CST_LOW (arg1) == min)
6223 switch (code)
6225 case LT_EXPR:
6226 return omit_one_operand (type,
6227 convert (type, integer_zero_node),
6228 arg0);
6229 case LE_EXPR:
6230 code = EQ_EXPR;
6231 TREE_SET_CODE (t, EQ_EXPR);
6232 break;
6234 case GE_EXPR:
6235 return omit_one_operand (type,
6236 convert (type, integer_one_node),
6237 arg0);
6238 case GT_EXPR:
6239 code = NE_EXPR;
6240 TREE_SET_CODE (t, NE_EXPR);
6241 break;
6243 default:
6244 break;
6246 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6247 && TREE_INT_CST_LOW (arg1) == min + 1)
6248 switch (code)
6250 case GE_EXPR:
6251 code = NE_EXPR;
6252 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6253 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6254 break;
6255 case LT_EXPR:
6256 code = EQ_EXPR;
6257 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6258 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6259 break;
6260 default:
6261 break;
6264 else if (TREE_INT_CST_HIGH (arg1) == 0
6265 && TREE_INT_CST_LOW (arg1) == signed_max
6266 && TREE_UNSIGNED (TREE_TYPE (arg1))
6267 /* signed_type does not work on pointer types. */
6268 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
6270 /* The following case also applies to X < signed_max+1
6271 and X >= signed_max+1 because previous transformations. */
6272 if (code == LE_EXPR || code == GT_EXPR)
6274 tree st0, st1;
6275 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
6276 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
6277 return fold
6278 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
6279 type, convert (st0, arg0),
6280 convert (st1, integer_zero_node)));
6286 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6287 a MINUS_EXPR of a constant, we can convert it into a comparison with
6288 a revised constant as long as no overflow occurs. */
6289 if ((code == EQ_EXPR || code == NE_EXPR)
6290 && TREE_CODE (arg1) == INTEGER_CST
6291 && (TREE_CODE (arg0) == PLUS_EXPR
6292 || TREE_CODE (arg0) == MINUS_EXPR)
6293 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6294 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6295 ? MINUS_EXPR : PLUS_EXPR,
6296 arg1, TREE_OPERAND (arg0, 1), 0))
6297 && ! TREE_CONSTANT_OVERFLOW (tem))
6298 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6300 /* Similarly for a NEGATE_EXPR. */
6301 else if ((code == EQ_EXPR || code == NE_EXPR)
6302 && TREE_CODE (arg0) == NEGATE_EXPR
6303 && TREE_CODE (arg1) == INTEGER_CST
6304 && 0 != (tem = negate_expr (arg1))
6305 && TREE_CODE (tem) == INTEGER_CST
6306 && ! TREE_CONSTANT_OVERFLOW (tem))
6307 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6309 /* If we have X - Y == 0, we can convert that to X == Y and similarly
6310 for !=. Don't do this for ordered comparisons due to overflow. */
6311 else if ((code == NE_EXPR || code == EQ_EXPR)
6312 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
6313 return fold (build (code, type,
6314 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
6316 /* If we are widening one operand of an integer comparison,
6317 see if the other operand is similarly being widened. Perhaps we
6318 can do the comparison in the narrower type. */
6319 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
6320 && TREE_CODE (arg0) == NOP_EXPR
6321 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
6322 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
6323 && (TREE_TYPE (t1) == TREE_TYPE (tem)
6324 || (TREE_CODE (t1) == INTEGER_CST
6325 && int_fits_type_p (t1, TREE_TYPE (tem)))))
6326 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
6328 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
6329 constant, we can simplify it. */
6330 else if (TREE_CODE (arg1) == INTEGER_CST
6331 && (TREE_CODE (arg0) == MIN_EXPR
6332 || TREE_CODE (arg0) == MAX_EXPR)
6333 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6334 return optimize_minmax_comparison (t);
6336 /* If we are comparing an ABS_EXPR with a constant, we can
6337 convert all the cases into explicit comparisons, but they may
6338 well not be faster than doing the ABS and one comparison.
6339 But ABS (X) <= C is a range comparison, which becomes a subtraction
6340 and a comparison, and is probably faster. */
6341 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6342 && TREE_CODE (arg0) == ABS_EXPR
6343 && ! TREE_SIDE_EFFECTS (arg0)
6344 && (0 != (tem = negate_expr (arg1)))
6345 && TREE_CODE (tem) == INTEGER_CST
6346 && ! TREE_CONSTANT_OVERFLOW (tem))
6347 return fold (build (TRUTH_ANDIF_EXPR, type,
6348 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
6349 build (LE_EXPR, type,
6350 TREE_OPERAND (arg0, 0), arg1)));
6352 /* If this is an EQ or NE comparison with zero and ARG0 is
6353 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
6354 two operations, but the latter can be done in one less insn
6355 on machines that have only two-operand insns or on which a
6356 constant cannot be the first operand. */
6357 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
6358 && TREE_CODE (arg0) == BIT_AND_EXPR)
6360 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
6361 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
6362 return
6363 fold (build (code, type,
6364 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6365 build (RSHIFT_EXPR,
6366 TREE_TYPE (TREE_OPERAND (arg0, 0)),
6367 TREE_OPERAND (arg0, 1),
6368 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
6369 convert (TREE_TYPE (arg0),
6370 integer_one_node)),
6371 arg1));
6372 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
6373 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
6374 return
6375 fold (build (code, type,
6376 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6377 build (RSHIFT_EXPR,
6378 TREE_TYPE (TREE_OPERAND (arg0, 1)),
6379 TREE_OPERAND (arg0, 0),
6380 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
6381 convert (TREE_TYPE (arg0),
6382 integer_one_node)),
6383 arg1));
6386 /* If this is an NE or EQ comparison of zero against the result of a
6387 signed MOD operation whose second operand is a power of 2, make
6388 the MOD operation unsigned since it is simpler and equivalent. */
6389 if ((code == NE_EXPR || code == EQ_EXPR)
6390 && integer_zerop (arg1)
6391 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
6392 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
6393 || TREE_CODE (arg0) == CEIL_MOD_EXPR
6394 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
6395 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
6396 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6398 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
6399 tree newmod = build (TREE_CODE (arg0), newtype,
6400 convert (newtype, TREE_OPERAND (arg0, 0)),
6401 convert (newtype, TREE_OPERAND (arg0, 1)));
6403 return build (code, type, newmod, convert (newtype, arg1));
6406 /* If this is an NE comparison of zero with an AND of one, remove the
6407 comparison since the AND will give the correct value. */
6408 if (code == NE_EXPR && integer_zerop (arg1)
6409 && TREE_CODE (arg0) == BIT_AND_EXPR
6410 && integer_onep (TREE_OPERAND (arg0, 1)))
6411 return convert (type, arg0);
6413 /* If we have (A & C) == C where C is a power of 2, convert this into
6414 (A & C) != 0. Similarly for NE_EXPR. */
6415 if ((code == EQ_EXPR || code == NE_EXPR)
6416 && TREE_CODE (arg0) == BIT_AND_EXPR
6417 && integer_pow2p (TREE_OPERAND (arg0, 1))
6418 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6419 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6420 arg0, integer_zero_node));
6422 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6423 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6424 if ((code == EQ_EXPR || code == NE_EXPR)
6425 && TREE_CODE (arg0) == BIT_AND_EXPR
6426 && integer_zerop (arg1))
6428 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0),
6429 TREE_OPERAND (arg0, 1));
6430 if (arg00 != NULL_TREE)
6432 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
6433 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
6434 convert (stype, arg00),
6435 convert (stype, integer_zero_node)));
6439 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6440 and similarly for >= into !=. */
6441 if ((code == LT_EXPR || code == GE_EXPR)
6442 && TREE_UNSIGNED (TREE_TYPE (arg0))
6443 && TREE_CODE (arg1) == LSHIFT_EXPR
6444 && integer_onep (TREE_OPERAND (arg1, 0)))
6445 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6446 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6447 TREE_OPERAND (arg1, 1)),
6448 convert (TREE_TYPE (arg0), integer_zero_node));
6450 else if ((code == LT_EXPR || code == GE_EXPR)
6451 && TREE_UNSIGNED (TREE_TYPE (arg0))
6452 && (TREE_CODE (arg1) == NOP_EXPR
6453 || TREE_CODE (arg1) == CONVERT_EXPR)
6454 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6455 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
6456 return
6457 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6458 convert (TREE_TYPE (arg0),
6459 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6460 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
6461 convert (TREE_TYPE (arg0), integer_zero_node));
6463 /* Simplify comparison of something with itself. (For IEEE
6464 floating-point, we can only do some of these simplifications.) */
6465 if (operand_equal_p (arg0, arg1, 0))
6467 switch (code)
6469 case EQ_EXPR:
6470 case GE_EXPR:
6471 case LE_EXPR:
6472 if (! FLOAT_TYPE_P (TREE_TYPE (arg0)))
6473 return constant_boolean_node (1, type);
6474 code = EQ_EXPR;
6475 TREE_SET_CODE (t, code);
6476 break;
6478 case NE_EXPR:
6479 /* For NE, we can only do this simplification if integer. */
6480 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6481 break;
6482 /* ... fall through ... */
6483 case GT_EXPR:
6484 case LT_EXPR:
6485 return constant_boolean_node (0, type);
6486 default:
6487 abort ();
6491 /* If we are comparing an expression that just has comparisons
6492 of two integer values, arithmetic expressions of those comparisons,
6493 and constants, we can simplify it. There are only three cases
6494 to check: the two values can either be equal, the first can be
6495 greater, or the second can be greater. Fold the expression for
6496 those three values. Since each value must be 0 or 1, we have
6497 eight possibilities, each of which corresponds to the constant 0
6498 or 1 or one of the six possible comparisons.
6500 This handles common cases like (a > b) == 0 but also handles
6501 expressions like ((x > y) - (y > x)) > 0, which supposedly
6502 occur in macroized code. */
6504 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
6506 tree cval1 = 0, cval2 = 0;
6507 int save_p = 0;
6509 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
6510 /* Don't handle degenerate cases here; they should already
6511 have been handled anyway. */
6512 && cval1 != 0 && cval2 != 0
6513 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
6514 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
6515 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
6516 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
6517 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
6518 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
6519 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
6521 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
6522 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
6524 /* We can't just pass T to eval_subst in case cval1 or cval2
6525 was the same as ARG1. */
6527 tree high_result
6528 = fold (build (code, type,
6529 eval_subst (arg0, cval1, maxval, cval2, minval),
6530 arg1));
6531 tree equal_result
6532 = fold (build (code, type,
6533 eval_subst (arg0, cval1, maxval, cval2, maxval),
6534 arg1));
6535 tree low_result
6536 = fold (build (code, type,
6537 eval_subst (arg0, cval1, minval, cval2, maxval),
6538 arg1));
6540 /* All three of these results should be 0 or 1. Confirm they
6541 are. Then use those values to select the proper code
6542 to use. */
6544 if ((integer_zerop (high_result)
6545 || integer_onep (high_result))
6546 && (integer_zerop (equal_result)
6547 || integer_onep (equal_result))
6548 && (integer_zerop (low_result)
6549 || integer_onep (low_result)))
6551 /* Make a 3-bit mask with the high-order bit being the
6552 value for `>', the next for '=', and the low for '<'. */
6553 switch ((integer_onep (high_result) * 4)
6554 + (integer_onep (equal_result) * 2)
6555 + integer_onep (low_result))
6557 case 0:
6558 /* Always false. */
6559 return omit_one_operand (type, integer_zero_node, arg0);
6560 case 1:
6561 code = LT_EXPR;
6562 break;
6563 case 2:
6564 code = EQ_EXPR;
6565 break;
6566 case 3:
6567 code = LE_EXPR;
6568 break;
6569 case 4:
6570 code = GT_EXPR;
6571 break;
6572 case 5:
6573 code = NE_EXPR;
6574 break;
6575 case 6:
6576 code = GE_EXPR;
6577 break;
6578 case 7:
6579 /* Always true. */
6580 return omit_one_operand (type, integer_one_node, arg0);
6583 t = build (code, type, cval1, cval2);
6584 if (save_p)
6585 return save_expr (t);
6586 else
6587 return fold (t);
6592 /* If this is a comparison of a field, we may be able to simplify it. */
6593 if ((TREE_CODE (arg0) == COMPONENT_REF
6594 || TREE_CODE (arg0) == BIT_FIELD_REF)
6595 && (code == EQ_EXPR || code == NE_EXPR)
6596 /* Handle the constant case even without -O
6597 to make sure the warnings are given. */
6598 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6600 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6601 return t1 ? t1 : t;
6604 /* If this is a comparison of complex values and either or both sides
6605 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6606 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6607 This may prevent needless evaluations. */
6608 if ((code == EQ_EXPR || code == NE_EXPR)
6609 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6610 && (TREE_CODE (arg0) == COMPLEX_EXPR
6611 || TREE_CODE (arg1) == COMPLEX_EXPR
6612 || TREE_CODE (arg0) == COMPLEX_CST
6613 || TREE_CODE (arg1) == COMPLEX_CST))
6615 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6616 tree real0, imag0, real1, imag1;
6618 arg0 = save_expr (arg0);
6619 arg1 = save_expr (arg1);
6620 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6621 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6622 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6623 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6625 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6626 : TRUTH_ORIF_EXPR),
6627 type,
6628 fold (build (code, type, real0, real1)),
6629 fold (build (code, type, imag0, imag1))));
6632 /* Optimize comparisons of strlen vs zero to a compare of the
6633 first character of the string vs zero. To wit,
6634 strlen(ptr) == 0 => *ptr == 0
6635 strlen(ptr) != 0 => *ptr != 0
6636 Other cases should reduce to one of these two (or a constant)
6637 due to the return value of strlen being unsigned. */
6638 if ((code == EQ_EXPR || code == NE_EXPR)
6639 && integer_zerop (arg1)
6640 && TREE_CODE (arg0) == CALL_EXPR
6641 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
6643 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6644 tree arglist;
6646 if (TREE_CODE (fndecl) == FUNCTION_DECL
6647 && DECL_BUILT_IN (fndecl)
6648 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
6649 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
6650 && (arglist = TREE_OPERAND (arg0, 1))
6651 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
6652 && ! TREE_CHAIN (arglist))
6653 return fold (build (code, type,
6654 build1 (INDIRECT_REF, char_type_node,
6655 TREE_VALUE(arglist)),
6656 integer_zero_node));
6659 /* From here on, the only cases we handle are when the result is
6660 known to be a constant.
6662 To compute GT, swap the arguments and do LT.
6663 To compute GE, do LT and invert the result.
6664 To compute LE, swap the arguments, do LT and invert the result.
6665 To compute NE, do EQ and invert the result.
6667 Therefore, the code below must handle only EQ and LT. */
6669 if (code == LE_EXPR || code == GT_EXPR)
6671 tem = arg0, arg0 = arg1, arg1 = tem;
6672 code = swap_tree_comparison (code);
6675 /* Note that it is safe to invert for real values here because we
6676 will check below in the one case that it matters. */
6678 t1 = NULL_TREE;
6679 invert = 0;
6680 if (code == NE_EXPR || code == GE_EXPR)
6682 invert = 1;
6683 code = invert_tree_comparison (code);
6686 /* Compute a result for LT or EQ if args permit;
6687 otherwise return T. */
6688 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6690 if (code == EQ_EXPR)
6691 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
6692 else
6693 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6694 ? INT_CST_LT_UNSIGNED (arg0, arg1)
6695 : INT_CST_LT (arg0, arg1)),
6699 #if 0 /* This is no longer useful, but breaks some real code. */
6700 /* Assume a nonexplicit constant cannot equal an explicit one,
6701 since such code would be undefined anyway.
6702 Exception: on sysvr4, using #pragma weak,
6703 a label can come out as 0. */
6704 else if (TREE_CODE (arg1) == INTEGER_CST
6705 && !integer_zerop (arg1)
6706 && TREE_CONSTANT (arg0)
6707 && TREE_CODE (arg0) == ADDR_EXPR
6708 && code == EQ_EXPR)
6709 t1 = build_int_2 (0, 0);
6710 #endif
6711 /* Two real constants can be compared explicitly. */
6712 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6714 /* If either operand is a NaN, the result is false with two
6715 exceptions: First, an NE_EXPR is true on NaNs, but that case
6716 is already handled correctly since we will be inverting the
6717 result for NE_EXPR. Second, if we had inverted a LE_EXPR
6718 or a GE_EXPR into a LT_EXPR, we must return true so that it
6719 will be inverted into false. */
6721 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6722 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6723 t1 = build_int_2 (invert && code == LT_EXPR, 0);
6725 else if (code == EQ_EXPR)
6726 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6727 TREE_REAL_CST (arg1)),
6729 else
6730 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6731 TREE_REAL_CST (arg1)),
6735 if (t1 == NULL_TREE)
6736 return t;
6738 if (invert)
6739 TREE_INT_CST_LOW (t1) ^= 1;
6741 TREE_TYPE (t1) = type;
6742 if (TREE_CODE (type) == BOOLEAN_TYPE)
6743 return (*lang_hooks.truthvalue_conversion) (t1);
6744 return t1;
6746 case COND_EXPR:
6747 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6748 so all simple results must be passed through pedantic_non_lvalue. */
6749 if (TREE_CODE (arg0) == INTEGER_CST)
6750 return pedantic_non_lvalue
6751 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
6752 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6753 return pedantic_omit_one_operand (type, arg1, arg0);
6755 /* If the second operand is zero, invert the comparison and swap
6756 the second and third operands. Likewise if the second operand
6757 is constant and the third is not or if the third operand is
6758 equivalent to the first operand of the comparison. */
6760 if (integer_zerop (arg1)
6761 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6762 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6763 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6764 TREE_OPERAND (t, 2),
6765 TREE_OPERAND (arg0, 1))))
6767 /* See if this can be inverted. If it can't, possibly because
6768 it was a floating-point inequality comparison, don't do
6769 anything. */
6770 tem = invert_truthvalue (arg0);
6772 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6774 t = build (code, type, tem,
6775 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6776 arg0 = tem;
6777 /* arg1 should be the first argument of the new T. */
6778 arg1 = TREE_OPERAND (t, 1);
6779 STRIP_NOPS (arg1);
6783 /* If we have A op B ? A : C, we may be able to convert this to a
6784 simpler expression, depending on the operation and the values
6785 of B and C. Signed zeros prevent all of these transformations,
6786 for reasons given above each one. */
6788 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6789 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6790 arg1, TREE_OPERAND (arg0, 1))
6791 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
6793 tree arg2 = TREE_OPERAND (t, 2);
6794 enum tree_code comp_code = TREE_CODE (arg0);
6796 STRIP_NOPS (arg2);
6798 /* If we have A op 0 ? A : -A, consider applying the following
6799 transformations:
6801 A == 0? A : -A same as -A
6802 A != 0? A : -A same as A
6803 A >= 0? A : -A same as abs (A)
6804 A > 0? A : -A same as abs (A)
6805 A <= 0? A : -A same as -abs (A)
6806 A < 0? A : -A same as -abs (A)
6808 None of these transformations work for modes with signed
6809 zeros. If A is +/-0, the first two transformations will
6810 change the sign of the result (from +0 to -0, or vice
6811 versa). The last four will fix the sign of the result,
6812 even though the original expressions could be positive or
6813 negative, depending on the sign of A.
6815 Note that all these transformations are correct if A is
6816 NaN, since the two alternatives (A and -A) are also NaNs. */
6817 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6818 ? real_zerop (TREE_OPERAND (arg0, 1))
6819 : integer_zerop (TREE_OPERAND (arg0, 1)))
6820 && TREE_CODE (arg2) == NEGATE_EXPR
6821 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6822 switch (comp_code)
6824 case EQ_EXPR:
6825 return
6826 pedantic_non_lvalue
6827 (convert (type,
6828 negate_expr
6829 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
6830 arg1))));
6831 case NE_EXPR:
6832 return pedantic_non_lvalue (convert (type, arg1));
6833 case GE_EXPR:
6834 case GT_EXPR:
6835 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6836 arg1 = convert ((*lang_hooks.types.signed_type)
6837 (TREE_TYPE (arg1)), arg1);
6838 return pedantic_non_lvalue
6839 (convert (type, fold (build1 (ABS_EXPR,
6840 TREE_TYPE (arg1), arg1))));
6841 case LE_EXPR:
6842 case LT_EXPR:
6843 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6844 arg1 = convert ((lang_hooks.types.signed_type)
6845 (TREE_TYPE (arg1)), arg1);
6846 return pedantic_non_lvalue
6847 (negate_expr (convert (type,
6848 fold (build1 (ABS_EXPR,
6849 TREE_TYPE (arg1),
6850 arg1)))));
6851 default:
6852 abort ();
6855 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6856 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6857 both transformations are correct when A is NaN: A != 0
6858 is then true, and A == 0 is false. */
6860 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6862 if (comp_code == NE_EXPR)
6863 return pedantic_non_lvalue (convert (type, arg1));
6864 else if (comp_code == EQ_EXPR)
6865 return pedantic_non_lvalue (convert (type, integer_zero_node));
6868 /* Try some transformations of A op B ? A : B.
6870 A == B? A : B same as B
6871 A != B? A : B same as A
6872 A >= B? A : B same as max (A, B)
6873 A > B? A : B same as max (B, A)
6874 A <= B? A : B same as min (A, B)
6875 A < B? A : B same as min (B, A)
6877 As above, these transformations don't work in the presence
6878 of signed zeros. For example, if A and B are zeros of
6879 opposite sign, the first two transformations will change
6880 the sign of the result. In the last four, the original
6881 expressions give different results for (A=+0, B=-0) and
6882 (A=-0, B=+0), but the transformed expressions do not.
6884 The first two transformations are correct if either A or B
6885 is a NaN. In the first transformation, the condition will
6886 be false, and B will indeed be chosen. In the case of the
6887 second transformation, the condition A != B will be true,
6888 and A will be chosen.
6890 The conversions to max() and min() are not correct if B is
6891 a number and A is not. The conditions in the original
6892 expressions will be false, so all four give B. The min()
6893 and max() versions would give a NaN instead. */
6894 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
6895 arg2, TREE_OPERAND (arg0, 0)))
6897 tree comp_op0 = TREE_OPERAND (arg0, 0);
6898 tree comp_op1 = TREE_OPERAND (arg0, 1);
6899 tree comp_type = TREE_TYPE (comp_op0);
6901 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
6902 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
6903 comp_type = type;
6905 switch (comp_code)
6907 case EQ_EXPR:
6908 return pedantic_non_lvalue (convert (type, arg2));
6909 case NE_EXPR:
6910 return pedantic_non_lvalue (convert (type, arg1));
6911 case LE_EXPR:
6912 case LT_EXPR:
6913 /* In C++ a ?: expression can be an lvalue, so put the
6914 operand which will be used if they are equal first
6915 so that we can convert this back to the
6916 corresponding COND_EXPR. */
6917 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
6918 return pedantic_non_lvalue
6919 (convert (type, fold (build (MIN_EXPR, comp_type,
6920 (comp_code == LE_EXPR
6921 ? comp_op0 : comp_op1),
6922 (comp_code == LE_EXPR
6923 ? comp_op1 : comp_op0)))));
6924 break;
6925 case GE_EXPR:
6926 case GT_EXPR:
6927 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
6928 return pedantic_non_lvalue
6929 (convert (type, fold (build (MAX_EXPR, comp_type,
6930 (comp_code == GE_EXPR
6931 ? comp_op0 : comp_op1),
6932 (comp_code == GE_EXPR
6933 ? comp_op1 : comp_op0)))));
6934 break;
6935 default:
6936 abort ();
6940 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6941 we might still be able to simplify this. For example,
6942 if C1 is one less or one more than C2, this might have started
6943 out as a MIN or MAX and been transformed by this function.
6944 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
6946 if (INTEGRAL_TYPE_P (type)
6947 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6948 && TREE_CODE (arg2) == INTEGER_CST)
6949 switch (comp_code)
6951 case EQ_EXPR:
6952 /* We can replace A with C1 in this case. */
6953 arg1 = convert (type, TREE_OPERAND (arg0, 1));
6954 t = build (code, type, TREE_OPERAND (t, 0), arg1,
6955 TREE_OPERAND (t, 2));
6956 break;
6958 case LT_EXPR:
6959 /* If C1 is C2 + 1, this is min(A, C2). */
6960 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6961 && operand_equal_p (TREE_OPERAND (arg0, 1),
6962 const_binop (PLUS_EXPR, arg2,
6963 integer_one_node, 0), 1))
6964 return pedantic_non_lvalue
6965 (fold (build (MIN_EXPR, type, arg1, arg2)));
6966 break;
6968 case LE_EXPR:
6969 /* If C1 is C2 - 1, this is min(A, C2). */
6970 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6971 && operand_equal_p (TREE_OPERAND (arg0, 1),
6972 const_binop (MINUS_EXPR, arg2,
6973 integer_one_node, 0), 1))
6974 return pedantic_non_lvalue
6975 (fold (build (MIN_EXPR, type, arg1, arg2)));
6976 break;
6978 case GT_EXPR:
6979 /* If C1 is C2 - 1, this is max(A, C2). */
6980 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6981 && operand_equal_p (TREE_OPERAND (arg0, 1),
6982 const_binop (MINUS_EXPR, arg2,
6983 integer_one_node, 0), 1))
6984 return pedantic_non_lvalue
6985 (fold (build (MAX_EXPR, type, arg1, arg2)));
6986 break;
6988 case GE_EXPR:
6989 /* If C1 is C2 + 1, this is max(A, C2). */
6990 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6991 && operand_equal_p (TREE_OPERAND (arg0, 1),
6992 const_binop (PLUS_EXPR, arg2,
6993 integer_one_node, 0), 1))
6994 return pedantic_non_lvalue
6995 (fold (build (MAX_EXPR, type, arg1, arg2)));
6996 break;
6997 case NE_EXPR:
6998 break;
6999 default:
7000 abort ();
7004 /* If the second operand is simpler than the third, swap them
7005 since that produces better jump optimization results. */
7006 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
7007 || TREE_CODE (arg1) == SAVE_EXPR)
7008 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
7009 || DECL_P (TREE_OPERAND (t, 2))
7010 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
7012 /* See if this can be inverted. If it can't, possibly because
7013 it was a floating-point inequality comparison, don't do
7014 anything. */
7015 tem = invert_truthvalue (arg0);
7017 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7019 t = build (code, type, tem,
7020 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7021 arg0 = tem;
7022 /* arg1 should be the first argument of the new T. */
7023 arg1 = TREE_OPERAND (t, 1);
7024 STRIP_NOPS (arg1);
7028 /* Convert A ? 1 : 0 to simply A. */
7029 if (integer_onep (TREE_OPERAND (t, 1))
7030 && integer_zerop (TREE_OPERAND (t, 2))
7031 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
7032 call to fold will try to move the conversion inside
7033 a COND, which will recurse. In that case, the COND_EXPR
7034 is probably the best choice, so leave it alone. */
7035 && type == TREE_TYPE (arg0))
7036 return pedantic_non_lvalue (arg0);
7038 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
7039 over COND_EXPR in cases such as floating point comparisons. */
7040 if (integer_zerop (TREE_OPERAND (t, 1))
7041 && integer_onep (TREE_OPERAND (t, 2))
7042 && truth_value_p (TREE_CODE (arg0)))
7043 return pedantic_non_lvalue (convert (type,
7044 invert_truthvalue (arg0)));
7046 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
7047 operation is simply A & 2. */
7049 if (integer_zerop (TREE_OPERAND (t, 2))
7050 && TREE_CODE (arg0) == NE_EXPR
7051 && integer_zerop (TREE_OPERAND (arg0, 1))
7052 && integer_pow2p (arg1)
7053 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
7054 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
7055 arg1, 1))
7056 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
7058 /* Convert A ? B : 0 into A && B if A and B are truth values. */
7059 if (integer_zerop (TREE_OPERAND (t, 2))
7060 && truth_value_p (TREE_CODE (arg0))
7061 && truth_value_p (TREE_CODE (arg1)))
7062 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
7063 arg0, arg1)));
7065 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
7066 if (integer_onep (TREE_OPERAND (t, 2))
7067 && truth_value_p (TREE_CODE (arg0))
7068 && truth_value_p (TREE_CODE (arg1)))
7070 /* Only perform transformation if ARG0 is easily inverted. */
7071 tem = invert_truthvalue (arg0);
7072 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7073 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
7074 tem, arg1)));
7077 return t;
7079 case COMPOUND_EXPR:
7080 /* When pedantic, a compound expression can be neither an lvalue
7081 nor an integer constant expression. */
7082 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
7083 return t;
7084 /* Don't let (0, 0) be null pointer constant. */
7085 if (integer_zerop (arg1))
7086 return build1 (NOP_EXPR, type, arg1);
7087 return convert (type, arg1);
7089 case COMPLEX_EXPR:
7090 if (wins)
7091 return build_complex (type, arg0, arg1);
7092 return t;
7094 case REALPART_EXPR:
7095 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7096 return t;
7097 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7098 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7099 TREE_OPERAND (arg0, 1));
7100 else if (TREE_CODE (arg0) == COMPLEX_CST)
7101 return TREE_REALPART (arg0);
7102 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7103 return fold (build (TREE_CODE (arg0), type,
7104 fold (build1 (REALPART_EXPR, type,
7105 TREE_OPERAND (arg0, 0))),
7106 fold (build1 (REALPART_EXPR,
7107 type, TREE_OPERAND (arg0, 1)))));
7108 return t;
7110 case IMAGPART_EXPR:
7111 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7112 return convert (type, integer_zero_node);
7113 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7114 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7115 TREE_OPERAND (arg0, 0));
7116 else if (TREE_CODE (arg0) == COMPLEX_CST)
7117 return TREE_IMAGPART (arg0);
7118 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7119 return fold (build (TREE_CODE (arg0), type,
7120 fold (build1 (IMAGPART_EXPR, type,
7121 TREE_OPERAND (arg0, 0))),
7122 fold (build1 (IMAGPART_EXPR, type,
7123 TREE_OPERAND (arg0, 1)))));
7124 return t;
7126 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
7127 appropriate. */
7128 case CLEANUP_POINT_EXPR:
7129 if (! has_cleanups (arg0))
7130 return TREE_OPERAND (t, 0);
7133 enum tree_code code0 = TREE_CODE (arg0);
7134 int kind0 = TREE_CODE_CLASS (code0);
7135 tree arg00 = TREE_OPERAND (arg0, 0);
7136 tree arg01;
7138 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
7139 return fold (build1 (code0, type,
7140 fold (build1 (CLEANUP_POINT_EXPR,
7141 TREE_TYPE (arg00), arg00))));
7143 if (kind0 == '<' || kind0 == '2'
7144 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
7145 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
7146 || code0 == TRUTH_XOR_EXPR)
7148 arg01 = TREE_OPERAND (arg0, 1);
7150 if (TREE_CONSTANT (arg00)
7151 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
7152 && ! has_cleanups (arg00)))
7153 return fold (build (code0, type, arg00,
7154 fold (build1 (CLEANUP_POINT_EXPR,
7155 TREE_TYPE (arg01), arg01))));
7157 if (TREE_CONSTANT (arg01))
7158 return fold (build (code0, type,
7159 fold (build1 (CLEANUP_POINT_EXPR,
7160 TREE_TYPE (arg00), arg00)),
7161 arg01));
7164 return t;
7167 case CALL_EXPR:
7168 /* Check for a built-in function. */
7169 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
7170 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
7171 == FUNCTION_DECL)
7172 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
7174 tree tmp = fold_builtin (expr);
7175 if (tmp)
7176 return tmp;
7178 return t;
7180 default:
7181 return t;
7182 } /* switch (code) */
7185 /* Determine if first argument is a multiple of second argument. Return 0 if
7186 it is not, or we cannot easily determined it to be.
7188 An example of the sort of thing we care about (at this point; this routine
7189 could surely be made more general, and expanded to do what the *_DIV_EXPR's
7190 fold cases do now) is discovering that
7192 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7194 is a multiple of
7196 SAVE_EXPR (J * 8)
7198 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7200 This code also handles discovering that
7202 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7204 is a multiple of 8 so we don't have to worry about dealing with a
7205 possible remainder.
7207 Note that we *look* inside a SAVE_EXPR only to determine how it was
7208 calculated; it is not safe for fold to do much of anything else with the
7209 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7210 at run time. For example, the latter example above *cannot* be implemented
7211 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7212 evaluation time of the original SAVE_EXPR is not necessarily the same at
7213 the time the new expression is evaluated. The only optimization of this
7214 sort that would be valid is changing
7216 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7218 divided by 8 to
7220 SAVE_EXPR (I) * SAVE_EXPR (J)
7222 (where the same SAVE_EXPR (J) is used in the original and the
7223 transformed version). */
7225 static int
7226 multiple_of_p (type, top, bottom)
7227 tree type;
7228 tree top;
7229 tree bottom;
7231 if (operand_equal_p (top, bottom, 0))
7232 return 1;
7234 if (TREE_CODE (type) != INTEGER_TYPE)
7235 return 0;
7237 switch (TREE_CODE (top))
7239 case MULT_EXPR:
7240 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7241 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7243 case PLUS_EXPR:
7244 case MINUS_EXPR:
7245 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7246 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7248 case LSHIFT_EXPR:
7249 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
7251 tree op1, t1;
7253 op1 = TREE_OPERAND (top, 1);
7254 /* const_binop may not detect overflow correctly,
7255 so check for it explicitly here. */
7256 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
7257 > TREE_INT_CST_LOW (op1)
7258 && TREE_INT_CST_HIGH (op1) == 0
7259 && 0 != (t1 = convert (type,
7260 const_binop (LSHIFT_EXPR, size_one_node,
7261 op1, 0)))
7262 && ! TREE_OVERFLOW (t1))
7263 return multiple_of_p (type, t1, bottom);
7265 return 0;
7267 case NOP_EXPR:
7268 /* Can't handle conversions from non-integral or wider integral type. */
7269 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
7270 || (TYPE_PRECISION (type)
7271 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
7272 return 0;
7274 /* .. fall through ... */
7276 case SAVE_EXPR:
7277 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7279 case INTEGER_CST:
7280 if (TREE_CODE (bottom) != INTEGER_CST
7281 || (TREE_UNSIGNED (type)
7282 && (tree_int_cst_sgn (top) < 0
7283 || tree_int_cst_sgn (bottom) < 0)))
7284 return 0;
7285 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7286 top, bottom, 0));
7288 default:
7289 return 0;
7293 /* Return true if `t' is known to be non-negative. */
7296 tree_expr_nonnegative_p (t)
7297 tree t;
7299 switch (TREE_CODE (t))
7301 case ABS_EXPR:
7302 case FFS_EXPR:
7303 return 1;
7304 case INTEGER_CST:
7305 return tree_int_cst_sgn (t) >= 0;
7306 case TRUNC_DIV_EXPR:
7307 case CEIL_DIV_EXPR:
7308 case FLOOR_DIV_EXPR:
7309 case ROUND_DIV_EXPR:
7310 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7311 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7312 case TRUNC_MOD_EXPR:
7313 case CEIL_MOD_EXPR:
7314 case FLOOR_MOD_EXPR:
7315 case ROUND_MOD_EXPR:
7316 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7317 case COND_EXPR:
7318 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
7319 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
7320 case COMPOUND_EXPR:
7321 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7322 case MIN_EXPR:
7323 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7324 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7325 case MAX_EXPR:
7326 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7327 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7328 case MODIFY_EXPR:
7329 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7330 case BIND_EXPR:
7331 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7332 case SAVE_EXPR:
7333 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7334 case NON_LVALUE_EXPR:
7335 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7336 case RTL_EXPR:
7337 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
7339 default:
7340 if (truth_value_p (TREE_CODE (t)))
7341 /* Truth values evaluate to 0 or 1, which is nonnegative. */
7342 return 1;
7343 else
7344 /* We don't know sign of `t', so be conservative and return false. */
7345 return 0;
7349 /* Return true if `r' is known to be non-negative.
7350 Only handles constants at the moment. */
7353 rtl_expr_nonnegative_p (r)
7354 rtx r;
7356 switch (GET_CODE (r))
7358 case CONST_INT:
7359 return INTVAL (r) >= 0;
7361 case CONST_DOUBLE:
7362 if (GET_MODE (r) == VOIDmode)
7363 return CONST_DOUBLE_HIGH (r) >= 0;
7364 return 0;
7366 case CONST_VECTOR:
7368 int units, i;
7369 rtx elt;
7371 units = CONST_VECTOR_NUNITS (r);
7373 for (i = 0; i < units; ++i)
7375 elt = CONST_VECTOR_ELT (r, i);
7376 if (!rtl_expr_nonnegative_p (elt))
7377 return 0;
7380 return 1;
7383 case SYMBOL_REF:
7384 case LABEL_REF:
7385 /* These are always nonnegative. */
7386 return 1;
7388 default:
7389 return 0;
7393 #include "gt-fold-const.h"