configure.in (GLIBCPP_ENABLE_CXX_FLAGS): Do not pass arguments, let the defaults...
[official-gcc.git] / gcc / fold-const.c
blob1454d414f0991608fe867216220189e7368eff1c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 2002,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
60 static void encode PARAMS ((HOST_WIDE_INT *,
61 unsigned HOST_WIDE_INT,
62 HOST_WIDE_INT));
63 static void decode PARAMS ((HOST_WIDE_INT *,
64 unsigned HOST_WIDE_INT *,
65 HOST_WIDE_INT *));
66 static tree negate_expr PARAMS ((tree));
67 static tree split_tree PARAMS ((tree, enum tree_code, tree *, tree *,
68 tree *, int));
69 static tree associate_trees PARAMS ((tree, tree, enum tree_code, tree));
70 static tree int_const_binop PARAMS ((enum tree_code, tree, tree, int));
71 static tree const_binop PARAMS ((enum tree_code, tree, tree, int));
72 static hashval_t size_htab_hash PARAMS ((const void *));
73 static int size_htab_eq PARAMS ((const void *, const void *));
74 static tree fold_convert PARAMS ((tree, tree));
75 static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
76 static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
77 static int comparison_to_compcode PARAMS ((enum tree_code));
78 static enum tree_code compcode_to_comparison PARAMS ((int));
79 static int truth_value_p PARAMS ((enum tree_code));
80 static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
81 static int twoval_comparison_p PARAMS ((tree, tree *, tree *, int *));
82 static tree eval_subst PARAMS ((tree, tree, tree, tree, tree));
83 static tree omit_one_operand PARAMS ((tree, tree, tree));
84 static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
85 static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
86 static tree make_bit_field_ref PARAMS ((tree, tree, int, int, int));
87 static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
88 tree, tree));
89 static tree decode_field_reference PARAMS ((tree, HOST_WIDE_INT *,
90 HOST_WIDE_INT *,
91 enum machine_mode *, int *,
92 int *, tree *, tree *));
93 static int all_ones_mask_p PARAMS ((tree, int));
94 static tree sign_bit_p PARAMS ((tree, tree));
95 static int simple_operand_p PARAMS ((tree));
96 static tree range_binop PARAMS ((enum tree_code, tree, tree, int,
97 tree, int));
98 static tree make_range PARAMS ((tree, int *, tree *, tree *));
99 static tree build_range_check PARAMS ((tree, tree, int, tree, tree));
100 static int merge_ranges PARAMS ((int *, tree *, tree *, int, tree, tree,
101 int, tree, tree));
102 static tree fold_range_test PARAMS ((tree));
103 static tree unextend PARAMS ((tree, int, int, tree));
104 static tree fold_truthop PARAMS ((enum tree_code, tree, tree, tree));
105 static tree optimize_minmax_comparison PARAMS ((tree));
106 static tree extract_muldiv PARAMS ((tree, tree, enum tree_code, tree));
107 static tree strip_compound_expr PARAMS ((tree, tree));
108 static int multiple_of_p PARAMS ((tree, tree, tree));
109 static tree constant_boolean_node PARAMS ((int, tree));
110 static int count_cond PARAMS ((tree, int));
111 static tree fold_binary_op_with_conditional_arg
112 PARAMS ((enum tree_code, tree, tree, tree, int));
113 static bool fold_real_zero_addition_p PARAMS ((tree, tree, int));
115 /* The following constants represent a bit based encoding of GCC's
116 comparison operators. This encoding simplifies transformations
117 on relational comparison operators, such as AND and OR. */
118 #define COMPCODE_FALSE 0
119 #define COMPCODE_LT 1
120 #define COMPCODE_EQ 2
121 #define COMPCODE_LE 3
122 #define COMPCODE_GT 4
123 #define COMPCODE_NE 5
124 #define COMPCODE_GE 6
125 #define COMPCODE_TRUE 7
127 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
128 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
129 and SUM1. Then this yields nonzero if overflow occurred during the
130 addition.
132 Overflow occurs if A and B have the same sign, but A and SUM differ in
133 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
134 sign. */
135 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
137 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
138 We do that by representing the two-word integer in 4 words, with only
139 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
140 number. The value of the word is LOWPART + HIGHPART * BASE. */
142 #define LOWPART(x) \
143 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
144 #define HIGHPART(x) \
145 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
146 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
148 /* Unpack a two-word integer into 4 words.
149 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
150 WORDS points to the array of HOST_WIDE_INTs. */
152 static void
153 encode (words, low, hi)
154 HOST_WIDE_INT *words;
155 unsigned HOST_WIDE_INT low;
156 HOST_WIDE_INT hi;
158 words[0] = LOWPART (low);
159 words[1] = HIGHPART (low);
160 words[2] = LOWPART (hi);
161 words[3] = HIGHPART (hi);
164 /* Pack an array of 4 words into a two-word integer.
165 WORDS points to the array of words.
166 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
168 static void
169 decode (words, low, hi)
170 HOST_WIDE_INT *words;
171 unsigned HOST_WIDE_INT *low;
172 HOST_WIDE_INT *hi;
174 *low = words[0] + words[1] * BASE;
175 *hi = words[2] + words[3] * BASE;
178 /* Make the integer constant T valid for its type by setting to 0 or 1 all
179 the bits in the constant that don't belong in the type.
181 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
182 nonzero, a signed overflow has already occurred in calculating T, so
183 propagate it. */
186 force_fit_type (t, overflow)
187 tree t;
188 int overflow;
190 unsigned HOST_WIDE_INT low;
191 HOST_WIDE_INT high;
192 unsigned int prec;
194 if (TREE_CODE (t) == REAL_CST)
196 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
197 Consider doing it via real_convert now. */
198 return overflow;
201 else if (TREE_CODE (t) != INTEGER_CST)
202 return overflow;
204 low = TREE_INT_CST_LOW (t);
205 high = TREE_INT_CST_HIGH (t);
207 if (POINTER_TYPE_P (TREE_TYPE (t)))
208 prec = POINTER_SIZE;
209 else
210 prec = TYPE_PRECISION (TREE_TYPE (t));
212 /* First clear all bits that are beyond the type's precision. */
214 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
216 else if (prec > HOST_BITS_PER_WIDE_INT)
217 TREE_INT_CST_HIGH (t)
218 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
219 else
221 TREE_INT_CST_HIGH (t) = 0;
222 if (prec < HOST_BITS_PER_WIDE_INT)
223 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
226 /* Unsigned types do not suffer sign extension or overflow unless they
227 are a sizetype. */
228 if (TREE_UNSIGNED (TREE_TYPE (t))
229 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
230 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
231 return overflow;
233 /* If the value's sign bit is set, extend the sign. */
234 if (prec != 2 * HOST_BITS_PER_WIDE_INT
235 && (prec > HOST_BITS_PER_WIDE_INT
236 ? 0 != (TREE_INT_CST_HIGH (t)
237 & ((HOST_WIDE_INT) 1
238 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
239 : 0 != (TREE_INT_CST_LOW (t)
240 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
242 /* Value is negative:
243 set to 1 all the bits that are outside this type's precision. */
244 if (prec > HOST_BITS_PER_WIDE_INT)
245 TREE_INT_CST_HIGH (t)
246 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
247 else
249 TREE_INT_CST_HIGH (t) = -1;
250 if (prec < HOST_BITS_PER_WIDE_INT)
251 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
255 /* Return nonzero if signed overflow occurred. */
256 return
257 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
258 != 0);
261 /* Add two doubleword integers with doubleword result.
262 Each argument is given as two `HOST_WIDE_INT' pieces.
263 One argument is L1 and H1; the other, L2 and H2.
264 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
267 add_double (l1, h1, l2, h2, lv, hv)
268 unsigned HOST_WIDE_INT l1, l2;
269 HOST_WIDE_INT h1, h2;
270 unsigned HOST_WIDE_INT *lv;
271 HOST_WIDE_INT *hv;
273 unsigned HOST_WIDE_INT l;
274 HOST_WIDE_INT h;
276 l = l1 + l2;
277 h = h1 + h2 + (l < l1);
279 *lv = l;
280 *hv = h;
281 return OVERFLOW_SUM_SIGN (h1, h2, h);
284 /* Negate a doubleword integer with doubleword result.
285 Return nonzero if the operation overflows, assuming it's signed.
286 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
290 neg_double (l1, h1, lv, hv)
291 unsigned HOST_WIDE_INT l1;
292 HOST_WIDE_INT h1;
293 unsigned HOST_WIDE_INT *lv;
294 HOST_WIDE_INT *hv;
296 if (l1 == 0)
298 *lv = 0;
299 *hv = - h1;
300 return (*hv & h1) < 0;
302 else
304 *lv = -l1;
305 *hv = ~h1;
306 return 0;
310 /* Multiply two doubleword integers with doubleword result.
311 Return nonzero if the operation overflows, assuming it's signed.
312 Each argument is given as two `HOST_WIDE_INT' pieces.
313 One argument is L1 and H1; the other, L2 and H2.
314 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
317 mul_double (l1, h1, l2, h2, lv, hv)
318 unsigned HOST_WIDE_INT l1, l2;
319 HOST_WIDE_INT h1, h2;
320 unsigned HOST_WIDE_INT *lv;
321 HOST_WIDE_INT *hv;
323 HOST_WIDE_INT arg1[4];
324 HOST_WIDE_INT arg2[4];
325 HOST_WIDE_INT prod[4 * 2];
326 unsigned HOST_WIDE_INT carry;
327 int i, j, k;
328 unsigned HOST_WIDE_INT toplow, neglow;
329 HOST_WIDE_INT tophigh, neghigh;
331 encode (arg1, l1, h1);
332 encode (arg2, l2, h2);
334 memset ((char *) prod, 0, sizeof prod);
336 for (i = 0; i < 4; i++)
338 carry = 0;
339 for (j = 0; j < 4; j++)
341 k = i + j;
342 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
343 carry += arg1[i] * arg2[j];
344 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
345 carry += prod[k];
346 prod[k] = LOWPART (carry);
347 carry = HIGHPART (carry);
349 prod[i + 4] = carry;
352 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
354 /* Check for overflow by calculating the top half of the answer in full;
355 it should agree with the low half's sign bit. */
356 decode (prod + 4, &toplow, &tophigh);
357 if (h1 < 0)
359 neg_double (l2, h2, &neglow, &neghigh);
360 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
362 if (h2 < 0)
364 neg_double (l1, h1, &neglow, &neghigh);
365 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
367 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
370 /* Shift the doubleword integer in L1, H1 left by COUNT places
371 keeping only PREC bits of result.
372 Shift right if COUNT is negative.
373 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
374 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
376 void
377 lshift_double (l1, h1, count, prec, lv, hv, arith)
378 unsigned HOST_WIDE_INT l1;
379 HOST_WIDE_INT h1, count;
380 unsigned int prec;
381 unsigned HOST_WIDE_INT *lv;
382 HOST_WIDE_INT *hv;
383 int arith;
385 unsigned HOST_WIDE_INT signmask;
387 if (count < 0)
389 rshift_double (l1, h1, -count, prec, lv, hv, arith);
390 return;
393 #ifdef SHIFT_COUNT_TRUNCATED
394 if (SHIFT_COUNT_TRUNCATED)
395 count %= prec;
396 #endif
398 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
400 /* Shifting by the host word size is undefined according to the
401 ANSI standard, so we must handle this as a special case. */
402 *hv = 0;
403 *lv = 0;
405 else if (count >= HOST_BITS_PER_WIDE_INT)
407 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
408 *lv = 0;
410 else
412 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
413 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
414 *lv = l1 << count;
417 /* Sign extend all bits that are beyond the precision. */
419 signmask = -((prec > HOST_BITS_PER_WIDE_INT
420 ? ((unsigned HOST_WIDE_INT) *hv
421 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
422 : (*lv >> (prec - 1))) & 1);
424 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
426 else if (prec >= HOST_BITS_PER_WIDE_INT)
428 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
429 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
431 else
433 *hv = signmask;
434 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
435 *lv |= signmask << prec;
439 /* Shift the doubleword integer in L1, H1 right by COUNT places
440 keeping only PREC bits of result. COUNT must be positive.
441 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
442 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
444 void
445 rshift_double (l1, h1, count, prec, lv, hv, arith)
446 unsigned HOST_WIDE_INT l1;
447 HOST_WIDE_INT h1, count;
448 unsigned int prec;
449 unsigned HOST_WIDE_INT *lv;
450 HOST_WIDE_INT *hv;
451 int arith;
453 unsigned HOST_WIDE_INT signmask;
455 signmask = (arith
456 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
457 : 0);
459 #ifdef SHIFT_COUNT_TRUNCATED
460 if (SHIFT_COUNT_TRUNCATED)
461 count %= prec;
462 #endif
464 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
466 /* Shifting by the host word size is undefined according to the
467 ANSI standard, so we must handle this as a special case. */
468 *hv = 0;
469 *lv = 0;
471 else if (count >= HOST_BITS_PER_WIDE_INT)
473 *hv = 0;
474 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
476 else
478 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
479 *lv = ((l1 >> count)
480 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
483 /* Zero / sign extend all bits that are beyond the precision. */
485 if (count >= (HOST_WIDE_INT)prec)
487 *hv = signmask;
488 *lv = signmask;
490 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
492 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
494 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
495 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
497 else
499 *hv = signmask;
500 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
501 *lv |= signmask << (prec - count);
505 /* Rotate the doubleword integer in L1, H1 left by COUNT places
506 keeping only PREC bits of result.
507 Rotate right if COUNT is negative.
508 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
510 void
511 lrotate_double (l1, h1, count, prec, lv, hv)
512 unsigned HOST_WIDE_INT l1;
513 HOST_WIDE_INT h1, count;
514 unsigned int prec;
515 unsigned HOST_WIDE_INT *lv;
516 HOST_WIDE_INT *hv;
518 unsigned HOST_WIDE_INT s1l, s2l;
519 HOST_WIDE_INT s1h, s2h;
521 count %= prec;
522 if (count < 0)
523 count += prec;
525 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
526 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
527 *lv = s1l | s2l;
528 *hv = s1h | s2h;
531 /* Rotate the doubleword integer in L1, H1 left by COUNT places
532 keeping only PREC bits of result. COUNT must be positive.
533 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
535 void
536 rrotate_double (l1, h1, count, prec, lv, hv)
537 unsigned HOST_WIDE_INT l1;
538 HOST_WIDE_INT h1, count;
539 unsigned int prec;
540 unsigned HOST_WIDE_INT *lv;
541 HOST_WIDE_INT *hv;
543 unsigned HOST_WIDE_INT s1l, s2l;
544 HOST_WIDE_INT s1h, s2h;
546 count %= prec;
547 if (count < 0)
548 count += prec;
550 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
551 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
552 *lv = s1l | s2l;
553 *hv = s1h | s2h;
556 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
557 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
558 CODE is a tree code for a kind of division, one of
559 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
560 or EXACT_DIV_EXPR
561 It controls how the quotient is rounded to an integer.
562 Return nonzero if the operation overflows.
563 UNS nonzero says do unsigned division. */
566 div_and_round_double (code, uns,
567 lnum_orig, hnum_orig, lden_orig, hden_orig,
568 lquo, hquo, lrem, hrem)
569 enum tree_code code;
570 int uns;
571 unsigned HOST_WIDE_INT lnum_orig; /* num == numerator == dividend */
572 HOST_WIDE_INT hnum_orig;
573 unsigned HOST_WIDE_INT lden_orig; /* den == denominator == divisor */
574 HOST_WIDE_INT hden_orig;
575 unsigned HOST_WIDE_INT *lquo, *lrem;
576 HOST_WIDE_INT *hquo, *hrem;
578 int quo_neg = 0;
579 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
580 HOST_WIDE_INT den[4], quo[4];
581 int i, j;
582 unsigned HOST_WIDE_INT work;
583 unsigned HOST_WIDE_INT carry = 0;
584 unsigned HOST_WIDE_INT lnum = lnum_orig;
585 HOST_WIDE_INT hnum = hnum_orig;
586 unsigned HOST_WIDE_INT lden = lden_orig;
587 HOST_WIDE_INT hden = hden_orig;
588 int overflow = 0;
590 if (hden == 0 && lden == 0)
591 overflow = 1, lden = 1;
593 /* calculate quotient sign and convert operands to unsigned. */
594 if (!uns)
596 if (hnum < 0)
598 quo_neg = ~ quo_neg;
599 /* (minimum integer) / (-1) is the only overflow case. */
600 if (neg_double (lnum, hnum, &lnum, &hnum)
601 && ((HOST_WIDE_INT) lden & hden) == -1)
602 overflow = 1;
604 if (hden < 0)
606 quo_neg = ~ quo_neg;
607 neg_double (lden, hden, &lden, &hden);
611 if (hnum == 0 && hden == 0)
612 { /* single precision */
613 *hquo = *hrem = 0;
614 /* This unsigned division rounds toward zero. */
615 *lquo = lnum / lden;
616 goto finish_up;
619 if (hnum == 0)
620 { /* trivial case: dividend < divisor */
621 /* hden != 0 already checked. */
622 *hquo = *lquo = 0;
623 *hrem = hnum;
624 *lrem = lnum;
625 goto finish_up;
628 memset ((char *) quo, 0, sizeof quo);
630 memset ((char *) num, 0, sizeof num); /* to zero 9th element */
631 memset ((char *) den, 0, sizeof den);
633 encode (num, lnum, hnum);
634 encode (den, lden, hden);
636 /* Special code for when the divisor < BASE. */
637 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
639 /* hnum != 0 already checked. */
640 for (i = 4 - 1; i >= 0; i--)
642 work = num[i] + carry * BASE;
643 quo[i] = work / lden;
644 carry = work % lden;
647 else
649 /* Full double precision division,
650 with thanks to Don Knuth's "Seminumerical Algorithms". */
651 int num_hi_sig, den_hi_sig;
652 unsigned HOST_WIDE_INT quo_est, scale;
654 /* Find the highest nonzero divisor digit. */
655 for (i = 4 - 1;; i--)
656 if (den[i] != 0)
658 den_hi_sig = i;
659 break;
662 /* Insure that the first digit of the divisor is at least BASE/2.
663 This is required by the quotient digit estimation algorithm. */
665 scale = BASE / (den[den_hi_sig] + 1);
666 if (scale > 1)
667 { /* scale divisor and dividend */
668 carry = 0;
669 for (i = 0; i <= 4 - 1; i++)
671 work = (num[i] * scale) + carry;
672 num[i] = LOWPART (work);
673 carry = HIGHPART (work);
676 num[4] = carry;
677 carry = 0;
678 for (i = 0; i <= 4 - 1; i++)
680 work = (den[i] * scale) + carry;
681 den[i] = LOWPART (work);
682 carry = HIGHPART (work);
683 if (den[i] != 0) den_hi_sig = i;
687 num_hi_sig = 4;
689 /* Main loop */
690 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
692 /* Guess the next quotient digit, quo_est, by dividing the first
693 two remaining dividend digits by the high order quotient digit.
694 quo_est is never low and is at most 2 high. */
695 unsigned HOST_WIDE_INT tmp;
697 num_hi_sig = i + den_hi_sig + 1;
698 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
699 if (num[num_hi_sig] != den[den_hi_sig])
700 quo_est = work / den[den_hi_sig];
701 else
702 quo_est = BASE - 1;
704 /* Refine quo_est so it's usually correct, and at most one high. */
705 tmp = work - quo_est * den[den_hi_sig];
706 if (tmp < BASE
707 && (den[den_hi_sig - 1] * quo_est
708 > (tmp * BASE + num[num_hi_sig - 2])))
709 quo_est--;
711 /* Try QUO_EST as the quotient digit, by multiplying the
712 divisor by QUO_EST and subtracting from the remaining dividend.
713 Keep in mind that QUO_EST is the I - 1st digit. */
715 carry = 0;
716 for (j = 0; j <= den_hi_sig; j++)
718 work = quo_est * den[j] + carry;
719 carry = HIGHPART (work);
720 work = num[i + j] - LOWPART (work);
721 num[i + j] = LOWPART (work);
722 carry += HIGHPART (work) != 0;
725 /* If quo_est was high by one, then num[i] went negative and
726 we need to correct things. */
727 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
729 quo_est--;
730 carry = 0; /* add divisor back in */
731 for (j = 0; j <= den_hi_sig; j++)
733 work = num[i + j] + den[j] + carry;
734 carry = HIGHPART (work);
735 num[i + j] = LOWPART (work);
738 num [num_hi_sig] += carry;
741 /* Store the quotient digit. */
742 quo[i] = quo_est;
746 decode (quo, lquo, hquo);
748 finish_up:
749 /* if result is negative, make it so. */
750 if (quo_neg)
751 neg_double (*lquo, *hquo, lquo, hquo);
753 /* compute trial remainder: rem = num - (quo * den) */
754 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
755 neg_double (*lrem, *hrem, lrem, hrem);
756 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
758 switch (code)
760 case TRUNC_DIV_EXPR:
761 case TRUNC_MOD_EXPR: /* round toward zero */
762 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
763 return overflow;
765 case FLOOR_DIV_EXPR:
766 case FLOOR_MOD_EXPR: /* round toward negative infinity */
767 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
769 /* quo = quo - 1; */
770 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
771 lquo, hquo);
773 else
774 return overflow;
775 break;
777 case CEIL_DIV_EXPR:
778 case CEIL_MOD_EXPR: /* round toward positive infinity */
779 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
781 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
782 lquo, hquo);
784 else
785 return overflow;
786 break;
788 case ROUND_DIV_EXPR:
789 case ROUND_MOD_EXPR: /* round to closest integer */
791 unsigned HOST_WIDE_INT labs_rem = *lrem;
792 HOST_WIDE_INT habs_rem = *hrem;
793 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
794 HOST_WIDE_INT habs_den = hden, htwice;
796 /* Get absolute values */
797 if (*hrem < 0)
798 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
799 if (hden < 0)
800 neg_double (lden, hden, &labs_den, &habs_den);
802 /* If (2 * abs (lrem) >= abs (lden)) */
803 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
804 labs_rem, habs_rem, &ltwice, &htwice);
806 if (((unsigned HOST_WIDE_INT) habs_den
807 < (unsigned HOST_WIDE_INT) htwice)
808 || (((unsigned HOST_WIDE_INT) habs_den
809 == (unsigned HOST_WIDE_INT) htwice)
810 && (labs_den < ltwice)))
812 if (*hquo < 0)
813 /* quo = quo - 1; */
814 add_double (*lquo, *hquo,
815 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
816 else
817 /* quo = quo + 1; */
818 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
819 lquo, hquo);
821 else
822 return overflow;
824 break;
826 default:
827 abort ();
830 /* compute true remainder: rem = num - (quo * den) */
831 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
832 neg_double (*lrem, *hrem, lrem, hrem);
833 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
834 return overflow;
837 /* Given T, an expression, return the negation of T. Allow for T to be
838 null, in which case return null. */
840 static tree
841 negate_expr (t)
842 tree t;
844 tree type;
845 tree tem;
847 if (t == 0)
848 return 0;
850 type = TREE_TYPE (t);
851 STRIP_SIGN_NOPS (t);
853 switch (TREE_CODE (t))
855 case INTEGER_CST:
856 case REAL_CST:
857 if (! TREE_UNSIGNED (type)
858 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
859 && ! TREE_OVERFLOW (tem))
860 return tem;
861 break;
863 case NEGATE_EXPR:
864 return convert (type, TREE_OPERAND (t, 0));
866 case MINUS_EXPR:
867 /* - (A - B) -> B - A */
868 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
869 return convert (type,
870 fold (build (MINUS_EXPR, TREE_TYPE (t),
871 TREE_OPERAND (t, 1),
872 TREE_OPERAND (t, 0))));
873 break;
875 default:
876 break;
879 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
882 /* Split a tree IN into a constant, literal and variable parts that could be
883 combined with CODE to make IN. "constant" means an expression with
884 TREE_CONSTANT but that isn't an actual constant. CODE must be a
885 commutative arithmetic operation. Store the constant part into *CONP,
886 the literal in *LITP and return the variable part. If a part isn't
887 present, set it to null. If the tree does not decompose in this way,
888 return the entire tree as the variable part and the other parts as null.
890 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
891 case, we negate an operand that was subtracted. Except if it is a
892 literal for which we use *MINUS_LITP instead.
894 If NEGATE_P is true, we are negating all of IN, again except a literal
895 for which we use *MINUS_LITP instead.
897 If IN is itself a literal or constant, return it as appropriate.
899 Note that we do not guarantee that any of the three values will be the
900 same type as IN, but they will have the same signedness and mode. */
902 static tree
903 split_tree (in, code, conp, litp, minus_litp, negate_p)
904 tree in;
905 enum tree_code code;
906 tree *conp, *litp, *minus_litp;
907 int negate_p;
909 tree var = 0;
911 *conp = 0;
912 *litp = 0;
913 *minus_litp = 0;
915 /* Strip any conversions that don't change the machine mode or signedness. */
916 STRIP_SIGN_NOPS (in);
918 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
919 *litp = in;
920 else if (TREE_CODE (in) == code
921 || (! FLOAT_TYPE_P (TREE_TYPE (in))
922 /* We can associate addition and subtraction together (even
923 though the C standard doesn't say so) for integers because
924 the value is not affected. For reals, the value might be
925 affected, so we can't. */
926 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
927 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
929 tree op0 = TREE_OPERAND (in, 0);
930 tree op1 = TREE_OPERAND (in, 1);
931 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
932 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
934 /* First see if either of the operands is a literal, then a constant. */
935 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
936 *litp = op0, op0 = 0;
937 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
938 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
940 if (op0 != 0 && TREE_CONSTANT (op0))
941 *conp = op0, op0 = 0;
942 else if (op1 != 0 && TREE_CONSTANT (op1))
943 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
945 /* If we haven't dealt with either operand, this is not a case we can
946 decompose. Otherwise, VAR is either of the ones remaining, if any. */
947 if (op0 != 0 && op1 != 0)
948 var = in;
949 else if (op0 != 0)
950 var = op0;
951 else
952 var = op1, neg_var_p = neg1_p;
954 /* Now do any needed negations. */
955 if (neg_litp_p)
956 *minus_litp = *litp, *litp = 0;
957 if (neg_conp_p)
958 *conp = negate_expr (*conp);
959 if (neg_var_p)
960 var = negate_expr (var);
962 else if (TREE_CONSTANT (in))
963 *conp = in;
964 else
965 var = in;
967 if (negate_p)
969 if (*litp)
970 *minus_litp = *litp, *litp = 0;
971 else if (*minus_litp)
972 *litp = *minus_litp, *minus_litp = 0;
973 *conp = negate_expr (*conp);
974 var = negate_expr (var);
977 return var;
980 /* Re-associate trees split by the above function. T1 and T2 are either
981 expressions to associate or null. Return the new expression, if any. If
982 we build an operation, do it in TYPE and with CODE. */
984 static tree
985 associate_trees (t1, t2, code, type)
986 tree t1, t2;
987 enum tree_code code;
988 tree type;
990 if (t1 == 0)
991 return t2;
992 else if (t2 == 0)
993 return t1;
995 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
996 try to fold this since we will have infinite recursion. But do
997 deal with any NEGATE_EXPRs. */
998 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
999 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1001 if (code == PLUS_EXPR)
1003 if (TREE_CODE (t1) == NEGATE_EXPR)
1004 return build (MINUS_EXPR, type, convert (type, t2),
1005 convert (type, TREE_OPERAND (t1, 0)));
1006 else if (TREE_CODE (t2) == NEGATE_EXPR)
1007 return build (MINUS_EXPR, type, convert (type, t1),
1008 convert (type, TREE_OPERAND (t2, 0)));
1010 return build (code, type, convert (type, t1), convert (type, t2));
1013 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1016 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1017 to produce a new constant.
1019 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1021 static tree
1022 int_const_binop (code, arg1, arg2, notrunc)
1023 enum tree_code code;
1024 tree arg1, arg2;
1025 int notrunc;
1027 unsigned HOST_WIDE_INT int1l, int2l;
1028 HOST_WIDE_INT int1h, int2h;
1029 unsigned HOST_WIDE_INT low;
1030 HOST_WIDE_INT hi;
1031 unsigned HOST_WIDE_INT garbagel;
1032 HOST_WIDE_INT garbageh;
1033 tree t;
1034 tree type = TREE_TYPE (arg1);
1035 int uns = TREE_UNSIGNED (type);
1036 int is_sizetype
1037 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1038 int overflow = 0;
1039 int no_overflow = 0;
1041 int1l = TREE_INT_CST_LOW (arg1);
1042 int1h = TREE_INT_CST_HIGH (arg1);
1043 int2l = TREE_INT_CST_LOW (arg2);
1044 int2h = TREE_INT_CST_HIGH (arg2);
1046 switch (code)
1048 case BIT_IOR_EXPR:
1049 low = int1l | int2l, hi = int1h | int2h;
1050 break;
1052 case BIT_XOR_EXPR:
1053 low = int1l ^ int2l, hi = int1h ^ int2h;
1054 break;
1056 case BIT_AND_EXPR:
1057 low = int1l & int2l, hi = int1h & int2h;
1058 break;
1060 case BIT_ANDTC_EXPR:
1061 low = int1l & ~int2l, hi = int1h & ~int2h;
1062 break;
1064 case RSHIFT_EXPR:
1065 int2l = -int2l;
1066 case LSHIFT_EXPR:
1067 /* It's unclear from the C standard whether shifts can overflow.
1068 The following code ignores overflow; perhaps a C standard
1069 interpretation ruling is needed. */
1070 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1071 &low, &hi, !uns);
1072 no_overflow = 1;
1073 break;
1075 case RROTATE_EXPR:
1076 int2l = - int2l;
1077 case LROTATE_EXPR:
1078 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1079 &low, &hi);
1080 break;
1082 case PLUS_EXPR:
1083 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1084 break;
1086 case MINUS_EXPR:
1087 neg_double (int2l, int2h, &low, &hi);
1088 add_double (int1l, int1h, low, hi, &low, &hi);
1089 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1090 break;
1092 case MULT_EXPR:
1093 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1094 break;
1096 case TRUNC_DIV_EXPR:
1097 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1098 case EXACT_DIV_EXPR:
1099 /* This is a shortcut for a common special case. */
1100 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1101 && ! TREE_CONSTANT_OVERFLOW (arg1)
1102 && ! TREE_CONSTANT_OVERFLOW (arg2)
1103 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1105 if (code == CEIL_DIV_EXPR)
1106 int1l += int2l - 1;
1108 low = int1l / int2l, hi = 0;
1109 break;
1112 /* ... fall through ... */
1114 case ROUND_DIV_EXPR:
1115 if (int2h == 0 && int2l == 1)
1117 low = int1l, hi = int1h;
1118 break;
1120 if (int1l == int2l && int1h == int2h
1121 && ! (int1l == 0 && int1h == 0))
1123 low = 1, hi = 0;
1124 break;
1126 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1127 &low, &hi, &garbagel, &garbageh);
1128 break;
1130 case TRUNC_MOD_EXPR:
1131 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1132 /* This is a shortcut for a common special case. */
1133 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1134 && ! TREE_CONSTANT_OVERFLOW (arg1)
1135 && ! TREE_CONSTANT_OVERFLOW (arg2)
1136 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1138 if (code == CEIL_MOD_EXPR)
1139 int1l += int2l - 1;
1140 low = int1l % int2l, hi = 0;
1141 break;
1144 /* ... fall through ... */
1146 case ROUND_MOD_EXPR:
1147 overflow = div_and_round_double (code, uns,
1148 int1l, int1h, int2l, int2h,
1149 &garbagel, &garbageh, &low, &hi);
1150 break;
1152 case MIN_EXPR:
1153 case MAX_EXPR:
1154 if (uns)
1155 low = (((unsigned HOST_WIDE_INT) int1h
1156 < (unsigned HOST_WIDE_INT) int2h)
1157 || (((unsigned HOST_WIDE_INT) int1h
1158 == (unsigned HOST_WIDE_INT) int2h)
1159 && int1l < int2l));
1160 else
1161 low = (int1h < int2h
1162 || (int1h == int2h && int1l < int2l));
1164 if (low == (code == MIN_EXPR))
1165 low = int1l, hi = int1h;
1166 else
1167 low = int2l, hi = int2h;
1168 break;
1170 default:
1171 abort ();
1174 /* If this is for a sizetype, can be represented as one (signed)
1175 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1176 constants. */
1177 if (is_sizetype
1178 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1179 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1180 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1181 return size_int_type_wide (low, type);
1182 else
1184 t = build_int_2 (low, hi);
1185 TREE_TYPE (t) = TREE_TYPE (arg1);
1188 TREE_OVERFLOW (t)
1189 = ((notrunc
1190 ? (!uns || is_sizetype) && overflow
1191 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1192 && ! no_overflow))
1193 | TREE_OVERFLOW (arg1)
1194 | TREE_OVERFLOW (arg2));
1196 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1197 So check if force_fit_type truncated the value. */
1198 if (is_sizetype
1199 && ! TREE_OVERFLOW (t)
1200 && (TREE_INT_CST_HIGH (t) != hi
1201 || TREE_INT_CST_LOW (t) != low))
1202 TREE_OVERFLOW (t) = 1;
1204 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1205 | TREE_CONSTANT_OVERFLOW (arg1)
1206 | TREE_CONSTANT_OVERFLOW (arg2));
1207 return t;
1210 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1211 constant. We assume ARG1 and ARG2 have the same data type, or at least
1212 are the same kind of constant and the same machine mode.
1214 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1216 static tree
1217 const_binop (code, arg1, arg2, notrunc)
1218 enum tree_code code;
1219 tree arg1, arg2;
1220 int notrunc;
1222 STRIP_NOPS (arg1);
1223 STRIP_NOPS (arg2);
1225 if (TREE_CODE (arg1) == INTEGER_CST)
1226 return int_const_binop (code, arg1, arg2, notrunc);
1228 if (TREE_CODE (arg1) == REAL_CST)
1230 REAL_VALUE_TYPE d1;
1231 REAL_VALUE_TYPE d2;
1232 REAL_VALUE_TYPE value;
1233 tree t;
1235 d1 = TREE_REAL_CST (arg1);
1236 d2 = TREE_REAL_CST (arg2);
1238 /* If either operand is a NaN, just return it. Otherwise, set up
1239 for floating-point trap; we return an overflow. */
1240 if (REAL_VALUE_ISNAN (d1))
1241 return arg1;
1242 else if (REAL_VALUE_ISNAN (d2))
1243 return arg2;
1245 REAL_ARITHMETIC (value, code, d1, d2);
1247 t = build_real (TREE_TYPE (arg1),
1248 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)),
1249 value));
1251 TREE_OVERFLOW (t)
1252 = (force_fit_type (t, 0)
1253 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1254 TREE_CONSTANT_OVERFLOW (t)
1255 = TREE_OVERFLOW (t)
1256 | TREE_CONSTANT_OVERFLOW (arg1)
1257 | TREE_CONSTANT_OVERFLOW (arg2);
1258 return t;
1260 if (TREE_CODE (arg1) == COMPLEX_CST)
1262 tree type = TREE_TYPE (arg1);
1263 tree r1 = TREE_REALPART (arg1);
1264 tree i1 = TREE_IMAGPART (arg1);
1265 tree r2 = TREE_REALPART (arg2);
1266 tree i2 = TREE_IMAGPART (arg2);
1267 tree t;
1269 switch (code)
1271 case PLUS_EXPR:
1272 t = build_complex (type,
1273 const_binop (PLUS_EXPR, r1, r2, notrunc),
1274 const_binop (PLUS_EXPR, i1, i2, notrunc));
1275 break;
1277 case MINUS_EXPR:
1278 t = build_complex (type,
1279 const_binop (MINUS_EXPR, r1, r2, notrunc),
1280 const_binop (MINUS_EXPR, i1, i2, notrunc));
1281 break;
1283 case MULT_EXPR:
1284 t = build_complex (type,
1285 const_binop (MINUS_EXPR,
1286 const_binop (MULT_EXPR,
1287 r1, r2, notrunc),
1288 const_binop (MULT_EXPR,
1289 i1, i2, notrunc),
1290 notrunc),
1291 const_binop (PLUS_EXPR,
1292 const_binop (MULT_EXPR,
1293 r1, i2, notrunc),
1294 const_binop (MULT_EXPR,
1295 i1, r2, notrunc),
1296 notrunc));
1297 break;
1299 case RDIV_EXPR:
1301 tree magsquared
1302 = const_binop (PLUS_EXPR,
1303 const_binop (MULT_EXPR, r2, r2, notrunc),
1304 const_binop (MULT_EXPR, i2, i2, notrunc),
1305 notrunc);
1307 t = build_complex (type,
1308 const_binop
1309 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1310 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1311 const_binop (PLUS_EXPR,
1312 const_binop (MULT_EXPR, r1, r2,
1313 notrunc),
1314 const_binop (MULT_EXPR, i1, i2,
1315 notrunc),
1316 notrunc),
1317 magsquared, notrunc),
1318 const_binop
1319 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1320 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1321 const_binop (MINUS_EXPR,
1322 const_binop (MULT_EXPR, i1, r2,
1323 notrunc),
1324 const_binop (MULT_EXPR, r1, i2,
1325 notrunc),
1326 notrunc),
1327 magsquared, notrunc));
1329 break;
1331 default:
1332 abort ();
1334 return t;
1336 return 0;
1339 /* These are the hash table functions for the hash table of INTEGER_CST
1340 nodes of a sizetype. */
1342 /* Return the hash code code X, an INTEGER_CST. */
1344 static hashval_t
1345 size_htab_hash (x)
1346 const void *x;
1348 tree t = (tree) x;
1350 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1351 ^ htab_hash_pointer (TREE_TYPE (t))
1352 ^ (TREE_OVERFLOW (t) << 20));
1355 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1356 is the same as that given by *Y, which is the same. */
1358 static int
1359 size_htab_eq (x, y)
1360 const void *x;
1361 const void *y;
1363 tree xt = (tree) x;
1364 tree yt = (tree) y;
1366 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1367 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1368 && TREE_TYPE (xt) == TREE_TYPE (yt)
1369 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1372 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1373 bits are given by NUMBER and of the sizetype represented by KIND. */
1375 tree
1376 size_int_wide (number, kind)
1377 HOST_WIDE_INT number;
1378 enum size_type_kind kind;
1380 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1383 /* Likewise, but the desired type is specified explicitly. */
1385 static GTY (()) tree new_const;
1386 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1387 htab_t size_htab;
1389 tree
1390 size_int_type_wide (number, type)
1391 HOST_WIDE_INT number;
1392 tree type;
1394 PTR *slot;
1396 if (size_htab == 0)
1398 size_htab = htab_create (1024, size_htab_hash, size_htab_eq, NULL);
1399 new_const = make_node (INTEGER_CST);
1402 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1403 hash table, we return the value from the hash table. Otherwise, we
1404 place that in the hash table and make a new node for the next time. */
1405 TREE_INT_CST_LOW (new_const) = number;
1406 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1407 TREE_TYPE (new_const) = type;
1408 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1409 = force_fit_type (new_const, 0);
1411 slot = htab_find_slot (size_htab, new_const, INSERT);
1412 if (*slot == 0)
1414 tree t = new_const;
1416 *slot = (PTR) new_const;
1417 new_const = make_node (INTEGER_CST);
1418 return t;
1420 else
1421 return (tree) *slot;
1424 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1425 is a tree code. The type of the result is taken from the operands.
1426 Both must be the same type integer type and it must be a size type.
1427 If the operands are constant, so is the result. */
1429 tree
1430 size_binop (code, arg0, arg1)
1431 enum tree_code code;
1432 tree arg0, arg1;
1434 tree type = TREE_TYPE (arg0);
1436 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1437 || type != TREE_TYPE (arg1))
1438 abort ();
1440 /* Handle the special case of two integer constants faster. */
1441 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1443 /* And some specific cases even faster than that. */
1444 if (code == PLUS_EXPR && integer_zerop (arg0))
1445 return arg1;
1446 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1447 && integer_zerop (arg1))
1448 return arg0;
1449 else if (code == MULT_EXPR && integer_onep (arg0))
1450 return arg1;
1452 /* Handle general case of two integer constants. */
1453 return int_const_binop (code, arg0, arg1, 0);
1456 if (arg0 == error_mark_node || arg1 == error_mark_node)
1457 return error_mark_node;
1459 return fold (build (code, type, arg0, arg1));
1462 /* Given two values, either both of sizetype or both of bitsizetype,
1463 compute the difference between the two values. Return the value
1464 in signed type corresponding to the type of the operands. */
1466 tree
1467 size_diffop (arg0, arg1)
1468 tree arg0, arg1;
1470 tree type = TREE_TYPE (arg0);
1471 tree ctype;
1473 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1474 || type != TREE_TYPE (arg1))
1475 abort ();
1477 /* If the type is already signed, just do the simple thing. */
1478 if (! TREE_UNSIGNED (type))
1479 return size_binop (MINUS_EXPR, arg0, arg1);
1481 ctype = (type == bitsizetype || type == ubitsizetype
1482 ? sbitsizetype : ssizetype);
1484 /* If either operand is not a constant, do the conversions to the signed
1485 type and subtract. The hardware will do the right thing with any
1486 overflow in the subtraction. */
1487 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1488 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1489 convert (ctype, arg1));
1491 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1492 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1493 overflow) and negate (which can't either). Special-case a result
1494 of zero while we're here. */
1495 if (tree_int_cst_equal (arg0, arg1))
1496 return convert (ctype, integer_zero_node);
1497 else if (tree_int_cst_lt (arg1, arg0))
1498 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1499 else
1500 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1501 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1505 /* Given T, a tree representing type conversion of ARG1, a constant,
1506 return a constant tree representing the result of conversion. */
1508 static tree
1509 fold_convert (t, arg1)
1510 tree t;
1511 tree arg1;
1513 tree type = TREE_TYPE (t);
1514 int overflow = 0;
1516 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1518 if (TREE_CODE (arg1) == INTEGER_CST)
1520 /* If we would build a constant wider than GCC supports,
1521 leave the conversion unfolded. */
1522 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1523 return t;
1525 /* If we are trying to make a sizetype for a small integer, use
1526 size_int to pick up cached types to reduce duplicate nodes. */
1527 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1528 && !TREE_CONSTANT_OVERFLOW (arg1)
1529 && compare_tree_int (arg1, 10000) < 0)
1530 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1532 /* Given an integer constant, make new constant with new type,
1533 appropriately sign-extended or truncated. */
1534 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1535 TREE_INT_CST_HIGH (arg1));
1536 TREE_TYPE (t) = type;
1537 /* Indicate an overflow if (1) ARG1 already overflowed,
1538 or (2) force_fit_type indicates an overflow.
1539 Tell force_fit_type that an overflow has already occurred
1540 if ARG1 is a too-large unsigned value and T is signed.
1541 But don't indicate an overflow if converting a pointer. */
1542 TREE_OVERFLOW (t)
1543 = ((force_fit_type (t,
1544 (TREE_INT_CST_HIGH (arg1) < 0
1545 && (TREE_UNSIGNED (type)
1546 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1547 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1548 || TREE_OVERFLOW (arg1));
1549 TREE_CONSTANT_OVERFLOW (t)
1550 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1552 else if (TREE_CODE (arg1) == REAL_CST)
1554 /* Don't initialize these, use assignments.
1555 Initialized local aggregates don't work on old compilers. */
1556 REAL_VALUE_TYPE x;
1557 REAL_VALUE_TYPE l;
1558 REAL_VALUE_TYPE u;
1559 tree type1 = TREE_TYPE (arg1);
1560 int no_upper_bound;
1562 x = TREE_REAL_CST (arg1);
1563 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1565 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1566 if (!no_upper_bound)
1567 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1569 /* See if X will be in range after truncation towards 0.
1570 To compensate for truncation, move the bounds away from 0,
1571 but reject if X exactly equals the adjusted bounds. */
1572 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1573 if (!no_upper_bound)
1574 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1575 /* If X is a NaN, use zero instead and show we have an overflow.
1576 Otherwise, range check. */
1577 if (REAL_VALUE_ISNAN (x))
1578 overflow = 1, x = dconst0;
1579 else if (! (REAL_VALUES_LESS (l, x)
1580 && !no_upper_bound
1581 && REAL_VALUES_LESS (x, u)))
1582 overflow = 1;
1585 HOST_WIDE_INT low, high;
1586 REAL_VALUE_TO_INT (&low, &high, x);
1587 t = build_int_2 (low, high);
1589 TREE_TYPE (t) = type;
1590 TREE_OVERFLOW (t)
1591 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1592 TREE_CONSTANT_OVERFLOW (t)
1593 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1595 TREE_TYPE (t) = type;
1597 else if (TREE_CODE (type) == REAL_TYPE)
1599 if (TREE_CODE (arg1) == INTEGER_CST)
1600 return build_real_from_int_cst (type, arg1);
1601 if (TREE_CODE (arg1) == REAL_CST)
1603 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1605 /* We make a copy of ARG1 so that we don't modify an
1606 existing constant tree. */
1607 t = copy_node (arg1);
1608 TREE_TYPE (t) = type;
1609 return t;
1612 t = build_real (type,
1613 real_value_truncate (TYPE_MODE (type),
1614 TREE_REAL_CST (arg1)));
1616 TREE_OVERFLOW (t)
1617 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1618 TREE_CONSTANT_OVERFLOW (t)
1619 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1620 return t;
1623 TREE_CONSTANT (t) = 1;
1624 return t;
1627 /* Return an expr equal to X but certainly not valid as an lvalue. */
1629 tree
1630 non_lvalue (x)
1631 tree x;
1633 tree result;
1635 /* These things are certainly not lvalues. */
1636 if (TREE_CODE (x) == NON_LVALUE_EXPR
1637 || TREE_CODE (x) == INTEGER_CST
1638 || TREE_CODE (x) == REAL_CST
1639 || TREE_CODE (x) == STRING_CST
1640 || TREE_CODE (x) == ADDR_EXPR)
1641 return x;
1643 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1644 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1645 return result;
1648 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1649 Zero means allow extended lvalues. */
1651 int pedantic_lvalues;
1653 /* When pedantic, return an expr equal to X but certainly not valid as a
1654 pedantic lvalue. Otherwise, return X. */
1656 tree
1657 pedantic_non_lvalue (x)
1658 tree x;
1660 if (pedantic_lvalues)
1661 return non_lvalue (x);
1662 else
1663 return x;
1666 /* Given a tree comparison code, return the code that is the logical inverse
1667 of the given code. It is not safe to do this for floating-point
1668 comparisons, except for NE_EXPR and EQ_EXPR. */
1670 static enum tree_code
1671 invert_tree_comparison (code)
1672 enum tree_code code;
1674 switch (code)
1676 case EQ_EXPR:
1677 return NE_EXPR;
1678 case NE_EXPR:
1679 return EQ_EXPR;
1680 case GT_EXPR:
1681 return LE_EXPR;
1682 case GE_EXPR:
1683 return LT_EXPR;
1684 case LT_EXPR:
1685 return GE_EXPR;
1686 case LE_EXPR:
1687 return GT_EXPR;
1688 default:
1689 abort ();
1693 /* Similar, but return the comparison that results if the operands are
1694 swapped. This is safe for floating-point. */
1696 static enum tree_code
1697 swap_tree_comparison (code)
1698 enum tree_code code;
1700 switch (code)
1702 case EQ_EXPR:
1703 case NE_EXPR:
1704 return code;
1705 case GT_EXPR:
1706 return LT_EXPR;
1707 case GE_EXPR:
1708 return LE_EXPR;
1709 case LT_EXPR:
1710 return GT_EXPR;
1711 case LE_EXPR:
1712 return GE_EXPR;
1713 default:
1714 abort ();
1719 /* Convert a comparison tree code from an enum tree_code representation
1720 into a compcode bit-based encoding. This function is the inverse of
1721 compcode_to_comparison. */
1723 static int
1724 comparison_to_compcode (code)
1725 enum tree_code code;
1727 switch (code)
1729 case LT_EXPR:
1730 return COMPCODE_LT;
1731 case EQ_EXPR:
1732 return COMPCODE_EQ;
1733 case LE_EXPR:
1734 return COMPCODE_LE;
1735 case GT_EXPR:
1736 return COMPCODE_GT;
1737 case NE_EXPR:
1738 return COMPCODE_NE;
1739 case GE_EXPR:
1740 return COMPCODE_GE;
1741 default:
1742 abort ();
1746 /* Convert a compcode bit-based encoding of a comparison operator back
1747 to GCC's enum tree_code representation. This function is the
1748 inverse of comparison_to_compcode. */
1750 static enum tree_code
1751 compcode_to_comparison (code)
1752 int code;
1754 switch (code)
1756 case COMPCODE_LT:
1757 return LT_EXPR;
1758 case COMPCODE_EQ:
1759 return EQ_EXPR;
1760 case COMPCODE_LE:
1761 return LE_EXPR;
1762 case COMPCODE_GT:
1763 return GT_EXPR;
1764 case COMPCODE_NE:
1765 return NE_EXPR;
1766 case COMPCODE_GE:
1767 return GE_EXPR;
1768 default:
1769 abort ();
1773 /* Return nonzero if CODE is a tree code that represents a truth value. */
1775 static int
1776 truth_value_p (code)
1777 enum tree_code code;
1779 return (TREE_CODE_CLASS (code) == '<'
1780 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1781 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1782 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1785 /* Return nonzero if two operands are necessarily equal.
1786 If ONLY_CONST is nonzero, only return nonzero for constants.
1787 This function tests whether the operands are indistinguishable;
1788 it does not test whether they are equal using C's == operation.
1789 The distinction is important for IEEE floating point, because
1790 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1791 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1794 operand_equal_p (arg0, arg1, only_const)
1795 tree arg0, arg1;
1796 int only_const;
1798 /* If both types don't have the same signedness, then we can't consider
1799 them equal. We must check this before the STRIP_NOPS calls
1800 because they may change the signedness of the arguments. */
1801 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1802 return 0;
1804 STRIP_NOPS (arg0);
1805 STRIP_NOPS (arg1);
1807 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1808 /* This is needed for conversions and for COMPONENT_REF.
1809 Might as well play it safe and always test this. */
1810 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1811 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1812 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1813 return 0;
1815 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1816 We don't care about side effects in that case because the SAVE_EXPR
1817 takes care of that for us. In all other cases, two expressions are
1818 equal if they have no side effects. If we have two identical
1819 expressions with side effects that should be treated the same due
1820 to the only side effects being identical SAVE_EXPR's, that will
1821 be detected in the recursive calls below. */
1822 if (arg0 == arg1 && ! only_const
1823 && (TREE_CODE (arg0) == SAVE_EXPR
1824 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1825 return 1;
1827 /* Next handle constant cases, those for which we can return 1 even
1828 if ONLY_CONST is set. */
1829 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1830 switch (TREE_CODE (arg0))
1832 case INTEGER_CST:
1833 return (! TREE_CONSTANT_OVERFLOW (arg0)
1834 && ! TREE_CONSTANT_OVERFLOW (arg1)
1835 && tree_int_cst_equal (arg0, arg1));
1837 case REAL_CST:
1838 return (! TREE_CONSTANT_OVERFLOW (arg0)
1839 && ! TREE_CONSTANT_OVERFLOW (arg1)
1840 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1841 TREE_REAL_CST (arg1)));
1843 case VECTOR_CST:
1845 tree v1, v2;
1847 if (TREE_CONSTANT_OVERFLOW (arg0)
1848 || TREE_CONSTANT_OVERFLOW (arg1))
1849 return 0;
1851 v1 = TREE_VECTOR_CST_ELTS (arg0);
1852 v2 = TREE_VECTOR_CST_ELTS (arg1);
1853 while (v1 && v2)
1855 if (!operand_equal_p (v1, v2, only_const))
1856 return 0;
1857 v1 = TREE_CHAIN (v1);
1858 v2 = TREE_CHAIN (v2);
1861 return 1;
1864 case COMPLEX_CST:
1865 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1866 only_const)
1867 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1868 only_const));
1870 case STRING_CST:
1871 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1872 && ! memcmp (TREE_STRING_POINTER (arg0),
1873 TREE_STRING_POINTER (arg1),
1874 TREE_STRING_LENGTH (arg0)));
1876 case ADDR_EXPR:
1877 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1879 default:
1880 break;
1883 if (only_const)
1884 return 0;
1886 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1888 case '1':
1889 /* Two conversions are equal only if signedness and modes match. */
1890 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1891 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1892 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1893 return 0;
1895 return operand_equal_p (TREE_OPERAND (arg0, 0),
1896 TREE_OPERAND (arg1, 0), 0);
1898 case '<':
1899 case '2':
1900 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1901 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1903 return 1;
1905 /* For commutative ops, allow the other order. */
1906 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1907 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1908 || TREE_CODE (arg0) == BIT_IOR_EXPR
1909 || TREE_CODE (arg0) == BIT_XOR_EXPR
1910 || TREE_CODE (arg0) == BIT_AND_EXPR
1911 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1912 && operand_equal_p (TREE_OPERAND (arg0, 0),
1913 TREE_OPERAND (arg1, 1), 0)
1914 && operand_equal_p (TREE_OPERAND (arg0, 1),
1915 TREE_OPERAND (arg1, 0), 0));
1917 case 'r':
1918 /* If either of the pointer (or reference) expressions we are dereferencing
1919 contain a side effect, these cannot be equal. */
1920 if (TREE_SIDE_EFFECTS (arg0)
1921 || TREE_SIDE_EFFECTS (arg1))
1922 return 0;
1924 switch (TREE_CODE (arg0))
1926 case INDIRECT_REF:
1927 return operand_equal_p (TREE_OPERAND (arg0, 0),
1928 TREE_OPERAND (arg1, 0), 0);
1930 case COMPONENT_REF:
1931 case ARRAY_REF:
1932 case ARRAY_RANGE_REF:
1933 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1934 TREE_OPERAND (arg1, 0), 0)
1935 && operand_equal_p (TREE_OPERAND (arg0, 1),
1936 TREE_OPERAND (arg1, 1), 0));
1938 case BIT_FIELD_REF:
1939 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1940 TREE_OPERAND (arg1, 0), 0)
1941 && operand_equal_p (TREE_OPERAND (arg0, 1),
1942 TREE_OPERAND (arg1, 1), 0)
1943 && operand_equal_p (TREE_OPERAND (arg0, 2),
1944 TREE_OPERAND (arg1, 2), 0));
1945 default:
1946 return 0;
1949 case 'e':
1950 if (TREE_CODE (arg0) == RTL_EXPR)
1951 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
1952 return 0;
1954 default:
1955 return 0;
1959 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1960 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1962 When in doubt, return 0. */
1964 static int
1965 operand_equal_for_comparison_p (arg0, arg1, other)
1966 tree arg0, arg1;
1967 tree other;
1969 int unsignedp1, unsignedpo;
1970 tree primarg0, primarg1, primother;
1971 unsigned int correct_width;
1973 if (operand_equal_p (arg0, arg1, 0))
1974 return 1;
1976 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1977 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1978 return 0;
1980 /* Discard any conversions that don't change the modes of ARG0 and ARG1
1981 and see if the inner values are the same. This removes any
1982 signedness comparison, which doesn't matter here. */
1983 primarg0 = arg0, primarg1 = arg1;
1984 STRIP_NOPS (primarg0);
1985 STRIP_NOPS (primarg1);
1986 if (operand_equal_p (primarg0, primarg1, 0))
1987 return 1;
1989 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1990 actual comparison operand, ARG0.
1992 First throw away any conversions to wider types
1993 already present in the operands. */
1995 primarg1 = get_narrower (arg1, &unsignedp1);
1996 primother = get_narrower (other, &unsignedpo);
1998 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1999 if (unsignedp1 == unsignedpo
2000 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2001 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2003 tree type = TREE_TYPE (arg0);
2005 /* Make sure shorter operand is extended the right way
2006 to match the longer operand. */
2007 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2008 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2010 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2011 return 1;
2014 return 0;
2017 /* See if ARG is an expression that is either a comparison or is performing
2018 arithmetic on comparisons. The comparisons must only be comparing
2019 two different values, which will be stored in *CVAL1 and *CVAL2; if
2020 they are nonzero it means that some operands have already been found.
2021 No variables may be used anywhere else in the expression except in the
2022 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2023 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2025 If this is true, return 1. Otherwise, return zero. */
2027 static int
2028 twoval_comparison_p (arg, cval1, cval2, save_p)
2029 tree arg;
2030 tree *cval1, *cval2;
2031 int *save_p;
2033 enum tree_code code = TREE_CODE (arg);
2034 char class = TREE_CODE_CLASS (code);
2036 /* We can handle some of the 'e' cases here. */
2037 if (class == 'e' && code == TRUTH_NOT_EXPR)
2038 class = '1';
2039 else if (class == 'e'
2040 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2041 || code == COMPOUND_EXPR))
2042 class = '2';
2044 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2045 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2047 /* If we've already found a CVAL1 or CVAL2, this expression is
2048 two complex to handle. */
2049 if (*cval1 || *cval2)
2050 return 0;
2052 class = '1';
2053 *save_p = 1;
2056 switch (class)
2058 case '1':
2059 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2061 case '2':
2062 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2063 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2064 cval1, cval2, save_p));
2066 case 'c':
2067 return 1;
2069 case 'e':
2070 if (code == COND_EXPR)
2071 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2072 cval1, cval2, save_p)
2073 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2074 cval1, cval2, save_p)
2075 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2076 cval1, cval2, save_p));
2077 return 0;
2079 case '<':
2080 /* First see if we can handle the first operand, then the second. For
2081 the second operand, we know *CVAL1 can't be zero. It must be that
2082 one side of the comparison is each of the values; test for the
2083 case where this isn't true by failing if the two operands
2084 are the same. */
2086 if (operand_equal_p (TREE_OPERAND (arg, 0),
2087 TREE_OPERAND (arg, 1), 0))
2088 return 0;
2090 if (*cval1 == 0)
2091 *cval1 = TREE_OPERAND (arg, 0);
2092 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2094 else if (*cval2 == 0)
2095 *cval2 = TREE_OPERAND (arg, 0);
2096 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2098 else
2099 return 0;
2101 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2103 else if (*cval2 == 0)
2104 *cval2 = TREE_OPERAND (arg, 1);
2105 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2107 else
2108 return 0;
2110 return 1;
2112 default:
2113 return 0;
2117 /* ARG is a tree that is known to contain just arithmetic operations and
2118 comparisons. Evaluate the operations in the tree substituting NEW0 for
2119 any occurrence of OLD0 as an operand of a comparison and likewise for
2120 NEW1 and OLD1. */
2122 static tree
2123 eval_subst (arg, old0, new0, old1, new1)
2124 tree arg;
2125 tree old0, new0, old1, new1;
2127 tree type = TREE_TYPE (arg);
2128 enum tree_code code = TREE_CODE (arg);
2129 char class = TREE_CODE_CLASS (code);
2131 /* We can handle some of the 'e' cases here. */
2132 if (class == 'e' && code == TRUTH_NOT_EXPR)
2133 class = '1';
2134 else if (class == 'e'
2135 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2136 class = '2';
2138 switch (class)
2140 case '1':
2141 return fold (build1 (code, type,
2142 eval_subst (TREE_OPERAND (arg, 0),
2143 old0, new0, old1, new1)));
2145 case '2':
2146 return fold (build (code, type,
2147 eval_subst (TREE_OPERAND (arg, 0),
2148 old0, new0, old1, new1),
2149 eval_subst (TREE_OPERAND (arg, 1),
2150 old0, new0, old1, new1)));
2152 case 'e':
2153 switch (code)
2155 case SAVE_EXPR:
2156 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2158 case COMPOUND_EXPR:
2159 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2161 case COND_EXPR:
2162 return fold (build (code, type,
2163 eval_subst (TREE_OPERAND (arg, 0),
2164 old0, new0, old1, new1),
2165 eval_subst (TREE_OPERAND (arg, 1),
2166 old0, new0, old1, new1),
2167 eval_subst (TREE_OPERAND (arg, 2),
2168 old0, new0, old1, new1)));
2169 default:
2170 break;
2172 /* fall through - ??? */
2174 case '<':
2176 tree arg0 = TREE_OPERAND (arg, 0);
2177 tree arg1 = TREE_OPERAND (arg, 1);
2179 /* We need to check both for exact equality and tree equality. The
2180 former will be true if the operand has a side-effect. In that
2181 case, we know the operand occurred exactly once. */
2183 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2184 arg0 = new0;
2185 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2186 arg0 = new1;
2188 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2189 arg1 = new0;
2190 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2191 arg1 = new1;
2193 return fold (build (code, type, arg0, arg1));
2196 default:
2197 return arg;
2201 /* Return a tree for the case when the result of an expression is RESULT
2202 converted to TYPE and OMITTED was previously an operand of the expression
2203 but is now not needed (e.g., we folded OMITTED * 0).
2205 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2206 the conversion of RESULT to TYPE. */
2208 static tree
2209 omit_one_operand (type, result, omitted)
2210 tree type, result, omitted;
2212 tree t = convert (type, result);
2214 if (TREE_SIDE_EFFECTS (omitted))
2215 return build (COMPOUND_EXPR, type, omitted, t);
2217 return non_lvalue (t);
2220 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2222 static tree
2223 pedantic_omit_one_operand (type, result, omitted)
2224 tree type, result, omitted;
2226 tree t = convert (type, result);
2228 if (TREE_SIDE_EFFECTS (omitted))
2229 return build (COMPOUND_EXPR, type, omitted, t);
2231 return pedantic_non_lvalue (t);
2234 /* Return a simplified tree node for the truth-negation of ARG. This
2235 never alters ARG itself. We assume that ARG is an operation that
2236 returns a truth value (0 or 1). */
2238 tree
2239 invert_truthvalue (arg)
2240 tree arg;
2242 tree type = TREE_TYPE (arg);
2243 enum tree_code code = TREE_CODE (arg);
2245 if (code == ERROR_MARK)
2246 return arg;
2248 /* If this is a comparison, we can simply invert it, except for
2249 floating-point non-equality comparisons, in which case we just
2250 enclose a TRUTH_NOT_EXPR around what we have. */
2252 if (TREE_CODE_CLASS (code) == '<')
2254 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2255 && !flag_unsafe_math_optimizations
2256 && code != NE_EXPR
2257 && code != EQ_EXPR)
2258 return build1 (TRUTH_NOT_EXPR, type, arg);
2259 else
2260 return build (invert_tree_comparison (code), type,
2261 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2264 switch (code)
2266 case INTEGER_CST:
2267 return convert (type, build_int_2 (integer_zerop (arg), 0));
2269 case TRUTH_AND_EXPR:
2270 return build (TRUTH_OR_EXPR, type,
2271 invert_truthvalue (TREE_OPERAND (arg, 0)),
2272 invert_truthvalue (TREE_OPERAND (arg, 1)));
2274 case TRUTH_OR_EXPR:
2275 return build (TRUTH_AND_EXPR, type,
2276 invert_truthvalue (TREE_OPERAND (arg, 0)),
2277 invert_truthvalue (TREE_OPERAND (arg, 1)));
2279 case TRUTH_XOR_EXPR:
2280 /* Here we can invert either operand. We invert the first operand
2281 unless the second operand is a TRUTH_NOT_EXPR in which case our
2282 result is the XOR of the first operand with the inside of the
2283 negation of the second operand. */
2285 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2286 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2287 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2288 else
2289 return build (TRUTH_XOR_EXPR, type,
2290 invert_truthvalue (TREE_OPERAND (arg, 0)),
2291 TREE_OPERAND (arg, 1));
2293 case TRUTH_ANDIF_EXPR:
2294 return build (TRUTH_ORIF_EXPR, type,
2295 invert_truthvalue (TREE_OPERAND (arg, 0)),
2296 invert_truthvalue (TREE_OPERAND (arg, 1)));
2298 case TRUTH_ORIF_EXPR:
2299 return build (TRUTH_ANDIF_EXPR, type,
2300 invert_truthvalue (TREE_OPERAND (arg, 0)),
2301 invert_truthvalue (TREE_OPERAND (arg, 1)));
2303 case TRUTH_NOT_EXPR:
2304 return TREE_OPERAND (arg, 0);
2306 case COND_EXPR:
2307 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2308 invert_truthvalue (TREE_OPERAND (arg, 1)),
2309 invert_truthvalue (TREE_OPERAND (arg, 2)));
2311 case COMPOUND_EXPR:
2312 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2313 invert_truthvalue (TREE_OPERAND (arg, 1)));
2315 case WITH_RECORD_EXPR:
2316 return build (WITH_RECORD_EXPR, type,
2317 invert_truthvalue (TREE_OPERAND (arg, 0)),
2318 TREE_OPERAND (arg, 1));
2320 case NON_LVALUE_EXPR:
2321 return invert_truthvalue (TREE_OPERAND (arg, 0));
2323 case NOP_EXPR:
2324 case CONVERT_EXPR:
2325 case FLOAT_EXPR:
2326 return build1 (TREE_CODE (arg), type,
2327 invert_truthvalue (TREE_OPERAND (arg, 0)));
2329 case BIT_AND_EXPR:
2330 if (!integer_onep (TREE_OPERAND (arg, 1)))
2331 break;
2332 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2334 case SAVE_EXPR:
2335 return build1 (TRUTH_NOT_EXPR, type, arg);
2337 case CLEANUP_POINT_EXPR:
2338 return build1 (CLEANUP_POINT_EXPR, type,
2339 invert_truthvalue (TREE_OPERAND (arg, 0)));
2341 default:
2342 break;
2344 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2345 abort ();
2346 return build1 (TRUTH_NOT_EXPR, type, arg);
2349 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2350 operands are another bit-wise operation with a common input. If so,
2351 distribute the bit operations to save an operation and possibly two if
2352 constants are involved. For example, convert
2353 (A | B) & (A | C) into A | (B & C)
2354 Further simplification will occur if B and C are constants.
2356 If this optimization cannot be done, 0 will be returned. */
2358 static tree
2359 distribute_bit_expr (code, type, arg0, arg1)
2360 enum tree_code code;
2361 tree type;
2362 tree arg0, arg1;
2364 tree common;
2365 tree left, right;
2367 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2368 || TREE_CODE (arg0) == code
2369 || (TREE_CODE (arg0) != BIT_AND_EXPR
2370 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2371 return 0;
2373 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2375 common = TREE_OPERAND (arg0, 0);
2376 left = TREE_OPERAND (arg0, 1);
2377 right = TREE_OPERAND (arg1, 1);
2379 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2381 common = TREE_OPERAND (arg0, 0);
2382 left = TREE_OPERAND (arg0, 1);
2383 right = TREE_OPERAND (arg1, 0);
2385 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2387 common = TREE_OPERAND (arg0, 1);
2388 left = TREE_OPERAND (arg0, 0);
2389 right = TREE_OPERAND (arg1, 1);
2391 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2393 common = TREE_OPERAND (arg0, 1);
2394 left = TREE_OPERAND (arg0, 0);
2395 right = TREE_OPERAND (arg1, 0);
2397 else
2398 return 0;
2400 return fold (build (TREE_CODE (arg0), type, common,
2401 fold (build (code, type, left, right))));
2404 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2405 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2407 static tree
2408 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2409 tree inner;
2410 tree type;
2411 int bitsize, bitpos;
2412 int unsignedp;
2414 tree result = build (BIT_FIELD_REF, type, inner,
2415 size_int (bitsize), bitsize_int (bitpos));
2417 TREE_UNSIGNED (result) = unsignedp;
2419 return result;
2422 /* Optimize a bit-field compare.
2424 There are two cases: First is a compare against a constant and the
2425 second is a comparison of two items where the fields are at the same
2426 bit position relative to the start of a chunk (byte, halfword, word)
2427 large enough to contain it. In these cases we can avoid the shift
2428 implicit in bitfield extractions.
2430 For constants, we emit a compare of the shifted constant with the
2431 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2432 compared. For two fields at the same position, we do the ANDs with the
2433 similar mask and compare the result of the ANDs.
2435 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2436 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2437 are the left and right operands of the comparison, respectively.
2439 If the optimization described above can be done, we return the resulting
2440 tree. Otherwise we return zero. */
2442 static tree
2443 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2444 enum tree_code code;
2445 tree compare_type;
2446 tree lhs, rhs;
2448 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2449 tree type = TREE_TYPE (lhs);
2450 tree signed_type, unsigned_type;
2451 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2452 enum machine_mode lmode, rmode, nmode;
2453 int lunsignedp, runsignedp;
2454 int lvolatilep = 0, rvolatilep = 0;
2455 tree linner, rinner = NULL_TREE;
2456 tree mask;
2457 tree offset;
2459 /* Get all the information about the extractions being done. If the bit size
2460 if the same as the size of the underlying object, we aren't doing an
2461 extraction at all and so can do nothing. We also don't want to
2462 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2463 then will no longer be able to replace it. */
2464 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2465 &lunsignedp, &lvolatilep);
2466 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2467 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2468 return 0;
2470 if (!const_p)
2472 /* If this is not a constant, we can only do something if bit positions,
2473 sizes, and signedness are the same. */
2474 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2475 &runsignedp, &rvolatilep);
2477 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2478 || lunsignedp != runsignedp || offset != 0
2479 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2480 return 0;
2483 /* See if we can find a mode to refer to this field. We should be able to,
2484 but fail if we can't. */
2485 nmode = get_best_mode (lbitsize, lbitpos,
2486 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2487 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2488 TYPE_ALIGN (TREE_TYPE (rinner))),
2489 word_mode, lvolatilep || rvolatilep);
2490 if (nmode == VOIDmode)
2491 return 0;
2493 /* Set signed and unsigned types of the precision of this mode for the
2494 shifts below. */
2495 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2496 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2498 /* Compute the bit position and size for the new reference and our offset
2499 within it. If the new reference is the same size as the original, we
2500 won't optimize anything, so return zero. */
2501 nbitsize = GET_MODE_BITSIZE (nmode);
2502 nbitpos = lbitpos & ~ (nbitsize - 1);
2503 lbitpos -= nbitpos;
2504 if (nbitsize == lbitsize)
2505 return 0;
2507 if (BYTES_BIG_ENDIAN)
2508 lbitpos = nbitsize - lbitsize - lbitpos;
2510 /* Make the mask to be used against the extracted field. */
2511 mask = build_int_2 (~0, ~0);
2512 TREE_TYPE (mask) = unsigned_type;
2513 force_fit_type (mask, 0);
2514 mask = convert (unsigned_type, mask);
2515 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2516 mask = const_binop (RSHIFT_EXPR, mask,
2517 size_int (nbitsize - lbitsize - lbitpos), 0);
2519 if (! const_p)
2520 /* If not comparing with constant, just rework the comparison
2521 and return. */
2522 return build (code, compare_type,
2523 build (BIT_AND_EXPR, unsigned_type,
2524 make_bit_field_ref (linner, unsigned_type,
2525 nbitsize, nbitpos, 1),
2526 mask),
2527 build (BIT_AND_EXPR, unsigned_type,
2528 make_bit_field_ref (rinner, unsigned_type,
2529 nbitsize, nbitpos, 1),
2530 mask));
2532 /* Otherwise, we are handling the constant case. See if the constant is too
2533 big for the field. Warn and return a tree of for 0 (false) if so. We do
2534 this not only for its own sake, but to avoid having to test for this
2535 error case below. If we didn't, we might generate wrong code.
2537 For unsigned fields, the constant shifted right by the field length should
2538 be all zero. For signed fields, the high-order bits should agree with
2539 the sign bit. */
2541 if (lunsignedp)
2543 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2544 convert (unsigned_type, rhs),
2545 size_int (lbitsize), 0)))
2547 warning ("comparison is always %d due to width of bit-field",
2548 code == NE_EXPR);
2549 return convert (compare_type,
2550 (code == NE_EXPR
2551 ? integer_one_node : integer_zero_node));
2554 else
2556 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2557 size_int (lbitsize - 1), 0);
2558 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2560 warning ("comparison is always %d due to width of bit-field",
2561 code == NE_EXPR);
2562 return convert (compare_type,
2563 (code == NE_EXPR
2564 ? integer_one_node : integer_zero_node));
2568 /* Single-bit compares should always be against zero. */
2569 if (lbitsize == 1 && ! integer_zerop (rhs))
2571 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2572 rhs = convert (type, integer_zero_node);
2575 /* Make a new bitfield reference, shift the constant over the
2576 appropriate number of bits and mask it with the computed mask
2577 (in case this was a signed field). If we changed it, make a new one. */
2578 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2579 if (lvolatilep)
2581 TREE_SIDE_EFFECTS (lhs) = 1;
2582 TREE_THIS_VOLATILE (lhs) = 1;
2585 rhs = fold (const_binop (BIT_AND_EXPR,
2586 const_binop (LSHIFT_EXPR,
2587 convert (unsigned_type, rhs),
2588 size_int (lbitpos), 0),
2589 mask, 0));
2591 return build (code, compare_type,
2592 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2593 rhs);
2596 /* Subroutine for fold_truthop: decode a field reference.
2598 If EXP is a comparison reference, we return the innermost reference.
2600 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2601 set to the starting bit number.
2603 If the innermost field can be completely contained in a mode-sized
2604 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2606 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2607 otherwise it is not changed.
2609 *PUNSIGNEDP is set to the signedness of the field.
2611 *PMASK is set to the mask used. This is either contained in a
2612 BIT_AND_EXPR or derived from the width of the field.
2614 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2616 Return 0 if this is not a component reference or is one that we can't
2617 do anything with. */
2619 static tree
2620 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2621 pvolatilep, pmask, pand_mask)
2622 tree exp;
2623 HOST_WIDE_INT *pbitsize, *pbitpos;
2624 enum machine_mode *pmode;
2625 int *punsignedp, *pvolatilep;
2626 tree *pmask;
2627 tree *pand_mask;
2629 tree and_mask = 0;
2630 tree mask, inner, offset;
2631 tree unsigned_type;
2632 unsigned int precision;
2634 /* All the optimizations using this function assume integer fields.
2635 There are problems with FP fields since the type_for_size call
2636 below can fail for, e.g., XFmode. */
2637 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2638 return 0;
2640 STRIP_NOPS (exp);
2642 if (TREE_CODE (exp) == BIT_AND_EXPR)
2644 and_mask = TREE_OPERAND (exp, 1);
2645 exp = TREE_OPERAND (exp, 0);
2646 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2647 if (TREE_CODE (and_mask) != INTEGER_CST)
2648 return 0;
2651 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2652 punsignedp, pvolatilep);
2653 if ((inner == exp && and_mask == 0)
2654 || *pbitsize < 0 || offset != 0
2655 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2656 return 0;
2658 /* Compute the mask to access the bitfield. */
2659 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2660 precision = TYPE_PRECISION (unsigned_type);
2662 mask = build_int_2 (~0, ~0);
2663 TREE_TYPE (mask) = unsigned_type;
2664 force_fit_type (mask, 0);
2665 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2666 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2668 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2669 if (and_mask != 0)
2670 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2671 convert (unsigned_type, and_mask), mask));
2673 *pmask = mask;
2674 *pand_mask = and_mask;
2675 return inner;
2678 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2679 bit positions. */
2681 static int
2682 all_ones_mask_p (mask, size)
2683 tree mask;
2684 int size;
2686 tree type = TREE_TYPE (mask);
2687 unsigned int precision = TYPE_PRECISION (type);
2688 tree tmask;
2690 tmask = build_int_2 (~0, ~0);
2691 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2692 force_fit_type (tmask, 0);
2693 return
2694 tree_int_cst_equal (mask,
2695 const_binop (RSHIFT_EXPR,
2696 const_binop (LSHIFT_EXPR, tmask,
2697 size_int (precision - size),
2699 size_int (precision - size), 0));
2702 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2703 represents the sign bit of EXP's type. If EXP represents a sign
2704 or zero extension, also test VAL against the unextended type.
2705 The return value is the (sub)expression whose sign bit is VAL,
2706 or NULL_TREE otherwise. */
2708 static tree
2709 sign_bit_p (exp, val)
2710 tree exp;
2711 tree val;
2713 unsigned HOST_WIDE_INT lo;
2714 HOST_WIDE_INT hi;
2715 int width;
2716 tree t;
2718 /* Tree EXP must have an integral type. */
2719 t = TREE_TYPE (exp);
2720 if (! INTEGRAL_TYPE_P (t))
2721 return NULL_TREE;
2723 /* Tree VAL must be an integer constant. */
2724 if (TREE_CODE (val) != INTEGER_CST
2725 || TREE_CONSTANT_OVERFLOW (val))
2726 return NULL_TREE;
2728 width = TYPE_PRECISION (t);
2729 if (width > HOST_BITS_PER_WIDE_INT)
2731 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2732 lo = 0;
2734 else
2736 hi = 0;
2737 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2740 if (TREE_INT_CST_HIGH (val) == hi && TREE_INT_CST_LOW (val) == lo)
2741 return exp;
2743 /* Handle extension from a narrower type. */
2744 if (TREE_CODE (exp) == NOP_EXPR
2745 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2746 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2748 return NULL_TREE;
2751 /* Subroutine for fold_truthop: determine if an operand is simple enough
2752 to be evaluated unconditionally. */
2754 static int
2755 simple_operand_p (exp)
2756 tree exp;
2758 /* Strip any conversions that don't change the machine mode. */
2759 while ((TREE_CODE (exp) == NOP_EXPR
2760 || TREE_CODE (exp) == CONVERT_EXPR)
2761 && (TYPE_MODE (TREE_TYPE (exp))
2762 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2763 exp = TREE_OPERAND (exp, 0);
2765 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2766 || (DECL_P (exp)
2767 && ! TREE_ADDRESSABLE (exp)
2768 && ! TREE_THIS_VOLATILE (exp)
2769 && ! DECL_NONLOCAL (exp)
2770 /* Don't regard global variables as simple. They may be
2771 allocated in ways unknown to the compiler (shared memory,
2772 #pragma weak, etc). */
2773 && ! TREE_PUBLIC (exp)
2774 && ! DECL_EXTERNAL (exp)
2775 /* Loading a static variable is unduly expensive, but global
2776 registers aren't expensive. */
2777 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2780 /* The following functions are subroutines to fold_range_test and allow it to
2781 try to change a logical combination of comparisons into a range test.
2783 For example, both
2784 X == 2 || X == 3 || X == 4 || X == 5
2786 X >= 2 && X <= 5
2787 are converted to
2788 (unsigned) (X - 2) <= 3
2790 We describe each set of comparisons as being either inside or outside
2791 a range, using a variable named like IN_P, and then describe the
2792 range with a lower and upper bound. If one of the bounds is omitted,
2793 it represents either the highest or lowest value of the type.
2795 In the comments below, we represent a range by two numbers in brackets
2796 preceded by a "+" to designate being inside that range, or a "-" to
2797 designate being outside that range, so the condition can be inverted by
2798 flipping the prefix. An omitted bound is represented by a "-". For
2799 example, "- [-, 10]" means being outside the range starting at the lowest
2800 possible value and ending at 10, in other words, being greater than 10.
2801 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2802 always false.
2804 We set up things so that the missing bounds are handled in a consistent
2805 manner so neither a missing bound nor "true" and "false" need to be
2806 handled using a special case. */
2808 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2809 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2810 and UPPER1_P are nonzero if the respective argument is an upper bound
2811 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2812 must be specified for a comparison. ARG1 will be converted to ARG0's
2813 type if both are specified. */
2815 static tree
2816 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2817 enum tree_code code;
2818 tree type;
2819 tree arg0, arg1;
2820 int upper0_p, upper1_p;
2822 tree tem;
2823 int result;
2824 int sgn0, sgn1;
2826 /* If neither arg represents infinity, do the normal operation.
2827 Else, if not a comparison, return infinity. Else handle the special
2828 comparison rules. Note that most of the cases below won't occur, but
2829 are handled for consistency. */
2831 if (arg0 != 0 && arg1 != 0)
2833 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2834 arg0, convert (TREE_TYPE (arg0), arg1)));
2835 STRIP_NOPS (tem);
2836 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2839 if (TREE_CODE_CLASS (code) != '<')
2840 return 0;
2842 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2843 for neither. In real maths, we cannot assume open ended ranges are
2844 the same. But, this is computer arithmetic, where numbers are finite.
2845 We can therefore make the transformation of any unbounded range with
2846 the value Z, Z being greater than any representable number. This permits
2847 us to treat unbounded ranges as equal. */
2848 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2849 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2850 switch (code)
2852 case EQ_EXPR:
2853 result = sgn0 == sgn1;
2854 break;
2855 case NE_EXPR:
2856 result = sgn0 != sgn1;
2857 break;
2858 case LT_EXPR:
2859 result = sgn0 < sgn1;
2860 break;
2861 case LE_EXPR:
2862 result = sgn0 <= sgn1;
2863 break;
2864 case GT_EXPR:
2865 result = sgn0 > sgn1;
2866 break;
2867 case GE_EXPR:
2868 result = sgn0 >= sgn1;
2869 break;
2870 default:
2871 abort ();
2874 return convert (type, result ? integer_one_node : integer_zero_node);
2877 /* Given EXP, a logical expression, set the range it is testing into
2878 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2879 actually being tested. *PLOW and *PHIGH will be made of the same type
2880 as the returned expression. If EXP is not a comparison, we will most
2881 likely not be returning a useful value and range. */
2883 static tree
2884 make_range (exp, pin_p, plow, phigh)
2885 tree exp;
2886 int *pin_p;
2887 tree *plow, *phigh;
2889 enum tree_code code;
2890 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2891 tree orig_type = NULL_TREE;
2892 int in_p, n_in_p;
2893 tree low, high, n_low, n_high;
2895 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2896 and see if we can refine the range. Some of the cases below may not
2897 happen, but it doesn't seem worth worrying about this. We "continue"
2898 the outer loop when we've changed something; otherwise we "break"
2899 the switch, which will "break" the while. */
2901 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2903 while (1)
2905 code = TREE_CODE (exp);
2907 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2909 arg0 = TREE_OPERAND (exp, 0);
2910 if (TREE_CODE_CLASS (code) == '<'
2911 || TREE_CODE_CLASS (code) == '1'
2912 || TREE_CODE_CLASS (code) == '2')
2913 type = TREE_TYPE (arg0);
2914 if (TREE_CODE_CLASS (code) == '2'
2915 || TREE_CODE_CLASS (code) == '<'
2916 || (TREE_CODE_CLASS (code) == 'e'
2917 && TREE_CODE_LENGTH (code) > 1))
2918 arg1 = TREE_OPERAND (exp, 1);
2921 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
2922 lose a cast by accident. */
2923 if (type != NULL_TREE && orig_type == NULL_TREE)
2924 orig_type = type;
2926 switch (code)
2928 case TRUTH_NOT_EXPR:
2929 in_p = ! in_p, exp = arg0;
2930 continue;
2932 case EQ_EXPR: case NE_EXPR:
2933 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2934 /* We can only do something if the range is testing for zero
2935 and if the second operand is an integer constant. Note that
2936 saying something is "in" the range we make is done by
2937 complementing IN_P since it will set in the initial case of
2938 being not equal to zero; "out" is leaving it alone. */
2939 if (low == 0 || high == 0
2940 || ! integer_zerop (low) || ! integer_zerop (high)
2941 || TREE_CODE (arg1) != INTEGER_CST)
2942 break;
2944 switch (code)
2946 case NE_EXPR: /* - [c, c] */
2947 low = high = arg1;
2948 break;
2949 case EQ_EXPR: /* + [c, c] */
2950 in_p = ! in_p, low = high = arg1;
2951 break;
2952 case GT_EXPR: /* - [-, c] */
2953 low = 0, high = arg1;
2954 break;
2955 case GE_EXPR: /* + [c, -] */
2956 in_p = ! in_p, low = arg1, high = 0;
2957 break;
2958 case LT_EXPR: /* - [c, -] */
2959 low = arg1, high = 0;
2960 break;
2961 case LE_EXPR: /* + [-, c] */
2962 in_p = ! in_p, low = 0, high = arg1;
2963 break;
2964 default:
2965 abort ();
2968 exp = arg0;
2970 /* If this is an unsigned comparison, we also know that EXP is
2971 greater than or equal to zero. We base the range tests we make
2972 on that fact, so we record it here so we can parse existing
2973 range tests. */
2974 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2976 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2977 1, convert (type, integer_zero_node),
2978 NULL_TREE))
2979 break;
2981 in_p = n_in_p, low = n_low, high = n_high;
2983 /* If the high bound is missing, but we
2984 have a low bound, reverse the range so
2985 it goes from zero to the low bound minus 1. */
2986 if (high == 0 && low)
2988 in_p = ! in_p;
2989 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2990 integer_one_node, 0);
2991 low = convert (type, integer_zero_node);
2994 continue;
2996 case NEGATE_EXPR:
2997 /* (-x) IN [a,b] -> x in [-b, -a] */
2998 n_low = range_binop (MINUS_EXPR, type,
2999 convert (type, integer_zero_node), 0, high, 1);
3000 n_high = range_binop (MINUS_EXPR, type,
3001 convert (type, integer_zero_node), 0, low, 0);
3002 low = n_low, high = n_high;
3003 exp = arg0;
3004 continue;
3006 case BIT_NOT_EXPR:
3007 /* ~ X -> -X - 1 */
3008 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3009 convert (type, integer_one_node));
3010 continue;
3012 case PLUS_EXPR: case MINUS_EXPR:
3013 if (TREE_CODE (arg1) != INTEGER_CST)
3014 break;
3016 /* If EXP is signed, any overflow in the computation is undefined,
3017 so we don't worry about it so long as our computations on
3018 the bounds don't overflow. For unsigned, overflow is defined
3019 and this is exactly the right thing. */
3020 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3021 type, low, 0, arg1, 0);
3022 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3023 type, high, 1, arg1, 0);
3024 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3025 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3026 break;
3028 /* Check for an unsigned range which has wrapped around the maximum
3029 value thus making n_high < n_low, and normalize it. */
3030 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3032 low = range_binop (PLUS_EXPR, type, n_high, 0,
3033 integer_one_node, 0);
3034 high = range_binop (MINUS_EXPR, type, n_low, 0,
3035 integer_one_node, 0);
3037 /* If the range is of the form +/- [ x+1, x ], we won't
3038 be able to normalize it. But then, it represents the
3039 whole range or the empty set, so make it
3040 +/- [ -, - ]. */
3041 if (tree_int_cst_equal (n_low, low)
3042 && tree_int_cst_equal (n_high, high))
3043 low = high = 0;
3044 else
3045 in_p = ! in_p;
3047 else
3048 low = n_low, high = n_high;
3050 exp = arg0;
3051 continue;
3053 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3054 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3055 break;
3057 if (! INTEGRAL_TYPE_P (type)
3058 || (low != 0 && ! int_fits_type_p (low, type))
3059 || (high != 0 && ! int_fits_type_p (high, type)))
3060 break;
3062 n_low = low, n_high = high;
3064 if (n_low != 0)
3065 n_low = convert (type, n_low);
3067 if (n_high != 0)
3068 n_high = convert (type, n_high);
3070 /* If we're converting from an unsigned to a signed type,
3071 we will be doing the comparison as unsigned. The tests above
3072 have already verified that LOW and HIGH are both positive.
3074 So we have to make sure that the original unsigned value will
3075 be interpreted as positive. */
3076 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3078 tree equiv_type = (*lang_hooks.types.type_for_mode)
3079 (TYPE_MODE (type), 1);
3080 tree high_positive;
3082 /* A range without an upper bound is, naturally, unbounded.
3083 Since convert would have cropped a very large value, use
3084 the max value for the destination type. */
3085 high_positive
3086 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3087 : TYPE_MAX_VALUE (type);
3089 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3090 high_positive = fold (build (RSHIFT_EXPR, type,
3091 convert (type, high_positive),
3092 convert (type, integer_one_node)));
3094 /* If the low bound is specified, "and" the range with the
3095 range for which the original unsigned value will be
3096 positive. */
3097 if (low != 0)
3099 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3100 1, n_low, n_high,
3101 1, convert (type, integer_zero_node),
3102 high_positive))
3103 break;
3105 in_p = (n_in_p == in_p);
3107 else
3109 /* Otherwise, "or" the range with the range of the input
3110 that will be interpreted as negative. */
3111 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3112 0, n_low, n_high,
3113 1, convert (type, integer_zero_node),
3114 high_positive))
3115 break;
3117 in_p = (in_p != n_in_p);
3121 exp = arg0;
3122 low = n_low, high = n_high;
3123 continue;
3125 default:
3126 break;
3129 break;
3132 /* If EXP is a constant, we can evaluate whether this is true or false. */
3133 if (TREE_CODE (exp) == INTEGER_CST)
3135 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3136 exp, 0, low, 0))
3137 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3138 exp, 1, high, 1)));
3139 low = high = 0;
3140 exp = 0;
3143 *pin_p = in_p, *plow = low, *phigh = high;
3144 return exp;
3147 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3148 type, TYPE, return an expression to test if EXP is in (or out of, depending
3149 on IN_P) the range. */
3151 static tree
3152 build_range_check (type, exp, in_p, low, high)
3153 tree type;
3154 tree exp;
3155 int in_p;
3156 tree low, high;
3158 tree etype = TREE_TYPE (exp);
3159 tree value;
3161 if (! in_p
3162 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3163 return invert_truthvalue (value);
3165 if (low == 0 && high == 0)
3166 return convert (type, integer_one_node);
3168 if (low == 0)
3169 return fold (build (LE_EXPR, type, exp, high));
3171 if (high == 0)
3172 return fold (build (GE_EXPR, type, exp, low));
3174 if (operand_equal_p (low, high, 0))
3175 return fold (build (EQ_EXPR, type, exp, low));
3177 if (integer_zerop (low))
3179 if (! TREE_UNSIGNED (etype))
3181 etype = (*lang_hooks.types.unsigned_type) (etype);
3182 high = convert (etype, high);
3183 exp = convert (etype, exp);
3185 return build_range_check (type, exp, 1, 0, high);
3188 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3189 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3191 unsigned HOST_WIDE_INT lo;
3192 HOST_WIDE_INT hi;
3193 int prec;
3195 prec = TYPE_PRECISION (etype);
3196 if (prec <= HOST_BITS_PER_WIDE_INT)
3198 hi = 0;
3199 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3201 else
3203 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3204 lo = (unsigned HOST_WIDE_INT) -1;
3207 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3209 if (TREE_UNSIGNED (etype))
3211 etype = (*lang_hooks.types.signed_type) (etype);
3212 exp = convert (etype, exp);
3214 return fold (build (GT_EXPR, type, exp,
3215 convert (etype, integer_zero_node)));
3219 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3220 && ! TREE_OVERFLOW (value))
3221 return build_range_check (type,
3222 fold (build (MINUS_EXPR, etype, exp, low)),
3223 1, convert (etype, integer_zero_node), value);
3225 return 0;
3228 /* Given two ranges, see if we can merge them into one. Return 1 if we
3229 can, 0 if we can't. Set the output range into the specified parameters. */
3231 static int
3232 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3233 int *pin_p;
3234 tree *plow, *phigh;
3235 int in0_p, in1_p;
3236 tree low0, high0, low1, high1;
3238 int no_overlap;
3239 int subset;
3240 int temp;
3241 tree tem;
3242 int in_p;
3243 tree low, high;
3244 int lowequal = ((low0 == 0 && low1 == 0)
3245 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3246 low0, 0, low1, 0)));
3247 int highequal = ((high0 == 0 && high1 == 0)
3248 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3249 high0, 1, high1, 1)));
3251 /* Make range 0 be the range that starts first, or ends last if they
3252 start at the same value. Swap them if it isn't. */
3253 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3254 low0, 0, low1, 0))
3255 || (lowequal
3256 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3257 high1, 1, high0, 1))))
3259 temp = in0_p, in0_p = in1_p, in1_p = temp;
3260 tem = low0, low0 = low1, low1 = tem;
3261 tem = high0, high0 = high1, high1 = tem;
3264 /* Now flag two cases, whether the ranges are disjoint or whether the
3265 second range is totally subsumed in the first. Note that the tests
3266 below are simplified by the ones above. */
3267 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3268 high0, 1, low1, 0));
3269 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3270 high1, 1, high0, 1));
3272 /* We now have four cases, depending on whether we are including or
3273 excluding the two ranges. */
3274 if (in0_p && in1_p)
3276 /* If they don't overlap, the result is false. If the second range
3277 is a subset it is the result. Otherwise, the range is from the start
3278 of the second to the end of the first. */
3279 if (no_overlap)
3280 in_p = 0, low = high = 0;
3281 else if (subset)
3282 in_p = 1, low = low1, high = high1;
3283 else
3284 in_p = 1, low = low1, high = high0;
3287 else if (in0_p && ! in1_p)
3289 /* If they don't overlap, the result is the first range. If they are
3290 equal, the result is false. If the second range is a subset of the
3291 first, and the ranges begin at the same place, we go from just after
3292 the end of the first range to the end of the second. If the second
3293 range is not a subset of the first, or if it is a subset and both
3294 ranges end at the same place, the range starts at the start of the
3295 first range and ends just before the second range.
3296 Otherwise, we can't describe this as a single range. */
3297 if (no_overlap)
3298 in_p = 1, low = low0, high = high0;
3299 else if (lowequal && highequal)
3300 in_p = 0, low = high = 0;
3301 else if (subset && lowequal)
3303 in_p = 1, high = high0;
3304 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3305 integer_one_node, 0);
3307 else if (! subset || highequal)
3309 in_p = 1, low = low0;
3310 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3311 integer_one_node, 0);
3313 else
3314 return 0;
3317 else if (! in0_p && in1_p)
3319 /* If they don't overlap, the result is the second range. If the second
3320 is a subset of the first, the result is false. Otherwise,
3321 the range starts just after the first range and ends at the
3322 end of the second. */
3323 if (no_overlap)
3324 in_p = 1, low = low1, high = high1;
3325 else if (subset || highequal)
3326 in_p = 0, low = high = 0;
3327 else
3329 in_p = 1, high = high1;
3330 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3331 integer_one_node, 0);
3335 else
3337 /* The case where we are excluding both ranges. Here the complex case
3338 is if they don't overlap. In that case, the only time we have a
3339 range is if they are adjacent. If the second is a subset of the
3340 first, the result is the first. Otherwise, the range to exclude
3341 starts at the beginning of the first range and ends at the end of the
3342 second. */
3343 if (no_overlap)
3345 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3346 range_binop (PLUS_EXPR, NULL_TREE,
3347 high0, 1,
3348 integer_one_node, 1),
3349 1, low1, 0)))
3350 in_p = 0, low = low0, high = high1;
3351 else
3352 return 0;
3354 else if (subset)
3355 in_p = 0, low = low0, high = high0;
3356 else
3357 in_p = 0, low = low0, high = high1;
3360 *pin_p = in_p, *plow = low, *phigh = high;
3361 return 1;
3364 /* EXP is some logical combination of boolean tests. See if we can
3365 merge it into some range test. Return the new tree if so. */
3367 static tree
3368 fold_range_test (exp)
3369 tree exp;
3371 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3372 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3373 int in0_p, in1_p, in_p;
3374 tree low0, low1, low, high0, high1, high;
3375 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3376 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3377 tree tem;
3379 /* If this is an OR operation, invert both sides; we will invert
3380 again at the end. */
3381 if (or_op)
3382 in0_p = ! in0_p, in1_p = ! in1_p;
3384 /* If both expressions are the same, if we can merge the ranges, and we
3385 can build the range test, return it or it inverted. If one of the
3386 ranges is always true or always false, consider it to be the same
3387 expression as the other. */
3388 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3389 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3390 in1_p, low1, high1)
3391 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3392 lhs != 0 ? lhs
3393 : rhs != 0 ? rhs : integer_zero_node,
3394 in_p, low, high))))
3395 return or_op ? invert_truthvalue (tem) : tem;
3397 /* On machines where the branch cost is expensive, if this is a
3398 short-circuited branch and the underlying object on both sides
3399 is the same, make a non-short-circuit operation. */
3400 else if (BRANCH_COST >= 2
3401 && lhs != 0 && rhs != 0
3402 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3403 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3404 && operand_equal_p (lhs, rhs, 0))
3406 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3407 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3408 which cases we can't do this. */
3409 if (simple_operand_p (lhs))
3410 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3411 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3412 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3413 TREE_OPERAND (exp, 1));
3415 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3416 && ! contains_placeholder_p (lhs))
3418 tree common = save_expr (lhs);
3420 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3421 or_op ? ! in0_p : in0_p,
3422 low0, high0))
3423 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3424 or_op ? ! in1_p : in1_p,
3425 low1, high1))))
3426 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3427 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3428 TREE_TYPE (exp), lhs, rhs);
3432 return 0;
3435 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3436 bit value. Arrange things so the extra bits will be set to zero if and
3437 only if C is signed-extended to its full width. If MASK is nonzero,
3438 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3440 static tree
3441 unextend (c, p, unsignedp, mask)
3442 tree c;
3443 int p;
3444 int unsignedp;
3445 tree mask;
3447 tree type = TREE_TYPE (c);
3448 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3449 tree temp;
3451 if (p == modesize || unsignedp)
3452 return c;
3454 /* We work by getting just the sign bit into the low-order bit, then
3455 into the high-order bit, then sign-extend. We then XOR that value
3456 with C. */
3457 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3458 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3460 /* We must use a signed type in order to get an arithmetic right shift.
3461 However, we must also avoid introducing accidental overflows, so that
3462 a subsequent call to integer_zerop will work. Hence we must
3463 do the type conversion here. At this point, the constant is either
3464 zero or one, and the conversion to a signed type can never overflow.
3465 We could get an overflow if this conversion is done anywhere else. */
3466 if (TREE_UNSIGNED (type))
3467 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3469 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3470 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3471 if (mask != 0)
3472 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3473 /* If necessary, convert the type back to match the type of C. */
3474 if (TREE_UNSIGNED (type))
3475 temp = convert (type, temp);
3477 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3480 /* Find ways of folding logical expressions of LHS and RHS:
3481 Try to merge two comparisons to the same innermost item.
3482 Look for range tests like "ch >= '0' && ch <= '9'".
3483 Look for combinations of simple terms on machines with expensive branches
3484 and evaluate the RHS unconditionally.
3486 For example, if we have p->a == 2 && p->b == 4 and we can make an
3487 object large enough to span both A and B, we can do this with a comparison
3488 against the object ANDed with the a mask.
3490 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3491 operations to do this with one comparison.
3493 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3494 function and the one above.
3496 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3497 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3499 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3500 two operands.
3502 We return the simplified tree or 0 if no optimization is possible. */
3504 static tree
3505 fold_truthop (code, truth_type, lhs, rhs)
3506 enum tree_code code;
3507 tree truth_type, lhs, rhs;
3509 /* If this is the "or" of two comparisons, we can do something if
3510 the comparisons are NE_EXPR. If this is the "and", we can do something
3511 if the comparisons are EQ_EXPR. I.e.,
3512 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3514 WANTED_CODE is this operation code. For single bit fields, we can
3515 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3516 comparison for one-bit fields. */
3518 enum tree_code wanted_code;
3519 enum tree_code lcode, rcode;
3520 tree ll_arg, lr_arg, rl_arg, rr_arg;
3521 tree ll_inner, lr_inner, rl_inner, rr_inner;
3522 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3523 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3524 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3525 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3526 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3527 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3528 enum machine_mode lnmode, rnmode;
3529 tree ll_mask, lr_mask, rl_mask, rr_mask;
3530 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3531 tree l_const, r_const;
3532 tree lntype, rntype, result;
3533 int first_bit, end_bit;
3534 int volatilep;
3536 /* Start by getting the comparison codes. Fail if anything is volatile.
3537 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3538 it were surrounded with a NE_EXPR. */
3540 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3541 return 0;
3543 lcode = TREE_CODE (lhs);
3544 rcode = TREE_CODE (rhs);
3546 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3547 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3549 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3550 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3552 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3553 return 0;
3555 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3556 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3558 ll_arg = TREE_OPERAND (lhs, 0);
3559 lr_arg = TREE_OPERAND (lhs, 1);
3560 rl_arg = TREE_OPERAND (rhs, 0);
3561 rr_arg = TREE_OPERAND (rhs, 1);
3563 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3564 if (simple_operand_p (ll_arg)
3565 && simple_operand_p (lr_arg)
3566 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3568 int compcode;
3570 if (operand_equal_p (ll_arg, rl_arg, 0)
3571 && operand_equal_p (lr_arg, rr_arg, 0))
3573 int lcompcode, rcompcode;
3575 lcompcode = comparison_to_compcode (lcode);
3576 rcompcode = comparison_to_compcode (rcode);
3577 compcode = (code == TRUTH_AND_EXPR)
3578 ? lcompcode & rcompcode
3579 : lcompcode | rcompcode;
3581 else if (operand_equal_p (ll_arg, rr_arg, 0)
3582 && operand_equal_p (lr_arg, rl_arg, 0))
3584 int lcompcode, rcompcode;
3586 rcode = swap_tree_comparison (rcode);
3587 lcompcode = comparison_to_compcode (lcode);
3588 rcompcode = comparison_to_compcode (rcode);
3589 compcode = (code == TRUTH_AND_EXPR)
3590 ? lcompcode & rcompcode
3591 : lcompcode | rcompcode;
3593 else
3594 compcode = -1;
3596 if (compcode == COMPCODE_TRUE)
3597 return convert (truth_type, integer_one_node);
3598 else if (compcode == COMPCODE_FALSE)
3599 return convert (truth_type, integer_zero_node);
3600 else if (compcode != -1)
3601 return build (compcode_to_comparison (compcode),
3602 truth_type, ll_arg, lr_arg);
3605 /* If the RHS can be evaluated unconditionally and its operands are
3606 simple, it wins to evaluate the RHS unconditionally on machines
3607 with expensive branches. In this case, this isn't a comparison
3608 that can be merged. Avoid doing this if the RHS is a floating-point
3609 comparison since those can trap. */
3611 if (BRANCH_COST >= 2
3612 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3613 && simple_operand_p (rl_arg)
3614 && simple_operand_p (rr_arg))
3616 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3617 if (code == TRUTH_OR_EXPR
3618 && lcode == NE_EXPR && integer_zerop (lr_arg)
3619 && rcode == NE_EXPR && integer_zerop (rr_arg)
3620 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3621 return build (NE_EXPR, truth_type,
3622 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3623 ll_arg, rl_arg),
3624 integer_zero_node);
3626 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3627 if (code == TRUTH_AND_EXPR
3628 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3629 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3630 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3631 return build (EQ_EXPR, truth_type,
3632 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3633 ll_arg, rl_arg),
3634 integer_zero_node);
3636 return build (code, truth_type, lhs, rhs);
3639 /* See if the comparisons can be merged. Then get all the parameters for
3640 each side. */
3642 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3643 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3644 return 0;
3646 volatilep = 0;
3647 ll_inner = decode_field_reference (ll_arg,
3648 &ll_bitsize, &ll_bitpos, &ll_mode,
3649 &ll_unsignedp, &volatilep, &ll_mask,
3650 &ll_and_mask);
3651 lr_inner = decode_field_reference (lr_arg,
3652 &lr_bitsize, &lr_bitpos, &lr_mode,
3653 &lr_unsignedp, &volatilep, &lr_mask,
3654 &lr_and_mask);
3655 rl_inner = decode_field_reference (rl_arg,
3656 &rl_bitsize, &rl_bitpos, &rl_mode,
3657 &rl_unsignedp, &volatilep, &rl_mask,
3658 &rl_and_mask);
3659 rr_inner = decode_field_reference (rr_arg,
3660 &rr_bitsize, &rr_bitpos, &rr_mode,
3661 &rr_unsignedp, &volatilep, &rr_mask,
3662 &rr_and_mask);
3664 /* It must be true that the inner operation on the lhs of each
3665 comparison must be the same if we are to be able to do anything.
3666 Then see if we have constants. If not, the same must be true for
3667 the rhs's. */
3668 if (volatilep || ll_inner == 0 || rl_inner == 0
3669 || ! operand_equal_p (ll_inner, rl_inner, 0))
3670 return 0;
3672 if (TREE_CODE (lr_arg) == INTEGER_CST
3673 && TREE_CODE (rr_arg) == INTEGER_CST)
3674 l_const = lr_arg, r_const = rr_arg;
3675 else if (lr_inner == 0 || rr_inner == 0
3676 || ! operand_equal_p (lr_inner, rr_inner, 0))
3677 return 0;
3678 else
3679 l_const = r_const = 0;
3681 /* If either comparison code is not correct for our logical operation,
3682 fail. However, we can convert a one-bit comparison against zero into
3683 the opposite comparison against that bit being set in the field. */
3685 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3686 if (lcode != wanted_code)
3688 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3690 /* Make the left operand unsigned, since we are only interested
3691 in the value of one bit. Otherwise we are doing the wrong
3692 thing below. */
3693 ll_unsignedp = 1;
3694 l_const = ll_mask;
3696 else
3697 return 0;
3700 /* This is analogous to the code for l_const above. */
3701 if (rcode != wanted_code)
3703 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3705 rl_unsignedp = 1;
3706 r_const = rl_mask;
3708 else
3709 return 0;
3712 /* After this point all optimizations will generate bit-field
3713 references, which we might not want. */
3714 if (! (*lang_hooks.can_use_bit_fields_p) ())
3715 return 0;
3717 /* See if we can find a mode that contains both fields being compared on
3718 the left. If we can't, fail. Otherwise, update all constants and masks
3719 to be relative to a field of that size. */
3720 first_bit = MIN (ll_bitpos, rl_bitpos);
3721 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3722 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3723 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3724 volatilep);
3725 if (lnmode == VOIDmode)
3726 return 0;
3728 lnbitsize = GET_MODE_BITSIZE (lnmode);
3729 lnbitpos = first_bit & ~ (lnbitsize - 1);
3730 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3731 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3733 if (BYTES_BIG_ENDIAN)
3735 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3736 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3739 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3740 size_int (xll_bitpos), 0);
3741 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3742 size_int (xrl_bitpos), 0);
3744 if (l_const)
3746 l_const = convert (lntype, l_const);
3747 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3748 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3749 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3750 fold (build1 (BIT_NOT_EXPR,
3751 lntype, ll_mask)),
3752 0)))
3754 warning ("comparison is always %d", wanted_code == NE_EXPR);
3756 return convert (truth_type,
3757 wanted_code == NE_EXPR
3758 ? integer_one_node : integer_zero_node);
3761 if (r_const)
3763 r_const = convert (lntype, r_const);
3764 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3765 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3766 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3767 fold (build1 (BIT_NOT_EXPR,
3768 lntype, rl_mask)),
3769 0)))
3771 warning ("comparison is always %d", wanted_code == NE_EXPR);
3773 return convert (truth_type,
3774 wanted_code == NE_EXPR
3775 ? integer_one_node : integer_zero_node);
3779 /* If the right sides are not constant, do the same for it. Also,
3780 disallow this optimization if a size or signedness mismatch occurs
3781 between the left and right sides. */
3782 if (l_const == 0)
3784 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3785 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3786 /* Make sure the two fields on the right
3787 correspond to the left without being swapped. */
3788 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3789 return 0;
3791 first_bit = MIN (lr_bitpos, rr_bitpos);
3792 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3793 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3794 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3795 volatilep);
3796 if (rnmode == VOIDmode)
3797 return 0;
3799 rnbitsize = GET_MODE_BITSIZE (rnmode);
3800 rnbitpos = first_bit & ~ (rnbitsize - 1);
3801 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3802 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3804 if (BYTES_BIG_ENDIAN)
3806 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3807 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3810 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3811 size_int (xlr_bitpos), 0);
3812 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3813 size_int (xrr_bitpos), 0);
3815 /* Make a mask that corresponds to both fields being compared.
3816 Do this for both items being compared. If the operands are the
3817 same size and the bits being compared are in the same position
3818 then we can do this by masking both and comparing the masked
3819 results. */
3820 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3821 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3822 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3824 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3825 ll_unsignedp || rl_unsignedp);
3826 if (! all_ones_mask_p (ll_mask, lnbitsize))
3827 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3829 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3830 lr_unsignedp || rr_unsignedp);
3831 if (! all_ones_mask_p (lr_mask, rnbitsize))
3832 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3834 return build (wanted_code, truth_type, lhs, rhs);
3837 /* There is still another way we can do something: If both pairs of
3838 fields being compared are adjacent, we may be able to make a wider
3839 field containing them both.
3841 Note that we still must mask the lhs/rhs expressions. Furthermore,
3842 the mask must be shifted to account for the shift done by
3843 make_bit_field_ref. */
3844 if ((ll_bitsize + ll_bitpos == rl_bitpos
3845 && lr_bitsize + lr_bitpos == rr_bitpos)
3846 || (ll_bitpos == rl_bitpos + rl_bitsize
3847 && lr_bitpos == rr_bitpos + rr_bitsize))
3849 tree type;
3851 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3852 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3853 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3854 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3856 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3857 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3858 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3859 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3861 /* Convert to the smaller type before masking out unwanted bits. */
3862 type = lntype;
3863 if (lntype != rntype)
3865 if (lnbitsize > rnbitsize)
3867 lhs = convert (rntype, lhs);
3868 ll_mask = convert (rntype, ll_mask);
3869 type = rntype;
3871 else if (lnbitsize < rnbitsize)
3873 rhs = convert (lntype, rhs);
3874 lr_mask = convert (lntype, lr_mask);
3875 type = lntype;
3879 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3880 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3882 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3883 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3885 return build (wanted_code, truth_type, lhs, rhs);
3888 return 0;
3891 /* Handle the case of comparisons with constants. If there is something in
3892 common between the masks, those bits of the constants must be the same.
3893 If not, the condition is always false. Test for this to avoid generating
3894 incorrect code below. */
3895 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3896 if (! integer_zerop (result)
3897 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3898 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3900 if (wanted_code == NE_EXPR)
3902 warning ("`or' of unmatched not-equal tests is always 1");
3903 return convert (truth_type, integer_one_node);
3905 else
3907 warning ("`and' of mutually exclusive equal-tests is always 0");
3908 return convert (truth_type, integer_zero_node);
3912 /* Construct the expression we will return. First get the component
3913 reference we will make. Unless the mask is all ones the width of
3914 that field, perform the mask operation. Then compare with the
3915 merged constant. */
3916 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3917 ll_unsignedp || rl_unsignedp);
3919 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3920 if (! all_ones_mask_p (ll_mask, lnbitsize))
3921 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
3923 return build (wanted_code, truth_type, result,
3924 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3927 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
3928 constant. */
3930 static tree
3931 optimize_minmax_comparison (t)
3932 tree t;
3934 tree type = TREE_TYPE (t);
3935 tree arg0 = TREE_OPERAND (t, 0);
3936 enum tree_code op_code;
3937 tree comp_const = TREE_OPERAND (t, 1);
3938 tree minmax_const;
3939 int consts_equal, consts_lt;
3940 tree inner;
3942 STRIP_SIGN_NOPS (arg0);
3944 op_code = TREE_CODE (arg0);
3945 minmax_const = TREE_OPERAND (arg0, 1);
3946 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
3947 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
3948 inner = TREE_OPERAND (arg0, 0);
3950 /* If something does not permit us to optimize, return the original tree. */
3951 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
3952 || TREE_CODE (comp_const) != INTEGER_CST
3953 || TREE_CONSTANT_OVERFLOW (comp_const)
3954 || TREE_CODE (minmax_const) != INTEGER_CST
3955 || TREE_CONSTANT_OVERFLOW (minmax_const))
3956 return t;
3958 /* Now handle all the various comparison codes. We only handle EQ_EXPR
3959 and GT_EXPR, doing the rest with recursive calls using logical
3960 simplifications. */
3961 switch (TREE_CODE (t))
3963 case NE_EXPR: case LT_EXPR: case LE_EXPR:
3964 return
3965 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
3967 case GE_EXPR:
3968 return
3969 fold (build (TRUTH_ORIF_EXPR, type,
3970 optimize_minmax_comparison
3971 (build (EQ_EXPR, type, arg0, comp_const)),
3972 optimize_minmax_comparison
3973 (build (GT_EXPR, type, arg0, comp_const))));
3975 case EQ_EXPR:
3976 if (op_code == MAX_EXPR && consts_equal)
3977 /* MAX (X, 0) == 0 -> X <= 0 */
3978 return fold (build (LE_EXPR, type, inner, comp_const));
3980 else if (op_code == MAX_EXPR && consts_lt)
3981 /* MAX (X, 0) == 5 -> X == 5 */
3982 return fold (build (EQ_EXPR, type, inner, comp_const));
3984 else if (op_code == MAX_EXPR)
3985 /* MAX (X, 0) == -1 -> false */
3986 return omit_one_operand (type, integer_zero_node, inner);
3988 else if (consts_equal)
3989 /* MIN (X, 0) == 0 -> X >= 0 */
3990 return fold (build (GE_EXPR, type, inner, comp_const));
3992 else if (consts_lt)
3993 /* MIN (X, 0) == 5 -> false */
3994 return omit_one_operand (type, integer_zero_node, inner);
3996 else
3997 /* MIN (X, 0) == -1 -> X == -1 */
3998 return fold (build (EQ_EXPR, type, inner, comp_const));
4000 case GT_EXPR:
4001 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4002 /* MAX (X, 0) > 0 -> X > 0
4003 MAX (X, 0) > 5 -> X > 5 */
4004 return fold (build (GT_EXPR, type, inner, comp_const));
4006 else if (op_code == MAX_EXPR)
4007 /* MAX (X, 0) > -1 -> true */
4008 return omit_one_operand (type, integer_one_node, inner);
4010 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4011 /* MIN (X, 0) > 0 -> false
4012 MIN (X, 0) > 5 -> false */
4013 return omit_one_operand (type, integer_zero_node, inner);
4015 else
4016 /* MIN (X, 0) > -1 -> X > -1 */
4017 return fold (build (GT_EXPR, type, inner, comp_const));
4019 default:
4020 return t;
4024 /* T is an integer expression that is being multiplied, divided, or taken a
4025 modulus (CODE says which and what kind of divide or modulus) by a
4026 constant C. See if we can eliminate that operation by folding it with
4027 other operations already in T. WIDE_TYPE, if non-null, is a type that
4028 should be used for the computation if wider than our type.
4030 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4031 (X * 2) + (Y * 4). We must, however, be assured that either the original
4032 expression would not overflow or that overflow is undefined for the type
4033 in the language in question.
4035 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4036 the machine has a multiply-accumulate insn or that this is part of an
4037 addressing calculation.
4039 If we return a non-null expression, it is an equivalent form of the
4040 original computation, but need not be in the original type. */
4042 static tree
4043 extract_muldiv (t, c, code, wide_type)
4044 tree t;
4045 tree c;
4046 enum tree_code code;
4047 tree wide_type;
4049 tree type = TREE_TYPE (t);
4050 enum tree_code tcode = TREE_CODE (t);
4051 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4052 > GET_MODE_SIZE (TYPE_MODE (type)))
4053 ? wide_type : type);
4054 tree t1, t2;
4055 int same_p = tcode == code;
4056 tree op0 = NULL_TREE, op1 = NULL_TREE;
4058 /* Don't deal with constants of zero here; they confuse the code below. */
4059 if (integer_zerop (c))
4060 return NULL_TREE;
4062 if (TREE_CODE_CLASS (tcode) == '1')
4063 op0 = TREE_OPERAND (t, 0);
4065 if (TREE_CODE_CLASS (tcode) == '2')
4066 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4068 /* Note that we need not handle conditional operations here since fold
4069 already handles those cases. So just do arithmetic here. */
4070 switch (tcode)
4072 case INTEGER_CST:
4073 /* For a constant, we can always simplify if we are a multiply
4074 or (for divide and modulus) if it is a multiple of our constant. */
4075 if (code == MULT_EXPR
4076 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4077 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4078 break;
4080 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4081 /* If op0 is an expression ... */
4082 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4083 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4084 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4085 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4086 /* ... and is unsigned, and its type is smaller than ctype,
4087 then we cannot pass through as widening. */
4088 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4089 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4090 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4091 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4092 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4093 /* ... or its type is larger than ctype,
4094 then we cannot pass through this truncation. */
4095 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4096 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))))
4097 break;
4099 /* Pass the constant down and see if we can make a simplification. If
4100 we can, replace this expression with the inner simplification for
4101 possible later conversion to our or some other type. */
4102 if (0 != (t1 = extract_muldiv (op0, convert (TREE_TYPE (op0), c), code,
4103 code == MULT_EXPR ? ctype : NULL_TREE)))
4104 return t1;
4105 break;
4107 case NEGATE_EXPR: case ABS_EXPR:
4108 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4109 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4110 break;
4112 case MIN_EXPR: case MAX_EXPR:
4113 /* If widening the type changes the signedness, then we can't perform
4114 this optimization as that changes the result. */
4115 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4116 break;
4118 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4119 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4120 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4122 if (tree_int_cst_sgn (c) < 0)
4123 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4125 return fold (build (tcode, ctype, convert (ctype, t1),
4126 convert (ctype, t2)));
4128 break;
4130 case WITH_RECORD_EXPR:
4131 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4132 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4133 TREE_OPERAND (t, 1));
4134 break;
4136 case SAVE_EXPR:
4137 /* If this has not been evaluated and the operand has no side effects,
4138 we can see if we can do something inside it and make a new one.
4139 Note that this test is overly conservative since we can do this
4140 if the only reason it had side effects is that it was another
4141 similar SAVE_EXPR, but that isn't worth bothering with. */
4142 if (SAVE_EXPR_RTL (t) == 0 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))
4143 && 0 != (t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code,
4144 wide_type)))
4146 t1 = save_expr (t1);
4147 if (SAVE_EXPR_PERSISTENT_P (t) && TREE_CODE (t1) == SAVE_EXPR)
4148 SAVE_EXPR_PERSISTENT_P (t1) = 1;
4149 if (is_pending_size (t))
4150 put_pending_size (t1);
4151 return t1;
4153 break;
4155 case LSHIFT_EXPR: case RSHIFT_EXPR:
4156 /* If the second operand is constant, this is a multiplication
4157 or floor division, by a power of two, so we can treat it that
4158 way unless the multiplier or divisor overflows. */
4159 if (TREE_CODE (op1) == INTEGER_CST
4160 /* const_binop may not detect overflow correctly,
4161 so check for it explicitly here. */
4162 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4163 && TREE_INT_CST_HIGH (op1) == 0
4164 && 0 != (t1 = convert (ctype,
4165 const_binop (LSHIFT_EXPR, size_one_node,
4166 op1, 0)))
4167 && ! TREE_OVERFLOW (t1))
4168 return extract_muldiv (build (tcode == LSHIFT_EXPR
4169 ? MULT_EXPR : FLOOR_DIV_EXPR,
4170 ctype, convert (ctype, op0), t1),
4171 c, code, wide_type);
4172 break;
4174 case PLUS_EXPR: case MINUS_EXPR:
4175 /* See if we can eliminate the operation on both sides. If we can, we
4176 can return a new PLUS or MINUS. If we can't, the only remaining
4177 cases where we can do anything are if the second operand is a
4178 constant. */
4179 t1 = extract_muldiv (op0, c, code, wide_type);
4180 t2 = extract_muldiv (op1, c, code, wide_type);
4181 if (t1 != 0 && t2 != 0
4182 && (code == MULT_EXPR
4183 /* If not multiplication, we can only do this if both operands
4184 are divisible by c. */
4185 || (multiple_of_p (ctype, op0, c)
4186 && multiple_of_p (ctype, op1, c))))
4187 return fold (build (tcode, ctype, convert (ctype, t1),
4188 convert (ctype, t2)));
4190 /* If this was a subtraction, negate OP1 and set it to be an addition.
4191 This simplifies the logic below. */
4192 if (tcode == MINUS_EXPR)
4193 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4195 if (TREE_CODE (op1) != INTEGER_CST)
4196 break;
4198 /* If either OP1 or C are negative, this optimization is not safe for
4199 some of the division and remainder types while for others we need
4200 to change the code. */
4201 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4203 if (code == CEIL_DIV_EXPR)
4204 code = FLOOR_DIV_EXPR;
4205 else if (code == FLOOR_DIV_EXPR)
4206 code = CEIL_DIV_EXPR;
4207 else if (code != MULT_EXPR
4208 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4209 break;
4212 /* If it's a multiply or a division/modulus operation of a multiple
4213 of our constant, do the operation and verify it doesn't overflow. */
4214 if (code == MULT_EXPR
4215 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4217 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4218 if (op1 == 0 || TREE_OVERFLOW (op1))
4219 break;
4221 else
4222 break;
4224 /* If we have an unsigned type is not a sizetype, we cannot widen
4225 the operation since it will change the result if the original
4226 computation overflowed. */
4227 if (TREE_UNSIGNED (ctype)
4228 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4229 && ctype != type)
4230 break;
4232 /* If we were able to eliminate our operation from the first side,
4233 apply our operation to the second side and reform the PLUS. */
4234 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4235 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4237 /* The last case is if we are a multiply. In that case, we can
4238 apply the distributive law to commute the multiply and addition
4239 if the multiplication of the constants doesn't overflow. */
4240 if (code == MULT_EXPR)
4241 return fold (build (tcode, ctype, fold (build (code, ctype,
4242 convert (ctype, op0),
4243 convert (ctype, c))),
4244 op1));
4246 break;
4248 case MULT_EXPR:
4249 /* We have a special case here if we are doing something like
4250 (C * 8) % 4 since we know that's zero. */
4251 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4252 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4253 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4254 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4255 return omit_one_operand (type, integer_zero_node, op0);
4257 /* ... fall through ... */
4259 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4260 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4261 /* If we can extract our operation from the LHS, do so and return a
4262 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4263 do something only if the second operand is a constant. */
4264 if (same_p
4265 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4266 return fold (build (tcode, ctype, convert (ctype, t1),
4267 convert (ctype, op1)));
4268 else if (tcode == MULT_EXPR && code == MULT_EXPR
4269 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4270 return fold (build (tcode, ctype, convert (ctype, op0),
4271 convert (ctype, t1)));
4272 else if (TREE_CODE (op1) != INTEGER_CST)
4273 return 0;
4275 /* If these are the same operation types, we can associate them
4276 assuming no overflow. */
4277 if (tcode == code
4278 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4279 convert (ctype, c), 0))
4280 && ! TREE_OVERFLOW (t1))
4281 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4283 /* If these operations "cancel" each other, we have the main
4284 optimizations of this pass, which occur when either constant is a
4285 multiple of the other, in which case we replace this with either an
4286 operation or CODE or TCODE.
4288 If we have an unsigned type that is not a sizetype, we cannot do
4289 this since it will change the result if the original computation
4290 overflowed. */
4291 if ((! TREE_UNSIGNED (ctype)
4292 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4293 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4294 || (tcode == MULT_EXPR
4295 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4296 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4298 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4299 return fold (build (tcode, ctype, convert (ctype, op0),
4300 convert (ctype,
4301 const_binop (TRUNC_DIV_EXPR,
4302 op1, c, 0))));
4303 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4304 return fold (build (code, ctype, convert (ctype, op0),
4305 convert (ctype,
4306 const_binop (TRUNC_DIV_EXPR,
4307 c, op1, 0))));
4309 break;
4311 default:
4312 break;
4315 return 0;
4318 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4319 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4320 that we may sometimes modify the tree. */
4322 static tree
4323 strip_compound_expr (t, s)
4324 tree t;
4325 tree s;
4327 enum tree_code code = TREE_CODE (t);
4329 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4330 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4331 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4332 return TREE_OPERAND (t, 1);
4334 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4335 don't bother handling any other types. */
4336 else if (code == COND_EXPR)
4338 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4339 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4340 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4342 else if (TREE_CODE_CLASS (code) == '1')
4343 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4344 else if (TREE_CODE_CLASS (code) == '<'
4345 || TREE_CODE_CLASS (code) == '2')
4347 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4348 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4351 return t;
4354 /* Return a node which has the indicated constant VALUE (either 0 or
4355 1), and is of the indicated TYPE. */
4357 static tree
4358 constant_boolean_node (value, type)
4359 int value;
4360 tree type;
4362 if (type == integer_type_node)
4363 return value ? integer_one_node : integer_zero_node;
4364 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4365 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4366 integer_zero_node);
4367 else
4369 tree t = build_int_2 (value, 0);
4371 TREE_TYPE (t) = type;
4372 return t;
4376 /* Utility function for the following routine, to see how complex a nesting of
4377 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4378 we don't care (to avoid spending too much time on complex expressions.). */
4380 static int
4381 count_cond (expr, lim)
4382 tree expr;
4383 int lim;
4385 int ctrue, cfalse;
4387 if (TREE_CODE (expr) != COND_EXPR)
4388 return 0;
4389 else if (lim <= 0)
4390 return 0;
4392 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4393 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4394 return MIN (lim, 1 + ctrue + cfalse);
4397 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4398 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4399 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4400 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4401 COND is the first argument to CODE; otherwise (as in the example
4402 given here), it is the second argument. TYPE is the type of the
4403 original expression. */
4405 static tree
4406 fold_binary_op_with_conditional_arg (code, type, cond, arg, cond_first_p)
4407 enum tree_code code;
4408 tree type;
4409 tree cond;
4410 tree arg;
4411 int cond_first_p;
4413 tree test, true_value, false_value;
4414 tree lhs = NULL_TREE;
4415 tree rhs = NULL_TREE;
4416 /* In the end, we'll produce a COND_EXPR. Both arms of the
4417 conditional expression will be binary operations. The left-hand
4418 side of the expression to be executed if the condition is true
4419 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4420 of the expression to be executed if the condition is true will be
4421 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4422 but apply to the expression to be executed if the conditional is
4423 false. */
4424 tree *true_lhs;
4425 tree *true_rhs;
4426 tree *false_lhs;
4427 tree *false_rhs;
4428 /* These are the codes to use for the left-hand side and right-hand
4429 side of the COND_EXPR. Normally, they are the same as CODE. */
4430 enum tree_code lhs_code = code;
4431 enum tree_code rhs_code = code;
4432 /* And these are the types of the expressions. */
4433 tree lhs_type = type;
4434 tree rhs_type = type;
4435 int save = 0;
4437 if (cond_first_p)
4439 true_rhs = false_rhs = &arg;
4440 true_lhs = &true_value;
4441 false_lhs = &false_value;
4443 else
4445 true_lhs = false_lhs = &arg;
4446 true_rhs = &true_value;
4447 false_rhs = &false_value;
4450 if (TREE_CODE (cond) == COND_EXPR)
4452 test = TREE_OPERAND (cond, 0);
4453 true_value = TREE_OPERAND (cond, 1);
4454 false_value = TREE_OPERAND (cond, 2);
4455 /* If this operand throws an expression, then it does not make
4456 sense to try to perform a logical or arithmetic operation
4457 involving it. Instead of building `a + throw 3' for example,
4458 we simply build `a, throw 3'. */
4459 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4461 if (! cond_first_p)
4463 lhs_code = COMPOUND_EXPR;
4464 lhs_type = void_type_node;
4466 else
4467 lhs = true_value;
4469 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4471 if (! cond_first_p)
4473 rhs_code = COMPOUND_EXPR;
4474 rhs_type = void_type_node;
4476 else
4477 rhs = false_value;
4480 else
4482 tree testtype = TREE_TYPE (cond);
4483 test = cond;
4484 true_value = convert (testtype, integer_one_node);
4485 false_value = convert (testtype, integer_zero_node);
4488 /* If ARG is complex we want to make sure we only evaluate
4489 it once. Though this is only required if it is volatile, it
4490 might be more efficient even if it is not. However, if we
4491 succeed in folding one part to a constant, we do not need
4492 to make this SAVE_EXPR. Since we do this optimization
4493 primarily to see if we do end up with constant and this
4494 SAVE_EXPR interferes with later optimizations, suppressing
4495 it when we can is important.
4497 If we are not in a function, we can't make a SAVE_EXPR, so don't
4498 try to do so. Don't try to see if the result is a constant
4499 if an arm is a COND_EXPR since we get exponential behavior
4500 in that case. */
4502 if (TREE_CODE (arg) == SAVE_EXPR)
4503 save = 1;
4504 else if (lhs == 0 && rhs == 0
4505 && !TREE_CONSTANT (arg)
4506 && (*lang_hooks.decls.global_bindings_p) () == 0
4507 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4508 || TREE_SIDE_EFFECTS (arg)))
4510 if (TREE_CODE (true_value) != COND_EXPR)
4511 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4513 if (TREE_CODE (false_value) != COND_EXPR)
4514 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4516 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4517 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4519 arg = save_expr (arg);
4520 lhs = rhs = 0;
4521 save = 1;
4525 if (lhs == 0)
4526 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4527 if (rhs == 0)
4528 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4530 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4532 if (save)
4533 return build (COMPOUND_EXPR, type,
4534 convert (void_type_node, arg),
4535 strip_compound_expr (test, arg));
4536 else
4537 return convert (type, test);
4541 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4543 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4544 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4545 ADDEND is the same as X.
4547 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4548 and finite. The problematic cases are when X is zero, and its mode
4549 has signed zeros. In the case of rounding towards -infinity,
4550 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4551 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4553 static bool
4554 fold_real_zero_addition_p (type, addend, negate)
4555 tree type, addend;
4556 int negate;
4558 if (!real_zerop (addend))
4559 return false;
4561 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4562 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4563 return true;
4565 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4566 if (TREE_CODE (addend) == REAL_CST
4567 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4568 negate = !negate;
4570 /* The mode has signed zeros, and we have to honor their sign.
4571 In this situation, there is only one case we can return true for.
4572 X - 0 is the same as X unless rounding towards -infinity is
4573 supported. */
4574 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4578 /* Perform constant folding and related simplification of EXPR.
4579 The related simplifications include x*1 => x, x*0 => 0, etc.,
4580 and application of the associative law.
4581 NOP_EXPR conversions may be removed freely (as long as we
4582 are careful not to change the C type of the overall expression)
4583 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4584 but we can constant-fold them if they have constant operands. */
4586 tree
4587 fold (expr)
4588 tree expr;
4590 tree t = expr;
4591 tree t1 = NULL_TREE;
4592 tree tem;
4593 tree type = TREE_TYPE (expr);
4594 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4595 enum tree_code code = TREE_CODE (t);
4596 int kind = TREE_CODE_CLASS (code);
4597 int invert;
4598 /* WINS will be nonzero when the switch is done
4599 if all operands are constant. */
4600 int wins = 1;
4602 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4603 Likewise for a SAVE_EXPR that's already been evaluated. */
4604 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
4605 return t;
4607 /* Return right away if a constant. */
4608 if (kind == 'c')
4609 return t;
4611 #ifdef MAX_INTEGER_COMPUTATION_MODE
4612 check_max_integer_computation_mode (expr);
4613 #endif
4615 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4617 tree subop;
4619 /* Special case for conversion ops that can have fixed point args. */
4620 arg0 = TREE_OPERAND (t, 0);
4622 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4623 if (arg0 != 0)
4624 STRIP_SIGN_NOPS (arg0);
4626 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4627 subop = TREE_REALPART (arg0);
4628 else
4629 subop = arg0;
4631 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4632 && TREE_CODE (subop) != REAL_CST
4634 /* Note that TREE_CONSTANT isn't enough:
4635 static var addresses are constant but we can't
4636 do arithmetic on them. */
4637 wins = 0;
4639 else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
4641 int len = first_rtl_op (code);
4642 int i;
4643 for (i = 0; i < len; i++)
4645 tree op = TREE_OPERAND (t, i);
4646 tree subop;
4648 if (op == 0)
4649 continue; /* Valid for CALL_EXPR, at least. */
4651 if (kind == '<' || code == RSHIFT_EXPR)
4653 /* Signedness matters here. Perhaps we can refine this
4654 later. */
4655 STRIP_SIGN_NOPS (op);
4657 else
4658 /* Strip any conversions that don't change the mode. */
4659 STRIP_NOPS (op);
4661 if (TREE_CODE (op) == COMPLEX_CST)
4662 subop = TREE_REALPART (op);
4663 else
4664 subop = op;
4666 if (TREE_CODE (subop) != INTEGER_CST
4667 && TREE_CODE (subop) != REAL_CST)
4668 /* Note that TREE_CONSTANT isn't enough:
4669 static var addresses are constant but we can't
4670 do arithmetic on them. */
4671 wins = 0;
4673 if (i == 0)
4674 arg0 = op;
4675 else if (i == 1)
4676 arg1 = op;
4680 /* If this is a commutative operation, and ARG0 is a constant, move it
4681 to ARG1 to reduce the number of tests below. */
4682 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4683 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4684 || code == BIT_AND_EXPR)
4685 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4687 tem = arg0; arg0 = arg1; arg1 = tem;
4689 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4690 TREE_OPERAND (t, 1) = tem;
4693 /* Now WINS is set as described above,
4694 ARG0 is the first operand of EXPR,
4695 and ARG1 is the second operand (if it has more than one operand).
4697 First check for cases where an arithmetic operation is applied to a
4698 compound, conditional, or comparison operation. Push the arithmetic
4699 operation inside the compound or conditional to see if any folding
4700 can then be done. Convert comparison to conditional for this purpose.
4701 The also optimizes non-constant cases that used to be done in
4702 expand_expr.
4704 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
4705 one of the operands is a comparison and the other is a comparison, a
4706 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4707 code below would make the expression more complex. Change it to a
4708 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4709 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4711 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4712 || code == EQ_EXPR || code == NE_EXPR)
4713 && ((truth_value_p (TREE_CODE (arg0))
4714 && (truth_value_p (TREE_CODE (arg1))
4715 || (TREE_CODE (arg1) == BIT_AND_EXPR
4716 && integer_onep (TREE_OPERAND (arg1, 1)))))
4717 || (truth_value_p (TREE_CODE (arg1))
4718 && (truth_value_p (TREE_CODE (arg0))
4719 || (TREE_CODE (arg0) == BIT_AND_EXPR
4720 && integer_onep (TREE_OPERAND (arg0, 1)))))))
4722 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4723 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4724 : TRUTH_XOR_EXPR,
4725 type, arg0, arg1));
4727 if (code == EQ_EXPR)
4728 t = invert_truthvalue (t);
4730 return t;
4733 if (TREE_CODE_CLASS (code) == '1')
4735 if (TREE_CODE (arg0) == COMPOUND_EXPR)
4736 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4737 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
4738 else if (TREE_CODE (arg0) == COND_EXPR)
4740 tree arg01 = TREE_OPERAND (arg0, 1);
4741 tree arg02 = TREE_OPERAND (arg0, 2);
4742 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
4743 arg01 = fold (build1 (code, type, arg01));
4744 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
4745 arg02 = fold (build1 (code, type, arg02));
4746 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
4747 arg01, arg02));
4749 /* If this was a conversion, and all we did was to move into
4750 inside the COND_EXPR, bring it back out. But leave it if
4751 it is a conversion from integer to integer and the
4752 result precision is no wider than a word since such a
4753 conversion is cheap and may be optimized away by combine,
4754 while it couldn't if it were outside the COND_EXPR. Then return
4755 so we don't get into an infinite recursion loop taking the
4756 conversion out and then back in. */
4758 if ((code == NOP_EXPR || code == CONVERT_EXPR
4759 || code == NON_LVALUE_EXPR)
4760 && TREE_CODE (t) == COND_EXPR
4761 && TREE_CODE (TREE_OPERAND (t, 1)) == code
4762 && TREE_CODE (TREE_OPERAND (t, 2)) == code
4763 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
4764 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
4765 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
4766 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
4767 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
4768 && (INTEGRAL_TYPE_P
4769 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
4770 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
4771 t = build1 (code, type,
4772 build (COND_EXPR,
4773 TREE_TYPE (TREE_OPERAND
4774 (TREE_OPERAND (t, 1), 0)),
4775 TREE_OPERAND (t, 0),
4776 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
4777 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
4778 return t;
4780 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
4781 return fold (build (COND_EXPR, type, arg0,
4782 fold (build1 (code, type, integer_one_node)),
4783 fold (build1 (code, type, integer_zero_node))));
4785 else if (TREE_CODE_CLASS (code) == '2'
4786 || TREE_CODE_CLASS (code) == '<')
4788 if (TREE_CODE (arg1) == COMPOUND_EXPR
4789 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
4790 && ! TREE_SIDE_EFFECTS (arg0))
4791 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4792 fold (build (code, type,
4793 arg0, TREE_OPERAND (arg1, 1))));
4794 else if ((TREE_CODE (arg1) == COND_EXPR
4795 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
4796 && TREE_CODE_CLASS (code) != '<'))
4797 && (TREE_CODE (arg0) != COND_EXPR
4798 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4799 && (! TREE_SIDE_EFFECTS (arg0)
4800 || ((*lang_hooks.decls.global_bindings_p) () == 0
4801 && ! contains_placeholder_p (arg0))))
4802 return
4803 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
4804 /*cond_first_p=*/0);
4805 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4806 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4807 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4808 else if ((TREE_CODE (arg0) == COND_EXPR
4809 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4810 && TREE_CODE_CLASS (code) != '<'))
4811 && (TREE_CODE (arg1) != COND_EXPR
4812 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4813 && (! TREE_SIDE_EFFECTS (arg1)
4814 || ((*lang_hooks.decls.global_bindings_p) () == 0
4815 && ! contains_placeholder_p (arg1))))
4816 return
4817 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
4818 /*cond_first_p=*/1);
4820 else if (TREE_CODE_CLASS (code) == '<'
4821 && TREE_CODE (arg0) == COMPOUND_EXPR)
4822 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4823 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4824 else if (TREE_CODE_CLASS (code) == '<'
4825 && TREE_CODE (arg1) == COMPOUND_EXPR)
4826 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4827 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
4829 switch (code)
4831 case INTEGER_CST:
4832 case REAL_CST:
4833 case VECTOR_CST:
4834 case STRING_CST:
4835 case COMPLEX_CST:
4836 case CONSTRUCTOR:
4837 return t;
4839 case CONST_DECL:
4840 return fold (DECL_INITIAL (t));
4842 case NOP_EXPR:
4843 case FLOAT_EXPR:
4844 case CONVERT_EXPR:
4845 case FIX_TRUNC_EXPR:
4846 /* Other kinds of FIX are not handled properly by fold_convert. */
4848 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4849 return TREE_OPERAND (t, 0);
4851 /* Handle cases of two conversions in a row. */
4852 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4853 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4855 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4856 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4857 tree final_type = TREE_TYPE (t);
4858 int inside_int = INTEGRAL_TYPE_P (inside_type);
4859 int inside_ptr = POINTER_TYPE_P (inside_type);
4860 int inside_float = FLOAT_TYPE_P (inside_type);
4861 unsigned int inside_prec = TYPE_PRECISION (inside_type);
4862 int inside_unsignedp = TREE_UNSIGNED (inside_type);
4863 int inter_int = INTEGRAL_TYPE_P (inter_type);
4864 int inter_ptr = POINTER_TYPE_P (inter_type);
4865 int inter_float = FLOAT_TYPE_P (inter_type);
4866 unsigned int inter_prec = TYPE_PRECISION (inter_type);
4867 int inter_unsignedp = TREE_UNSIGNED (inter_type);
4868 int final_int = INTEGRAL_TYPE_P (final_type);
4869 int final_ptr = POINTER_TYPE_P (final_type);
4870 int final_float = FLOAT_TYPE_P (final_type);
4871 unsigned int final_prec = TYPE_PRECISION (final_type);
4872 int final_unsignedp = TREE_UNSIGNED (final_type);
4874 /* In addition to the cases of two conversions in a row
4875 handled below, if we are converting something to its own
4876 type via an object of identical or wider precision, neither
4877 conversion is needed. */
4878 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
4879 && ((inter_int && final_int) || (inter_float && final_float))
4880 && inter_prec >= final_prec)
4881 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4883 /* Likewise, if the intermediate and final types are either both
4884 float or both integer, we don't need the middle conversion if
4885 it is wider than the final type and doesn't change the signedness
4886 (for integers). Avoid this if the final type is a pointer
4887 since then we sometimes need the inner conversion. Likewise if
4888 the outer has a precision not equal to the size of its mode. */
4889 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4890 || (inter_float && inside_float))
4891 && inter_prec >= inside_prec
4892 && (inter_float || inter_unsignedp == inside_unsignedp)
4893 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4894 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4895 && ! final_ptr)
4896 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4898 /* If we have a sign-extension of a zero-extended value, we can
4899 replace that by a single zero-extension. */
4900 if (inside_int && inter_int && final_int
4901 && inside_prec < inter_prec && inter_prec < final_prec
4902 && inside_unsignedp && !inter_unsignedp)
4903 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4905 /* Two conversions in a row are not needed unless:
4906 - some conversion is floating-point (overstrict for now), or
4907 - the intermediate type is narrower than both initial and
4908 final, or
4909 - the intermediate type and innermost type differ in signedness,
4910 and the outermost type is wider than the intermediate, or
4911 - the initial type is a pointer type and the precisions of the
4912 intermediate and final types differ, or
4913 - the final type is a pointer type and the precisions of the
4914 initial and intermediate types differ. */
4915 if (! inside_float && ! inter_float && ! final_float
4916 && (inter_prec > inside_prec || inter_prec > final_prec)
4917 && ! (inside_int && inter_int
4918 && inter_unsignedp != inside_unsignedp
4919 && inter_prec < final_prec)
4920 && ((inter_unsignedp && inter_prec > inside_prec)
4921 == (final_unsignedp && final_prec > inter_prec))
4922 && ! (inside_ptr && inter_prec != final_prec)
4923 && ! (final_ptr && inside_prec != inter_prec)
4924 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4925 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4926 && ! final_ptr)
4927 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4930 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
4931 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
4932 /* Detect assigning a bitfield. */
4933 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
4934 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
4936 /* Don't leave an assignment inside a conversion
4937 unless assigning a bitfield. */
4938 tree prev = TREE_OPERAND (t, 0);
4939 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
4940 /* First do the assignment, then return converted constant. */
4941 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
4942 TREE_USED (t) = 1;
4943 return t;
4946 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
4947 constants (if x has signed type, the sign bit cannot be set
4948 in c). This folds extension into the BIT_AND_EXPR. */
4949 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
4950 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
4951 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
4952 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
4954 tree and = TREE_OPERAND (t, 0);
4955 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
4956 int change = 0;
4958 if (TREE_UNSIGNED (TREE_TYPE (and))
4959 || (TYPE_PRECISION (TREE_TYPE (t))
4960 <= TYPE_PRECISION (TREE_TYPE (and))))
4961 change = 1;
4962 else if (TYPE_PRECISION (TREE_TYPE (and1))
4963 <= HOST_BITS_PER_WIDE_INT
4964 && host_integerp (and1, 1))
4966 unsigned HOST_WIDE_INT cst;
4968 cst = tree_low_cst (and1, 1);
4969 cst &= (HOST_WIDE_INT) -1
4970 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
4971 change = (cst == 0);
4972 #ifdef LOAD_EXTEND_OP
4973 if (change
4974 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
4975 == ZERO_EXTEND))
4977 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
4978 and0 = convert (uns, and0);
4979 and1 = convert (uns, and1);
4981 #endif
4983 if (change)
4984 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
4985 convert (TREE_TYPE (t), and0),
4986 convert (TREE_TYPE (t), and1)));
4989 if (!wins)
4991 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
4992 return t;
4994 return fold_convert (t, arg0);
4996 case VIEW_CONVERT_EXPR:
4997 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
4998 return build1 (VIEW_CONVERT_EXPR, type,
4999 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5000 return t;
5002 case COMPONENT_REF:
5003 if (TREE_CODE (arg0) == CONSTRUCTOR)
5005 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5006 if (m)
5007 t = TREE_VALUE (m);
5009 return t;
5011 case RANGE_EXPR:
5012 TREE_CONSTANT (t) = wins;
5013 return t;
5015 case NEGATE_EXPR:
5016 if (wins)
5018 if (TREE_CODE (arg0) == INTEGER_CST)
5020 unsigned HOST_WIDE_INT low;
5021 HOST_WIDE_INT high;
5022 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5023 TREE_INT_CST_HIGH (arg0),
5024 &low, &high);
5025 t = build_int_2 (low, high);
5026 TREE_TYPE (t) = type;
5027 TREE_OVERFLOW (t)
5028 = (TREE_OVERFLOW (arg0)
5029 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5030 TREE_CONSTANT_OVERFLOW (t)
5031 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5033 else if (TREE_CODE (arg0) == REAL_CST)
5034 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5036 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5037 return TREE_OPERAND (arg0, 0);
5039 /* Convert - (a - b) to (b - a) for non-floating-point. */
5040 else if (TREE_CODE (arg0) == MINUS_EXPR
5041 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5042 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5043 TREE_OPERAND (arg0, 0));
5045 return t;
5047 case ABS_EXPR:
5048 if (wins)
5050 if (TREE_CODE (arg0) == INTEGER_CST)
5052 /* If the value is unsigned, then the absolute value is
5053 the same as the ordinary value. */
5054 if (TREE_UNSIGNED (type))
5055 return arg0;
5056 /* Similarly, if the value is non-negative. */
5057 else if (INT_CST_LT (integer_minus_one_node, arg0))
5058 return arg0;
5059 /* If the value is negative, then the absolute value is
5060 its negation. */
5061 else
5063 unsigned HOST_WIDE_INT low;
5064 HOST_WIDE_INT high;
5065 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5066 TREE_INT_CST_HIGH (arg0),
5067 &low, &high);
5068 t = build_int_2 (low, high);
5069 TREE_TYPE (t) = type;
5070 TREE_OVERFLOW (t)
5071 = (TREE_OVERFLOW (arg0)
5072 | force_fit_type (t, overflow));
5073 TREE_CONSTANT_OVERFLOW (t)
5074 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5077 else if (TREE_CODE (arg0) == REAL_CST)
5079 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5080 t = build_real (type,
5081 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5084 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
5085 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
5086 else
5088 /* fabs(sqrt(x)) = sqrt(x) and fabs(exp(x)) = exp(x). */
5089 enum built_in_function fcode = builtin_mathfn_code (arg0);
5090 if (fcode == BUILT_IN_SQRT
5091 || fcode == BUILT_IN_SQRTF
5092 || fcode == BUILT_IN_SQRTL
5093 || fcode == BUILT_IN_EXP
5094 || fcode == BUILT_IN_EXPF
5095 || fcode == BUILT_IN_EXPL)
5096 t = arg0;
5098 return t;
5100 case CONJ_EXPR:
5101 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5102 return convert (type, arg0);
5103 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5104 return build (COMPLEX_EXPR, type,
5105 TREE_OPERAND (arg0, 0),
5106 negate_expr (TREE_OPERAND (arg0, 1)));
5107 else if (TREE_CODE (arg0) == COMPLEX_CST)
5108 return build_complex (type, TREE_REALPART (arg0),
5109 negate_expr (TREE_IMAGPART (arg0)));
5110 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5111 return fold (build (TREE_CODE (arg0), type,
5112 fold (build1 (CONJ_EXPR, type,
5113 TREE_OPERAND (arg0, 0))),
5114 fold (build1 (CONJ_EXPR,
5115 type, TREE_OPERAND (arg0, 1)))));
5116 else if (TREE_CODE (arg0) == CONJ_EXPR)
5117 return TREE_OPERAND (arg0, 0);
5118 return t;
5120 case BIT_NOT_EXPR:
5121 if (wins)
5123 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5124 ~ TREE_INT_CST_HIGH (arg0));
5125 TREE_TYPE (t) = type;
5126 force_fit_type (t, 0);
5127 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5128 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5130 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5131 return TREE_OPERAND (arg0, 0);
5132 return t;
5134 case PLUS_EXPR:
5135 /* A + (-B) -> A - B */
5136 if (TREE_CODE (arg1) == NEGATE_EXPR)
5137 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5138 /* (-A) + B -> B - A */
5139 if (TREE_CODE (arg0) == NEGATE_EXPR)
5140 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5141 else if (! FLOAT_TYPE_P (type))
5143 if (integer_zerop (arg1))
5144 return non_lvalue (convert (type, arg0));
5146 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5147 with a constant, and the two constants have no bits in common,
5148 we should treat this as a BIT_IOR_EXPR since this may produce more
5149 simplifications. */
5150 if (TREE_CODE (arg0) == BIT_AND_EXPR
5151 && TREE_CODE (arg1) == BIT_AND_EXPR
5152 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5153 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5154 && integer_zerop (const_binop (BIT_AND_EXPR,
5155 TREE_OPERAND (arg0, 1),
5156 TREE_OPERAND (arg1, 1), 0)))
5158 code = BIT_IOR_EXPR;
5159 goto bit_ior;
5162 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5163 (plus (plus (mult) (mult)) (foo)) so that we can
5164 take advantage of the factoring cases below. */
5165 if ((TREE_CODE (arg0) == PLUS_EXPR
5166 && TREE_CODE (arg1) == MULT_EXPR)
5167 || (TREE_CODE (arg1) == PLUS_EXPR
5168 && TREE_CODE (arg0) == MULT_EXPR))
5170 tree parg0, parg1, parg, marg;
5172 if (TREE_CODE (arg0) == PLUS_EXPR)
5173 parg = arg0, marg = arg1;
5174 else
5175 parg = arg1, marg = arg0;
5176 parg0 = TREE_OPERAND (parg, 0);
5177 parg1 = TREE_OPERAND (parg, 1);
5178 STRIP_NOPS (parg0);
5179 STRIP_NOPS (parg1);
5181 if (TREE_CODE (parg0) == MULT_EXPR
5182 && TREE_CODE (parg1) != MULT_EXPR)
5183 return fold (build (PLUS_EXPR, type,
5184 fold (build (PLUS_EXPR, type, parg0, marg)),
5185 parg1));
5186 if (TREE_CODE (parg0) != MULT_EXPR
5187 && TREE_CODE (parg1) == MULT_EXPR)
5188 return fold (build (PLUS_EXPR, type,
5189 fold (build (PLUS_EXPR, type, parg1, marg)),
5190 parg0));
5193 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5195 tree arg00, arg01, arg10, arg11;
5196 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5198 /* (A * C) + (B * C) -> (A+B) * C.
5199 We are most concerned about the case where C is a constant,
5200 but other combinations show up during loop reduction. Since
5201 it is not difficult, try all four possibilities. */
5203 arg00 = TREE_OPERAND (arg0, 0);
5204 arg01 = TREE_OPERAND (arg0, 1);
5205 arg10 = TREE_OPERAND (arg1, 0);
5206 arg11 = TREE_OPERAND (arg1, 1);
5207 same = NULL_TREE;
5209 if (operand_equal_p (arg01, arg11, 0))
5210 same = arg01, alt0 = arg00, alt1 = arg10;
5211 else if (operand_equal_p (arg00, arg10, 0))
5212 same = arg00, alt0 = arg01, alt1 = arg11;
5213 else if (operand_equal_p (arg00, arg11, 0))
5214 same = arg00, alt0 = arg01, alt1 = arg10;
5215 else if (operand_equal_p (arg01, arg10, 0))
5216 same = arg01, alt0 = arg00, alt1 = arg11;
5218 /* No identical multiplicands; see if we can find a common
5219 power-of-two factor in non-power-of-two multiplies. This
5220 can help in multi-dimensional array access. */
5221 else if (TREE_CODE (arg01) == INTEGER_CST
5222 && TREE_CODE (arg11) == INTEGER_CST
5223 && TREE_INT_CST_HIGH (arg01) == 0
5224 && TREE_INT_CST_HIGH (arg11) == 0)
5226 HOST_WIDE_INT int01, int11, tmp;
5227 int01 = TREE_INT_CST_LOW (arg01);
5228 int11 = TREE_INT_CST_LOW (arg11);
5230 /* Move min of absolute values to int11. */
5231 if ((int01 >= 0 ? int01 : -int01)
5232 < (int11 >= 0 ? int11 : -int11))
5234 tmp = int01, int01 = int11, int11 = tmp;
5235 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5236 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5239 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5241 alt0 = fold (build (MULT_EXPR, type, arg00,
5242 build_int_2 (int01 / int11, 0)));
5243 alt1 = arg10;
5244 same = arg11;
5248 if (same)
5249 return fold (build (MULT_EXPR, type,
5250 fold (build (PLUS_EXPR, type, alt0, alt1)),
5251 same));
5255 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5256 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5257 return non_lvalue (convert (type, arg0));
5259 /* Likewise if the operands are reversed. */
5260 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5261 return non_lvalue (convert (type, arg1));
5263 bit_rotate:
5264 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5265 is a rotate of A by C1 bits. */
5266 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5267 is a rotate of A by B bits. */
5269 enum tree_code code0, code1;
5270 code0 = TREE_CODE (arg0);
5271 code1 = TREE_CODE (arg1);
5272 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5273 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5274 && operand_equal_p (TREE_OPERAND (arg0, 0),
5275 TREE_OPERAND (arg1, 0), 0)
5276 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5278 tree tree01, tree11;
5279 enum tree_code code01, code11;
5281 tree01 = TREE_OPERAND (arg0, 1);
5282 tree11 = TREE_OPERAND (arg1, 1);
5283 STRIP_NOPS (tree01);
5284 STRIP_NOPS (tree11);
5285 code01 = TREE_CODE (tree01);
5286 code11 = TREE_CODE (tree11);
5287 if (code01 == INTEGER_CST
5288 && code11 == INTEGER_CST
5289 && TREE_INT_CST_HIGH (tree01) == 0
5290 && TREE_INT_CST_HIGH (tree11) == 0
5291 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5292 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5293 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5294 code0 == LSHIFT_EXPR ? tree01 : tree11);
5295 else if (code11 == MINUS_EXPR)
5297 tree tree110, tree111;
5298 tree110 = TREE_OPERAND (tree11, 0);
5299 tree111 = TREE_OPERAND (tree11, 1);
5300 STRIP_NOPS (tree110);
5301 STRIP_NOPS (tree111);
5302 if (TREE_CODE (tree110) == INTEGER_CST
5303 && 0 == compare_tree_int (tree110,
5304 TYPE_PRECISION
5305 (TREE_TYPE (TREE_OPERAND
5306 (arg0, 0))))
5307 && operand_equal_p (tree01, tree111, 0))
5308 return build ((code0 == LSHIFT_EXPR
5309 ? LROTATE_EXPR
5310 : RROTATE_EXPR),
5311 type, TREE_OPERAND (arg0, 0), tree01);
5313 else if (code01 == MINUS_EXPR)
5315 tree tree010, tree011;
5316 tree010 = TREE_OPERAND (tree01, 0);
5317 tree011 = TREE_OPERAND (tree01, 1);
5318 STRIP_NOPS (tree010);
5319 STRIP_NOPS (tree011);
5320 if (TREE_CODE (tree010) == INTEGER_CST
5321 && 0 == compare_tree_int (tree010,
5322 TYPE_PRECISION
5323 (TREE_TYPE (TREE_OPERAND
5324 (arg0, 0))))
5325 && operand_equal_p (tree11, tree011, 0))
5326 return build ((code0 != LSHIFT_EXPR
5327 ? LROTATE_EXPR
5328 : RROTATE_EXPR),
5329 type, TREE_OPERAND (arg0, 0), tree11);
5334 associate:
5335 /* In most languages, can't associate operations on floats through
5336 parentheses. Rather than remember where the parentheses were, we
5337 don't associate floats at all. It shouldn't matter much. However,
5338 associating multiplications is only very slightly inaccurate, so do
5339 that if -funsafe-math-optimizations is specified. */
5341 if (! wins
5342 && (! FLOAT_TYPE_P (type)
5343 || (flag_unsafe_math_optimizations && code == MULT_EXPR)))
5345 tree var0, con0, lit0, minus_lit0;
5346 tree var1, con1, lit1, minus_lit1;
5348 /* Split both trees into variables, constants, and literals. Then
5349 associate each group together, the constants with literals,
5350 then the result with variables. This increases the chances of
5351 literals being recombined later and of generating relocatable
5352 expressions for the sum of a constant and literal. */
5353 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5354 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5355 code == MINUS_EXPR);
5357 /* Only do something if we found more than two objects. Otherwise,
5358 nothing has changed and we risk infinite recursion. */
5359 if (2 < ((var0 != 0) + (var1 != 0)
5360 + (con0 != 0) + (con1 != 0)
5361 + (lit0 != 0) + (lit1 != 0)
5362 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5364 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5365 if (code == MINUS_EXPR)
5366 code = PLUS_EXPR;
5368 var0 = associate_trees (var0, var1, code, type);
5369 con0 = associate_trees (con0, con1, code, type);
5370 lit0 = associate_trees (lit0, lit1, code, type);
5371 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5373 /* Preserve the MINUS_EXPR if the negative part of the literal is
5374 greater than the positive part. Otherwise, the multiplicative
5375 folding code (i.e extract_muldiv) may be fooled in case
5376 unsigned constants are substracted, like in the following
5377 example: ((X*2 + 4) - 8U)/2. */
5378 if (minus_lit0 && lit0)
5380 if (tree_int_cst_lt (lit0, minus_lit0))
5382 minus_lit0 = associate_trees (minus_lit0, lit0,
5383 MINUS_EXPR, type);
5384 lit0 = 0;
5386 else
5388 lit0 = associate_trees (lit0, minus_lit0,
5389 MINUS_EXPR, type);
5390 minus_lit0 = 0;
5393 if (minus_lit0)
5395 if (con0 == 0)
5396 return convert (type, associate_trees (var0, minus_lit0,
5397 MINUS_EXPR, type));
5398 else
5400 con0 = associate_trees (con0, minus_lit0,
5401 MINUS_EXPR, type);
5402 return convert (type, associate_trees (var0, con0,
5403 PLUS_EXPR, type));
5407 con0 = associate_trees (con0, lit0, code, type);
5408 return convert (type, associate_trees (var0, con0, code, type));
5412 binary:
5413 if (wins)
5414 t1 = const_binop (code, arg0, arg1, 0);
5415 if (t1 != NULL_TREE)
5417 /* The return value should always have
5418 the same type as the original expression. */
5419 if (TREE_TYPE (t1) != TREE_TYPE (t))
5420 t1 = convert (TREE_TYPE (t), t1);
5422 return t1;
5424 return t;
5426 case MINUS_EXPR:
5427 /* A - (-B) -> A + B */
5428 if (TREE_CODE (arg1) == NEGATE_EXPR)
5429 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5430 /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5431 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5432 return
5433 fold (build (MINUS_EXPR, type,
5434 build_real (TREE_TYPE (arg1),
5435 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1))),
5436 TREE_OPERAND (arg0, 0)));
5438 if (! FLOAT_TYPE_P (type))
5440 if (! wins && integer_zerop (arg0))
5441 return negate_expr (convert (type, arg1));
5442 if (integer_zerop (arg1))
5443 return non_lvalue (convert (type, arg0));
5445 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5446 about the case where C is a constant, just try one of the
5447 four possibilities. */
5449 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5450 && operand_equal_p (TREE_OPERAND (arg0, 1),
5451 TREE_OPERAND (arg1, 1), 0))
5452 return fold (build (MULT_EXPR, type,
5453 fold (build (MINUS_EXPR, type,
5454 TREE_OPERAND (arg0, 0),
5455 TREE_OPERAND (arg1, 0))),
5456 TREE_OPERAND (arg0, 1)));
5459 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5460 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5461 return non_lvalue (convert (type, arg0));
5463 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
5464 ARG0 is zero and X + ARG0 reduces to X, since that would mean
5465 (-ARG1 + ARG0) reduces to -ARG1. */
5466 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5467 return negate_expr (convert (type, arg1));
5469 /* Fold &x - &x. This can happen from &x.foo - &x.
5470 This is unsafe for certain floats even in non-IEEE formats.
5471 In IEEE, it is unsafe because it does wrong for NaNs.
5472 Also note that operand_equal_p is always false if an operand
5473 is volatile. */
5475 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
5476 && operand_equal_p (arg0, arg1, 0))
5477 return convert (type, integer_zero_node);
5479 goto associate;
5481 case MULT_EXPR:
5482 /* (-A) * (-B) -> A * B */
5483 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5484 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5485 TREE_OPERAND (arg1, 0)));
5487 if (! FLOAT_TYPE_P (type))
5489 if (integer_zerop (arg1))
5490 return omit_one_operand (type, arg1, arg0);
5491 if (integer_onep (arg1))
5492 return non_lvalue (convert (type, arg0));
5494 /* (a * (1 << b)) is (a << b) */
5495 if (TREE_CODE (arg1) == LSHIFT_EXPR
5496 && integer_onep (TREE_OPERAND (arg1, 0)))
5497 return fold (build (LSHIFT_EXPR, type, arg0,
5498 TREE_OPERAND (arg1, 1)));
5499 if (TREE_CODE (arg0) == LSHIFT_EXPR
5500 && integer_onep (TREE_OPERAND (arg0, 0)))
5501 return fold (build (LSHIFT_EXPR, type, arg1,
5502 TREE_OPERAND (arg0, 1)));
5504 if (TREE_CODE (arg1) == INTEGER_CST
5505 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5506 code, NULL_TREE)))
5507 return convert (type, tem);
5510 else
5512 /* Maybe fold x * 0 to 0. The expressions aren't the same
5513 when x is NaN, since x * 0 is also NaN. Nor are they the
5514 same in modes with signed zeros, since multiplying a
5515 negative value by 0 gives -0, not +0. */
5516 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
5517 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
5518 && real_zerop (arg1))
5519 return omit_one_operand (type, arg1, arg0);
5520 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
5521 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5522 && real_onep (arg1))
5523 return non_lvalue (convert (type, arg0));
5525 /* Transform x * -1.0 into -x. */
5526 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5527 && real_minus_onep (arg1))
5528 return fold (build1 (NEGATE_EXPR, type, arg0));
5530 /* x*2 is x+x */
5531 if (! wins && real_twop (arg1)
5532 && (*lang_hooks.decls.global_bindings_p) () == 0
5533 && ! contains_placeholder_p (arg0))
5535 tree arg = save_expr (arg0);
5536 return build (PLUS_EXPR, type, arg, arg);
5539 if (flag_unsafe_math_optimizations)
5541 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
5542 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
5544 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
5545 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
5546 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
5547 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
5549 tree sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5550 tree arg = build (MULT_EXPR, type,
5551 TREE_VALUE (TREE_OPERAND (arg0, 1)),
5552 TREE_VALUE (TREE_OPERAND (arg1, 1)));
5553 tree arglist = build_tree_list (NULL_TREE, arg);
5554 return fold (build_function_call_expr (sqrtfn, arglist));
5557 /* Optimize exp(x)*exp(y) as exp(x+y). */
5558 if ((fcode0 == BUILT_IN_EXP && fcode1 == BUILT_IN_EXP)
5559 || (fcode0 == BUILT_IN_EXPF && fcode1 == BUILT_IN_EXPF)
5560 || (fcode0 == BUILT_IN_EXPL && fcode1 == BUILT_IN_EXPL))
5562 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5563 tree arg = build (PLUS_EXPR, type,
5564 TREE_VALUE (TREE_OPERAND (arg0, 1)),
5565 TREE_VALUE (TREE_OPERAND (arg1, 1)));
5566 tree arglist = build_tree_list (NULL_TREE, arg);
5567 return fold (build_function_call_expr (expfn, arglist));
5571 goto associate;
5573 case BIT_IOR_EXPR:
5574 bit_ior:
5575 if (integer_all_onesp (arg1))
5576 return omit_one_operand (type, arg1, arg0);
5577 if (integer_zerop (arg1))
5578 return non_lvalue (convert (type, arg0));
5579 t1 = distribute_bit_expr (code, type, arg0, arg1);
5580 if (t1 != NULL_TREE)
5581 return t1;
5583 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5585 This results in more efficient code for machines without a NAND
5586 instruction. Combine will canonicalize to the first form
5587 which will allow use of NAND instructions provided by the
5588 backend if they exist. */
5589 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5590 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5592 return fold (build1 (BIT_NOT_EXPR, type,
5593 build (BIT_AND_EXPR, type,
5594 TREE_OPERAND (arg0, 0),
5595 TREE_OPERAND (arg1, 0))));
5598 /* See if this can be simplified into a rotate first. If that
5599 is unsuccessful continue in the association code. */
5600 goto bit_rotate;
5602 case BIT_XOR_EXPR:
5603 if (integer_zerop (arg1))
5604 return non_lvalue (convert (type, arg0));
5605 if (integer_all_onesp (arg1))
5606 return fold (build1 (BIT_NOT_EXPR, type, arg0));
5608 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5609 with a constant, and the two constants have no bits in common,
5610 we should treat this as a BIT_IOR_EXPR since this may produce more
5611 simplifications. */
5612 if (TREE_CODE (arg0) == BIT_AND_EXPR
5613 && TREE_CODE (arg1) == BIT_AND_EXPR
5614 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5615 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5616 && integer_zerop (const_binop (BIT_AND_EXPR,
5617 TREE_OPERAND (arg0, 1),
5618 TREE_OPERAND (arg1, 1), 0)))
5620 code = BIT_IOR_EXPR;
5621 goto bit_ior;
5624 /* See if this can be simplified into a rotate first. If that
5625 is unsuccessful continue in the association code. */
5626 goto bit_rotate;
5628 case BIT_AND_EXPR:
5629 bit_and:
5630 if (integer_all_onesp (arg1))
5631 return non_lvalue (convert (type, arg0));
5632 if (integer_zerop (arg1))
5633 return omit_one_operand (type, arg1, arg0);
5634 t1 = distribute_bit_expr (code, type, arg0, arg1);
5635 if (t1 != NULL_TREE)
5636 return t1;
5637 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5638 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5639 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5641 unsigned int prec
5642 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5644 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5645 && (~TREE_INT_CST_LOW (arg1)
5646 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5647 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5650 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5652 This results in more efficient code for machines without a NOR
5653 instruction. Combine will canonicalize to the first form
5654 which will allow use of NOR instructions provided by the
5655 backend if they exist. */
5656 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5657 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5659 return fold (build1 (BIT_NOT_EXPR, type,
5660 build (BIT_IOR_EXPR, type,
5661 TREE_OPERAND (arg0, 0),
5662 TREE_OPERAND (arg1, 0))));
5665 goto associate;
5667 case BIT_ANDTC_EXPR:
5668 if (integer_all_onesp (arg0))
5669 return non_lvalue (convert (type, arg1));
5670 if (integer_zerop (arg0))
5671 return omit_one_operand (type, arg0, arg1);
5672 if (TREE_CODE (arg1) == INTEGER_CST)
5674 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5675 code = BIT_AND_EXPR;
5676 goto bit_and;
5678 goto binary;
5680 case RDIV_EXPR:
5681 /* Don't touch a floating-point divide by zero unless the mode
5682 of the constant can represent infinity. */
5683 if (TREE_CODE (arg1) == REAL_CST
5684 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
5685 && real_zerop (arg1))
5686 return t;
5688 /* (-A) / (-B) -> A / B */
5689 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5690 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5691 TREE_OPERAND (arg1, 0)));
5693 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
5694 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
5695 && real_onep (arg1))
5696 return non_lvalue (convert (type, arg0));
5698 /* If ARG1 is a constant, we can convert this to a multiply by the
5699 reciprocal. This does not have the same rounding properties,
5700 so only do this if -funsafe-math-optimizations. We can actually
5701 always safely do it if ARG1 is a power of two, but it's hard to
5702 tell if it is or not in a portable manner. */
5703 if (TREE_CODE (arg1) == REAL_CST)
5705 if (flag_unsafe_math_optimizations
5706 && 0 != (tem = const_binop (code, build_real (type, dconst1),
5707 arg1, 0)))
5708 return fold (build (MULT_EXPR, type, arg0, tem));
5709 /* Find the reciprocal if optimizing and the result is exact. */
5710 else if (optimize)
5712 REAL_VALUE_TYPE r;
5713 r = TREE_REAL_CST (arg1);
5714 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5716 tem = build_real (type, r);
5717 return fold (build (MULT_EXPR, type, arg0, tem));
5721 /* Convert A/B/C to A/(B*C). */
5722 if (flag_unsafe_math_optimizations
5723 && TREE_CODE (arg0) == RDIV_EXPR)
5725 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5726 build (MULT_EXPR, type, TREE_OPERAND (arg0, 1),
5727 arg1)));
5729 /* Convert A/(B/C) to (A/B)*C. */
5730 if (flag_unsafe_math_optimizations
5731 && TREE_CODE (arg1) == RDIV_EXPR)
5733 return fold (build (MULT_EXPR, type,
5734 build (RDIV_EXPR, type, arg0,
5735 TREE_OPERAND (arg1, 0)),
5736 TREE_OPERAND (arg1, 1)));
5739 /* Optimize x/exp(y) into x*exp(-y). */
5740 if (flag_unsafe_math_optimizations)
5742 enum built_in_function fcode = builtin_mathfn_code (arg1);
5743 if (fcode == BUILT_IN_EXP
5744 || fcode == BUILT_IN_EXPF
5745 || fcode == BUILT_IN_EXPL)
5747 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
5748 tree arg = build1 (NEGATE_EXPR, type,
5749 TREE_VALUE (TREE_OPERAND (arg1, 1)));
5750 tree arglist = build_tree_list (NULL_TREE, arg);
5751 arg1 = build_function_call_expr (expfn, arglist);
5752 return fold (build (MULT_EXPR, type, arg0, arg1));
5755 goto binary;
5757 case TRUNC_DIV_EXPR:
5758 case ROUND_DIV_EXPR:
5759 case FLOOR_DIV_EXPR:
5760 case CEIL_DIV_EXPR:
5761 case EXACT_DIV_EXPR:
5762 if (integer_onep (arg1))
5763 return non_lvalue (convert (type, arg0));
5764 if (integer_zerop (arg1))
5765 return t;
5767 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5768 operation, EXACT_DIV_EXPR.
5770 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5771 At one time others generated faster code, it's not clear if they do
5772 after the last round to changes to the DIV code in expmed.c. */
5773 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5774 && multiple_of_p (type, arg0, arg1))
5775 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5777 if (TREE_CODE (arg1) == INTEGER_CST
5778 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5779 code, NULL_TREE)))
5780 return convert (type, tem);
5782 goto binary;
5784 case CEIL_MOD_EXPR:
5785 case FLOOR_MOD_EXPR:
5786 case ROUND_MOD_EXPR:
5787 case TRUNC_MOD_EXPR:
5788 if (integer_onep (arg1))
5789 return omit_one_operand (type, integer_zero_node, arg0);
5790 if (integer_zerop (arg1))
5791 return t;
5793 if (TREE_CODE (arg1) == INTEGER_CST
5794 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5795 code, NULL_TREE)))
5796 return convert (type, tem);
5798 goto binary;
5800 case LROTATE_EXPR:
5801 case RROTATE_EXPR:
5802 if (integer_all_onesp (arg0))
5803 return omit_one_operand (type, arg0, arg1);
5804 goto shift;
5806 case RSHIFT_EXPR:
5807 /* Optimize -1 >> x for arithmetic right shifts. */
5808 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
5809 return omit_one_operand (type, arg0, arg1);
5810 /* ... fall through ... */
5812 case LSHIFT_EXPR:
5813 shift:
5814 if (integer_zerop (arg1))
5815 return non_lvalue (convert (type, arg0));
5816 if (integer_zerop (arg0))
5817 return omit_one_operand (type, arg0, arg1);
5819 /* Since negative shift count is not well-defined,
5820 don't try to compute it in the compiler. */
5821 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5822 return t;
5823 /* Rewrite an LROTATE_EXPR by a constant into an
5824 RROTATE_EXPR by a new constant. */
5825 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5827 TREE_SET_CODE (t, RROTATE_EXPR);
5828 code = RROTATE_EXPR;
5829 TREE_OPERAND (t, 1) = arg1
5830 = const_binop
5831 (MINUS_EXPR,
5832 convert (TREE_TYPE (arg1),
5833 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5834 arg1, 0);
5835 if (tree_int_cst_sgn (arg1) < 0)
5836 return t;
5839 /* If we have a rotate of a bit operation with the rotate count and
5840 the second operand of the bit operation both constant,
5841 permute the two operations. */
5842 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5843 && (TREE_CODE (arg0) == BIT_AND_EXPR
5844 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5845 || TREE_CODE (arg0) == BIT_IOR_EXPR
5846 || TREE_CODE (arg0) == BIT_XOR_EXPR)
5847 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5848 return fold (build (TREE_CODE (arg0), type,
5849 fold (build (code, type,
5850 TREE_OPERAND (arg0, 0), arg1)),
5851 fold (build (code, type,
5852 TREE_OPERAND (arg0, 1), arg1))));
5854 /* Two consecutive rotates adding up to the width of the mode can
5855 be ignored. */
5856 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5857 && TREE_CODE (arg0) == RROTATE_EXPR
5858 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5859 && TREE_INT_CST_HIGH (arg1) == 0
5860 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5861 && ((TREE_INT_CST_LOW (arg1)
5862 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5863 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
5864 return TREE_OPERAND (arg0, 0);
5866 goto binary;
5868 case MIN_EXPR:
5869 if (operand_equal_p (arg0, arg1, 0))
5870 return omit_one_operand (type, arg0, arg1);
5871 if (INTEGRAL_TYPE_P (type)
5872 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5873 return omit_one_operand (type, arg1, arg0);
5874 goto associate;
5876 case MAX_EXPR:
5877 if (operand_equal_p (arg0, arg1, 0))
5878 return omit_one_operand (type, arg0, arg1);
5879 if (INTEGRAL_TYPE_P (type)
5880 && TYPE_MAX_VALUE (type)
5881 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5882 return omit_one_operand (type, arg1, arg0);
5883 goto associate;
5885 case TRUTH_NOT_EXPR:
5886 /* Note that the operand of this must be an int
5887 and its values must be 0 or 1.
5888 ("true" is a fixed value perhaps depending on the language,
5889 but we don't handle values other than 1 correctly yet.) */
5890 tem = invert_truthvalue (arg0);
5891 /* Avoid infinite recursion. */
5892 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5893 return t;
5894 return convert (type, tem);
5896 case TRUTH_ANDIF_EXPR:
5897 /* Note that the operands of this must be ints
5898 and their values must be 0 or 1.
5899 ("true" is a fixed value perhaps depending on the language.) */
5900 /* If first arg is constant zero, return it. */
5901 if (integer_zerop (arg0))
5902 return convert (type, arg0);
5903 case TRUTH_AND_EXPR:
5904 /* If either arg is constant true, drop it. */
5905 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5906 return non_lvalue (convert (type, arg1));
5907 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
5908 /* Preserve sequence points. */
5909 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5910 return non_lvalue (convert (type, arg0));
5911 /* If second arg is constant zero, result is zero, but first arg
5912 must be evaluated. */
5913 if (integer_zerop (arg1))
5914 return omit_one_operand (type, arg1, arg0);
5915 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5916 case will be handled here. */
5917 if (integer_zerop (arg0))
5918 return omit_one_operand (type, arg0, arg1);
5920 truth_andor:
5921 /* We only do these simplifications if we are optimizing. */
5922 if (!optimize)
5923 return t;
5925 /* Check for things like (A || B) && (A || C). We can convert this
5926 to A || (B && C). Note that either operator can be any of the four
5927 truth and/or operations and the transformation will still be
5928 valid. Also note that we only care about order for the
5929 ANDIF and ORIF operators. If B contains side effects, this
5930 might change the truth-value of A. */
5931 if (TREE_CODE (arg0) == TREE_CODE (arg1)
5932 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5933 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5934 || TREE_CODE (arg0) == TRUTH_AND_EXPR
5935 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5936 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5938 tree a00 = TREE_OPERAND (arg0, 0);
5939 tree a01 = TREE_OPERAND (arg0, 1);
5940 tree a10 = TREE_OPERAND (arg1, 0);
5941 tree a11 = TREE_OPERAND (arg1, 1);
5942 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5943 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5944 && (code == TRUTH_AND_EXPR
5945 || code == TRUTH_OR_EXPR));
5947 if (operand_equal_p (a00, a10, 0))
5948 return fold (build (TREE_CODE (arg0), type, a00,
5949 fold (build (code, type, a01, a11))));
5950 else if (commutative && operand_equal_p (a00, a11, 0))
5951 return fold (build (TREE_CODE (arg0), type, a00,
5952 fold (build (code, type, a01, a10))));
5953 else if (commutative && operand_equal_p (a01, a10, 0))
5954 return fold (build (TREE_CODE (arg0), type, a01,
5955 fold (build (code, type, a00, a11))));
5957 /* This case if tricky because we must either have commutative
5958 operators or else A10 must not have side-effects. */
5960 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5961 && operand_equal_p (a01, a11, 0))
5962 return fold (build (TREE_CODE (arg0), type,
5963 fold (build (code, type, a00, a10)),
5964 a01));
5967 /* See if we can build a range comparison. */
5968 if (0 != (tem = fold_range_test (t)))
5969 return tem;
5971 /* Check for the possibility of merging component references. If our
5972 lhs is another similar operation, try to merge its rhs with our
5973 rhs. Then try to merge our lhs and rhs. */
5974 if (TREE_CODE (arg0) == code
5975 && 0 != (tem = fold_truthop (code, type,
5976 TREE_OPERAND (arg0, 1), arg1)))
5977 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5979 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5980 return tem;
5982 return t;
5984 case TRUTH_ORIF_EXPR:
5985 /* Note that the operands of this must be ints
5986 and their values must be 0 or true.
5987 ("true" is a fixed value perhaps depending on the language.) */
5988 /* If first arg is constant true, return it. */
5989 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5990 return convert (type, arg0);
5991 case TRUTH_OR_EXPR:
5992 /* If either arg is constant zero, drop it. */
5993 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
5994 return non_lvalue (convert (type, arg1));
5995 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
5996 /* Preserve sequence points. */
5997 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5998 return non_lvalue (convert (type, arg0));
5999 /* If second arg is constant true, result is true, but we must
6000 evaluate first arg. */
6001 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6002 return omit_one_operand (type, arg1, arg0);
6003 /* Likewise for first arg, but note this only occurs here for
6004 TRUTH_OR_EXPR. */
6005 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6006 return omit_one_operand (type, arg0, arg1);
6007 goto truth_andor;
6009 case TRUTH_XOR_EXPR:
6010 /* If either arg is constant zero, drop it. */
6011 if (integer_zerop (arg0))
6012 return non_lvalue (convert (type, arg1));
6013 if (integer_zerop (arg1))
6014 return non_lvalue (convert (type, arg0));
6015 /* If either arg is constant true, this is a logical inversion. */
6016 if (integer_onep (arg0))
6017 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6018 if (integer_onep (arg1))
6019 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6020 return t;
6022 case EQ_EXPR:
6023 case NE_EXPR:
6024 case LT_EXPR:
6025 case GT_EXPR:
6026 case LE_EXPR:
6027 case GE_EXPR:
6028 /* If one arg is a real or integer constant, put it last. */
6029 if ((TREE_CODE (arg0) == INTEGER_CST
6030 && TREE_CODE (arg1) != INTEGER_CST)
6031 || (TREE_CODE (arg0) == REAL_CST
6032 && TREE_CODE (arg0) != REAL_CST))
6034 TREE_OPERAND (t, 0) = arg1;
6035 TREE_OPERAND (t, 1) = arg0;
6036 arg0 = TREE_OPERAND (t, 0);
6037 arg1 = TREE_OPERAND (t, 1);
6038 code = swap_tree_comparison (code);
6039 TREE_SET_CODE (t, code);
6042 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6044 /* (-a) CMP (-b) -> b CMP a */
6045 if (TREE_CODE (arg0) == NEGATE_EXPR
6046 && TREE_CODE (arg1) == NEGATE_EXPR)
6047 return fold (build (code, type, TREE_OPERAND (arg1, 0),
6048 TREE_OPERAND (arg0, 0)));
6049 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6050 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
6051 return
6052 fold (build
6053 (swap_tree_comparison (code), type,
6054 TREE_OPERAND (arg0, 0),
6055 build_real (TREE_TYPE (arg1),
6056 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
6057 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6058 /* a CMP (-0) -> a CMP 0 */
6059 if (TREE_CODE (arg1) == REAL_CST
6060 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
6061 return fold (build (code, type, arg0,
6062 build_real (TREE_TYPE (arg1), dconst0)));
6064 /* If this is a comparison of a real constant with a PLUS_EXPR
6065 or a MINUS_EXPR of a real constant, we can convert it into a
6066 comparison with a revised real constant as long as no overflow
6067 occurs when unsafe_math_optimizations are enabled. */
6068 if (flag_unsafe_math_optimizations
6069 && TREE_CODE (arg1) == REAL_CST
6070 && (TREE_CODE (arg0) == PLUS_EXPR
6071 || TREE_CODE (arg0) == MINUS_EXPR)
6072 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6073 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6074 ? MINUS_EXPR : PLUS_EXPR,
6075 arg1, TREE_OPERAND (arg0, 1), 0))
6076 && ! TREE_CONSTANT_OVERFLOW (tem))
6077 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6080 /* Convert foo++ == CONST into ++foo == CONST + INCR.
6081 First, see if one arg is constant; find the constant arg
6082 and the other one. */
6084 tree constop = 0, varop = NULL_TREE;
6085 int constopnum = -1;
6087 if (TREE_CONSTANT (arg1))
6088 constopnum = 1, constop = arg1, varop = arg0;
6089 if (TREE_CONSTANT (arg0))
6090 constopnum = 0, constop = arg0, varop = arg1;
6092 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6094 /* This optimization is invalid for ordered comparisons
6095 if CONST+INCR overflows or if foo+incr might overflow.
6096 This optimization is invalid for floating point due to rounding.
6097 For pointer types we assume overflow doesn't happen. */
6098 if (POINTER_TYPE_P (TREE_TYPE (varop))
6099 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6100 && (code == EQ_EXPR || code == NE_EXPR)))
6102 tree newconst
6103 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6104 constop, TREE_OPERAND (varop, 1)));
6106 /* Do not overwrite the current varop to be a preincrement,
6107 create a new node so that we won't confuse our caller who
6108 might create trees and throw them away, reusing the
6109 arguments that they passed to build. This shows up in
6110 the THEN or ELSE parts of ?: being postincrements. */
6111 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
6112 TREE_OPERAND (varop, 0),
6113 TREE_OPERAND (varop, 1));
6115 /* If VAROP is a reference to a bitfield, we must mask
6116 the constant by the width of the field. */
6117 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6118 && DECL_BIT_FIELD(TREE_OPERAND
6119 (TREE_OPERAND (varop, 0), 1)))
6121 int size
6122 = TREE_INT_CST_LOW (DECL_SIZE
6123 (TREE_OPERAND
6124 (TREE_OPERAND (varop, 0), 1)));
6125 tree mask, unsigned_type;
6126 unsigned int precision;
6127 tree folded_compare;
6129 /* First check whether the comparison would come out
6130 always the same. If we don't do that we would
6131 change the meaning with the masking. */
6132 if (constopnum == 0)
6133 folded_compare = fold (build (code, type, constop,
6134 TREE_OPERAND (varop, 0)));
6135 else
6136 folded_compare = fold (build (code, type,
6137 TREE_OPERAND (varop, 0),
6138 constop));
6139 if (integer_zerop (folded_compare)
6140 || integer_onep (folded_compare))
6141 return omit_one_operand (type, folded_compare, varop);
6143 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6144 precision = TYPE_PRECISION (unsigned_type);
6145 mask = build_int_2 (~0, ~0);
6146 TREE_TYPE (mask) = unsigned_type;
6147 force_fit_type (mask, 0);
6148 mask = const_binop (RSHIFT_EXPR, mask,
6149 size_int (precision - size), 0);
6150 newconst = fold (build (BIT_AND_EXPR,
6151 TREE_TYPE (varop), newconst,
6152 convert (TREE_TYPE (varop),
6153 mask)));
6156 t = build (code, type,
6157 (constopnum == 0) ? newconst : varop,
6158 (constopnum == 1) ? newconst : varop);
6159 return t;
6162 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
6164 if (POINTER_TYPE_P (TREE_TYPE (varop))
6165 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6166 && (code == EQ_EXPR || code == NE_EXPR)))
6168 tree newconst
6169 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
6170 constop, TREE_OPERAND (varop, 1)));
6172 /* Do not overwrite the current varop to be a predecrement,
6173 create a new node so that we won't confuse our caller who
6174 might create trees and throw them away, reusing the
6175 arguments that they passed to build. This shows up in
6176 the THEN or ELSE parts of ?: being postdecrements. */
6177 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
6178 TREE_OPERAND (varop, 0),
6179 TREE_OPERAND (varop, 1));
6181 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6182 && DECL_BIT_FIELD(TREE_OPERAND
6183 (TREE_OPERAND (varop, 0), 1)))
6185 int size
6186 = TREE_INT_CST_LOW (DECL_SIZE
6187 (TREE_OPERAND
6188 (TREE_OPERAND (varop, 0), 1)));
6189 tree mask, unsigned_type;
6190 unsigned int precision;
6191 tree folded_compare;
6193 if (constopnum == 0)
6194 folded_compare = fold (build (code, type, constop,
6195 TREE_OPERAND (varop, 0)));
6196 else
6197 folded_compare = fold (build (code, type,
6198 TREE_OPERAND (varop, 0),
6199 constop));
6200 if (integer_zerop (folded_compare)
6201 || integer_onep (folded_compare))
6202 return omit_one_operand (type, folded_compare, varop);
6204 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
6205 precision = TYPE_PRECISION (unsigned_type);
6206 mask = build_int_2 (~0, ~0);
6207 TREE_TYPE (mask) = TREE_TYPE (varop);
6208 force_fit_type (mask, 0);
6209 mask = const_binop (RSHIFT_EXPR, mask,
6210 size_int (precision - size), 0);
6211 newconst = fold (build (BIT_AND_EXPR,
6212 TREE_TYPE (varop), newconst,
6213 convert (TREE_TYPE (varop),
6214 mask)));
6217 t = build (code, type,
6218 (constopnum == 0) ? newconst : varop,
6219 (constopnum == 1) ? newconst : varop);
6220 return t;
6225 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
6226 This transformation affects the cases which are handled in later
6227 optimizations involving comparisons with non-negative constants. */
6228 if (TREE_CODE (arg1) == INTEGER_CST
6229 && TREE_CODE (arg0) != INTEGER_CST
6230 && tree_int_cst_sgn (arg1) > 0)
6232 switch (code)
6234 case GE_EXPR:
6235 code = GT_EXPR;
6236 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6237 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6238 break;
6240 case LT_EXPR:
6241 code = LE_EXPR;
6242 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6243 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6244 break;
6246 default:
6247 break;
6251 /* Comparisons with the highest or lowest possible integer of
6252 the specified size will have known values. */
6254 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6256 if (TREE_CODE (arg1) == INTEGER_CST
6257 && ! TREE_CONSTANT_OVERFLOW (arg1)
6258 && width <= HOST_BITS_PER_WIDE_INT
6259 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6260 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6262 unsigned HOST_WIDE_INT signed_max;
6263 unsigned HOST_WIDE_INT max, min;
6265 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
6267 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6269 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
6270 min = 0;
6272 else
6274 max = signed_max;
6275 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
6278 if (TREE_INT_CST_HIGH (arg1) == 0
6279 && TREE_INT_CST_LOW (arg1) == max)
6280 switch (code)
6282 case GT_EXPR:
6283 return omit_one_operand (type,
6284 convert (type, integer_zero_node),
6285 arg0);
6286 case GE_EXPR:
6287 code = EQ_EXPR;
6288 TREE_SET_CODE (t, EQ_EXPR);
6289 break;
6290 case LE_EXPR:
6291 return omit_one_operand (type,
6292 convert (type, integer_one_node),
6293 arg0);
6294 case LT_EXPR:
6295 code = NE_EXPR;
6296 TREE_SET_CODE (t, NE_EXPR);
6297 break;
6299 /* The GE_EXPR and LT_EXPR cases above are not normally
6300 reached because of previous transformations. */
6302 default:
6303 break;
6305 else if (TREE_INT_CST_HIGH (arg1) == 0
6306 && TREE_INT_CST_LOW (arg1) == max - 1)
6307 switch (code)
6309 case GT_EXPR:
6310 code = EQ_EXPR;
6311 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6312 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6313 break;
6314 case LE_EXPR:
6315 code = NE_EXPR;
6316 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
6317 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6318 break;
6319 default:
6320 break;
6322 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6323 && TREE_INT_CST_LOW (arg1) == min)
6324 switch (code)
6326 case LT_EXPR:
6327 return omit_one_operand (type,
6328 convert (type, integer_zero_node),
6329 arg0);
6330 case LE_EXPR:
6331 code = EQ_EXPR;
6332 TREE_SET_CODE (t, EQ_EXPR);
6333 break;
6335 case GE_EXPR:
6336 return omit_one_operand (type,
6337 convert (type, integer_one_node),
6338 arg0);
6339 case GT_EXPR:
6340 code = NE_EXPR;
6341 TREE_SET_CODE (t, NE_EXPR);
6342 break;
6344 default:
6345 break;
6347 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
6348 && TREE_INT_CST_LOW (arg1) == min + 1)
6349 switch (code)
6351 case GE_EXPR:
6352 code = NE_EXPR;
6353 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6354 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6355 break;
6356 case LT_EXPR:
6357 code = EQ_EXPR;
6358 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6359 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6360 break;
6361 default:
6362 break;
6365 else if (TREE_INT_CST_HIGH (arg1) == 0
6366 && TREE_INT_CST_LOW (arg1) == signed_max
6367 && TREE_UNSIGNED (TREE_TYPE (arg1))
6368 /* signed_type does not work on pointer types. */
6369 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
6371 /* The following case also applies to X < signed_max+1
6372 and X >= signed_max+1 because previous transformations. */
6373 if (code == LE_EXPR || code == GT_EXPR)
6375 tree st0, st1;
6376 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
6377 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
6378 return fold
6379 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
6380 type, convert (st0, arg0),
6381 convert (st1, integer_zero_node)));
6387 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6388 a MINUS_EXPR of a constant, we can convert it into a comparison with
6389 a revised constant as long as no overflow occurs. */
6390 if ((code == EQ_EXPR || code == NE_EXPR)
6391 && TREE_CODE (arg1) == INTEGER_CST
6392 && (TREE_CODE (arg0) == PLUS_EXPR
6393 || TREE_CODE (arg0) == MINUS_EXPR)
6394 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6395 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6396 ? MINUS_EXPR : PLUS_EXPR,
6397 arg1, TREE_OPERAND (arg0, 1), 0))
6398 && ! TREE_CONSTANT_OVERFLOW (tem))
6399 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6401 /* Similarly for a NEGATE_EXPR. */
6402 else if ((code == EQ_EXPR || code == NE_EXPR)
6403 && TREE_CODE (arg0) == NEGATE_EXPR
6404 && TREE_CODE (arg1) == INTEGER_CST
6405 && 0 != (tem = negate_expr (arg1))
6406 && TREE_CODE (tem) == INTEGER_CST
6407 && ! TREE_CONSTANT_OVERFLOW (tem))
6408 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6410 /* If we have X - Y == 0, we can convert that to X == Y and similarly
6411 for !=. Don't do this for ordered comparisons due to overflow. */
6412 else if ((code == NE_EXPR || code == EQ_EXPR)
6413 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
6414 return fold (build (code, type,
6415 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
6417 /* If we are widening one operand of an integer comparison,
6418 see if the other operand is similarly being widened. Perhaps we
6419 can do the comparison in the narrower type. */
6420 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
6421 && TREE_CODE (arg0) == NOP_EXPR
6422 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
6423 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
6424 && (TREE_TYPE (t1) == TREE_TYPE (tem)
6425 || (TREE_CODE (t1) == INTEGER_CST
6426 && int_fits_type_p (t1, TREE_TYPE (tem)))))
6427 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
6429 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
6430 constant, we can simplify it. */
6431 else if (TREE_CODE (arg1) == INTEGER_CST
6432 && (TREE_CODE (arg0) == MIN_EXPR
6433 || TREE_CODE (arg0) == MAX_EXPR)
6434 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6435 return optimize_minmax_comparison (t);
6437 /* If we are comparing an ABS_EXPR with a constant, we can
6438 convert all the cases into explicit comparisons, but they may
6439 well not be faster than doing the ABS and one comparison.
6440 But ABS (X) <= C is a range comparison, which becomes a subtraction
6441 and a comparison, and is probably faster. */
6442 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6443 && TREE_CODE (arg0) == ABS_EXPR
6444 && ! TREE_SIDE_EFFECTS (arg0)
6445 && (0 != (tem = negate_expr (arg1)))
6446 && TREE_CODE (tem) == INTEGER_CST
6447 && ! TREE_CONSTANT_OVERFLOW (tem))
6448 return fold (build (TRUTH_ANDIF_EXPR, type,
6449 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
6450 build (LE_EXPR, type,
6451 TREE_OPERAND (arg0, 0), arg1)));
6453 /* If this is an EQ or NE comparison with zero and ARG0 is
6454 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
6455 two operations, but the latter can be done in one less insn
6456 on machines that have only two-operand insns or on which a
6457 constant cannot be the first operand. */
6458 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
6459 && TREE_CODE (arg0) == BIT_AND_EXPR)
6461 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
6462 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
6463 return
6464 fold (build (code, type,
6465 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6466 build (RSHIFT_EXPR,
6467 TREE_TYPE (TREE_OPERAND (arg0, 0)),
6468 TREE_OPERAND (arg0, 1),
6469 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
6470 convert (TREE_TYPE (arg0),
6471 integer_one_node)),
6472 arg1));
6473 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
6474 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
6475 return
6476 fold (build (code, type,
6477 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6478 build (RSHIFT_EXPR,
6479 TREE_TYPE (TREE_OPERAND (arg0, 1)),
6480 TREE_OPERAND (arg0, 0),
6481 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
6482 convert (TREE_TYPE (arg0),
6483 integer_one_node)),
6484 arg1));
6487 /* If this is an NE or EQ comparison of zero against the result of a
6488 signed MOD operation whose second operand is a power of 2, make
6489 the MOD operation unsigned since it is simpler and equivalent. */
6490 if ((code == NE_EXPR || code == EQ_EXPR)
6491 && integer_zerop (arg1)
6492 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
6493 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
6494 || TREE_CODE (arg0) == CEIL_MOD_EXPR
6495 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
6496 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
6497 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6499 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
6500 tree newmod = build (TREE_CODE (arg0), newtype,
6501 convert (newtype, TREE_OPERAND (arg0, 0)),
6502 convert (newtype, TREE_OPERAND (arg0, 1)));
6504 return build (code, type, newmod, convert (newtype, arg1));
6507 /* If this is an NE comparison of zero with an AND of one, remove the
6508 comparison since the AND will give the correct value. */
6509 if (code == NE_EXPR && integer_zerop (arg1)
6510 && TREE_CODE (arg0) == BIT_AND_EXPR
6511 && integer_onep (TREE_OPERAND (arg0, 1)))
6512 return convert (type, arg0);
6514 /* If we have (A & C) == C where C is a power of 2, convert this into
6515 (A & C) != 0. Similarly for NE_EXPR. */
6516 if ((code == EQ_EXPR || code == NE_EXPR)
6517 && TREE_CODE (arg0) == BIT_AND_EXPR
6518 && integer_pow2p (TREE_OPERAND (arg0, 1))
6519 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6520 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6521 arg0, integer_zero_node));
6523 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6524 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6525 if ((code == EQ_EXPR || code == NE_EXPR)
6526 && TREE_CODE (arg0) == BIT_AND_EXPR
6527 && integer_zerop (arg1))
6529 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0),
6530 TREE_OPERAND (arg0, 1));
6531 if (arg00 != NULL_TREE)
6533 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
6534 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
6535 convert (stype, arg00),
6536 convert (stype, integer_zero_node)));
6540 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6541 and similarly for >= into !=. */
6542 if ((code == LT_EXPR || code == GE_EXPR)
6543 && TREE_UNSIGNED (TREE_TYPE (arg0))
6544 && TREE_CODE (arg1) == LSHIFT_EXPR
6545 && integer_onep (TREE_OPERAND (arg1, 0)))
6546 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6547 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6548 TREE_OPERAND (arg1, 1)),
6549 convert (TREE_TYPE (arg0), integer_zero_node));
6551 else if ((code == LT_EXPR || code == GE_EXPR)
6552 && TREE_UNSIGNED (TREE_TYPE (arg0))
6553 && (TREE_CODE (arg1) == NOP_EXPR
6554 || TREE_CODE (arg1) == CONVERT_EXPR)
6555 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6556 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
6557 return
6558 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6559 convert (TREE_TYPE (arg0),
6560 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6561 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
6562 convert (TREE_TYPE (arg0), integer_zero_node));
6564 /* Simplify comparison of something with itself. (For IEEE
6565 floating-point, we can only do some of these simplifications.) */
6566 if (operand_equal_p (arg0, arg1, 0))
6568 switch (code)
6570 case EQ_EXPR:
6571 case GE_EXPR:
6572 case LE_EXPR:
6573 if (! FLOAT_TYPE_P (TREE_TYPE (arg0)))
6574 return constant_boolean_node (1, type);
6575 code = EQ_EXPR;
6576 TREE_SET_CODE (t, code);
6577 break;
6579 case NE_EXPR:
6580 /* For NE, we can only do this simplification if integer. */
6581 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6582 break;
6583 /* ... fall through ... */
6584 case GT_EXPR:
6585 case LT_EXPR:
6586 return constant_boolean_node (0, type);
6587 default:
6588 abort ();
6592 /* If we are comparing an expression that just has comparisons
6593 of two integer values, arithmetic expressions of those comparisons,
6594 and constants, we can simplify it. There are only three cases
6595 to check: the two values can either be equal, the first can be
6596 greater, or the second can be greater. Fold the expression for
6597 those three values. Since each value must be 0 or 1, we have
6598 eight possibilities, each of which corresponds to the constant 0
6599 or 1 or one of the six possible comparisons.
6601 This handles common cases like (a > b) == 0 but also handles
6602 expressions like ((x > y) - (y > x)) > 0, which supposedly
6603 occur in macroized code. */
6605 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
6607 tree cval1 = 0, cval2 = 0;
6608 int save_p = 0;
6610 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
6611 /* Don't handle degenerate cases here; they should already
6612 have been handled anyway. */
6613 && cval1 != 0 && cval2 != 0
6614 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
6615 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
6616 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
6617 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
6618 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
6619 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
6620 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
6622 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
6623 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
6625 /* We can't just pass T to eval_subst in case cval1 or cval2
6626 was the same as ARG1. */
6628 tree high_result
6629 = fold (build (code, type,
6630 eval_subst (arg0, cval1, maxval, cval2, minval),
6631 arg1));
6632 tree equal_result
6633 = fold (build (code, type,
6634 eval_subst (arg0, cval1, maxval, cval2, maxval),
6635 arg1));
6636 tree low_result
6637 = fold (build (code, type,
6638 eval_subst (arg0, cval1, minval, cval2, maxval),
6639 arg1));
6641 /* All three of these results should be 0 or 1. Confirm they
6642 are. Then use those values to select the proper code
6643 to use. */
6645 if ((integer_zerop (high_result)
6646 || integer_onep (high_result))
6647 && (integer_zerop (equal_result)
6648 || integer_onep (equal_result))
6649 && (integer_zerop (low_result)
6650 || integer_onep (low_result)))
6652 /* Make a 3-bit mask with the high-order bit being the
6653 value for `>', the next for '=', and the low for '<'. */
6654 switch ((integer_onep (high_result) * 4)
6655 + (integer_onep (equal_result) * 2)
6656 + integer_onep (low_result))
6658 case 0:
6659 /* Always false. */
6660 return omit_one_operand (type, integer_zero_node, arg0);
6661 case 1:
6662 code = LT_EXPR;
6663 break;
6664 case 2:
6665 code = EQ_EXPR;
6666 break;
6667 case 3:
6668 code = LE_EXPR;
6669 break;
6670 case 4:
6671 code = GT_EXPR;
6672 break;
6673 case 5:
6674 code = NE_EXPR;
6675 break;
6676 case 6:
6677 code = GE_EXPR;
6678 break;
6679 case 7:
6680 /* Always true. */
6681 return omit_one_operand (type, integer_one_node, arg0);
6684 t = build (code, type, cval1, cval2);
6685 if (save_p)
6686 return save_expr (t);
6687 else
6688 return fold (t);
6693 /* If this is a comparison of a field, we may be able to simplify it. */
6694 if (((TREE_CODE (arg0) == COMPONENT_REF
6695 && (*lang_hooks.can_use_bit_fields_p) ())
6696 || TREE_CODE (arg0) == BIT_FIELD_REF)
6697 && (code == EQ_EXPR || code == NE_EXPR)
6698 /* Handle the constant case even without -O
6699 to make sure the warnings are given. */
6700 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6702 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6703 return t1 ? t1 : t;
6706 /* If this is a comparison of complex values and either or both sides
6707 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6708 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6709 This may prevent needless evaluations. */
6710 if ((code == EQ_EXPR || code == NE_EXPR)
6711 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6712 && (TREE_CODE (arg0) == COMPLEX_EXPR
6713 || TREE_CODE (arg1) == COMPLEX_EXPR
6714 || TREE_CODE (arg0) == COMPLEX_CST
6715 || TREE_CODE (arg1) == COMPLEX_CST))
6717 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6718 tree real0, imag0, real1, imag1;
6720 arg0 = save_expr (arg0);
6721 arg1 = save_expr (arg1);
6722 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6723 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6724 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6725 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6727 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6728 : TRUTH_ORIF_EXPR),
6729 type,
6730 fold (build (code, type, real0, real1)),
6731 fold (build (code, type, imag0, imag1))));
6734 /* Optimize comparisons of strlen vs zero to a compare of the
6735 first character of the string vs zero. To wit,
6736 strlen(ptr) == 0 => *ptr == 0
6737 strlen(ptr) != 0 => *ptr != 0
6738 Other cases should reduce to one of these two (or a constant)
6739 due to the return value of strlen being unsigned. */
6740 if ((code == EQ_EXPR || code == NE_EXPR)
6741 && integer_zerop (arg1)
6742 && TREE_CODE (arg0) == CALL_EXPR
6743 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
6745 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6746 tree arglist;
6748 if (TREE_CODE (fndecl) == FUNCTION_DECL
6749 && DECL_BUILT_IN (fndecl)
6750 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
6751 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
6752 && (arglist = TREE_OPERAND (arg0, 1))
6753 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
6754 && ! TREE_CHAIN (arglist))
6755 return fold (build (code, type,
6756 build1 (INDIRECT_REF, char_type_node,
6757 TREE_VALUE(arglist)),
6758 integer_zero_node));
6761 /* From here on, the only cases we handle are when the result is
6762 known to be a constant.
6764 To compute GT, swap the arguments and do LT.
6765 To compute GE, do LT and invert the result.
6766 To compute LE, swap the arguments, do LT and invert the result.
6767 To compute NE, do EQ and invert the result.
6769 Therefore, the code below must handle only EQ and LT. */
6771 if (code == LE_EXPR || code == GT_EXPR)
6773 tem = arg0, arg0 = arg1, arg1 = tem;
6774 code = swap_tree_comparison (code);
6777 /* Note that it is safe to invert for real values here because we
6778 will check below in the one case that it matters. */
6780 t1 = NULL_TREE;
6781 invert = 0;
6782 if (code == NE_EXPR || code == GE_EXPR)
6784 invert = 1;
6785 code = invert_tree_comparison (code);
6788 /* Compute a result for LT or EQ if args permit;
6789 otherwise return T. */
6790 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6792 if (code == EQ_EXPR)
6793 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
6794 else
6795 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6796 ? INT_CST_LT_UNSIGNED (arg0, arg1)
6797 : INT_CST_LT (arg0, arg1)),
6801 #if 0 /* This is no longer useful, but breaks some real code. */
6802 /* Assume a nonexplicit constant cannot equal an explicit one,
6803 since such code would be undefined anyway.
6804 Exception: on sysvr4, using #pragma weak,
6805 a label can come out as 0. */
6806 else if (TREE_CODE (arg1) == INTEGER_CST
6807 && !integer_zerop (arg1)
6808 && TREE_CONSTANT (arg0)
6809 && TREE_CODE (arg0) == ADDR_EXPR
6810 && code == EQ_EXPR)
6811 t1 = build_int_2 (0, 0);
6812 #endif
6813 /* Two real constants can be compared explicitly. */
6814 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6816 /* If either operand is a NaN, the result is false with two
6817 exceptions: First, an NE_EXPR is true on NaNs, but that case
6818 is already handled correctly since we will be inverting the
6819 result for NE_EXPR. Second, if we had inverted a LE_EXPR
6820 or a GE_EXPR into a LT_EXPR, we must return true so that it
6821 will be inverted into false. */
6823 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6824 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6825 t1 = build_int_2 (invert && code == LT_EXPR, 0);
6827 else if (code == EQ_EXPR)
6828 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6829 TREE_REAL_CST (arg1)),
6831 else
6832 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6833 TREE_REAL_CST (arg1)),
6837 if (t1 == NULL_TREE)
6838 return t;
6840 if (invert)
6841 TREE_INT_CST_LOW (t1) ^= 1;
6843 TREE_TYPE (t1) = type;
6844 if (TREE_CODE (type) == BOOLEAN_TYPE)
6845 return (*lang_hooks.truthvalue_conversion) (t1);
6846 return t1;
6848 case COND_EXPR:
6849 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6850 so all simple results must be passed through pedantic_non_lvalue. */
6851 if (TREE_CODE (arg0) == INTEGER_CST)
6852 return pedantic_non_lvalue
6853 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
6854 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6855 return pedantic_omit_one_operand (type, arg1, arg0);
6857 /* If the second operand is zero, invert the comparison and swap
6858 the second and third operands. Likewise if the second operand
6859 is constant and the third is not or if the third operand is
6860 equivalent to the first operand of the comparison. */
6862 if (integer_zerop (arg1)
6863 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6864 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6865 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6866 TREE_OPERAND (t, 2),
6867 TREE_OPERAND (arg0, 1))))
6869 /* See if this can be inverted. If it can't, possibly because
6870 it was a floating-point inequality comparison, don't do
6871 anything. */
6872 tem = invert_truthvalue (arg0);
6874 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6876 t = build (code, type, tem,
6877 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6878 arg0 = tem;
6879 /* arg1 should be the first argument of the new T. */
6880 arg1 = TREE_OPERAND (t, 1);
6881 STRIP_NOPS (arg1);
6885 /* If we have A op B ? A : C, we may be able to convert this to a
6886 simpler expression, depending on the operation and the values
6887 of B and C. Signed zeros prevent all of these transformations,
6888 for reasons given above each one. */
6890 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6891 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6892 arg1, TREE_OPERAND (arg0, 1))
6893 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
6895 tree arg2 = TREE_OPERAND (t, 2);
6896 enum tree_code comp_code = TREE_CODE (arg0);
6898 STRIP_NOPS (arg2);
6900 /* If we have A op 0 ? A : -A, consider applying the following
6901 transformations:
6903 A == 0? A : -A same as -A
6904 A != 0? A : -A same as A
6905 A >= 0? A : -A same as abs (A)
6906 A > 0? A : -A same as abs (A)
6907 A <= 0? A : -A same as -abs (A)
6908 A < 0? A : -A same as -abs (A)
6910 None of these transformations work for modes with signed
6911 zeros. If A is +/-0, the first two transformations will
6912 change the sign of the result (from +0 to -0, or vice
6913 versa). The last four will fix the sign of the result,
6914 even though the original expressions could be positive or
6915 negative, depending on the sign of A.
6917 Note that all these transformations are correct if A is
6918 NaN, since the two alternatives (A and -A) are also NaNs. */
6919 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6920 ? real_zerop (TREE_OPERAND (arg0, 1))
6921 : integer_zerop (TREE_OPERAND (arg0, 1)))
6922 && TREE_CODE (arg2) == NEGATE_EXPR
6923 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6924 switch (comp_code)
6926 case EQ_EXPR:
6927 return
6928 pedantic_non_lvalue
6929 (convert (type,
6930 negate_expr
6931 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
6932 arg1))));
6933 case NE_EXPR:
6934 return pedantic_non_lvalue (convert (type, arg1));
6935 case GE_EXPR:
6936 case GT_EXPR:
6937 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6938 arg1 = convert ((*lang_hooks.types.signed_type)
6939 (TREE_TYPE (arg1)), arg1);
6940 return pedantic_non_lvalue
6941 (convert (type, fold (build1 (ABS_EXPR,
6942 TREE_TYPE (arg1), arg1))));
6943 case LE_EXPR:
6944 case LT_EXPR:
6945 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6946 arg1 = convert ((lang_hooks.types.signed_type)
6947 (TREE_TYPE (arg1)), arg1);
6948 return pedantic_non_lvalue
6949 (negate_expr (convert (type,
6950 fold (build1 (ABS_EXPR,
6951 TREE_TYPE (arg1),
6952 arg1)))));
6953 default:
6954 abort ();
6957 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6958 A == 0 ? A : 0 is always 0 unless A is -0. Note that
6959 both transformations are correct when A is NaN: A != 0
6960 is then true, and A == 0 is false. */
6962 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6964 if (comp_code == NE_EXPR)
6965 return pedantic_non_lvalue (convert (type, arg1));
6966 else if (comp_code == EQ_EXPR)
6967 return pedantic_non_lvalue (convert (type, integer_zero_node));
6970 /* Try some transformations of A op B ? A : B.
6972 A == B? A : B same as B
6973 A != B? A : B same as A
6974 A >= B? A : B same as max (A, B)
6975 A > B? A : B same as max (B, A)
6976 A <= B? A : B same as min (A, B)
6977 A < B? A : B same as min (B, A)
6979 As above, these transformations don't work in the presence
6980 of signed zeros. For example, if A and B are zeros of
6981 opposite sign, the first two transformations will change
6982 the sign of the result. In the last four, the original
6983 expressions give different results for (A=+0, B=-0) and
6984 (A=-0, B=+0), but the transformed expressions do not.
6986 The first two transformations are correct if either A or B
6987 is a NaN. In the first transformation, the condition will
6988 be false, and B will indeed be chosen. In the case of the
6989 second transformation, the condition A != B will be true,
6990 and A will be chosen.
6992 The conversions to max() and min() are not correct if B is
6993 a number and A is not. The conditions in the original
6994 expressions will be false, so all four give B. The min()
6995 and max() versions would give a NaN instead. */
6996 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
6997 arg2, TREE_OPERAND (arg0, 0)))
6999 tree comp_op0 = TREE_OPERAND (arg0, 0);
7000 tree comp_op1 = TREE_OPERAND (arg0, 1);
7001 tree comp_type = TREE_TYPE (comp_op0);
7003 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
7004 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
7005 comp_type = type;
7007 switch (comp_code)
7009 case EQ_EXPR:
7010 return pedantic_non_lvalue (convert (type, arg2));
7011 case NE_EXPR:
7012 return pedantic_non_lvalue (convert (type, arg1));
7013 case LE_EXPR:
7014 case LT_EXPR:
7015 /* In C++ a ?: expression can be an lvalue, so put the
7016 operand which will be used if they are equal first
7017 so that we can convert this back to the
7018 corresponding COND_EXPR. */
7019 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7020 return pedantic_non_lvalue
7021 (convert (type, fold (build (MIN_EXPR, comp_type,
7022 (comp_code == LE_EXPR
7023 ? comp_op0 : comp_op1),
7024 (comp_code == LE_EXPR
7025 ? comp_op1 : comp_op0)))));
7026 break;
7027 case GE_EXPR:
7028 case GT_EXPR:
7029 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7030 return pedantic_non_lvalue
7031 (convert (type, fold (build (MAX_EXPR, comp_type,
7032 (comp_code == GE_EXPR
7033 ? comp_op0 : comp_op1),
7034 (comp_code == GE_EXPR
7035 ? comp_op1 : comp_op0)))));
7036 break;
7037 default:
7038 abort ();
7042 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
7043 we might still be able to simplify this. For example,
7044 if C1 is one less or one more than C2, this might have started
7045 out as a MIN or MAX and been transformed by this function.
7046 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
7048 if (INTEGRAL_TYPE_P (type)
7049 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7050 && TREE_CODE (arg2) == INTEGER_CST)
7051 switch (comp_code)
7053 case EQ_EXPR:
7054 /* We can replace A with C1 in this case. */
7055 arg1 = convert (type, TREE_OPERAND (arg0, 1));
7056 t = build (code, type, TREE_OPERAND (t, 0), arg1,
7057 TREE_OPERAND (t, 2));
7058 break;
7060 case LT_EXPR:
7061 /* If C1 is C2 + 1, this is min(A, C2). */
7062 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7063 && operand_equal_p (TREE_OPERAND (arg0, 1),
7064 const_binop (PLUS_EXPR, arg2,
7065 integer_one_node, 0), 1))
7066 return pedantic_non_lvalue
7067 (fold (build (MIN_EXPR, type, arg1, arg2)));
7068 break;
7070 case LE_EXPR:
7071 /* If C1 is C2 - 1, this is min(A, C2). */
7072 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7073 && operand_equal_p (TREE_OPERAND (arg0, 1),
7074 const_binop (MINUS_EXPR, arg2,
7075 integer_one_node, 0), 1))
7076 return pedantic_non_lvalue
7077 (fold (build (MIN_EXPR, type, arg1, arg2)));
7078 break;
7080 case GT_EXPR:
7081 /* If C1 is C2 - 1, this is max(A, C2). */
7082 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
7083 && operand_equal_p (TREE_OPERAND (arg0, 1),
7084 const_binop (MINUS_EXPR, arg2,
7085 integer_one_node, 0), 1))
7086 return pedantic_non_lvalue
7087 (fold (build (MAX_EXPR, type, arg1, arg2)));
7088 break;
7090 case GE_EXPR:
7091 /* If C1 is C2 + 1, this is max(A, C2). */
7092 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7093 && operand_equal_p (TREE_OPERAND (arg0, 1),
7094 const_binop (PLUS_EXPR, arg2,
7095 integer_one_node, 0), 1))
7096 return pedantic_non_lvalue
7097 (fold (build (MAX_EXPR, type, arg1, arg2)));
7098 break;
7099 case NE_EXPR:
7100 break;
7101 default:
7102 abort ();
7106 /* If the second operand is simpler than the third, swap them
7107 since that produces better jump optimization results. */
7108 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
7109 || TREE_CODE (arg1) == SAVE_EXPR)
7110 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
7111 || DECL_P (TREE_OPERAND (t, 2))
7112 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
7114 /* See if this can be inverted. If it can't, possibly because
7115 it was a floating-point inequality comparison, don't do
7116 anything. */
7117 tem = invert_truthvalue (arg0);
7119 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7121 t = build (code, type, tem,
7122 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7123 arg0 = tem;
7124 /* arg1 should be the first argument of the new T. */
7125 arg1 = TREE_OPERAND (t, 1);
7126 STRIP_NOPS (arg1);
7130 /* Convert A ? 1 : 0 to simply A. */
7131 if (integer_onep (TREE_OPERAND (t, 1))
7132 && integer_zerop (TREE_OPERAND (t, 2))
7133 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
7134 call to fold will try to move the conversion inside
7135 a COND, which will recurse. In that case, the COND_EXPR
7136 is probably the best choice, so leave it alone. */
7137 && type == TREE_TYPE (arg0))
7138 return pedantic_non_lvalue (arg0);
7140 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
7141 over COND_EXPR in cases such as floating point comparisons. */
7142 if (integer_zerop (TREE_OPERAND (t, 1))
7143 && integer_onep (TREE_OPERAND (t, 2))
7144 && truth_value_p (TREE_CODE (arg0)))
7145 return pedantic_non_lvalue (convert (type,
7146 invert_truthvalue (arg0)));
7148 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
7149 operation is simply A & 2. */
7151 if (integer_zerop (TREE_OPERAND (t, 2))
7152 && TREE_CODE (arg0) == NE_EXPR
7153 && integer_zerop (TREE_OPERAND (arg0, 1))
7154 && integer_pow2p (arg1)
7155 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
7156 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
7157 arg1, 1))
7158 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
7160 /* Convert A ? B : 0 into A && B if A and B are truth values. */
7161 if (integer_zerop (TREE_OPERAND (t, 2))
7162 && truth_value_p (TREE_CODE (arg0))
7163 && truth_value_p (TREE_CODE (arg1)))
7164 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
7165 arg0, arg1)));
7167 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
7168 if (integer_onep (TREE_OPERAND (t, 2))
7169 && truth_value_p (TREE_CODE (arg0))
7170 && truth_value_p (TREE_CODE (arg1)))
7172 /* Only perform transformation if ARG0 is easily inverted. */
7173 tem = invert_truthvalue (arg0);
7174 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7175 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
7176 tem, arg1)));
7179 return t;
7181 case COMPOUND_EXPR:
7182 /* When pedantic, a compound expression can be neither an lvalue
7183 nor an integer constant expression. */
7184 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
7185 return t;
7186 /* Don't let (0, 0) be null pointer constant. */
7187 if (integer_zerop (arg1))
7188 return build1 (NOP_EXPR, type, arg1);
7189 return convert (type, arg1);
7191 case COMPLEX_EXPR:
7192 if (wins)
7193 return build_complex (type, arg0, arg1);
7194 return t;
7196 case REALPART_EXPR:
7197 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7198 return t;
7199 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7200 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7201 TREE_OPERAND (arg0, 1));
7202 else if (TREE_CODE (arg0) == COMPLEX_CST)
7203 return TREE_REALPART (arg0);
7204 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7205 return fold (build (TREE_CODE (arg0), type,
7206 fold (build1 (REALPART_EXPR, type,
7207 TREE_OPERAND (arg0, 0))),
7208 fold (build1 (REALPART_EXPR,
7209 type, TREE_OPERAND (arg0, 1)))));
7210 return t;
7212 case IMAGPART_EXPR:
7213 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7214 return convert (type, integer_zero_node);
7215 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7216 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7217 TREE_OPERAND (arg0, 0));
7218 else if (TREE_CODE (arg0) == COMPLEX_CST)
7219 return TREE_IMAGPART (arg0);
7220 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7221 return fold (build (TREE_CODE (arg0), type,
7222 fold (build1 (IMAGPART_EXPR, type,
7223 TREE_OPERAND (arg0, 0))),
7224 fold (build1 (IMAGPART_EXPR, type,
7225 TREE_OPERAND (arg0, 1)))));
7226 return t;
7228 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
7229 appropriate. */
7230 case CLEANUP_POINT_EXPR:
7231 if (! has_cleanups (arg0))
7232 return TREE_OPERAND (t, 0);
7235 enum tree_code code0 = TREE_CODE (arg0);
7236 int kind0 = TREE_CODE_CLASS (code0);
7237 tree arg00 = TREE_OPERAND (arg0, 0);
7238 tree arg01;
7240 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
7241 return fold (build1 (code0, type,
7242 fold (build1 (CLEANUP_POINT_EXPR,
7243 TREE_TYPE (arg00), arg00))));
7245 if (kind0 == '<' || kind0 == '2'
7246 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
7247 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
7248 || code0 == TRUTH_XOR_EXPR)
7250 arg01 = TREE_OPERAND (arg0, 1);
7252 if (TREE_CONSTANT (arg00)
7253 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
7254 && ! has_cleanups (arg00)))
7255 return fold (build (code0, type, arg00,
7256 fold (build1 (CLEANUP_POINT_EXPR,
7257 TREE_TYPE (arg01), arg01))));
7259 if (TREE_CONSTANT (arg01))
7260 return fold (build (code0, type,
7261 fold (build1 (CLEANUP_POINT_EXPR,
7262 TREE_TYPE (arg00), arg00)),
7263 arg01));
7266 return t;
7269 case CALL_EXPR:
7270 /* Check for a built-in function. */
7271 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
7272 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
7273 == FUNCTION_DECL)
7274 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
7276 tree tmp = fold_builtin (expr);
7277 if (tmp)
7278 return tmp;
7280 return t;
7282 default:
7283 return t;
7284 } /* switch (code) */
7287 /* Determine if first argument is a multiple of second argument. Return 0 if
7288 it is not, or we cannot easily determined it to be.
7290 An example of the sort of thing we care about (at this point; this routine
7291 could surely be made more general, and expanded to do what the *_DIV_EXPR's
7292 fold cases do now) is discovering that
7294 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7296 is a multiple of
7298 SAVE_EXPR (J * 8)
7300 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7302 This code also handles discovering that
7304 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7306 is a multiple of 8 so we don't have to worry about dealing with a
7307 possible remainder.
7309 Note that we *look* inside a SAVE_EXPR only to determine how it was
7310 calculated; it is not safe for fold to do much of anything else with the
7311 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7312 at run time. For example, the latter example above *cannot* be implemented
7313 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7314 evaluation time of the original SAVE_EXPR is not necessarily the same at
7315 the time the new expression is evaluated. The only optimization of this
7316 sort that would be valid is changing
7318 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7320 divided by 8 to
7322 SAVE_EXPR (I) * SAVE_EXPR (J)
7324 (where the same SAVE_EXPR (J) is used in the original and the
7325 transformed version). */
7327 static int
7328 multiple_of_p (type, top, bottom)
7329 tree type;
7330 tree top;
7331 tree bottom;
7333 if (operand_equal_p (top, bottom, 0))
7334 return 1;
7336 if (TREE_CODE (type) != INTEGER_TYPE)
7337 return 0;
7339 switch (TREE_CODE (top))
7341 case MULT_EXPR:
7342 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7343 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7345 case PLUS_EXPR:
7346 case MINUS_EXPR:
7347 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7348 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7350 case LSHIFT_EXPR:
7351 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
7353 tree op1, t1;
7355 op1 = TREE_OPERAND (top, 1);
7356 /* const_binop may not detect overflow correctly,
7357 so check for it explicitly here. */
7358 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
7359 > TREE_INT_CST_LOW (op1)
7360 && TREE_INT_CST_HIGH (op1) == 0
7361 && 0 != (t1 = convert (type,
7362 const_binop (LSHIFT_EXPR, size_one_node,
7363 op1, 0)))
7364 && ! TREE_OVERFLOW (t1))
7365 return multiple_of_p (type, t1, bottom);
7367 return 0;
7369 case NOP_EXPR:
7370 /* Can't handle conversions from non-integral or wider integral type. */
7371 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
7372 || (TYPE_PRECISION (type)
7373 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
7374 return 0;
7376 /* .. fall through ... */
7378 case SAVE_EXPR:
7379 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7381 case INTEGER_CST:
7382 if (TREE_CODE (bottom) != INTEGER_CST
7383 || (TREE_UNSIGNED (type)
7384 && (tree_int_cst_sgn (top) < 0
7385 || tree_int_cst_sgn (bottom) < 0)))
7386 return 0;
7387 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
7388 top, bottom, 0));
7390 default:
7391 return 0;
7395 /* Return true if `t' is known to be non-negative. */
7398 tree_expr_nonnegative_p (t)
7399 tree t;
7401 switch (TREE_CODE (t))
7403 case ABS_EXPR:
7404 case FFS_EXPR:
7405 return 1;
7406 case INTEGER_CST:
7407 return tree_int_cst_sgn (t) >= 0;
7408 case TRUNC_DIV_EXPR:
7409 case CEIL_DIV_EXPR:
7410 case FLOOR_DIV_EXPR:
7411 case ROUND_DIV_EXPR:
7412 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7413 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7414 case TRUNC_MOD_EXPR:
7415 case CEIL_MOD_EXPR:
7416 case FLOOR_MOD_EXPR:
7417 case ROUND_MOD_EXPR:
7418 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7419 case COND_EXPR:
7420 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
7421 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
7422 case COMPOUND_EXPR:
7423 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7424 case MIN_EXPR:
7425 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7426 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7427 case MAX_EXPR:
7428 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
7429 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7430 case MODIFY_EXPR:
7431 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7432 case BIND_EXPR:
7433 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
7434 case SAVE_EXPR:
7435 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7436 case NON_LVALUE_EXPR:
7437 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
7438 case RTL_EXPR:
7439 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
7441 default:
7442 if (truth_value_p (TREE_CODE (t)))
7443 /* Truth values evaluate to 0 or 1, which is nonnegative. */
7444 return 1;
7445 else
7446 /* We don't know sign of `t', so be conservative and return false. */
7447 return 0;
7451 /* Return true if `r' is known to be non-negative.
7452 Only handles constants at the moment. */
7455 rtl_expr_nonnegative_p (r)
7456 rtx r;
7458 switch (GET_CODE (r))
7460 case CONST_INT:
7461 return INTVAL (r) >= 0;
7463 case CONST_DOUBLE:
7464 if (GET_MODE (r) == VOIDmode)
7465 return CONST_DOUBLE_HIGH (r) >= 0;
7466 return 0;
7468 case CONST_VECTOR:
7470 int units, i;
7471 rtx elt;
7473 units = CONST_VECTOR_NUNITS (r);
7475 for (i = 0; i < units; ++i)
7477 elt = CONST_VECTOR_ELT (r, i);
7478 if (!rtl_expr_nonnegative_p (elt))
7479 return 0;
7482 return 1;
7485 case SYMBOL_REF:
7486 case LABEL_REF:
7487 /* These are always nonnegative. */
7488 return 1;
7490 default:
7491 return 0;
7495 #include "gt-fold-const.h"