2005-01-13 Michael Koch <konqueror@gmx.de>
[official-gcc.git] / gcc / fold-const.c
blob5266b000bee7c078bece1776495901688b6fcf32
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
61 /* The following constants represent a bit based encoding of GCC's
62 comparison operators. This encoding simplifies transformations
63 on relational comparison operators, such as AND and OR. */
64 enum comparison_code {
65 COMPCODE_FALSE = 0,
66 COMPCODE_LT = 1,
67 COMPCODE_EQ = 2,
68 COMPCODE_LE = 3,
69 COMPCODE_GT = 4,
70 COMPCODE_LTGT = 5,
71 COMPCODE_GE = 6,
72 COMPCODE_ORD = 7,
73 COMPCODE_UNORD = 8,
74 COMPCODE_UNLT = 9,
75 COMPCODE_UNEQ = 10,
76 COMPCODE_UNLE = 11,
77 COMPCODE_UNGT = 12,
78 COMPCODE_NE = 13,
79 COMPCODE_UNGE = 14,
80 COMPCODE_TRUE = 15
83 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
84 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
85 static bool negate_mathfn_p (enum built_in_function);
86 static bool negate_expr_p (tree);
87 static tree negate_expr (tree);
88 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
89 static tree associate_trees (tree, tree, enum tree_code, tree);
90 static tree const_binop (enum tree_code, tree, tree, int);
91 static hashval_t size_htab_hash (const void *);
92 static int size_htab_eq (const void *, const void *);
93 static tree fold_convert_const (enum tree_code, tree, tree);
94 static enum tree_code invert_tree_comparison (enum tree_code, bool);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static tree combine_comparisons (enum tree_code, enum tree_code,
98 enum tree_code, tree, tree, tree);
99 static int truth_value_p (enum tree_code);
100 static int operand_equal_for_comparison_p (tree, tree, tree);
101 static int twoval_comparison_p (tree, tree *, tree *, int *);
102 static tree eval_subst (tree, tree, tree, tree, tree);
103 static tree pedantic_omit_one_operand (tree, tree, tree);
104 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
105 static tree make_bit_field_ref (tree, tree, int, int, int);
106 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
107 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (tree, int);
111 static tree sign_bit_p (tree, tree);
112 static int simple_operand_p (tree);
113 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
114 static tree make_range (tree, int *, tree *, tree *);
115 static tree build_range_check (tree, tree, int, tree, tree);
116 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
117 tree);
118 static tree fold_range_test (tree);
119 static tree unextend (tree, int, int, tree);
120 static tree fold_truthop (enum tree_code, tree, tree, tree);
121 static tree optimize_minmax_comparison (tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
124 static int multiple_of_p (tree, tree, tree);
125 static tree constant_boolean_node (int, tree);
126 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
127 tree, int);
128 static bool fold_real_zero_addition_p (tree, tree, int);
129 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (tree, tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_relational_hi_lo (enum tree_code *, const tree,
138 tree *, tree *);
140 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
141 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
142 and SUM1. Then this yields nonzero if overflow occurred during the
143 addition.
145 Overflow occurs if A and B have the same sign, but A and SUM differ in
146 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
147 sign. */
148 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
150 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
151 We do that by representing the two-word integer in 4 words, with only
152 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
153 number. The value of the word is LOWPART + HIGHPART * BASE. */
155 #define LOWPART(x) \
156 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
157 #define HIGHPART(x) \
158 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
159 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
161 /* Unpack a two-word integer into 4 words.
162 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
163 WORDS points to the array of HOST_WIDE_INTs. */
165 static void
166 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
168 words[0] = LOWPART (low);
169 words[1] = HIGHPART (low);
170 words[2] = LOWPART (hi);
171 words[3] = HIGHPART (hi);
174 /* Pack an array of 4 words into a two-word integer.
175 WORDS points to the array of words.
176 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
178 static void
179 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
180 HOST_WIDE_INT *hi)
182 *low = words[0] + words[1] * BASE;
183 *hi = words[2] + words[3] * BASE;
186 /* Make the integer constant T valid for its type by setting to 0 or 1 all
187 the bits in the constant that don't belong in the type.
189 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
190 nonzero, a signed overflow has already occurred in calculating T, so
191 propagate it. */
194 force_fit_type (tree t, int overflow)
196 unsigned HOST_WIDE_INT low;
197 HOST_WIDE_INT high;
198 unsigned int prec;
200 if (TREE_CODE (t) == REAL_CST)
202 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
203 Consider doing it via real_convert now. */
204 return overflow;
207 else if (TREE_CODE (t) != INTEGER_CST)
208 return overflow;
210 low = TREE_INT_CST_LOW (t);
211 high = TREE_INT_CST_HIGH (t);
213 if (POINTER_TYPE_P (TREE_TYPE (t))
214 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
215 prec = POINTER_SIZE;
216 else
217 prec = TYPE_PRECISION (TREE_TYPE (t));
219 /* First clear all bits that are beyond the type's precision. */
221 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
223 else if (prec > HOST_BITS_PER_WIDE_INT)
224 TREE_INT_CST_HIGH (t)
225 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
226 else
228 TREE_INT_CST_HIGH (t) = 0;
229 if (prec < HOST_BITS_PER_WIDE_INT)
230 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
233 /* Unsigned types do not suffer sign extension or overflow unless they
234 are a sizetype. */
235 if (TYPE_UNSIGNED (TREE_TYPE (t))
236 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
237 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
238 return overflow;
240 /* If the value's sign bit is set, extend the sign. */
241 if (prec != 2 * HOST_BITS_PER_WIDE_INT
242 && (prec > HOST_BITS_PER_WIDE_INT
243 ? 0 != (TREE_INT_CST_HIGH (t)
244 & ((HOST_WIDE_INT) 1
245 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
246 : 0 != (TREE_INT_CST_LOW (t)
247 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
249 /* Value is negative:
250 set to 1 all the bits that are outside this type's precision. */
251 if (prec > HOST_BITS_PER_WIDE_INT)
252 TREE_INT_CST_HIGH (t)
253 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
254 else
256 TREE_INT_CST_HIGH (t) = -1;
257 if (prec < HOST_BITS_PER_WIDE_INT)
258 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
262 /* Return nonzero if signed overflow occurred. */
263 return
264 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
265 != 0);
268 /* Add two doubleword integers with doubleword result.
269 Each argument is given as two `HOST_WIDE_INT' pieces.
270 One argument is L1 and H1; the other, L2 and H2.
271 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
274 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
275 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
276 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
278 unsigned HOST_WIDE_INT l;
279 HOST_WIDE_INT h;
281 l = l1 + l2;
282 h = h1 + h2 + (l < l1);
284 *lv = l;
285 *hv = h;
286 return OVERFLOW_SUM_SIGN (h1, h2, h);
289 /* Negate a doubleword integer with doubleword result.
290 Return nonzero if the operation overflows, assuming it's signed.
291 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
292 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
295 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
296 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
298 if (l1 == 0)
300 *lv = 0;
301 *hv = - h1;
302 return (*hv & h1) < 0;
304 else
306 *lv = -l1;
307 *hv = ~h1;
308 return 0;
312 /* Multiply two doubleword integers with doubleword result.
313 Return nonzero if the operation overflows, assuming it's signed.
314 Each argument is given as two `HOST_WIDE_INT' pieces.
315 One argument is L1 and H1; the other, L2 and H2.
316 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
319 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
320 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
321 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
323 HOST_WIDE_INT arg1[4];
324 HOST_WIDE_INT arg2[4];
325 HOST_WIDE_INT prod[4 * 2];
326 unsigned HOST_WIDE_INT carry;
327 int i, j, k;
328 unsigned HOST_WIDE_INT toplow, neglow;
329 HOST_WIDE_INT tophigh, neghigh;
331 encode (arg1, l1, h1);
332 encode (arg2, l2, h2);
334 memset (prod, 0, sizeof prod);
336 for (i = 0; i < 4; i++)
338 carry = 0;
339 for (j = 0; j < 4; j++)
341 k = i + j;
342 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
343 carry += arg1[i] * arg2[j];
344 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
345 carry += prod[k];
346 prod[k] = LOWPART (carry);
347 carry = HIGHPART (carry);
349 prod[i + 4] = carry;
352 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
354 /* Check for overflow by calculating the top half of the answer in full;
355 it should agree with the low half's sign bit. */
356 decode (prod + 4, &toplow, &tophigh);
357 if (h1 < 0)
359 neg_double (l2, h2, &neglow, &neghigh);
360 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
362 if (h2 < 0)
364 neg_double (l1, h1, &neglow, &neghigh);
365 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
367 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
370 /* Shift the doubleword integer in L1, H1 left by COUNT places
371 keeping only PREC bits of result.
372 Shift right if COUNT is negative.
373 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
374 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
376 void
377 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
378 HOST_WIDE_INT count, unsigned int prec,
379 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
381 unsigned HOST_WIDE_INT signmask;
383 if (count < 0)
385 rshift_double (l1, h1, -count, prec, lv, hv, arith);
386 return;
389 if (SHIFT_COUNT_TRUNCATED)
390 count %= prec;
392 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
394 /* Shifting by the host word size is undefined according to the
395 ANSI standard, so we must handle this as a special case. */
396 *hv = 0;
397 *lv = 0;
399 else if (count >= HOST_BITS_PER_WIDE_INT)
401 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
402 *lv = 0;
404 else
406 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
407 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
408 *lv = l1 << count;
411 /* Sign extend all bits that are beyond the precision. */
413 signmask = -((prec > HOST_BITS_PER_WIDE_INT
414 ? ((unsigned HOST_WIDE_INT) *hv
415 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
416 : (*lv >> (prec - 1))) & 1);
418 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
420 else if (prec >= HOST_BITS_PER_WIDE_INT)
422 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
423 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
425 else
427 *hv = signmask;
428 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
429 *lv |= signmask << prec;
433 /* Shift the doubleword integer in L1, H1 right by COUNT places
434 keeping only PREC bits of result. COUNT must be positive.
435 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
436 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
438 void
439 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
440 HOST_WIDE_INT count, unsigned int prec,
441 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
442 int arith)
444 unsigned HOST_WIDE_INT signmask;
446 signmask = (arith
447 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
448 : 0);
450 if (SHIFT_COUNT_TRUNCATED)
451 count %= prec;
453 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
455 /* Shifting by the host word size is undefined according to the
456 ANSI standard, so we must handle this as a special case. */
457 *hv = 0;
458 *lv = 0;
460 else if (count >= HOST_BITS_PER_WIDE_INT)
462 *hv = 0;
463 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
465 else
467 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
468 *lv = ((l1 >> count)
469 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
472 /* Zero / sign extend all bits that are beyond the precision. */
474 if (count >= (HOST_WIDE_INT)prec)
476 *hv = signmask;
477 *lv = signmask;
479 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
481 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
483 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
484 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
486 else
488 *hv = signmask;
489 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
490 *lv |= signmask << (prec - count);
494 /* Rotate the doubleword integer in L1, H1 left by COUNT places
495 keeping only PREC bits of result.
496 Rotate right if COUNT is negative.
497 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
499 void
500 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
501 HOST_WIDE_INT count, unsigned int prec,
502 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
504 unsigned HOST_WIDE_INT s1l, s2l;
505 HOST_WIDE_INT s1h, s2h;
507 count %= prec;
508 if (count < 0)
509 count += prec;
511 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
512 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
513 *lv = s1l | s2l;
514 *hv = s1h | s2h;
517 /* Rotate the doubleword integer in L1, H1 left by COUNT places
518 keeping only PREC bits of result. COUNT must be positive.
519 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
521 void
522 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
523 HOST_WIDE_INT count, unsigned int prec,
524 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
526 unsigned HOST_WIDE_INT s1l, s2l;
527 HOST_WIDE_INT s1h, s2h;
529 count %= prec;
530 if (count < 0)
531 count += prec;
533 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
534 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
535 *lv = s1l | s2l;
536 *hv = s1h | s2h;
539 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
540 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
541 CODE is a tree code for a kind of division, one of
542 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
543 or EXACT_DIV_EXPR
544 It controls how the quotient is rounded to an integer.
545 Return nonzero if the operation overflows.
546 UNS nonzero says do unsigned division. */
549 div_and_round_double (enum tree_code code, int uns,
550 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
551 HOST_WIDE_INT hnum_orig,
552 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
553 HOST_WIDE_INT hden_orig,
554 unsigned HOST_WIDE_INT *lquo,
555 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
556 HOST_WIDE_INT *hrem)
558 int quo_neg = 0;
559 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
560 HOST_WIDE_INT den[4], quo[4];
561 int i, j;
562 unsigned HOST_WIDE_INT work;
563 unsigned HOST_WIDE_INT carry = 0;
564 unsigned HOST_WIDE_INT lnum = lnum_orig;
565 HOST_WIDE_INT hnum = hnum_orig;
566 unsigned HOST_WIDE_INT lden = lden_orig;
567 HOST_WIDE_INT hden = hden_orig;
568 int overflow = 0;
570 if (hden == 0 && lden == 0)
571 overflow = 1, lden = 1;
573 /* Calculate quotient sign and convert operands to unsigned. */
574 if (!uns)
576 if (hnum < 0)
578 quo_neg = ~ quo_neg;
579 /* (minimum integer) / (-1) is the only overflow case. */
580 if (neg_double (lnum, hnum, &lnum, &hnum)
581 && ((HOST_WIDE_INT) lden & hden) == -1)
582 overflow = 1;
584 if (hden < 0)
586 quo_neg = ~ quo_neg;
587 neg_double (lden, hden, &lden, &hden);
591 if (hnum == 0 && hden == 0)
592 { /* single precision */
593 *hquo = *hrem = 0;
594 /* This unsigned division rounds toward zero. */
595 *lquo = lnum / lden;
596 goto finish_up;
599 if (hnum == 0)
600 { /* trivial case: dividend < divisor */
601 /* hden != 0 already checked. */
602 *hquo = *lquo = 0;
603 *hrem = hnum;
604 *lrem = lnum;
605 goto finish_up;
608 memset (quo, 0, sizeof quo);
610 memset (num, 0, sizeof num); /* to zero 9th element */
611 memset (den, 0, sizeof den);
613 encode (num, lnum, hnum);
614 encode (den, lden, hden);
616 /* Special code for when the divisor < BASE. */
617 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
619 /* hnum != 0 already checked. */
620 for (i = 4 - 1; i >= 0; i--)
622 work = num[i] + carry * BASE;
623 quo[i] = work / lden;
624 carry = work % lden;
627 else
629 /* Full double precision division,
630 with thanks to Don Knuth's "Seminumerical Algorithms". */
631 int num_hi_sig, den_hi_sig;
632 unsigned HOST_WIDE_INT quo_est, scale;
634 /* Find the highest nonzero divisor digit. */
635 for (i = 4 - 1;; i--)
636 if (den[i] != 0)
638 den_hi_sig = i;
639 break;
642 /* Insure that the first digit of the divisor is at least BASE/2.
643 This is required by the quotient digit estimation algorithm. */
645 scale = BASE / (den[den_hi_sig] + 1);
646 if (scale > 1)
647 { /* scale divisor and dividend */
648 carry = 0;
649 for (i = 0; i <= 4 - 1; i++)
651 work = (num[i] * scale) + carry;
652 num[i] = LOWPART (work);
653 carry = HIGHPART (work);
656 num[4] = carry;
657 carry = 0;
658 for (i = 0; i <= 4 - 1; i++)
660 work = (den[i] * scale) + carry;
661 den[i] = LOWPART (work);
662 carry = HIGHPART (work);
663 if (den[i] != 0) den_hi_sig = i;
667 num_hi_sig = 4;
669 /* Main loop */
670 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
672 /* Guess the next quotient digit, quo_est, by dividing the first
673 two remaining dividend digits by the high order quotient digit.
674 quo_est is never low and is at most 2 high. */
675 unsigned HOST_WIDE_INT tmp;
677 num_hi_sig = i + den_hi_sig + 1;
678 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
679 if (num[num_hi_sig] != den[den_hi_sig])
680 quo_est = work / den[den_hi_sig];
681 else
682 quo_est = BASE - 1;
684 /* Refine quo_est so it's usually correct, and at most one high. */
685 tmp = work - quo_est * den[den_hi_sig];
686 if (tmp < BASE
687 && (den[den_hi_sig - 1] * quo_est
688 > (tmp * BASE + num[num_hi_sig - 2])))
689 quo_est--;
691 /* Try QUO_EST as the quotient digit, by multiplying the
692 divisor by QUO_EST and subtracting from the remaining dividend.
693 Keep in mind that QUO_EST is the I - 1st digit. */
695 carry = 0;
696 for (j = 0; j <= den_hi_sig; j++)
698 work = quo_est * den[j] + carry;
699 carry = HIGHPART (work);
700 work = num[i + j] - LOWPART (work);
701 num[i + j] = LOWPART (work);
702 carry += HIGHPART (work) != 0;
705 /* If quo_est was high by one, then num[i] went negative and
706 we need to correct things. */
707 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
709 quo_est--;
710 carry = 0; /* add divisor back in */
711 for (j = 0; j <= den_hi_sig; j++)
713 work = num[i + j] + den[j] + carry;
714 carry = HIGHPART (work);
715 num[i + j] = LOWPART (work);
718 num [num_hi_sig] += carry;
721 /* Store the quotient digit. */
722 quo[i] = quo_est;
726 decode (quo, lquo, hquo);
728 finish_up:
729 /* If result is negative, make it so. */
730 if (quo_neg)
731 neg_double (*lquo, *hquo, lquo, hquo);
733 /* Compute trial remainder: rem = num - (quo * den) */
734 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
735 neg_double (*lrem, *hrem, lrem, hrem);
736 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
738 switch (code)
740 case TRUNC_DIV_EXPR:
741 case TRUNC_MOD_EXPR: /* round toward zero */
742 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
743 return overflow;
745 case FLOOR_DIV_EXPR:
746 case FLOOR_MOD_EXPR: /* round toward negative infinity */
747 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
749 /* quo = quo - 1; */
750 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
751 lquo, hquo);
753 else
754 return overflow;
755 break;
757 case CEIL_DIV_EXPR:
758 case CEIL_MOD_EXPR: /* round toward positive infinity */
759 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
761 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
762 lquo, hquo);
764 else
765 return overflow;
766 break;
768 case ROUND_DIV_EXPR:
769 case ROUND_MOD_EXPR: /* round to closest integer */
771 unsigned HOST_WIDE_INT labs_rem = *lrem;
772 HOST_WIDE_INT habs_rem = *hrem;
773 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
774 HOST_WIDE_INT habs_den = hden, htwice;
776 /* Get absolute values. */
777 if (*hrem < 0)
778 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
779 if (hden < 0)
780 neg_double (lden, hden, &labs_den, &habs_den);
782 /* If (2 * abs (lrem) >= abs (lden)) */
783 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
784 labs_rem, habs_rem, &ltwice, &htwice);
786 if (((unsigned HOST_WIDE_INT) habs_den
787 < (unsigned HOST_WIDE_INT) htwice)
788 || (((unsigned HOST_WIDE_INT) habs_den
789 == (unsigned HOST_WIDE_INT) htwice)
790 && (labs_den < ltwice)))
792 if (*hquo < 0)
793 /* quo = quo - 1; */
794 add_double (*lquo, *hquo,
795 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
796 else
797 /* quo = quo + 1; */
798 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
799 lquo, hquo);
801 else
802 return overflow;
804 break;
806 default:
807 abort ();
810 /* Compute true remainder: rem = num - (quo * den) */
811 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
812 neg_double (*lrem, *hrem, lrem, hrem);
813 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
814 return overflow;
817 /* Return true if built-in mathematical function specified by CODE
818 preserves the sign of it argument, i.e. -f(x) == f(-x). */
820 static bool
821 negate_mathfn_p (enum built_in_function code)
823 switch (code)
825 case BUILT_IN_ASIN:
826 case BUILT_IN_ASINF:
827 case BUILT_IN_ASINL:
828 case BUILT_IN_ATAN:
829 case BUILT_IN_ATANF:
830 case BUILT_IN_ATANL:
831 case BUILT_IN_SIN:
832 case BUILT_IN_SINF:
833 case BUILT_IN_SINL:
834 case BUILT_IN_TAN:
835 case BUILT_IN_TANF:
836 case BUILT_IN_TANL:
837 return true;
839 default:
840 break;
842 return false;
845 /* Determine whether an expression T can be cheaply negated using
846 the function negate_expr. */
848 static bool
849 negate_expr_p (tree t)
851 unsigned HOST_WIDE_INT val;
852 unsigned int prec;
853 tree type;
855 if (t == 0)
856 return false;
858 type = TREE_TYPE (t);
860 STRIP_SIGN_NOPS (t);
861 switch (TREE_CODE (t))
863 case INTEGER_CST:
864 if (TYPE_UNSIGNED (type) || ! flag_trapv)
865 return true;
867 /* Check that -CST will not overflow type. */
868 prec = TYPE_PRECISION (type);
869 if (prec > HOST_BITS_PER_WIDE_INT)
871 if (TREE_INT_CST_LOW (t) != 0)
872 return true;
873 prec -= HOST_BITS_PER_WIDE_INT;
874 val = TREE_INT_CST_HIGH (t);
876 else
877 val = TREE_INT_CST_LOW (t);
878 if (prec < HOST_BITS_PER_WIDE_INT)
879 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
880 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
882 case REAL_CST:
883 case NEGATE_EXPR:
884 return true;
886 case COMPLEX_CST:
887 return negate_expr_p (TREE_REALPART (t))
888 && negate_expr_p (TREE_IMAGPART (t));
890 case PLUS_EXPR:
891 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
892 return false;
893 /* -(A + B) -> (-B) - A. */
894 if (negate_expr_p (TREE_OPERAND (t, 1))
895 && reorder_operands_p (TREE_OPERAND (t, 0),
896 TREE_OPERAND (t, 1)))
897 return true;
898 /* -(A + B) -> (-A) - B. */
899 return negate_expr_p (TREE_OPERAND (t, 0));
901 case MINUS_EXPR:
902 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
903 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
904 && reorder_operands_p (TREE_OPERAND (t, 0),
905 TREE_OPERAND (t, 1));
907 case MULT_EXPR:
908 if (TYPE_UNSIGNED (TREE_TYPE (t)))
909 break;
911 /* Fall through. */
913 case RDIV_EXPR:
914 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
915 return negate_expr_p (TREE_OPERAND (t, 1))
916 || negate_expr_p (TREE_OPERAND (t, 0));
917 break;
919 case NOP_EXPR:
920 /* Negate -((double)float) as (double)(-float). */
921 if (TREE_CODE (type) == REAL_TYPE)
923 tree tem = strip_float_extensions (t);
924 if (tem != t)
925 return negate_expr_p (tem);
927 break;
929 case CALL_EXPR:
930 /* Negate -f(x) as f(-x). */
931 if (negate_mathfn_p (builtin_mathfn_code (t)))
932 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
933 break;
935 case RSHIFT_EXPR:
936 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
937 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
939 tree op1 = TREE_OPERAND (t, 1);
940 if (TREE_INT_CST_HIGH (op1) == 0
941 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
942 == TREE_INT_CST_LOW (op1))
943 return true;
945 break;
947 default:
948 break;
950 return false;
953 /* Given T, an expression, return the negation of T. Allow for T to be
954 null, in which case return null. */
956 static tree
957 negate_expr (tree t)
959 tree type;
960 tree tem;
962 if (t == 0)
963 return 0;
965 type = TREE_TYPE (t);
966 STRIP_SIGN_NOPS (t);
968 switch (TREE_CODE (t))
970 case INTEGER_CST:
971 tem = fold_negate_const (t, type);
972 if (! TREE_OVERFLOW (tem)
973 || TYPE_UNSIGNED (type)
974 || ! flag_trapv)
975 return tem;
976 break;
978 case REAL_CST:
979 tem = fold_negate_const (t, type);
980 /* Two's complement FP formats, such as c4x, may overflow. */
981 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
982 return fold_convert (type, tem);
983 break;
985 case COMPLEX_CST:
987 tree rpart = negate_expr (TREE_REALPART (t));
988 tree ipart = negate_expr (TREE_IMAGPART (t));
990 if ((TREE_CODE (rpart) == REAL_CST
991 && TREE_CODE (ipart) == REAL_CST)
992 || (TREE_CODE (rpart) == INTEGER_CST
993 && TREE_CODE (ipart) == INTEGER_CST))
994 return build_complex (type, rpart, ipart);
996 break;
998 case NEGATE_EXPR:
999 return fold_convert (type, TREE_OPERAND (t, 0));
1001 case PLUS_EXPR:
1002 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1004 /* -(A + B) -> (-B) - A. */
1005 if (negate_expr_p (TREE_OPERAND (t, 1))
1006 && reorder_operands_p (TREE_OPERAND (t, 0),
1007 TREE_OPERAND (t, 1)))
1009 tem = negate_expr (TREE_OPERAND (t, 1));
1010 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1011 tem, TREE_OPERAND (t, 0)));
1012 return fold_convert (type, tem);
1015 /* -(A + B) -> (-A) - B. */
1016 if (negate_expr_p (TREE_OPERAND (t, 0)))
1018 tem = negate_expr (TREE_OPERAND (t, 0));
1019 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1020 tem, TREE_OPERAND (t, 1)));
1021 return fold_convert (type, tem);
1024 break;
1026 case MINUS_EXPR:
1027 /* - (A - B) -> B - A */
1028 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1029 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1030 return fold_convert (type,
1031 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1032 TREE_OPERAND (t, 1),
1033 TREE_OPERAND (t, 0))));
1034 break;
1036 case MULT_EXPR:
1037 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1038 break;
1040 /* Fall through. */
1042 case RDIV_EXPR:
1043 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1045 tem = TREE_OPERAND (t, 1);
1046 if (negate_expr_p (tem))
1047 return fold_convert (type,
1048 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1049 TREE_OPERAND (t, 0),
1050 negate_expr (tem))));
1051 tem = TREE_OPERAND (t, 0);
1052 if (negate_expr_p (tem))
1053 return fold_convert (type,
1054 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1055 negate_expr (tem),
1056 TREE_OPERAND (t, 1))));
1058 break;
1060 case NOP_EXPR:
1061 /* Convert -((double)float) into (double)(-float). */
1062 if (TREE_CODE (type) == REAL_TYPE)
1064 tem = strip_float_extensions (t);
1065 if (tem != t && negate_expr_p (tem))
1066 return fold_convert (type, negate_expr (tem));
1068 break;
1070 case CALL_EXPR:
1071 /* Negate -f(x) as f(-x). */
1072 if (negate_mathfn_p (builtin_mathfn_code (t))
1073 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1075 tree fndecl, arg, arglist;
1077 fndecl = get_callee_fndecl (t);
1078 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1079 arglist = build_tree_list (NULL_TREE, arg);
1080 return build_function_call_expr (fndecl, arglist);
1082 break;
1084 case RSHIFT_EXPR:
1085 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1086 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1088 tree op1 = TREE_OPERAND (t, 1);
1089 if (TREE_INT_CST_HIGH (op1) == 0
1090 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1091 == TREE_INT_CST_LOW (op1))
1093 tree ntype = TYPE_UNSIGNED (type)
1094 ? lang_hooks.types.signed_type (type)
1095 : lang_hooks.types.unsigned_type (type);
1096 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1097 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1098 return fold_convert (type, temp);
1101 break;
1103 default:
1104 break;
1107 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1108 return fold_convert (type, tem);
1111 /* Split a tree IN into a constant, literal and variable parts that could be
1112 combined with CODE to make IN. "constant" means an expression with
1113 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1114 commutative arithmetic operation. Store the constant part into *CONP,
1115 the literal in *LITP and return the variable part. If a part isn't
1116 present, set it to null. If the tree does not decompose in this way,
1117 return the entire tree as the variable part and the other parts as null.
1119 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1120 case, we negate an operand that was subtracted. Except if it is a
1121 literal for which we use *MINUS_LITP instead.
1123 If NEGATE_P is true, we are negating all of IN, again except a literal
1124 for which we use *MINUS_LITP instead.
1126 If IN is itself a literal or constant, return it as appropriate.
1128 Note that we do not guarantee that any of the three values will be the
1129 same type as IN, but they will have the same signedness and mode. */
1131 static tree
1132 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1133 tree *minus_litp, int negate_p)
1135 tree var = 0;
1137 *conp = 0;
1138 *litp = 0;
1139 *minus_litp = 0;
1141 /* Strip any conversions that don't change the machine mode or signedness. */
1142 STRIP_SIGN_NOPS (in);
1144 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1145 *litp = in;
1146 else if (TREE_CODE (in) == code
1147 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1148 /* We can associate addition and subtraction together (even
1149 though the C standard doesn't say so) for integers because
1150 the value is not affected. For reals, the value might be
1151 affected, so we can't. */
1152 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1153 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1155 tree op0 = TREE_OPERAND (in, 0);
1156 tree op1 = TREE_OPERAND (in, 1);
1157 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1158 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1160 /* First see if either of the operands is a literal, then a constant. */
1161 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1162 *litp = op0, op0 = 0;
1163 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1164 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1166 if (op0 != 0 && TREE_CONSTANT (op0))
1167 *conp = op0, op0 = 0;
1168 else if (op1 != 0 && TREE_CONSTANT (op1))
1169 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1171 /* If we haven't dealt with either operand, this is not a case we can
1172 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1173 if (op0 != 0 && op1 != 0)
1174 var = in;
1175 else if (op0 != 0)
1176 var = op0;
1177 else
1178 var = op1, neg_var_p = neg1_p;
1180 /* Now do any needed negations. */
1181 if (neg_litp_p)
1182 *minus_litp = *litp, *litp = 0;
1183 if (neg_conp_p)
1184 *conp = negate_expr (*conp);
1185 if (neg_var_p)
1186 var = negate_expr (var);
1188 else if (TREE_CONSTANT (in))
1189 *conp = in;
1190 else
1191 var = in;
1193 if (negate_p)
1195 if (*litp)
1196 *minus_litp = *litp, *litp = 0;
1197 else if (*minus_litp)
1198 *litp = *minus_litp, *minus_litp = 0;
1199 *conp = negate_expr (*conp);
1200 var = negate_expr (var);
1203 return var;
1206 /* Re-associate trees split by the above function. T1 and T2 are either
1207 expressions to associate or null. Return the new expression, if any. If
1208 we build an operation, do it in TYPE and with CODE. */
1210 static tree
1211 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1213 if (t1 == 0)
1214 return t2;
1215 else if (t2 == 0)
1216 return t1;
1218 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1219 try to fold this since we will have infinite recursion. But do
1220 deal with any NEGATE_EXPRs. */
1221 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1222 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1224 if (code == PLUS_EXPR)
1226 if (TREE_CODE (t1) == NEGATE_EXPR)
1227 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1228 fold_convert (type, TREE_OPERAND (t1, 0)));
1229 else if (TREE_CODE (t2) == NEGATE_EXPR)
1230 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1231 fold_convert (type, TREE_OPERAND (t2, 0)));
1233 return build2 (code, type, fold_convert (type, t1),
1234 fold_convert (type, t2));
1237 return fold (build2 (code, type, fold_convert (type, t1),
1238 fold_convert (type, t2)));
1241 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1242 to produce a new constant.
1244 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1246 tree
1247 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1249 unsigned HOST_WIDE_INT int1l, int2l;
1250 HOST_WIDE_INT int1h, int2h;
1251 unsigned HOST_WIDE_INT low;
1252 HOST_WIDE_INT hi;
1253 unsigned HOST_WIDE_INT garbagel;
1254 HOST_WIDE_INT garbageh;
1255 tree t;
1256 tree type = TREE_TYPE (arg1);
1257 int uns = TYPE_UNSIGNED (type);
1258 int is_sizetype
1259 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1260 int overflow = 0;
1261 int no_overflow = 0;
1263 int1l = TREE_INT_CST_LOW (arg1);
1264 int1h = TREE_INT_CST_HIGH (arg1);
1265 int2l = TREE_INT_CST_LOW (arg2);
1266 int2h = TREE_INT_CST_HIGH (arg2);
1268 switch (code)
1270 case BIT_IOR_EXPR:
1271 low = int1l | int2l, hi = int1h | int2h;
1272 break;
1274 case BIT_XOR_EXPR:
1275 low = int1l ^ int2l, hi = int1h ^ int2h;
1276 break;
1278 case BIT_AND_EXPR:
1279 low = int1l & int2l, hi = int1h & int2h;
1280 break;
1282 case RSHIFT_EXPR:
1283 int2l = -int2l;
1284 case LSHIFT_EXPR:
1285 /* It's unclear from the C standard whether shifts can overflow.
1286 The following code ignores overflow; perhaps a C standard
1287 interpretation ruling is needed. */
1288 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1289 &low, &hi, !uns);
1290 no_overflow = 1;
1291 break;
1293 case RROTATE_EXPR:
1294 int2l = - int2l;
1295 case LROTATE_EXPR:
1296 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1297 &low, &hi);
1298 break;
1300 case PLUS_EXPR:
1301 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1302 break;
1304 case MINUS_EXPR:
1305 neg_double (int2l, int2h, &low, &hi);
1306 add_double (int1l, int1h, low, hi, &low, &hi);
1307 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1308 break;
1310 case MULT_EXPR:
1311 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1312 break;
1314 case TRUNC_DIV_EXPR:
1315 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1316 case EXACT_DIV_EXPR:
1317 /* This is a shortcut for a common special case. */
1318 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1319 && ! TREE_CONSTANT_OVERFLOW (arg1)
1320 && ! TREE_CONSTANT_OVERFLOW (arg2)
1321 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1323 if (code == CEIL_DIV_EXPR)
1324 int1l += int2l - 1;
1326 low = int1l / int2l, hi = 0;
1327 break;
1330 /* ... fall through ... */
1332 case ROUND_DIV_EXPR:
1333 if (int2h == 0 && int2l == 1)
1335 low = int1l, hi = int1h;
1336 break;
1338 if (int1l == int2l && int1h == int2h
1339 && ! (int1l == 0 && int1h == 0))
1341 low = 1, hi = 0;
1342 break;
1344 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1345 &low, &hi, &garbagel, &garbageh);
1346 break;
1348 case TRUNC_MOD_EXPR:
1349 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1350 /* This is a shortcut for a common special case. */
1351 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1352 && ! TREE_CONSTANT_OVERFLOW (arg1)
1353 && ! TREE_CONSTANT_OVERFLOW (arg2)
1354 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1356 if (code == CEIL_MOD_EXPR)
1357 int1l += int2l - 1;
1358 low = int1l % int2l, hi = 0;
1359 break;
1362 /* ... fall through ... */
1364 case ROUND_MOD_EXPR:
1365 overflow = div_and_round_double (code, uns,
1366 int1l, int1h, int2l, int2h,
1367 &garbagel, &garbageh, &low, &hi);
1368 break;
1370 case MIN_EXPR:
1371 case MAX_EXPR:
1372 if (uns)
1373 low = (((unsigned HOST_WIDE_INT) int1h
1374 < (unsigned HOST_WIDE_INT) int2h)
1375 || (((unsigned HOST_WIDE_INT) int1h
1376 == (unsigned HOST_WIDE_INT) int2h)
1377 && int1l < int2l));
1378 else
1379 low = (int1h < int2h
1380 || (int1h == int2h && int1l < int2l));
1382 if (low == (code == MIN_EXPR))
1383 low = int1l, hi = int1h;
1384 else
1385 low = int2l, hi = int2h;
1386 break;
1388 default:
1389 abort ();
1392 /* If this is for a sizetype, can be represented as one (signed)
1393 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1394 constants. */
1395 if (is_sizetype
1396 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1397 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1398 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1399 return size_int_type_wide (low, type);
1400 else
1402 t = build_int_2 (low, hi);
1403 TREE_TYPE (t) = TREE_TYPE (arg1);
1406 TREE_OVERFLOW (t)
1407 = ((notrunc
1408 ? (!uns || is_sizetype) && overflow
1409 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1410 && ! no_overflow))
1411 | TREE_OVERFLOW (arg1)
1412 | TREE_OVERFLOW (arg2));
1414 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1415 So check if force_fit_type truncated the value. */
1416 if (is_sizetype
1417 && ! TREE_OVERFLOW (t)
1418 && (TREE_INT_CST_HIGH (t) != hi
1419 || TREE_INT_CST_LOW (t) != low))
1420 TREE_OVERFLOW (t) = 1;
1422 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1423 | TREE_CONSTANT_OVERFLOW (arg1)
1424 | TREE_CONSTANT_OVERFLOW (arg2));
1425 return t;
1428 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1429 constant. We assume ARG1 and ARG2 have the same data type, or at least
1430 are the same kind of constant and the same machine mode.
1432 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1434 static tree
1435 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1437 STRIP_NOPS (arg1);
1438 STRIP_NOPS (arg2);
1440 if (TREE_CODE (arg1) == INTEGER_CST)
1441 return int_const_binop (code, arg1, arg2, notrunc);
1443 if (TREE_CODE (arg1) == REAL_CST)
1445 enum machine_mode mode;
1446 REAL_VALUE_TYPE d1;
1447 REAL_VALUE_TYPE d2;
1448 REAL_VALUE_TYPE value;
1449 tree t, type;
1451 d1 = TREE_REAL_CST (arg1);
1452 d2 = TREE_REAL_CST (arg2);
1454 type = TREE_TYPE (arg1);
1455 mode = TYPE_MODE (type);
1457 /* Don't perform operation if we honor signaling NaNs and
1458 either operand is a NaN. */
1459 if (HONOR_SNANS (mode)
1460 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1461 return NULL_TREE;
1463 /* Don't perform operation if it would raise a division
1464 by zero exception. */
1465 if (code == RDIV_EXPR
1466 && REAL_VALUES_EQUAL (d2, dconst0)
1467 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1468 return NULL_TREE;
1470 /* If either operand is a NaN, just return it. Otherwise, set up
1471 for floating-point trap; we return an overflow. */
1472 if (REAL_VALUE_ISNAN (d1))
1473 return arg1;
1474 else if (REAL_VALUE_ISNAN (d2))
1475 return arg2;
1477 REAL_ARITHMETIC (value, code, d1, d2);
1479 t = build_real (type, real_value_truncate (mode, value));
1481 TREE_OVERFLOW (t)
1482 = (force_fit_type (t, 0)
1483 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1484 TREE_CONSTANT_OVERFLOW (t)
1485 = TREE_OVERFLOW (t)
1486 | TREE_CONSTANT_OVERFLOW (arg1)
1487 | TREE_CONSTANT_OVERFLOW (arg2);
1488 return t;
1490 if (TREE_CODE (arg1) == COMPLEX_CST)
1492 tree type = TREE_TYPE (arg1);
1493 tree r1 = TREE_REALPART (arg1);
1494 tree i1 = TREE_IMAGPART (arg1);
1495 tree r2 = TREE_REALPART (arg2);
1496 tree i2 = TREE_IMAGPART (arg2);
1497 tree t;
1499 switch (code)
1501 case PLUS_EXPR:
1502 t = build_complex (type,
1503 const_binop (PLUS_EXPR, r1, r2, notrunc),
1504 const_binop (PLUS_EXPR, i1, i2, notrunc));
1505 break;
1507 case MINUS_EXPR:
1508 t = build_complex (type,
1509 const_binop (MINUS_EXPR, r1, r2, notrunc),
1510 const_binop (MINUS_EXPR, i1, i2, notrunc));
1511 break;
1513 case MULT_EXPR:
1514 t = build_complex (type,
1515 const_binop (MINUS_EXPR,
1516 const_binop (MULT_EXPR,
1517 r1, r2, notrunc),
1518 const_binop (MULT_EXPR,
1519 i1, i2, notrunc),
1520 notrunc),
1521 const_binop (PLUS_EXPR,
1522 const_binop (MULT_EXPR,
1523 r1, i2, notrunc),
1524 const_binop (MULT_EXPR,
1525 i1, r2, notrunc),
1526 notrunc));
1527 break;
1529 case RDIV_EXPR:
1531 tree magsquared
1532 = const_binop (PLUS_EXPR,
1533 const_binop (MULT_EXPR, r2, r2, notrunc),
1534 const_binop (MULT_EXPR, i2, i2, notrunc),
1535 notrunc);
1537 t = build_complex (type,
1538 const_binop
1539 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1540 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1541 const_binop (PLUS_EXPR,
1542 const_binop (MULT_EXPR, r1, r2,
1543 notrunc),
1544 const_binop (MULT_EXPR, i1, i2,
1545 notrunc),
1546 notrunc),
1547 magsquared, notrunc),
1548 const_binop
1549 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1550 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1551 const_binop (MINUS_EXPR,
1552 const_binop (MULT_EXPR, i1, r2,
1553 notrunc),
1554 const_binop (MULT_EXPR, r1, i2,
1555 notrunc),
1556 notrunc),
1557 magsquared, notrunc));
1559 break;
1561 default:
1562 abort ();
1564 return t;
1566 return 0;
1569 /* These are the hash table functions for the hash table of INTEGER_CST
1570 nodes of a sizetype. */
1572 /* Return the hash code code X, an INTEGER_CST. */
1574 static hashval_t
1575 size_htab_hash (const void *x)
1577 tree t = (tree) x;
1579 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1580 ^ htab_hash_pointer (TREE_TYPE (t))
1581 ^ (TREE_OVERFLOW (t) << 20));
1584 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1585 is the same as that given by *Y, which is the same. */
1587 static int
1588 size_htab_eq (const void *x, const void *y)
1590 tree xt = (tree) x;
1591 tree yt = (tree) y;
1593 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1594 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1595 && TREE_TYPE (xt) == TREE_TYPE (yt)
1596 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1599 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1600 bits are given by NUMBER and of the sizetype represented by KIND. */
1602 tree
1603 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1605 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1608 /* Likewise, but the desired type is specified explicitly. */
1610 static GTY (()) tree new_const;
1611 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1612 htab_t size_htab;
1614 tree
1615 size_int_type_wide (HOST_WIDE_INT number, tree type)
1617 void **slot;
1619 if (size_htab == 0)
1621 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1622 new_const = make_node (INTEGER_CST);
1625 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1626 hash table, we return the value from the hash table. Otherwise, we
1627 place that in the hash table and make a new node for the next time. */
1628 TREE_INT_CST_LOW (new_const) = number;
1629 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1630 TREE_TYPE (new_const) = type;
1631 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1632 = force_fit_type (new_const, 0);
1634 slot = htab_find_slot (size_htab, new_const, INSERT);
1635 if (*slot == 0)
1637 tree t = new_const;
1639 *slot = new_const;
1640 new_const = make_node (INTEGER_CST);
1641 return t;
1643 else
1644 return (tree) *slot;
1647 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1648 is a tree code. The type of the result is taken from the operands.
1649 Both must be the same type integer type and it must be a size type.
1650 If the operands are constant, so is the result. */
1652 tree
1653 size_binop (enum tree_code code, tree arg0, tree arg1)
1655 tree type = TREE_TYPE (arg0);
1657 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1658 || type != TREE_TYPE (arg1))
1659 abort ();
1661 /* Handle the special case of two integer constants faster. */
1662 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1664 /* And some specific cases even faster than that. */
1665 if (code == PLUS_EXPR && integer_zerop (arg0))
1666 return arg1;
1667 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1668 && integer_zerop (arg1))
1669 return arg0;
1670 else if (code == MULT_EXPR && integer_onep (arg0))
1671 return arg1;
1673 /* Handle general case of two integer constants. */
1674 return int_const_binop (code, arg0, arg1, 0);
1677 if (arg0 == error_mark_node || arg1 == error_mark_node)
1678 return error_mark_node;
1680 return fold (build2 (code, type, arg0, arg1));
1683 /* Given two values, either both of sizetype or both of bitsizetype,
1684 compute the difference between the two values. Return the value
1685 in signed type corresponding to the type of the operands. */
1687 tree
1688 size_diffop (tree arg0, tree arg1)
1690 tree type = TREE_TYPE (arg0);
1691 tree ctype;
1693 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1694 || type != TREE_TYPE (arg1))
1695 abort ();
1697 /* If the type is already signed, just do the simple thing. */
1698 if (!TYPE_UNSIGNED (type))
1699 return size_binop (MINUS_EXPR, arg0, arg1);
1701 ctype = (type == bitsizetype || type == ubitsizetype
1702 ? sbitsizetype : ssizetype);
1704 /* If either operand is not a constant, do the conversions to the signed
1705 type and subtract. The hardware will do the right thing with any
1706 overflow in the subtraction. */
1707 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1708 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1709 fold_convert (ctype, arg1));
1711 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1712 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1713 overflow) and negate (which can't either). Special-case a result
1714 of zero while we're here. */
1715 if (tree_int_cst_equal (arg0, arg1))
1716 return fold_convert (ctype, integer_zero_node);
1717 else if (tree_int_cst_lt (arg1, arg0))
1718 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1719 else
1720 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1721 fold_convert (ctype, size_binop (MINUS_EXPR,
1722 arg1, arg0)));
1726 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1727 type TYPE. If no simplification can be done return NULL_TREE. */
1729 static tree
1730 fold_convert_const (enum tree_code code, tree type, tree arg1)
1732 int overflow = 0;
1733 tree t;
1735 if (TREE_TYPE (arg1) == type)
1736 return arg1;
1738 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1740 if (TREE_CODE (arg1) == INTEGER_CST)
1742 /* If we would build a constant wider than GCC supports,
1743 leave the conversion unfolded. */
1744 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1745 return NULL_TREE;
1747 /* If we are trying to make a sizetype for a small integer, use
1748 size_int to pick up cached types to reduce duplicate nodes. */
1749 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1750 && !TREE_CONSTANT_OVERFLOW (arg1)
1751 && compare_tree_int (arg1, 10000) < 0)
1752 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1754 /* Given an integer constant, make new constant with new type,
1755 appropriately sign-extended or truncated. */
1756 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1757 TREE_INT_CST_HIGH (arg1));
1758 TREE_TYPE (t) = type;
1759 /* Indicate an overflow if (1) ARG1 already overflowed,
1760 or (2) force_fit_type indicates an overflow.
1761 Tell force_fit_type that an overflow has already occurred
1762 if ARG1 is a too-large unsigned value and T is signed.
1763 But don't indicate an overflow if converting a pointer. */
1764 TREE_OVERFLOW (t)
1765 = ((force_fit_type (t,
1766 (TREE_INT_CST_HIGH (arg1) < 0
1767 && (TYPE_UNSIGNED (type)
1768 < TYPE_UNSIGNED (TREE_TYPE (arg1)))))
1769 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1770 || TREE_OVERFLOW (arg1));
1771 TREE_CONSTANT_OVERFLOW (t)
1772 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1773 return t;
1775 else if (TREE_CODE (arg1) == REAL_CST)
1777 /* The following code implements the floating point to integer
1778 conversion rules required by the Java Language Specification,
1779 that IEEE NaNs are mapped to zero and values that overflow
1780 the target precision saturate, i.e. values greater than
1781 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1782 are mapped to INT_MIN. These semantics are allowed by the
1783 C and C++ standards that simply state that the behavior of
1784 FP-to-integer conversion is unspecified upon overflow. */
1786 HOST_WIDE_INT high, low;
1788 REAL_VALUE_TYPE r;
1789 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1791 switch (code)
1793 case FIX_TRUNC_EXPR:
1794 real_trunc (&r, VOIDmode, &x);
1795 break;
1797 case FIX_CEIL_EXPR:
1798 real_ceil (&r, VOIDmode, &x);
1799 break;
1801 case FIX_FLOOR_EXPR:
1802 real_floor (&r, VOIDmode, &x);
1803 break;
1805 case FIX_ROUND_EXPR:
1806 real_round (&r, VOIDmode, &x);
1807 break;
1809 default:
1810 abort ();
1813 /* If R is NaN, return zero and show we have an overflow. */
1814 if (REAL_VALUE_ISNAN (r))
1816 overflow = 1;
1817 high = 0;
1818 low = 0;
1821 /* See if R is less than the lower bound or greater than the
1822 upper bound. */
1824 if (! overflow)
1826 tree lt = TYPE_MIN_VALUE (type);
1827 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1828 if (REAL_VALUES_LESS (r, l))
1830 overflow = 1;
1831 high = TREE_INT_CST_HIGH (lt);
1832 low = TREE_INT_CST_LOW (lt);
1836 if (! overflow)
1838 tree ut = TYPE_MAX_VALUE (type);
1839 if (ut)
1841 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1842 if (REAL_VALUES_LESS (u, r))
1844 overflow = 1;
1845 high = TREE_INT_CST_HIGH (ut);
1846 low = TREE_INT_CST_LOW (ut);
1851 if (! overflow)
1852 REAL_VALUE_TO_INT (&low, &high, r);
1854 t = build_int_2 (low, high);
1855 TREE_TYPE (t) = type;
1856 TREE_OVERFLOW (t)
1857 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1858 TREE_CONSTANT_OVERFLOW (t)
1859 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1860 return t;
1863 else if (TREE_CODE (type) == REAL_TYPE)
1865 if (TREE_CODE (arg1) == INTEGER_CST)
1866 return build_real_from_int_cst (type, arg1);
1867 if (TREE_CODE (arg1) == REAL_CST)
1869 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1871 /* We make a copy of ARG1 so that we don't modify an
1872 existing constant tree. */
1873 t = copy_node (arg1);
1874 TREE_TYPE (t) = type;
1875 return t;
1878 t = build_real (type,
1879 real_value_truncate (TYPE_MODE (type),
1880 TREE_REAL_CST (arg1)));
1882 TREE_OVERFLOW (t)
1883 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1884 TREE_CONSTANT_OVERFLOW (t)
1885 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1886 return t;
1889 return NULL_TREE;
1892 /* Convert expression ARG to type TYPE. Used by the middle-end for
1893 simple conversions in preference to calling the front-end's convert. */
1895 tree
1896 fold_convert (tree type, tree arg)
1898 tree orig = TREE_TYPE (arg);
1899 tree tem;
1901 if (type == orig)
1902 return arg;
1904 if (TREE_CODE (arg) == ERROR_MARK
1905 || TREE_CODE (type) == ERROR_MARK
1906 || TREE_CODE (orig) == ERROR_MARK)
1907 return error_mark_node;
1909 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1910 return fold (build1 (NOP_EXPR, type, arg));
1912 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
1913 || TREE_CODE (type) == OFFSET_TYPE)
1915 if (TREE_CODE (arg) == INTEGER_CST)
1917 tem = fold_convert_const (NOP_EXPR, type, arg);
1918 if (tem != NULL_TREE)
1919 return tem;
1921 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1922 || TREE_CODE (orig) == OFFSET_TYPE)
1923 return fold (build1 (NOP_EXPR, type, arg));
1924 if (TREE_CODE (orig) == COMPLEX_TYPE)
1926 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1927 return fold_convert (type, tem);
1929 if (TREE_CODE (orig) == VECTOR_TYPE
1930 && GET_MODE_SIZE (TYPE_MODE (type))
1931 == GET_MODE_SIZE (TYPE_MODE (orig)))
1932 return fold (build1 (NOP_EXPR, type, arg));
1934 else if (TREE_CODE (type) == REAL_TYPE)
1936 if (TREE_CODE (arg) == INTEGER_CST)
1938 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1939 if (tem != NULL_TREE)
1940 return tem;
1942 else if (TREE_CODE (arg) == REAL_CST)
1944 tem = fold_convert_const (NOP_EXPR, type, arg);
1945 if (tem != NULL_TREE)
1946 return tem;
1949 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1950 return fold (build1 (FLOAT_EXPR, type, arg));
1951 if (TREE_CODE (orig) == REAL_TYPE)
1952 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1953 type, arg));
1954 if (TREE_CODE (orig) == COMPLEX_TYPE)
1956 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1957 return fold_convert (type, tem);
1960 else if (TREE_CODE (type) == COMPLEX_TYPE)
1962 if (INTEGRAL_TYPE_P (orig)
1963 || POINTER_TYPE_P (orig)
1964 || TREE_CODE (orig) == REAL_TYPE)
1965 return build2 (COMPLEX_EXPR, type,
1966 fold_convert (TREE_TYPE (type), arg),
1967 fold_convert (TREE_TYPE (type), integer_zero_node));
1968 if (TREE_CODE (orig) == COMPLEX_TYPE)
1970 tree rpart, ipart;
1972 if (TREE_CODE (arg) == COMPLEX_EXPR)
1974 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1975 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1976 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1979 arg = save_expr (arg);
1980 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1981 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1982 rpart = fold_convert (TREE_TYPE (type), rpart);
1983 ipart = fold_convert (TREE_TYPE (type), ipart);
1984 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1987 else if (TREE_CODE (type) == VECTOR_TYPE)
1989 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1990 && GET_MODE_SIZE (TYPE_MODE (type))
1991 == GET_MODE_SIZE (TYPE_MODE (orig)))
1992 return fold (build1 (NOP_EXPR, type, arg));
1993 if (TREE_CODE (orig) == VECTOR_TYPE
1994 && GET_MODE_SIZE (TYPE_MODE (type))
1995 == GET_MODE_SIZE (TYPE_MODE (orig)))
1996 return fold (build1 (NOP_EXPR, type, arg));
1998 else if (VOID_TYPE_P (type))
1999 return fold (build1 (CONVERT_EXPR, type, arg));
2000 abort ();
2003 /* Return an expr equal to X but certainly not valid as an lvalue. */
2005 tree
2006 non_lvalue (tree x)
2008 /* We only need to wrap lvalue tree codes. */
2009 switch (TREE_CODE (x))
2011 case VAR_DECL:
2012 case PARM_DECL:
2013 case RESULT_DECL:
2014 case LABEL_DECL:
2015 case FUNCTION_DECL:
2016 case SSA_NAME:
2018 case COMPONENT_REF:
2019 case INDIRECT_REF:
2020 case ARRAY_REF:
2021 case BIT_FIELD_REF:
2022 case BUFFER_REF:
2023 case ARRAY_RANGE_REF:
2024 case VTABLE_REF:
2026 case REALPART_EXPR:
2027 case IMAGPART_EXPR:
2028 case PREINCREMENT_EXPR:
2029 case PREDECREMENT_EXPR:
2030 case SAVE_EXPR:
2031 case UNSAVE_EXPR:
2032 case TRY_CATCH_EXPR:
2033 case WITH_CLEANUP_EXPR:
2034 case COMPOUND_EXPR:
2035 case MODIFY_EXPR:
2036 case TARGET_EXPR:
2037 case COND_EXPR:
2038 case BIND_EXPR:
2039 case MIN_EXPR:
2040 case MAX_EXPR:
2041 case RTL_EXPR:
2042 break;
2044 default:
2045 /* Assume the worst for front-end tree codes. */
2046 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2047 break;
2048 return x;
2050 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2053 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2054 Zero means allow extended lvalues. */
2056 int pedantic_lvalues;
2058 /* When pedantic, return an expr equal to X but certainly not valid as a
2059 pedantic lvalue. Otherwise, return X. */
2061 tree
2062 pedantic_non_lvalue (tree x)
2064 if (pedantic_lvalues)
2065 return non_lvalue (x);
2066 else
2067 return x;
2070 /* Given a tree comparison code, return the code that is the logical inverse
2071 of the given code. It is not safe to do this for floating-point
2072 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2073 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2075 static enum tree_code
2076 invert_tree_comparison (enum tree_code code, bool honor_nans)
2078 if (honor_nans && flag_trapping_math)
2079 return ERROR_MARK;
2081 switch (code)
2083 case EQ_EXPR:
2084 return NE_EXPR;
2085 case NE_EXPR:
2086 return EQ_EXPR;
2087 case GT_EXPR:
2088 return honor_nans ? UNLE_EXPR : LE_EXPR;
2089 case GE_EXPR:
2090 return honor_nans ? UNLT_EXPR : LT_EXPR;
2091 case LT_EXPR:
2092 return honor_nans ? UNGE_EXPR : GE_EXPR;
2093 case LE_EXPR:
2094 return honor_nans ? UNGT_EXPR : GT_EXPR;
2095 case LTGT_EXPR:
2096 return UNEQ_EXPR;
2097 case UNEQ_EXPR:
2098 return LTGT_EXPR;
2099 case UNGT_EXPR:
2100 return LE_EXPR;
2101 case UNGE_EXPR:
2102 return LT_EXPR;
2103 case UNLT_EXPR:
2104 return GE_EXPR;
2105 case UNLE_EXPR:
2106 return GT_EXPR;
2107 case ORDERED_EXPR:
2108 return UNORDERED_EXPR;
2109 case UNORDERED_EXPR:
2110 return ORDERED_EXPR;
2111 default:
2112 abort ();
2116 /* Similar, but return the comparison that results if the operands are
2117 swapped. This is safe for floating-point. */
2119 enum tree_code
2120 swap_tree_comparison (enum tree_code code)
2122 switch (code)
2124 case EQ_EXPR:
2125 case NE_EXPR:
2126 return code;
2127 case GT_EXPR:
2128 return LT_EXPR;
2129 case GE_EXPR:
2130 return LE_EXPR;
2131 case LT_EXPR:
2132 return GT_EXPR;
2133 case LE_EXPR:
2134 return GE_EXPR;
2135 default:
2136 abort ();
2141 /* Convert a comparison tree code from an enum tree_code representation
2142 into a compcode bit-based encoding. This function is the inverse of
2143 compcode_to_comparison. */
2145 static enum comparison_code
2146 comparison_to_compcode (enum tree_code code)
2148 switch (code)
2150 case LT_EXPR:
2151 return COMPCODE_LT;
2152 case EQ_EXPR:
2153 return COMPCODE_EQ;
2154 case LE_EXPR:
2155 return COMPCODE_LE;
2156 case GT_EXPR:
2157 return COMPCODE_GT;
2158 case NE_EXPR:
2159 return COMPCODE_NE;
2160 case GE_EXPR:
2161 return COMPCODE_GE;
2162 case ORDERED_EXPR:
2163 return COMPCODE_ORD;
2164 case UNORDERED_EXPR:
2165 return COMPCODE_UNORD;
2166 case UNLT_EXPR:
2167 return COMPCODE_UNLT;
2168 case UNEQ_EXPR:
2169 return COMPCODE_UNEQ;
2170 case UNLE_EXPR:
2171 return COMPCODE_UNLE;
2172 case UNGT_EXPR:
2173 return COMPCODE_UNGT;
2174 case LTGT_EXPR:
2175 return COMPCODE_LTGT;
2176 case UNGE_EXPR:
2177 return COMPCODE_UNGE;
2178 default:
2179 abort ();
2183 /* Convert a compcode bit-based encoding of a comparison operator back
2184 to GCC's enum tree_code representation. This function is the
2185 inverse of comparison_to_compcode. */
2187 static enum tree_code
2188 compcode_to_comparison (enum comparison_code code)
2190 switch (code)
2192 case COMPCODE_LT:
2193 return LT_EXPR;
2194 case COMPCODE_EQ:
2195 return EQ_EXPR;
2196 case COMPCODE_LE:
2197 return LE_EXPR;
2198 case COMPCODE_GT:
2199 return GT_EXPR;
2200 case COMPCODE_NE:
2201 return NE_EXPR;
2202 case COMPCODE_GE:
2203 return GE_EXPR;
2204 case COMPCODE_ORD:
2205 return ORDERED_EXPR;
2206 case COMPCODE_UNORD:
2207 return UNORDERED_EXPR;
2208 case COMPCODE_UNLT:
2209 return UNLT_EXPR;
2210 case COMPCODE_UNEQ:
2211 return UNEQ_EXPR;
2212 case COMPCODE_UNLE:
2213 return UNLE_EXPR;
2214 case COMPCODE_UNGT:
2215 return UNGT_EXPR;
2216 case COMPCODE_LTGT:
2217 return LTGT_EXPR;
2218 case COMPCODE_UNGE:
2219 return UNGE_EXPR;
2220 default:
2221 abort ();
2225 /* Return a tree for the comparison which is the combination of
2226 doing the AND or OR (depending on CODE) of the two operations LCODE
2227 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2228 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2229 if this makes the transformation invalid. */
2231 tree
2232 combine_comparisons (enum tree_code code, enum tree_code lcode,
2233 enum tree_code rcode, tree truth_type,
2234 tree ll_arg, tree lr_arg)
2236 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2237 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2238 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2239 enum comparison_code compcode;
2241 switch (code)
2243 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2244 compcode = lcompcode & rcompcode;
2245 break;
2247 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2248 compcode = lcompcode | rcompcode;
2249 break;
2251 default:
2252 return NULL_TREE;
2255 if (!honor_nans)
2257 /* Eliminate unordered comparisons, as well as LTGT and ORD
2258 which are not used unless the mode has NaNs. */
2259 compcode &= ~COMPCODE_UNORD;
2260 if (compcode == COMPCODE_LTGT)
2261 compcode = COMPCODE_NE;
2262 else if (compcode == COMPCODE_ORD)
2263 compcode = COMPCODE_TRUE;
2265 else if (flag_trapping_math)
2267 /* Check that the original operation and the optimized ones will trap
2268 under the same condition. */
2269 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2270 && (lcompcode != COMPCODE_EQ)
2271 && (lcompcode != COMPCODE_ORD);
2272 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2273 && (rcompcode != COMPCODE_EQ)
2274 && (rcompcode != COMPCODE_ORD);
2275 bool trap = (compcode & COMPCODE_UNORD) == 0
2276 && (compcode != COMPCODE_EQ)
2277 && (compcode != COMPCODE_ORD);
2279 /* In a short-circuited boolean expression the LHS might be
2280 such that the RHS, if evaluated, will never trap. For
2281 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2282 if neither x nor y is NaN. (This is a mixed blessing: for
2283 example, the expression above will never trap, hence
2284 optimizing it to x < y would be invalid). */
2285 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2286 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2287 rtrap = false;
2289 /* If the comparison was short-circuited, and only the RHS
2290 trapped, we may now generate a spurious trap. */
2291 if (rtrap && !ltrap
2292 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2293 return NULL_TREE;
2295 /* If we changed the conditions that cause a trap, we lose. */
2296 if ((ltrap || rtrap) != trap)
2297 return NULL_TREE;
2300 if (compcode == COMPCODE_TRUE)
2301 return constant_boolean_node (true, truth_type);
2302 else if (compcode == COMPCODE_FALSE)
2303 return constant_boolean_node (false, truth_type);
2304 else
2305 return fold (build2 (compcode_to_comparison (compcode),
2306 truth_type, ll_arg, lr_arg));
2309 /* Return nonzero if CODE is a tree code that represents a truth value. */
2311 static int
2312 truth_value_p (enum tree_code code)
2314 return (TREE_CODE_CLASS (code) == '<'
2315 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2316 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2317 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2320 /* Return nonzero if two operands (typically of the same tree node)
2321 are necessarily equal. If either argument has side-effects this
2322 function returns zero. FLAGS modifies behavior as follows:
2324 If OEP_ONLY_CONST is set, only return nonzero for constants.
2325 This function tests whether the operands are indistinguishable;
2326 it does not test whether they are equal using C's == operation.
2327 The distinction is important for IEEE floating point, because
2328 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2329 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2331 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2332 even though it may hold multiple values during a function.
2333 This is because a GCC tree node guarantees that nothing else is
2334 executed between the evaluation of its "operands" (which may often
2335 be evaluated in arbitrary order). Hence if the operands themselves
2336 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2337 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2338 unset means assuming isochronic (or instantaneous) tree equivalence.
2339 Unless comparing arbitrary expression trees, such as from different
2340 statements, this flag can usually be left unset.
2342 If OEP_PURE_SAME is set, then pure functions with identical arguments
2343 are considered the same. It is used when the caller has other ways
2344 to ensure that global memory is unchanged in between. */
2347 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2349 /* If either is ERROR_MARK, they aren't equal. */
2350 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2351 return 0;
2353 /* If both types don't have the same signedness, then we can't consider
2354 them equal. We must check this before the STRIP_NOPS calls
2355 because they may change the signedness of the arguments. */
2356 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2357 return 0;
2359 STRIP_NOPS (arg0);
2360 STRIP_NOPS (arg1);
2362 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2363 /* This is needed for conversions and for COMPONENT_REF.
2364 Might as well play it safe and always test this. */
2365 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2366 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2367 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2368 return 0;
2370 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2371 We don't care about side effects in that case because the SAVE_EXPR
2372 takes care of that for us. In all other cases, two expressions are
2373 equal if they have no side effects. If we have two identical
2374 expressions with side effects that should be treated the same due
2375 to the only side effects being identical SAVE_EXPR's, that will
2376 be detected in the recursive calls below. */
2377 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2378 && (TREE_CODE (arg0) == SAVE_EXPR
2379 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2380 return 1;
2382 /* Next handle constant cases, those for which we can return 1 even
2383 if ONLY_CONST is set. */
2384 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2385 switch (TREE_CODE (arg0))
2387 case INTEGER_CST:
2388 return (! TREE_CONSTANT_OVERFLOW (arg0)
2389 && ! TREE_CONSTANT_OVERFLOW (arg1)
2390 && tree_int_cst_equal (arg0, arg1));
2392 case REAL_CST:
2393 return (! TREE_CONSTANT_OVERFLOW (arg0)
2394 && ! TREE_CONSTANT_OVERFLOW (arg1)
2395 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2396 TREE_REAL_CST (arg1)));
2398 case VECTOR_CST:
2400 tree v1, v2;
2402 if (TREE_CONSTANT_OVERFLOW (arg0)
2403 || TREE_CONSTANT_OVERFLOW (arg1))
2404 return 0;
2406 v1 = TREE_VECTOR_CST_ELTS (arg0);
2407 v2 = TREE_VECTOR_CST_ELTS (arg1);
2408 while (v1 && v2)
2410 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2411 flags))
2412 return 0;
2413 v1 = TREE_CHAIN (v1);
2414 v2 = TREE_CHAIN (v2);
2417 return 1;
2420 case COMPLEX_CST:
2421 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2422 flags)
2423 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2424 flags));
2426 case STRING_CST:
2427 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2428 && ! memcmp (TREE_STRING_POINTER (arg0),
2429 TREE_STRING_POINTER (arg1),
2430 TREE_STRING_LENGTH (arg0)));
2432 case ADDR_EXPR:
2433 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2435 default:
2436 break;
2439 if (flags & OEP_ONLY_CONST)
2440 return 0;
2442 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2444 case '1':
2445 /* Two conversions are equal only if signedness and modes match. */
2446 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2447 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2448 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2449 return 0;
2451 return operand_equal_p (TREE_OPERAND (arg0, 0),
2452 TREE_OPERAND (arg1, 0), flags);
2454 case '<':
2455 case '2':
2456 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2457 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2459 return 1;
2461 /* For commutative ops, allow the other order. */
2462 return (commutative_tree_code (TREE_CODE (arg0))
2463 && operand_equal_p (TREE_OPERAND (arg0, 0),
2464 TREE_OPERAND (arg1, 1), flags)
2465 && operand_equal_p (TREE_OPERAND (arg0, 1),
2466 TREE_OPERAND (arg1, 0), flags));
2468 case 'r':
2469 /* If either of the pointer (or reference) expressions we are
2470 dereferencing contain a side effect, these cannot be equal. */
2471 if (TREE_SIDE_EFFECTS (arg0)
2472 || TREE_SIDE_EFFECTS (arg1))
2473 return 0;
2475 switch (TREE_CODE (arg0))
2477 case INDIRECT_REF:
2478 return operand_equal_p (TREE_OPERAND (arg0, 0),
2479 TREE_OPERAND (arg1, 0), flags);
2481 case COMPONENT_REF:
2482 case ARRAY_REF:
2483 case ARRAY_RANGE_REF:
2484 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2485 TREE_OPERAND (arg1, 0), flags)
2486 && operand_equal_p (TREE_OPERAND (arg0, 1),
2487 TREE_OPERAND (arg1, 1), flags));
2489 case BIT_FIELD_REF:
2490 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2491 TREE_OPERAND (arg1, 0), flags)
2492 && operand_equal_p (TREE_OPERAND (arg0, 1),
2493 TREE_OPERAND (arg1, 1), flags)
2494 && operand_equal_p (TREE_OPERAND (arg0, 2),
2495 TREE_OPERAND (arg1, 2), flags));
2496 default:
2497 return 0;
2500 case 'e':
2501 switch (TREE_CODE (arg0))
2503 case ADDR_EXPR:
2504 case TRUTH_NOT_EXPR:
2505 return operand_equal_p (TREE_OPERAND (arg0, 0),
2506 TREE_OPERAND (arg1, 0), flags);
2508 case RTL_EXPR:
2509 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2511 case CALL_EXPR:
2512 /* If the CALL_EXPRs call different functions, then they
2513 clearly can not be equal. */
2514 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2515 TREE_OPERAND (arg1, 0), flags))
2516 return 0;
2519 unsigned int cef = call_expr_flags (arg0);
2520 if (flags & OEP_PURE_SAME)
2521 cef &= ECF_CONST | ECF_PURE;
2522 else
2523 cef &= ECF_CONST;
2524 if (!cef)
2525 return 0;
2528 /* Now see if all the arguments are the same. operand_equal_p
2529 does not handle TREE_LIST, so we walk the operands here
2530 feeding them to operand_equal_p. */
2531 arg0 = TREE_OPERAND (arg0, 1);
2532 arg1 = TREE_OPERAND (arg1, 1);
2533 while (arg0 && arg1)
2535 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2536 flags))
2537 return 0;
2539 arg0 = TREE_CHAIN (arg0);
2540 arg1 = TREE_CHAIN (arg1);
2543 /* If we get here and both argument lists are exhausted
2544 then the CALL_EXPRs are equal. */
2545 return ! (arg0 || arg1);
2547 default:
2548 return 0;
2551 case 'd':
2552 /* Consider __builtin_sqrt equal to sqrt. */
2553 return (TREE_CODE (arg0) == FUNCTION_DECL
2554 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2555 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2556 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2558 default:
2559 return 0;
2563 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2564 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2566 When in doubt, return 0. */
2568 static int
2569 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2571 int unsignedp1, unsignedpo;
2572 tree primarg0, primarg1, primother;
2573 unsigned int correct_width;
2575 if (operand_equal_p (arg0, arg1, 0))
2576 return 1;
2578 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2579 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2580 return 0;
2582 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2583 and see if the inner values are the same. This removes any
2584 signedness comparison, which doesn't matter here. */
2585 primarg0 = arg0, primarg1 = arg1;
2586 STRIP_NOPS (primarg0);
2587 STRIP_NOPS (primarg1);
2588 if (operand_equal_p (primarg0, primarg1, 0))
2589 return 1;
2591 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2592 actual comparison operand, ARG0.
2594 First throw away any conversions to wider types
2595 already present in the operands. */
2597 primarg1 = get_narrower (arg1, &unsignedp1);
2598 primother = get_narrower (other, &unsignedpo);
2600 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2601 if (unsignedp1 == unsignedpo
2602 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2603 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2605 tree type = TREE_TYPE (arg0);
2607 /* Make sure shorter operand is extended the right way
2608 to match the longer operand. */
2609 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2610 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2612 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2613 return 1;
2616 return 0;
2619 /* See if ARG is an expression that is either a comparison or is performing
2620 arithmetic on comparisons. The comparisons must only be comparing
2621 two different values, which will be stored in *CVAL1 and *CVAL2; if
2622 they are nonzero it means that some operands have already been found.
2623 No variables may be used anywhere else in the expression except in the
2624 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2625 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2627 If this is true, return 1. Otherwise, return zero. */
2629 static int
2630 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2632 enum tree_code code = TREE_CODE (arg);
2633 char class = TREE_CODE_CLASS (code);
2635 /* We can handle some of the 'e' cases here. */
2636 if (class == 'e' && code == TRUTH_NOT_EXPR)
2637 class = '1';
2638 else if (class == 'e'
2639 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2640 || code == COMPOUND_EXPR))
2641 class = '2';
2643 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2644 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2646 /* If we've already found a CVAL1 or CVAL2, this expression is
2647 two complex to handle. */
2648 if (*cval1 || *cval2)
2649 return 0;
2651 class = '1';
2652 *save_p = 1;
2655 switch (class)
2657 case '1':
2658 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2660 case '2':
2661 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2662 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2663 cval1, cval2, save_p));
2665 case 'c':
2666 return 1;
2668 case 'e':
2669 if (code == COND_EXPR)
2670 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2671 cval1, cval2, save_p)
2672 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2673 cval1, cval2, save_p)
2674 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2675 cval1, cval2, save_p));
2676 return 0;
2678 case '<':
2679 /* First see if we can handle the first operand, then the second. For
2680 the second operand, we know *CVAL1 can't be zero. It must be that
2681 one side of the comparison is each of the values; test for the
2682 case where this isn't true by failing if the two operands
2683 are the same. */
2685 if (operand_equal_p (TREE_OPERAND (arg, 0),
2686 TREE_OPERAND (arg, 1), 0))
2687 return 0;
2689 if (*cval1 == 0)
2690 *cval1 = TREE_OPERAND (arg, 0);
2691 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2693 else if (*cval2 == 0)
2694 *cval2 = TREE_OPERAND (arg, 0);
2695 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2697 else
2698 return 0;
2700 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2702 else if (*cval2 == 0)
2703 *cval2 = TREE_OPERAND (arg, 1);
2704 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2706 else
2707 return 0;
2709 return 1;
2711 default:
2712 return 0;
2716 /* ARG is a tree that is known to contain just arithmetic operations and
2717 comparisons. Evaluate the operations in the tree substituting NEW0 for
2718 any occurrence of OLD0 as an operand of a comparison and likewise for
2719 NEW1 and OLD1. */
2721 static tree
2722 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2724 tree type = TREE_TYPE (arg);
2725 enum tree_code code = TREE_CODE (arg);
2726 char class = TREE_CODE_CLASS (code);
2728 /* We can handle some of the 'e' cases here. */
2729 if (class == 'e' && code == TRUTH_NOT_EXPR)
2730 class = '1';
2731 else if (class == 'e'
2732 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2733 class = '2';
2735 switch (class)
2737 case '1':
2738 return fold (build1 (code, type,
2739 eval_subst (TREE_OPERAND (arg, 0),
2740 old0, new0, old1, new1)));
2742 case '2':
2743 return fold (build2 (code, type,
2744 eval_subst (TREE_OPERAND (arg, 0),
2745 old0, new0, old1, new1),
2746 eval_subst (TREE_OPERAND (arg, 1),
2747 old0, new0, old1, new1)));
2749 case 'e':
2750 switch (code)
2752 case SAVE_EXPR:
2753 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2755 case COMPOUND_EXPR:
2756 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2758 case COND_EXPR:
2759 return fold (build3 (code, type,
2760 eval_subst (TREE_OPERAND (arg, 0),
2761 old0, new0, old1, new1),
2762 eval_subst (TREE_OPERAND (arg, 1),
2763 old0, new0, old1, new1),
2764 eval_subst (TREE_OPERAND (arg, 2),
2765 old0, new0, old1, new1)));
2766 default:
2767 break;
2769 /* Fall through - ??? */
2771 case '<':
2773 tree arg0 = TREE_OPERAND (arg, 0);
2774 tree arg1 = TREE_OPERAND (arg, 1);
2776 /* We need to check both for exact equality and tree equality. The
2777 former will be true if the operand has a side-effect. In that
2778 case, we know the operand occurred exactly once. */
2780 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2781 arg0 = new0;
2782 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2783 arg0 = new1;
2785 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2786 arg1 = new0;
2787 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2788 arg1 = new1;
2790 return fold (build2 (code, type, arg0, arg1));
2793 default:
2794 return arg;
2798 /* Return a tree for the case when the result of an expression is RESULT
2799 converted to TYPE and OMITTED was previously an operand of the expression
2800 but is now not needed (e.g., we folded OMITTED * 0).
2802 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2803 the conversion of RESULT to TYPE. */
2805 tree
2806 omit_one_operand (tree type, tree result, tree omitted)
2808 tree t = fold_convert (type, result);
2810 if (TREE_SIDE_EFFECTS (omitted))
2811 return build2 (COMPOUND_EXPR, type, omitted, t);
2813 return non_lvalue (t);
2816 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2818 static tree
2819 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2821 tree t = fold_convert (type, result);
2823 if (TREE_SIDE_EFFECTS (omitted))
2824 return build2 (COMPOUND_EXPR, type, omitted, t);
2826 return pedantic_non_lvalue (t);
2829 /* Return a tree for the case when the result of an expression is RESULT
2830 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2831 of the expression but are now not needed.
2833 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2834 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2835 evaluated before OMITTED2. Otherwise, if neither has side effects,
2836 just do the conversion of RESULT to TYPE. */
2838 tree
2839 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2841 tree t = fold_convert (type, result);
2843 if (TREE_SIDE_EFFECTS (omitted2))
2844 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2845 if (TREE_SIDE_EFFECTS (omitted1))
2846 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2848 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2852 /* Return a simplified tree node for the truth-negation of ARG. This
2853 never alters ARG itself. We assume that ARG is an operation that
2854 returns a truth value (0 or 1).
2856 FIXME: one would think we would fold the result, but it causes
2857 problems with the dominator optimizer. */
2858 tree
2859 invert_truthvalue (tree arg)
2861 tree type = TREE_TYPE (arg);
2862 enum tree_code code = TREE_CODE (arg);
2864 if (code == ERROR_MARK)
2865 return arg;
2867 /* If this is a comparison, we can simply invert it, except for
2868 floating-point non-equality comparisons, in which case we just
2869 enclose a TRUTH_NOT_EXPR around what we have. */
2871 if (TREE_CODE_CLASS (code) == '<')
2873 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2874 if (FLOAT_TYPE_P (op_type)
2875 && flag_trapping_math
2876 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2877 && code != NE_EXPR && code != EQ_EXPR)
2878 return build1 (TRUTH_NOT_EXPR, type, arg);
2879 else
2881 code = invert_tree_comparison (code,
2882 HONOR_NANS (TYPE_MODE (op_type)));
2883 if (code == ERROR_MARK)
2884 return build1 (TRUTH_NOT_EXPR, type, arg);
2885 else
2886 return build2 (code, type,
2887 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2891 switch (code)
2893 case INTEGER_CST:
2894 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2896 case TRUTH_AND_EXPR:
2897 return build2 (TRUTH_OR_EXPR, type,
2898 invert_truthvalue (TREE_OPERAND (arg, 0)),
2899 invert_truthvalue (TREE_OPERAND (arg, 1)));
2901 case TRUTH_OR_EXPR:
2902 return build2 (TRUTH_AND_EXPR, type,
2903 invert_truthvalue (TREE_OPERAND (arg, 0)),
2904 invert_truthvalue (TREE_OPERAND (arg, 1)));
2906 case TRUTH_XOR_EXPR:
2907 /* Here we can invert either operand. We invert the first operand
2908 unless the second operand is a TRUTH_NOT_EXPR in which case our
2909 result is the XOR of the first operand with the inside of the
2910 negation of the second operand. */
2912 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2913 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2914 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2915 else
2916 return build2 (TRUTH_XOR_EXPR, type,
2917 invert_truthvalue (TREE_OPERAND (arg, 0)),
2918 TREE_OPERAND (arg, 1));
2920 case TRUTH_ANDIF_EXPR:
2921 return build2 (TRUTH_ORIF_EXPR, type,
2922 invert_truthvalue (TREE_OPERAND (arg, 0)),
2923 invert_truthvalue (TREE_OPERAND (arg, 1)));
2925 case TRUTH_ORIF_EXPR:
2926 return build2 (TRUTH_ANDIF_EXPR, type,
2927 invert_truthvalue (TREE_OPERAND (arg, 0)),
2928 invert_truthvalue (TREE_OPERAND (arg, 1)));
2930 case TRUTH_NOT_EXPR:
2931 return TREE_OPERAND (arg, 0);
2933 case COND_EXPR:
2934 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2935 invert_truthvalue (TREE_OPERAND (arg, 1)),
2936 invert_truthvalue (TREE_OPERAND (arg, 2)));
2938 case COMPOUND_EXPR:
2939 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2940 invert_truthvalue (TREE_OPERAND (arg, 1)));
2942 case NON_LVALUE_EXPR:
2943 return invert_truthvalue (TREE_OPERAND (arg, 0));
2945 case NOP_EXPR:
2946 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2947 break;
2949 case CONVERT_EXPR:
2950 case FLOAT_EXPR:
2951 return build1 (TREE_CODE (arg), type,
2952 invert_truthvalue (TREE_OPERAND (arg, 0)));
2954 case BIT_AND_EXPR:
2955 if (!integer_onep (TREE_OPERAND (arg, 1)))
2956 break;
2957 return build2 (EQ_EXPR, type, arg,
2958 fold_convert (type, integer_zero_node));
2960 case SAVE_EXPR:
2961 return build1 (TRUTH_NOT_EXPR, type, arg);
2963 case CLEANUP_POINT_EXPR:
2964 return build1 (CLEANUP_POINT_EXPR, type,
2965 invert_truthvalue (TREE_OPERAND (arg, 0)));
2967 default:
2968 break;
2970 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2971 abort ();
2972 return build1 (TRUTH_NOT_EXPR, type, arg);
2975 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2976 operands are another bit-wise operation with a common input. If so,
2977 distribute the bit operations to save an operation and possibly two if
2978 constants are involved. For example, convert
2979 (A | B) & (A | C) into A | (B & C)
2980 Further simplification will occur if B and C are constants.
2982 If this optimization cannot be done, 0 will be returned. */
2984 static tree
2985 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2987 tree common;
2988 tree left, right;
2990 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2991 || TREE_CODE (arg0) == code
2992 || (TREE_CODE (arg0) != BIT_AND_EXPR
2993 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2994 return 0;
2996 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2998 common = TREE_OPERAND (arg0, 0);
2999 left = TREE_OPERAND (arg0, 1);
3000 right = TREE_OPERAND (arg1, 1);
3002 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3004 common = TREE_OPERAND (arg0, 0);
3005 left = TREE_OPERAND (arg0, 1);
3006 right = TREE_OPERAND (arg1, 0);
3008 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3010 common = TREE_OPERAND (arg0, 1);
3011 left = TREE_OPERAND (arg0, 0);
3012 right = TREE_OPERAND (arg1, 1);
3014 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3016 common = TREE_OPERAND (arg0, 1);
3017 left = TREE_OPERAND (arg0, 0);
3018 right = TREE_OPERAND (arg1, 0);
3020 else
3021 return 0;
3023 return fold (build2 (TREE_CODE (arg0), type, common,
3024 fold (build2 (code, type, left, right))));
3027 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3028 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3030 static tree
3031 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3032 int unsignedp)
3034 tree result = build3 (BIT_FIELD_REF, type, inner,
3035 size_int (bitsize), bitsize_int (bitpos));
3037 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3039 return result;
3042 /* Optimize a bit-field compare.
3044 There are two cases: First is a compare against a constant and the
3045 second is a comparison of two items where the fields are at the same
3046 bit position relative to the start of a chunk (byte, halfword, word)
3047 large enough to contain it. In these cases we can avoid the shift
3048 implicit in bitfield extractions.
3050 For constants, we emit a compare of the shifted constant with the
3051 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3052 compared. For two fields at the same position, we do the ANDs with the
3053 similar mask and compare the result of the ANDs.
3055 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3056 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3057 are the left and right operands of the comparison, respectively.
3059 If the optimization described above can be done, we return the resulting
3060 tree. Otherwise we return zero. */
3062 static tree
3063 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3064 tree lhs, tree rhs)
3066 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3067 tree type = TREE_TYPE (lhs);
3068 tree signed_type, unsigned_type;
3069 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3070 enum machine_mode lmode, rmode, nmode;
3071 int lunsignedp, runsignedp;
3072 int lvolatilep = 0, rvolatilep = 0;
3073 tree linner, rinner = NULL_TREE;
3074 tree mask;
3075 tree offset;
3077 /* Get all the information about the extractions being done. If the bit size
3078 if the same as the size of the underlying object, we aren't doing an
3079 extraction at all and so can do nothing. We also don't want to
3080 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3081 then will no longer be able to replace it. */
3082 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3083 &lunsignedp, &lvolatilep);
3084 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3085 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3086 return 0;
3088 if (!const_p)
3090 /* If this is not a constant, we can only do something if bit positions,
3091 sizes, and signedness are the same. */
3092 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3093 &runsignedp, &rvolatilep);
3095 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3096 || lunsignedp != runsignedp || offset != 0
3097 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3098 return 0;
3101 /* See if we can find a mode to refer to this field. We should be able to,
3102 but fail if we can't. */
3103 nmode = get_best_mode (lbitsize, lbitpos,
3104 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3105 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3106 TYPE_ALIGN (TREE_TYPE (rinner))),
3107 word_mode, lvolatilep || rvolatilep);
3108 if (nmode == VOIDmode)
3109 return 0;
3111 /* Set signed and unsigned types of the precision of this mode for the
3112 shifts below. */
3113 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3114 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3116 /* Compute the bit position and size for the new reference and our offset
3117 within it. If the new reference is the same size as the original, we
3118 won't optimize anything, so return zero. */
3119 nbitsize = GET_MODE_BITSIZE (nmode);
3120 nbitpos = lbitpos & ~ (nbitsize - 1);
3121 lbitpos -= nbitpos;
3122 if (nbitsize == lbitsize)
3123 return 0;
3125 if (BYTES_BIG_ENDIAN)
3126 lbitpos = nbitsize - lbitsize - lbitpos;
3128 /* Make the mask to be used against the extracted field. */
3129 mask = build_int_2 (~0, ~0);
3130 TREE_TYPE (mask) = unsigned_type;
3131 force_fit_type (mask, 0);
3132 mask = fold_convert (unsigned_type, mask);
3133 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3134 mask = const_binop (RSHIFT_EXPR, mask,
3135 size_int (nbitsize - lbitsize - lbitpos), 0);
3137 if (! const_p)
3138 /* If not comparing with constant, just rework the comparison
3139 and return. */
3140 return build2 (code, compare_type,
3141 build2 (BIT_AND_EXPR, unsigned_type,
3142 make_bit_field_ref (linner, unsigned_type,
3143 nbitsize, nbitpos, 1),
3144 mask),
3145 build2 (BIT_AND_EXPR, unsigned_type,
3146 make_bit_field_ref (rinner, unsigned_type,
3147 nbitsize, nbitpos, 1),
3148 mask));
3150 /* Otherwise, we are handling the constant case. See if the constant is too
3151 big for the field. Warn and return a tree of for 0 (false) if so. We do
3152 this not only for its own sake, but to avoid having to test for this
3153 error case below. If we didn't, we might generate wrong code.
3155 For unsigned fields, the constant shifted right by the field length should
3156 be all zero. For signed fields, the high-order bits should agree with
3157 the sign bit. */
3159 if (lunsignedp)
3161 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3162 fold_convert (unsigned_type, rhs),
3163 size_int (lbitsize), 0)))
3165 warning ("comparison is always %d due to width of bit-field",
3166 code == NE_EXPR);
3167 return constant_boolean_node (code == NE_EXPR, compare_type);
3170 else
3172 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3173 size_int (lbitsize - 1), 0);
3174 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3176 warning ("comparison is always %d due to width of bit-field",
3177 code == NE_EXPR);
3178 return constant_boolean_node (code == NE_EXPR, compare_type);
3182 /* Single-bit compares should always be against zero. */
3183 if (lbitsize == 1 && ! integer_zerop (rhs))
3185 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3186 rhs = fold_convert (type, integer_zero_node);
3189 /* Make a new bitfield reference, shift the constant over the
3190 appropriate number of bits and mask it with the computed mask
3191 (in case this was a signed field). If we changed it, make a new one. */
3192 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3193 if (lvolatilep)
3195 TREE_SIDE_EFFECTS (lhs) = 1;
3196 TREE_THIS_VOLATILE (lhs) = 1;
3199 rhs = fold (const_binop (BIT_AND_EXPR,
3200 const_binop (LSHIFT_EXPR,
3201 fold_convert (unsigned_type, rhs),
3202 size_int (lbitpos), 0),
3203 mask, 0));
3205 return build2 (code, compare_type,
3206 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3207 rhs);
3210 /* Subroutine for fold_truthop: decode a field reference.
3212 If EXP is a comparison reference, we return the innermost reference.
3214 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3215 set to the starting bit number.
3217 If the innermost field can be completely contained in a mode-sized
3218 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3220 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3221 otherwise it is not changed.
3223 *PUNSIGNEDP is set to the signedness of the field.
3225 *PMASK is set to the mask used. This is either contained in a
3226 BIT_AND_EXPR or derived from the width of the field.
3228 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3230 Return 0 if this is not a component reference or is one that we can't
3231 do anything with. */
3233 static tree
3234 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3235 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3236 int *punsignedp, int *pvolatilep,
3237 tree *pmask, tree *pand_mask)
3239 tree outer_type = 0;
3240 tree and_mask = 0;
3241 tree mask, inner, offset;
3242 tree unsigned_type;
3243 unsigned int precision;
3245 /* All the optimizations using this function assume integer fields.
3246 There are problems with FP fields since the type_for_size call
3247 below can fail for, e.g., XFmode. */
3248 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3249 return 0;
3251 /* We are interested in the bare arrangement of bits, so strip everything
3252 that doesn't affect the machine mode. However, record the type of the
3253 outermost expression if it may matter below. */
3254 if (TREE_CODE (exp) == NOP_EXPR
3255 || TREE_CODE (exp) == CONVERT_EXPR
3256 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3257 outer_type = TREE_TYPE (exp);
3258 STRIP_NOPS (exp);
3260 if (TREE_CODE (exp) == BIT_AND_EXPR)
3262 and_mask = TREE_OPERAND (exp, 1);
3263 exp = TREE_OPERAND (exp, 0);
3264 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3265 if (TREE_CODE (and_mask) != INTEGER_CST)
3266 return 0;
3269 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3270 punsignedp, pvolatilep);
3271 if ((inner == exp && and_mask == 0)
3272 || *pbitsize < 0 || offset != 0
3273 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3274 return 0;
3276 /* If the number of bits in the reference is the same as the bitsize of
3277 the outer type, then the outer type gives the signedness. Otherwise
3278 (in case of a small bitfield) the signedness is unchanged. */
3279 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3280 *punsignedp = TYPE_UNSIGNED (outer_type);
3282 /* Compute the mask to access the bitfield. */
3283 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3284 precision = TYPE_PRECISION (unsigned_type);
3286 mask = build_int_2 (~0, ~0);
3287 TREE_TYPE (mask) = unsigned_type;
3288 force_fit_type (mask, 0);
3289 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3290 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3292 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3293 if (and_mask != 0)
3294 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3295 fold_convert (unsigned_type, and_mask), mask));
3297 *pmask = mask;
3298 *pand_mask = and_mask;
3299 return inner;
3302 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3303 bit positions. */
3305 static int
3306 all_ones_mask_p (tree mask, int size)
3308 tree type = TREE_TYPE (mask);
3309 unsigned int precision = TYPE_PRECISION (type);
3310 tree tmask;
3312 tmask = build_int_2 (~0, ~0);
3313 TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
3314 force_fit_type (tmask, 0);
3315 return
3316 tree_int_cst_equal (mask,
3317 const_binop (RSHIFT_EXPR,
3318 const_binop (LSHIFT_EXPR, tmask,
3319 size_int (precision - size),
3321 size_int (precision - size), 0));
3324 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3325 represents the sign bit of EXP's type. If EXP represents a sign
3326 or zero extension, also test VAL against the unextended type.
3327 The return value is the (sub)expression whose sign bit is VAL,
3328 or NULL_TREE otherwise. */
3330 static tree
3331 sign_bit_p (tree exp, tree val)
3333 unsigned HOST_WIDE_INT mask_lo, lo;
3334 HOST_WIDE_INT mask_hi, hi;
3335 int width;
3336 tree t;
3338 /* Tree EXP must have an integral type. */
3339 t = TREE_TYPE (exp);
3340 if (! INTEGRAL_TYPE_P (t))
3341 return NULL_TREE;
3343 /* Tree VAL must be an integer constant. */
3344 if (TREE_CODE (val) != INTEGER_CST
3345 || TREE_CONSTANT_OVERFLOW (val))
3346 return NULL_TREE;
3348 width = TYPE_PRECISION (t);
3349 if (width > HOST_BITS_PER_WIDE_INT)
3351 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3352 lo = 0;
3354 mask_hi = ((unsigned HOST_WIDE_INT) -1
3355 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3356 mask_lo = -1;
3358 else
3360 hi = 0;
3361 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3363 mask_hi = 0;
3364 mask_lo = ((unsigned HOST_WIDE_INT) -1
3365 >> (HOST_BITS_PER_WIDE_INT - width));
3368 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3369 treat VAL as if it were unsigned. */
3370 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3371 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3372 return exp;
3374 /* Handle extension from a narrower type. */
3375 if (TREE_CODE (exp) == NOP_EXPR
3376 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3377 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3379 return NULL_TREE;
3382 /* Subroutine for fold_truthop: determine if an operand is simple enough
3383 to be evaluated unconditionally. */
3385 static int
3386 simple_operand_p (tree exp)
3388 /* Strip any conversions that don't change the machine mode. */
3389 while ((TREE_CODE (exp) == NOP_EXPR
3390 || TREE_CODE (exp) == CONVERT_EXPR)
3391 && (TYPE_MODE (TREE_TYPE (exp))
3392 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3393 exp = TREE_OPERAND (exp, 0);
3395 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3396 || (DECL_P (exp)
3397 && ! TREE_ADDRESSABLE (exp)
3398 && ! TREE_THIS_VOLATILE (exp)
3399 && ! DECL_NONLOCAL (exp)
3400 /* Don't regard global variables as simple. They may be
3401 allocated in ways unknown to the compiler (shared memory,
3402 #pragma weak, etc). */
3403 && ! TREE_PUBLIC (exp)
3404 && ! DECL_EXTERNAL (exp)
3405 /* Loading a static variable is unduly expensive, but global
3406 registers aren't expensive. */
3407 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3410 /* The following functions are subroutines to fold_range_test and allow it to
3411 try to change a logical combination of comparisons into a range test.
3413 For example, both
3414 X == 2 || X == 3 || X == 4 || X == 5
3416 X >= 2 && X <= 5
3417 are converted to
3418 (unsigned) (X - 2) <= 3
3420 We describe each set of comparisons as being either inside or outside
3421 a range, using a variable named like IN_P, and then describe the
3422 range with a lower and upper bound. If one of the bounds is omitted,
3423 it represents either the highest or lowest value of the type.
3425 In the comments below, we represent a range by two numbers in brackets
3426 preceded by a "+" to designate being inside that range, or a "-" to
3427 designate being outside that range, so the condition can be inverted by
3428 flipping the prefix. An omitted bound is represented by a "-". For
3429 example, "- [-, 10]" means being outside the range starting at the lowest
3430 possible value and ending at 10, in other words, being greater than 10.
3431 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3432 always false.
3434 We set up things so that the missing bounds are handled in a consistent
3435 manner so neither a missing bound nor "true" and "false" need to be
3436 handled using a special case. */
3438 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3439 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3440 and UPPER1_P are nonzero if the respective argument is an upper bound
3441 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3442 must be specified for a comparison. ARG1 will be converted to ARG0's
3443 type if both are specified. */
3445 static tree
3446 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3447 tree arg1, int upper1_p)
3449 tree tem;
3450 int result;
3451 int sgn0, sgn1;
3453 /* If neither arg represents infinity, do the normal operation.
3454 Else, if not a comparison, return infinity. Else handle the special
3455 comparison rules. Note that most of the cases below won't occur, but
3456 are handled for consistency. */
3458 if (arg0 != 0 && arg1 != 0)
3460 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3461 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3462 STRIP_NOPS (tem);
3463 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3466 if (TREE_CODE_CLASS (code) != '<')
3467 return 0;
3469 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3470 for neither. In real maths, we cannot assume open ended ranges are
3471 the same. But, this is computer arithmetic, where numbers are finite.
3472 We can therefore make the transformation of any unbounded range with
3473 the value Z, Z being greater than any representable number. This permits
3474 us to treat unbounded ranges as equal. */
3475 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3476 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3477 switch (code)
3479 case EQ_EXPR:
3480 result = sgn0 == sgn1;
3481 break;
3482 case NE_EXPR:
3483 result = sgn0 != sgn1;
3484 break;
3485 case LT_EXPR:
3486 result = sgn0 < sgn1;
3487 break;
3488 case LE_EXPR:
3489 result = sgn0 <= sgn1;
3490 break;
3491 case GT_EXPR:
3492 result = sgn0 > sgn1;
3493 break;
3494 case GE_EXPR:
3495 result = sgn0 >= sgn1;
3496 break;
3497 default:
3498 abort ();
3501 return constant_boolean_node (result, type);
3504 /* Given EXP, a logical expression, set the range it is testing into
3505 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3506 actually being tested. *PLOW and *PHIGH will be made of the same type
3507 as the returned expression. If EXP is not a comparison, we will most
3508 likely not be returning a useful value and range. */
3510 static tree
3511 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3513 enum tree_code code;
3514 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3515 tree orig_type = NULL_TREE;
3516 int in_p, n_in_p;
3517 tree low, high, n_low, n_high;
3519 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3520 and see if we can refine the range. Some of the cases below may not
3521 happen, but it doesn't seem worth worrying about this. We "continue"
3522 the outer loop when we've changed something; otherwise we "break"
3523 the switch, which will "break" the while. */
3525 in_p = 0;
3526 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3528 while (1)
3530 code = TREE_CODE (exp);
3532 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3534 if (first_rtl_op (code) > 0)
3535 arg0 = TREE_OPERAND (exp, 0);
3536 if (TREE_CODE_CLASS (code) == '<'
3537 || TREE_CODE_CLASS (code) == '1'
3538 || TREE_CODE_CLASS (code) == '2')
3539 type = TREE_TYPE (arg0);
3540 if (TREE_CODE_CLASS (code) == '2'
3541 || TREE_CODE_CLASS (code) == '<'
3542 || (TREE_CODE_CLASS (code) == 'e'
3543 && TREE_CODE_LENGTH (code) > 1))
3544 arg1 = TREE_OPERAND (exp, 1);
3547 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3548 lose a cast by accident. */
3549 if (type != NULL_TREE && orig_type == NULL_TREE)
3550 orig_type = type;
3552 switch (code)
3554 case TRUTH_NOT_EXPR:
3555 in_p = ! in_p, exp = arg0;
3556 continue;
3558 case EQ_EXPR: case NE_EXPR:
3559 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3560 /* We can only do something if the range is testing for zero
3561 and if the second operand is an integer constant. Note that
3562 saying something is "in" the range we make is done by
3563 complementing IN_P since it will set in the initial case of
3564 being not equal to zero; "out" is leaving it alone. */
3565 if (low == 0 || high == 0
3566 || ! integer_zerop (low) || ! integer_zerop (high)
3567 || TREE_CODE (arg1) != INTEGER_CST)
3568 break;
3570 switch (code)
3572 case NE_EXPR: /* - [c, c] */
3573 low = high = arg1;
3574 break;
3575 case EQ_EXPR: /* + [c, c] */
3576 in_p = ! in_p, low = high = arg1;
3577 break;
3578 case GT_EXPR: /* - [-, c] */
3579 low = 0, high = arg1;
3580 break;
3581 case GE_EXPR: /* + [c, -] */
3582 in_p = ! in_p, low = arg1, high = 0;
3583 break;
3584 case LT_EXPR: /* - [c, -] */
3585 low = arg1, high = 0;
3586 break;
3587 case LE_EXPR: /* + [-, c] */
3588 in_p = ! in_p, low = 0, high = arg1;
3589 break;
3590 default:
3591 abort ();
3594 exp = arg0;
3596 /* If this is an unsigned comparison, we also know that EXP is
3597 greater than or equal to zero. We base the range tests we make
3598 on that fact, so we record it here so we can parse existing
3599 range tests. */
3600 if (TYPE_UNSIGNED (type) && (low == 0 || high == 0))
3602 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3603 1, fold_convert (type, integer_zero_node),
3604 NULL_TREE))
3605 break;
3607 in_p = n_in_p, low = n_low, high = n_high;
3609 /* If the high bound is missing, but we have a nonzero low
3610 bound, reverse the range so it goes from zero to the low bound
3611 minus 1. */
3612 if (high == 0 && low && ! integer_zerop (low))
3614 in_p = ! in_p;
3615 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3616 integer_one_node, 0);
3617 low = fold_convert (type, integer_zero_node);
3620 continue;
3622 case NEGATE_EXPR:
3623 /* (-x) IN [a,b] -> x in [-b, -a] */
3624 n_low = range_binop (MINUS_EXPR, type,
3625 fold_convert (type, integer_zero_node),
3626 0, high, 1);
3627 n_high = range_binop (MINUS_EXPR, type,
3628 fold_convert (type, integer_zero_node),
3629 0, low, 0);
3630 low = n_low, high = n_high;
3631 exp = arg0;
3632 continue;
3634 case BIT_NOT_EXPR:
3635 /* ~ X -> -X - 1 */
3636 exp = build2 (MINUS_EXPR, type, negate_expr (arg0),
3637 fold_convert (type, integer_one_node));
3638 continue;
3640 case PLUS_EXPR: case MINUS_EXPR:
3641 if (TREE_CODE (arg1) != INTEGER_CST)
3642 break;
3644 /* If EXP is signed, any overflow in the computation is undefined,
3645 so we don't worry about it so long as our computations on
3646 the bounds don't overflow. For unsigned, overflow is defined
3647 and this is exactly the right thing. */
3648 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3649 type, low, 0, arg1, 0);
3650 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3651 type, high, 1, arg1, 0);
3652 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3653 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3654 break;
3656 /* Check for an unsigned range which has wrapped around the maximum
3657 value thus making n_high < n_low, and normalize it. */
3658 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3660 low = range_binop (PLUS_EXPR, type, n_high, 0,
3661 integer_one_node, 0);
3662 high = range_binop (MINUS_EXPR, type, n_low, 0,
3663 integer_one_node, 0);
3665 /* If the range is of the form +/- [ x+1, x ], we won't
3666 be able to normalize it. But then, it represents the
3667 whole range or the empty set, so make it
3668 +/- [ -, - ]. */
3669 if (tree_int_cst_equal (n_low, low)
3670 && tree_int_cst_equal (n_high, high))
3671 low = high = 0;
3672 else
3673 in_p = ! in_p;
3675 else
3676 low = n_low, high = n_high;
3678 exp = arg0;
3679 continue;
3681 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3682 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3683 break;
3685 if (! INTEGRAL_TYPE_P (type)
3686 || (low != 0 && ! int_fits_type_p (low, type))
3687 || (high != 0 && ! int_fits_type_p (high, type)))
3688 break;
3690 n_low = low, n_high = high;
3692 if (n_low != 0)
3693 n_low = fold_convert (type, n_low);
3695 if (n_high != 0)
3696 n_high = fold_convert (type, n_high);
3698 /* If we're converting from an unsigned to a signed type,
3699 we will be doing the comparison as unsigned. The tests above
3700 have already verified that LOW and HIGH are both positive.
3702 So we have to make sure that the original unsigned value will
3703 be interpreted as positive. */
3704 if (TYPE_UNSIGNED (type) && ! TYPE_UNSIGNED (TREE_TYPE (exp)))
3706 tree equiv_type = lang_hooks.types.type_for_mode
3707 (TYPE_MODE (type), 1);
3708 tree high_positive;
3710 /* A range without an upper bound is, naturally, unbounded.
3711 Since convert would have cropped a very large value, use
3712 the max value for the destination type. */
3713 high_positive
3714 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3715 : TYPE_MAX_VALUE (type);
3717 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3718 high_positive = fold (build2 (RSHIFT_EXPR, type,
3719 fold_convert (type,
3720 high_positive),
3721 fold_convert (type,
3722 integer_one_node)));
3724 /* If the low bound is specified, "and" the range with the
3725 range for which the original unsigned value will be
3726 positive. */
3727 if (low != 0)
3729 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3730 1, n_low, n_high, 1,
3731 fold_convert (type, integer_zero_node),
3732 high_positive))
3733 break;
3735 in_p = (n_in_p == in_p);
3737 else
3739 /* Otherwise, "or" the range with the range of the input
3740 that will be interpreted as negative. */
3741 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3742 0, n_low, n_high, 1,
3743 fold_convert (type, integer_zero_node),
3744 high_positive))
3745 break;
3747 in_p = (in_p != n_in_p);
3751 exp = arg0;
3752 low = n_low, high = n_high;
3753 continue;
3755 default:
3756 break;
3759 break;
3762 /* If EXP is a constant, we can evaluate whether this is true or false. */
3763 if (TREE_CODE (exp) == INTEGER_CST)
3765 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3766 exp, 0, low, 0))
3767 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3768 exp, 1, high, 1)));
3769 low = high = 0;
3770 exp = 0;
3773 *pin_p = in_p, *plow = low, *phigh = high;
3774 return exp;
3777 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3778 type, TYPE, return an expression to test if EXP is in (or out of, depending
3779 on IN_P) the range. Return 0 if the test couldn't be created. */
3781 static tree
3782 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3784 tree etype = TREE_TYPE (exp);
3785 tree value;
3787 if (! in_p)
3789 value = build_range_check (type, exp, 1, low, high);
3790 if (value != 0)
3791 return invert_truthvalue (value);
3793 return 0;
3796 if (low == 0 && high == 0)
3797 return fold_convert (type, integer_one_node);
3799 if (low == 0)
3800 return fold (build2 (LE_EXPR, type, exp, high));
3802 if (high == 0)
3803 return fold (build2 (GE_EXPR, type, exp, low));
3805 if (operand_equal_p (low, high, 0))
3806 return fold (build2 (EQ_EXPR, type, exp, low));
3808 if (integer_zerop (low))
3810 if (! TYPE_UNSIGNED (etype))
3812 etype = lang_hooks.types.unsigned_type (etype);
3813 high = fold_convert (etype, high);
3814 exp = fold_convert (etype, exp);
3816 return build_range_check (type, exp, 1, 0, high);
3819 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3820 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3822 unsigned HOST_WIDE_INT lo;
3823 HOST_WIDE_INT hi;
3824 int prec;
3826 prec = TYPE_PRECISION (etype);
3827 if (prec <= HOST_BITS_PER_WIDE_INT)
3829 hi = 0;
3830 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3832 else
3834 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3835 lo = (unsigned HOST_WIDE_INT) -1;
3838 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3840 if (TYPE_UNSIGNED (etype))
3842 etype = lang_hooks.types.signed_type (etype);
3843 exp = fold_convert (etype, exp);
3845 return fold (build2 (GT_EXPR, type, exp,
3846 fold_convert (etype, integer_zero_node)));
3850 value = const_binop (MINUS_EXPR, high, low, 0);
3851 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3853 tree utype, minv, maxv;
3855 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3856 for the type in question, as we rely on this here. */
3857 switch (TREE_CODE (etype))
3859 case INTEGER_TYPE:
3860 case ENUMERAL_TYPE:
3861 case CHAR_TYPE:
3862 utype = lang_hooks.types.unsigned_type (etype);
3863 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3864 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3865 integer_one_node, 1);
3866 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3867 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3868 minv, 1, maxv, 1)))
3870 etype = utype;
3871 high = fold_convert (etype, high);
3872 low = fold_convert (etype, low);
3873 exp = fold_convert (etype, exp);
3874 value = const_binop (MINUS_EXPR, high, low, 0);
3876 break;
3877 default:
3878 break;
3882 if (value != 0 && ! TREE_OVERFLOW (value))
3883 return build_range_check (type,
3884 fold (build2 (MINUS_EXPR, etype, exp, low)),
3885 1, fold_convert (etype, integer_zero_node),
3886 value);
3888 return 0;
3891 /* Given two ranges, see if we can merge them into one. Return 1 if we
3892 can, 0 if we can't. Set the output range into the specified parameters. */
3894 static int
3895 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3896 tree high0, int in1_p, tree low1, tree high1)
3898 int no_overlap;
3899 int subset;
3900 int temp;
3901 tree tem;
3902 int in_p;
3903 tree low, high;
3904 int lowequal = ((low0 == 0 && low1 == 0)
3905 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3906 low0, 0, low1, 0)));
3907 int highequal = ((high0 == 0 && high1 == 0)
3908 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3909 high0, 1, high1, 1)));
3911 /* Make range 0 be the range that starts first, or ends last if they
3912 start at the same value. Swap them if it isn't. */
3913 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3914 low0, 0, low1, 0))
3915 || (lowequal
3916 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3917 high1, 1, high0, 1))))
3919 temp = in0_p, in0_p = in1_p, in1_p = temp;
3920 tem = low0, low0 = low1, low1 = tem;
3921 tem = high0, high0 = high1, high1 = tem;
3924 /* Now flag two cases, whether the ranges are disjoint or whether the
3925 second range is totally subsumed in the first. Note that the tests
3926 below are simplified by the ones above. */
3927 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3928 high0, 1, low1, 0));
3929 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3930 high1, 1, high0, 1));
3932 /* We now have four cases, depending on whether we are including or
3933 excluding the two ranges. */
3934 if (in0_p && in1_p)
3936 /* If they don't overlap, the result is false. If the second range
3937 is a subset it is the result. Otherwise, the range is from the start
3938 of the second to the end of the first. */
3939 if (no_overlap)
3940 in_p = 0, low = high = 0;
3941 else if (subset)
3942 in_p = 1, low = low1, high = high1;
3943 else
3944 in_p = 1, low = low1, high = high0;
3947 else if (in0_p && ! in1_p)
3949 /* If they don't overlap, the result is the first range. If they are
3950 equal, the result is false. If the second range is a subset of the
3951 first, and the ranges begin at the same place, we go from just after
3952 the end of the first range to the end of the second. If the second
3953 range is not a subset of the first, or if it is a subset and both
3954 ranges end at the same place, the range starts at the start of the
3955 first range and ends just before the second range.
3956 Otherwise, we can't describe this as a single range. */
3957 if (no_overlap)
3958 in_p = 1, low = low0, high = high0;
3959 else if (lowequal && highequal)
3960 in_p = 0, low = high = 0;
3961 else if (subset && lowequal)
3963 in_p = 1, high = high0;
3964 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3965 integer_one_node, 0);
3967 else if (! subset || highequal)
3969 in_p = 1, low = low0;
3970 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3971 integer_one_node, 0);
3973 else
3974 return 0;
3977 else if (! in0_p && in1_p)
3979 /* If they don't overlap, the result is the second range. If the second
3980 is a subset of the first, the result is false. Otherwise,
3981 the range starts just after the first range and ends at the
3982 end of the second. */
3983 if (no_overlap)
3984 in_p = 1, low = low1, high = high1;
3985 else if (subset || highequal)
3986 in_p = 0, low = high = 0;
3987 else
3989 in_p = 1, high = high1;
3990 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3991 integer_one_node, 0);
3995 else
3997 /* The case where we are excluding both ranges. Here the complex case
3998 is if they don't overlap. In that case, the only time we have a
3999 range is if they are adjacent. If the second is a subset of the
4000 first, the result is the first. Otherwise, the range to exclude
4001 starts at the beginning of the first range and ends at the end of the
4002 second. */
4003 if (no_overlap)
4005 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4006 range_binop (PLUS_EXPR, NULL_TREE,
4007 high0, 1,
4008 integer_one_node, 1),
4009 1, low1, 0)))
4010 in_p = 0, low = low0, high = high1;
4011 else
4013 /* Canonicalize - [min, x] into - [-, x]. */
4014 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4015 switch (TREE_CODE (TREE_TYPE (low0)))
4017 case ENUMERAL_TYPE:
4018 if (TYPE_PRECISION (TREE_TYPE (low0))
4019 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4020 break;
4021 /* FALLTHROUGH */
4022 case INTEGER_TYPE:
4023 case CHAR_TYPE:
4024 if (tree_int_cst_equal (low0,
4025 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4026 low0 = 0;
4027 break;
4028 case POINTER_TYPE:
4029 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4030 && integer_zerop (low0))
4031 low0 = 0;
4032 break;
4033 default:
4034 break;
4037 /* Canonicalize - [x, max] into - [x, -]. */
4038 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4039 switch (TREE_CODE (TREE_TYPE (high1)))
4041 case ENUMERAL_TYPE:
4042 if (TYPE_PRECISION (TREE_TYPE (high1))
4043 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4044 break;
4045 /* FALLTHROUGH */
4046 case INTEGER_TYPE:
4047 case CHAR_TYPE:
4048 if (tree_int_cst_equal (high1,
4049 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4050 high1 = 0;
4051 break;
4052 case POINTER_TYPE:
4053 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4054 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4055 high1, 1,
4056 integer_one_node, 1)))
4057 high1 = 0;
4058 break;
4059 default:
4060 break;
4063 /* The ranges might be also adjacent between the maximum and
4064 minimum values of the given type. For
4065 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4066 return + [x + 1, y - 1]. */
4067 if (low0 == 0 && high1 == 0)
4069 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4070 integer_one_node, 1);
4071 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4072 integer_one_node, 0);
4073 if (low == 0 || high == 0)
4074 return 0;
4076 in_p = 1;
4078 else
4079 return 0;
4082 else if (subset)
4083 in_p = 0, low = low0, high = high0;
4084 else
4085 in_p = 0, low = low0, high = high1;
4088 *pin_p = in_p, *plow = low, *phigh = high;
4089 return 1;
4092 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
4093 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4094 #endif
4096 /* EXP is some logical combination of boolean tests. See if we can
4097 merge it into some range test. Return the new tree if so. */
4099 static tree
4100 fold_range_test (tree exp)
4102 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4103 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4104 int in0_p, in1_p, in_p;
4105 tree low0, low1, low, high0, high1, high;
4106 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4107 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4108 tree tem;
4110 /* If this is an OR operation, invert both sides; we will invert
4111 again at the end. */
4112 if (or_op)
4113 in0_p = ! in0_p, in1_p = ! in1_p;
4115 /* If both expressions are the same, if we can merge the ranges, and we
4116 can build the range test, return it or it inverted. If one of the
4117 ranges is always true or always false, consider it to be the same
4118 expression as the other. */
4119 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4120 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4121 in1_p, low1, high1)
4122 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4123 lhs != 0 ? lhs
4124 : rhs != 0 ? rhs : integer_zero_node,
4125 in_p, low, high))))
4126 return or_op ? invert_truthvalue (tem) : tem;
4128 /* On machines where the branch cost is expensive, if this is a
4129 short-circuited branch and the underlying object on both sides
4130 is the same, make a non-short-circuit operation. */
4131 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4132 && lhs != 0 && rhs != 0
4133 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4134 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4135 && operand_equal_p (lhs, rhs, 0))
4137 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4138 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4139 which cases we can't do this. */
4140 if (simple_operand_p (lhs))
4141 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4142 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4143 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4144 TREE_OPERAND (exp, 1));
4146 else if (lang_hooks.decls.global_bindings_p () == 0
4147 && ! CONTAINS_PLACEHOLDER_P (lhs))
4149 tree common = save_expr (lhs);
4151 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4152 or_op ? ! in0_p : in0_p,
4153 low0, high0))
4154 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4155 or_op ? ! in1_p : in1_p,
4156 low1, high1))))
4157 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4158 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4159 TREE_TYPE (exp), lhs, rhs);
4163 return 0;
4166 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4167 bit value. Arrange things so the extra bits will be set to zero if and
4168 only if C is signed-extended to its full width. If MASK is nonzero,
4169 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4171 static tree
4172 unextend (tree c, int p, int unsignedp, tree mask)
4174 tree type = TREE_TYPE (c);
4175 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4176 tree temp;
4178 if (p == modesize || unsignedp)
4179 return c;
4181 /* We work by getting just the sign bit into the low-order bit, then
4182 into the high-order bit, then sign-extend. We then XOR that value
4183 with C. */
4184 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4185 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4187 /* We must use a signed type in order to get an arithmetic right shift.
4188 However, we must also avoid introducing accidental overflows, so that
4189 a subsequent call to integer_zerop will work. Hence we must
4190 do the type conversion here. At this point, the constant is either
4191 zero or one, and the conversion to a signed type can never overflow.
4192 We could get an overflow if this conversion is done anywhere else. */
4193 if (TYPE_UNSIGNED (type))
4194 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4196 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4197 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4198 if (mask != 0)
4199 temp = const_binop (BIT_AND_EXPR, temp,
4200 fold_convert (TREE_TYPE (c), mask), 0);
4201 /* If necessary, convert the type back to match the type of C. */
4202 if (TYPE_UNSIGNED (type))
4203 temp = fold_convert (type, temp);
4205 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4208 /* Find ways of folding logical expressions of LHS and RHS:
4209 Try to merge two comparisons to the same innermost item.
4210 Look for range tests like "ch >= '0' && ch <= '9'".
4211 Look for combinations of simple terms on machines with expensive branches
4212 and evaluate the RHS unconditionally.
4214 For example, if we have p->a == 2 && p->b == 4 and we can make an
4215 object large enough to span both A and B, we can do this with a comparison
4216 against the object ANDed with the a mask.
4218 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4219 operations to do this with one comparison.
4221 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4222 function and the one above.
4224 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4225 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4227 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4228 two operands.
4230 We return the simplified tree or 0 if no optimization is possible. */
4232 static tree
4233 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4235 /* If this is the "or" of two comparisons, we can do something if
4236 the comparisons are NE_EXPR. If this is the "and", we can do something
4237 if the comparisons are EQ_EXPR. I.e.,
4238 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4240 WANTED_CODE is this operation code. For single bit fields, we can
4241 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4242 comparison for one-bit fields. */
4244 enum tree_code wanted_code;
4245 enum tree_code lcode, rcode;
4246 tree ll_arg, lr_arg, rl_arg, rr_arg;
4247 tree ll_inner, lr_inner, rl_inner, rr_inner;
4248 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4249 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4250 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4251 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4252 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4253 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4254 enum machine_mode lnmode, rnmode;
4255 tree ll_mask, lr_mask, rl_mask, rr_mask;
4256 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4257 tree l_const, r_const;
4258 tree lntype, rntype, result;
4259 int first_bit, end_bit;
4260 int volatilep;
4262 /* Start by getting the comparison codes. Fail if anything is volatile.
4263 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4264 it were surrounded with a NE_EXPR. */
4266 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4267 return 0;
4269 lcode = TREE_CODE (lhs);
4270 rcode = TREE_CODE (rhs);
4272 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4274 lhs = build2 (NE_EXPR, truth_type, lhs, integer_zero_node);
4275 lcode = NE_EXPR;
4278 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4280 rhs = build2 (NE_EXPR, truth_type, rhs, integer_zero_node);
4281 rcode = NE_EXPR;
4284 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
4285 return 0;
4287 ll_arg = TREE_OPERAND (lhs, 0);
4288 lr_arg = TREE_OPERAND (lhs, 1);
4289 rl_arg = TREE_OPERAND (rhs, 0);
4290 rr_arg = TREE_OPERAND (rhs, 1);
4292 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4293 if (simple_operand_p (ll_arg)
4294 && simple_operand_p (lr_arg))
4296 tree result;
4297 if (operand_equal_p (ll_arg, rl_arg, 0)
4298 && operand_equal_p (lr_arg, rr_arg, 0))
4300 result = combine_comparisons (code, lcode, rcode,
4301 truth_type, ll_arg, lr_arg);
4302 if (result)
4303 return result;
4305 else if (operand_equal_p (ll_arg, rr_arg, 0)
4306 && operand_equal_p (lr_arg, rl_arg, 0))
4308 result = combine_comparisons (code, lcode,
4309 swap_tree_comparison (rcode),
4310 truth_type, ll_arg, lr_arg);
4311 if (result)
4312 return result;
4316 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4317 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4319 /* If the RHS can be evaluated unconditionally and its operands are
4320 simple, it wins to evaluate the RHS unconditionally on machines
4321 with expensive branches. In this case, this isn't a comparison
4322 that can be merged. Avoid doing this if the RHS is a floating-point
4323 comparison since those can trap. */
4325 if (BRANCH_COST >= 2
4326 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4327 && simple_operand_p (rl_arg)
4328 && simple_operand_p (rr_arg))
4330 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4331 if (code == TRUTH_OR_EXPR
4332 && lcode == NE_EXPR && integer_zerop (lr_arg)
4333 && rcode == NE_EXPR && integer_zerop (rr_arg)
4334 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4335 return build2 (NE_EXPR, truth_type,
4336 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4337 ll_arg, rl_arg),
4338 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4340 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4341 if (code == TRUTH_AND_EXPR
4342 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4343 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4344 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4345 return build2 (EQ_EXPR, truth_type,
4346 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4347 ll_arg, rl_arg),
4348 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4350 return build2 (code, truth_type, lhs, rhs);
4353 /* See if the comparisons can be merged. Then get all the parameters for
4354 each side. */
4356 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4357 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4358 return 0;
4360 volatilep = 0;
4361 ll_inner = decode_field_reference (ll_arg,
4362 &ll_bitsize, &ll_bitpos, &ll_mode,
4363 &ll_unsignedp, &volatilep, &ll_mask,
4364 &ll_and_mask);
4365 lr_inner = decode_field_reference (lr_arg,
4366 &lr_bitsize, &lr_bitpos, &lr_mode,
4367 &lr_unsignedp, &volatilep, &lr_mask,
4368 &lr_and_mask);
4369 rl_inner = decode_field_reference (rl_arg,
4370 &rl_bitsize, &rl_bitpos, &rl_mode,
4371 &rl_unsignedp, &volatilep, &rl_mask,
4372 &rl_and_mask);
4373 rr_inner = decode_field_reference (rr_arg,
4374 &rr_bitsize, &rr_bitpos, &rr_mode,
4375 &rr_unsignedp, &volatilep, &rr_mask,
4376 &rr_and_mask);
4378 /* It must be true that the inner operation on the lhs of each
4379 comparison must be the same if we are to be able to do anything.
4380 Then see if we have constants. If not, the same must be true for
4381 the rhs's. */
4382 if (volatilep || ll_inner == 0 || rl_inner == 0
4383 || ! operand_equal_p (ll_inner, rl_inner, 0))
4384 return 0;
4386 if (TREE_CODE (lr_arg) == INTEGER_CST
4387 && TREE_CODE (rr_arg) == INTEGER_CST)
4388 l_const = lr_arg, r_const = rr_arg;
4389 else if (lr_inner == 0 || rr_inner == 0
4390 || ! operand_equal_p (lr_inner, rr_inner, 0))
4391 return 0;
4392 else
4393 l_const = r_const = 0;
4395 /* If either comparison code is not correct for our logical operation,
4396 fail. However, we can convert a one-bit comparison against zero into
4397 the opposite comparison against that bit being set in the field. */
4399 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4400 if (lcode != wanted_code)
4402 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4404 /* Make the left operand unsigned, since we are only interested
4405 in the value of one bit. Otherwise we are doing the wrong
4406 thing below. */
4407 ll_unsignedp = 1;
4408 l_const = ll_mask;
4410 else
4411 return 0;
4414 /* This is analogous to the code for l_const above. */
4415 if (rcode != wanted_code)
4417 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4419 rl_unsignedp = 1;
4420 r_const = rl_mask;
4422 else
4423 return 0;
4426 /* After this point all optimizations will generate bit-field
4427 references, which we might not want. */
4428 if (! lang_hooks.can_use_bit_fields_p ())
4429 return 0;
4431 /* See if we can find a mode that contains both fields being compared on
4432 the left. If we can't, fail. Otherwise, update all constants and masks
4433 to be relative to a field of that size. */
4434 first_bit = MIN (ll_bitpos, rl_bitpos);
4435 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4436 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4437 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4438 volatilep);
4439 if (lnmode == VOIDmode)
4440 return 0;
4442 lnbitsize = GET_MODE_BITSIZE (lnmode);
4443 lnbitpos = first_bit & ~ (lnbitsize - 1);
4444 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4445 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4447 if (BYTES_BIG_ENDIAN)
4449 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4450 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4453 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4454 size_int (xll_bitpos), 0);
4455 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4456 size_int (xrl_bitpos), 0);
4458 if (l_const)
4460 l_const = fold_convert (lntype, l_const);
4461 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4462 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4463 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4464 fold (build1 (BIT_NOT_EXPR,
4465 lntype, ll_mask)),
4466 0)))
4468 warning ("comparison is always %d", wanted_code == NE_EXPR);
4470 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4473 if (r_const)
4475 r_const = fold_convert (lntype, r_const);
4476 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4477 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4478 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4479 fold (build1 (BIT_NOT_EXPR,
4480 lntype, rl_mask)),
4481 0)))
4483 warning ("comparison is always %d", wanted_code == NE_EXPR);
4485 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4489 /* If the right sides are not constant, do the same for it. Also,
4490 disallow this optimization if a size or signedness mismatch occurs
4491 between the left and right sides. */
4492 if (l_const == 0)
4494 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4495 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4496 /* Make sure the two fields on the right
4497 correspond to the left without being swapped. */
4498 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4499 return 0;
4501 first_bit = MIN (lr_bitpos, rr_bitpos);
4502 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4503 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4504 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4505 volatilep);
4506 if (rnmode == VOIDmode)
4507 return 0;
4509 rnbitsize = GET_MODE_BITSIZE (rnmode);
4510 rnbitpos = first_bit & ~ (rnbitsize - 1);
4511 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4512 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4514 if (BYTES_BIG_ENDIAN)
4516 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4517 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4520 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4521 size_int (xlr_bitpos), 0);
4522 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4523 size_int (xrr_bitpos), 0);
4525 /* Make a mask that corresponds to both fields being compared.
4526 Do this for both items being compared. If the operands are the
4527 same size and the bits being compared are in the same position
4528 then we can do this by masking both and comparing the masked
4529 results. */
4530 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4531 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4532 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4534 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4535 ll_unsignedp || rl_unsignedp);
4536 if (! all_ones_mask_p (ll_mask, lnbitsize))
4537 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4539 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4540 lr_unsignedp || rr_unsignedp);
4541 if (! all_ones_mask_p (lr_mask, rnbitsize))
4542 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4544 return build2 (wanted_code, truth_type, lhs, rhs);
4547 /* There is still another way we can do something: If both pairs of
4548 fields being compared are adjacent, we may be able to make a wider
4549 field containing them both.
4551 Note that we still must mask the lhs/rhs expressions. Furthermore,
4552 the mask must be shifted to account for the shift done by
4553 make_bit_field_ref. */
4554 if ((ll_bitsize + ll_bitpos == rl_bitpos
4555 && lr_bitsize + lr_bitpos == rr_bitpos)
4556 || (ll_bitpos == rl_bitpos + rl_bitsize
4557 && lr_bitpos == rr_bitpos + rr_bitsize))
4559 tree type;
4561 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4562 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4563 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4564 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4566 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4567 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4568 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4569 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4571 /* Convert to the smaller type before masking out unwanted bits. */
4572 type = lntype;
4573 if (lntype != rntype)
4575 if (lnbitsize > rnbitsize)
4577 lhs = fold_convert (rntype, lhs);
4578 ll_mask = fold_convert (rntype, ll_mask);
4579 type = rntype;
4581 else if (lnbitsize < rnbitsize)
4583 rhs = fold_convert (lntype, rhs);
4584 lr_mask = fold_convert (lntype, lr_mask);
4585 type = lntype;
4589 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4590 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4592 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4593 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4595 return build2 (wanted_code, truth_type, lhs, rhs);
4598 return 0;
4601 /* Handle the case of comparisons with constants. If there is something in
4602 common between the masks, those bits of the constants must be the same.
4603 If not, the condition is always false. Test for this to avoid generating
4604 incorrect code below. */
4605 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4606 if (! integer_zerop (result)
4607 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4608 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4610 if (wanted_code == NE_EXPR)
4612 warning ("`or' of unmatched not-equal tests is always 1");
4613 return constant_boolean_node (true, truth_type);
4615 else
4617 warning ("`and' of mutually exclusive equal-tests is always 0");
4618 return constant_boolean_node (false, truth_type);
4622 /* Construct the expression we will return. First get the component
4623 reference we will make. Unless the mask is all ones the width of
4624 that field, perform the mask operation. Then compare with the
4625 merged constant. */
4626 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4627 ll_unsignedp || rl_unsignedp);
4629 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4630 if (! all_ones_mask_p (ll_mask, lnbitsize))
4631 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4633 return build2 (wanted_code, truth_type, result,
4634 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4637 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4638 constant. */
4640 static tree
4641 optimize_minmax_comparison (tree t)
4643 tree type = TREE_TYPE (t);
4644 tree arg0 = TREE_OPERAND (t, 0);
4645 enum tree_code op_code;
4646 tree comp_const = TREE_OPERAND (t, 1);
4647 tree minmax_const;
4648 int consts_equal, consts_lt;
4649 tree inner;
4651 STRIP_SIGN_NOPS (arg0);
4653 op_code = TREE_CODE (arg0);
4654 minmax_const = TREE_OPERAND (arg0, 1);
4655 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4656 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4657 inner = TREE_OPERAND (arg0, 0);
4659 /* If something does not permit us to optimize, return the original tree. */
4660 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4661 || TREE_CODE (comp_const) != INTEGER_CST
4662 || TREE_CONSTANT_OVERFLOW (comp_const)
4663 || TREE_CODE (minmax_const) != INTEGER_CST
4664 || TREE_CONSTANT_OVERFLOW (minmax_const))
4665 return t;
4667 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4668 and GT_EXPR, doing the rest with recursive calls using logical
4669 simplifications. */
4670 switch (TREE_CODE (t))
4672 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4673 return
4674 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4676 case GE_EXPR:
4677 return
4678 fold (build2 (TRUTH_ORIF_EXPR, type,
4679 optimize_minmax_comparison
4680 (build2 (EQ_EXPR, type, arg0, comp_const)),
4681 optimize_minmax_comparison
4682 (build2 (GT_EXPR, type, arg0, comp_const))));
4684 case EQ_EXPR:
4685 if (op_code == MAX_EXPR && consts_equal)
4686 /* MAX (X, 0) == 0 -> X <= 0 */
4687 return fold (build2 (LE_EXPR, type, inner, comp_const));
4689 else if (op_code == MAX_EXPR && consts_lt)
4690 /* MAX (X, 0) == 5 -> X == 5 */
4691 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4693 else if (op_code == MAX_EXPR)
4694 /* MAX (X, 0) == -1 -> false */
4695 return omit_one_operand (type, integer_zero_node, inner);
4697 else if (consts_equal)
4698 /* MIN (X, 0) == 0 -> X >= 0 */
4699 return fold (build2 (GE_EXPR, type, inner, comp_const));
4701 else if (consts_lt)
4702 /* MIN (X, 0) == 5 -> false */
4703 return omit_one_operand (type, integer_zero_node, inner);
4705 else
4706 /* MIN (X, 0) == -1 -> X == -1 */
4707 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4709 case GT_EXPR:
4710 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4711 /* MAX (X, 0) > 0 -> X > 0
4712 MAX (X, 0) > 5 -> X > 5 */
4713 return fold (build2 (GT_EXPR, type, inner, comp_const));
4715 else if (op_code == MAX_EXPR)
4716 /* MAX (X, 0) > -1 -> true */
4717 return omit_one_operand (type, integer_one_node, inner);
4719 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4720 /* MIN (X, 0) > 0 -> false
4721 MIN (X, 0) > 5 -> false */
4722 return omit_one_operand (type, integer_zero_node, inner);
4724 else
4725 /* MIN (X, 0) > -1 -> X > -1 */
4726 return fold (build2 (GT_EXPR, type, inner, comp_const));
4728 default:
4729 return t;
4733 /* T is an integer expression that is being multiplied, divided, or taken a
4734 modulus (CODE says which and what kind of divide or modulus) by a
4735 constant C. See if we can eliminate that operation by folding it with
4736 other operations already in T. WIDE_TYPE, if non-null, is a type that
4737 should be used for the computation if wider than our type.
4739 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4740 (X * 2) + (Y * 4). We must, however, be assured that either the original
4741 expression would not overflow or that overflow is undefined for the type
4742 in the language in question.
4744 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4745 the machine has a multiply-accumulate insn or that this is part of an
4746 addressing calculation.
4748 If we return a non-null expression, it is an equivalent form of the
4749 original computation, but need not be in the original type. */
4751 static tree
4752 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4754 /* To avoid exponential search depth, refuse to allow recursion past
4755 three levels. Beyond that (1) it's highly unlikely that we'll find
4756 something interesting and (2) we've probably processed it before
4757 when we built the inner expression. */
4759 static int depth;
4760 tree ret;
4762 if (depth > 3)
4763 return NULL;
4765 depth++;
4766 ret = extract_muldiv_1 (t, c, code, wide_type);
4767 depth--;
4769 return ret;
4772 static tree
4773 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4775 tree type = TREE_TYPE (t);
4776 enum tree_code tcode = TREE_CODE (t);
4777 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4778 > GET_MODE_SIZE (TYPE_MODE (type)))
4779 ? wide_type : type);
4780 tree t1, t2;
4781 int same_p = tcode == code;
4782 tree op0 = NULL_TREE, op1 = NULL_TREE;
4784 /* Don't deal with constants of zero here; they confuse the code below. */
4785 if (integer_zerop (c))
4786 return NULL_TREE;
4788 if (TREE_CODE_CLASS (tcode) == '1')
4789 op0 = TREE_OPERAND (t, 0);
4791 if (TREE_CODE_CLASS (tcode) == '2')
4792 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4794 /* Note that we need not handle conditional operations here since fold
4795 already handles those cases. So just do arithmetic here. */
4796 switch (tcode)
4798 case INTEGER_CST:
4799 /* For a constant, we can always simplify if we are a multiply
4800 or (for divide and modulus) if it is a multiple of our constant. */
4801 if (code == MULT_EXPR
4802 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4803 return const_binop (code, fold_convert (ctype, t),
4804 fold_convert (ctype, c), 0);
4805 break;
4807 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4808 /* If op0 is an expression ... */
4809 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4810 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4811 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4812 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4813 /* ... and is unsigned, and its type is smaller than ctype,
4814 then we cannot pass through as widening. */
4815 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
4816 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4817 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4818 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4819 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4820 /* ... or its type is larger than ctype,
4821 then we cannot pass through this truncation. */
4822 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4823 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4824 /* ... or signedness changes for division or modulus,
4825 then we cannot pass through this conversion. */
4826 || (code != MULT_EXPR
4827 && (TYPE_UNSIGNED (ctype)
4828 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
4829 break;
4831 /* Pass the constant down and see if we can make a simplification. If
4832 we can, replace this expression with the inner simplification for
4833 possible later conversion to our or some other type. */
4834 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4835 && TREE_CODE (t2) == INTEGER_CST
4836 && ! TREE_CONSTANT_OVERFLOW (t2)
4837 && (0 != (t1 = extract_muldiv (op0, t2, code,
4838 code == MULT_EXPR
4839 ? ctype : NULL_TREE))))
4840 return t1;
4841 break;
4843 case NEGATE_EXPR: case ABS_EXPR:
4844 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4845 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4846 break;
4848 case MIN_EXPR: case MAX_EXPR:
4849 /* If widening the type changes the signedness, then we can't perform
4850 this optimization as that changes the result. */
4851 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
4852 break;
4854 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4855 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4856 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4858 if (tree_int_cst_sgn (c) < 0)
4859 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4861 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
4862 fold_convert (ctype, t2)));
4864 break;
4866 case LSHIFT_EXPR: case RSHIFT_EXPR:
4867 /* If the second operand is constant, this is a multiplication
4868 or floor division, by a power of two, so we can treat it that
4869 way unless the multiplier or divisor overflows. */
4870 if (TREE_CODE (op1) == INTEGER_CST
4871 /* const_binop may not detect overflow correctly,
4872 so check for it explicitly here. */
4873 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4874 && TREE_INT_CST_HIGH (op1) == 0
4875 && 0 != (t1 = fold_convert (ctype,
4876 const_binop (LSHIFT_EXPR,
4877 size_one_node,
4878 op1, 0)))
4879 && ! TREE_OVERFLOW (t1))
4880 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
4881 ? MULT_EXPR : FLOOR_DIV_EXPR,
4882 ctype, fold_convert (ctype, op0), t1),
4883 c, code, wide_type);
4884 break;
4886 case PLUS_EXPR: case MINUS_EXPR:
4887 /* See if we can eliminate the operation on both sides. If we can, we
4888 can return a new PLUS or MINUS. If we can't, the only remaining
4889 cases where we can do anything are if the second operand is a
4890 constant. */
4891 t1 = extract_muldiv (op0, c, code, wide_type);
4892 t2 = extract_muldiv (op1, c, code, wide_type);
4893 if (t1 != 0 && t2 != 0
4894 && (code == MULT_EXPR
4895 /* If not multiplication, we can only do this if both operands
4896 are divisible by c. */
4897 || (multiple_of_p (ctype, op0, c)
4898 && multiple_of_p (ctype, op1, c))))
4899 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
4900 fold_convert (ctype, t2)));
4902 /* If this was a subtraction, negate OP1 and set it to be an addition.
4903 This simplifies the logic below. */
4904 if (tcode == MINUS_EXPR)
4905 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4907 if (TREE_CODE (op1) != INTEGER_CST)
4908 break;
4910 /* If either OP1 or C are negative, this optimization is not safe for
4911 some of the division and remainder types while for others we need
4912 to change the code. */
4913 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4915 if (code == CEIL_DIV_EXPR)
4916 code = FLOOR_DIV_EXPR;
4917 else if (code == FLOOR_DIV_EXPR)
4918 code = CEIL_DIV_EXPR;
4919 else if (code != MULT_EXPR
4920 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4921 break;
4924 /* If it's a multiply or a division/modulus operation of a multiple
4925 of our constant, do the operation and verify it doesn't overflow. */
4926 if (code == MULT_EXPR
4927 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4929 op1 = const_binop (code, fold_convert (ctype, op1),
4930 fold_convert (ctype, c), 0);
4931 /* We allow the constant to overflow with wrapping semantics. */
4932 if (op1 == 0
4933 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4934 break;
4936 else
4937 break;
4939 /* If we have an unsigned type is not a sizetype, we cannot widen
4940 the operation since it will change the result if the original
4941 computation overflowed. */
4942 if (TYPE_UNSIGNED (ctype)
4943 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4944 && ctype != type)
4945 break;
4947 /* If we were able to eliminate our operation from the first side,
4948 apply our operation to the second side and reform the PLUS. */
4949 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4950 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
4952 /* The last case is if we are a multiply. In that case, we can
4953 apply the distributive law to commute the multiply and addition
4954 if the multiplication of the constants doesn't overflow. */
4955 if (code == MULT_EXPR)
4956 return fold (build2 (tcode, ctype,
4957 fold (build2 (code, ctype,
4958 fold_convert (ctype, op0),
4959 fold_convert (ctype, c))),
4960 op1));
4962 break;
4964 case MULT_EXPR:
4965 /* We have a special case here if we are doing something like
4966 (C * 8) % 4 since we know that's zero. */
4967 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4968 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4969 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4970 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4971 return omit_one_operand (type, integer_zero_node, op0);
4973 /* ... fall through ... */
4975 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4976 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4977 /* If we can extract our operation from the LHS, do so and return a
4978 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4979 do something only if the second operand is a constant. */
4980 if (same_p
4981 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4982 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
4983 fold_convert (ctype, op1)));
4984 else if (tcode == MULT_EXPR && code == MULT_EXPR
4985 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4986 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
4987 fold_convert (ctype, t1)));
4988 else if (TREE_CODE (op1) != INTEGER_CST)
4989 return 0;
4991 /* If these are the same operation types, we can associate them
4992 assuming no overflow. */
4993 if (tcode == code
4994 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4995 fold_convert (ctype, c), 0))
4996 && ! TREE_OVERFLOW (t1))
4997 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
4999 /* If these operations "cancel" each other, we have the main
5000 optimizations of this pass, which occur when either constant is a
5001 multiple of the other, in which case we replace this with either an
5002 operation or CODE or TCODE.
5004 If we have an unsigned type that is not a sizetype, we cannot do
5005 this since it will change the result if the original computation
5006 overflowed. */
5007 if ((! TYPE_UNSIGNED (ctype)
5008 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5009 && ! flag_wrapv
5010 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5011 || (tcode == MULT_EXPR
5012 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5013 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5015 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5016 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5017 fold_convert (ctype,
5018 const_binop (TRUNC_DIV_EXPR,
5019 op1, c, 0))));
5020 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5021 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5022 fold_convert (ctype,
5023 const_binop (TRUNC_DIV_EXPR,
5024 c, op1, 0))));
5026 break;
5028 default:
5029 break;
5032 return 0;
5035 /* Return a node which has the indicated constant VALUE (either 0 or
5036 1), and is of the indicated TYPE. */
5038 static tree
5039 constant_boolean_node (int value, tree type)
5041 if (type == integer_type_node)
5042 return value ? integer_one_node : integer_zero_node;
5043 else if (type == boolean_type_node)
5044 return value ? boolean_true_node : boolean_false_node;
5045 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5046 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5047 : integer_zero_node);
5048 else
5050 tree t = build_int_2 (value, 0);
5052 TREE_TYPE (t) = type;
5053 return t;
5057 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5058 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5059 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5060 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5061 COND is the first argument to CODE; otherwise (as in the example
5062 given here), it is the second argument. TYPE is the type of the
5063 original expression. Return NULL_TREE if no simplification is
5064 possible. */
5066 static tree
5067 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5068 tree cond, tree arg, int cond_first_p)
5070 tree test, true_value, false_value;
5071 tree lhs = NULL_TREE;
5072 tree rhs = NULL_TREE;
5074 /* This transformation is only worthwhile if we don't have to wrap
5075 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5076 one of the branches once its pushed inside the COND_EXPR. */
5077 if (!TREE_CONSTANT (arg))
5078 return NULL_TREE;
5080 if (TREE_CODE (cond) == COND_EXPR)
5082 test = TREE_OPERAND (cond, 0);
5083 true_value = TREE_OPERAND (cond, 1);
5084 false_value = TREE_OPERAND (cond, 2);
5085 /* If this operand throws an expression, then it does not make
5086 sense to try to perform a logical or arithmetic operation
5087 involving it. */
5088 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5089 lhs = true_value;
5090 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5091 rhs = false_value;
5093 else
5095 tree testtype = TREE_TYPE (cond);
5096 test = cond;
5097 true_value = constant_boolean_node (true, testtype);
5098 false_value = constant_boolean_node (false, testtype);
5101 if (lhs == 0)
5102 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5103 : build2 (code, type, arg, true_value));
5104 if (rhs == 0)
5105 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5106 : build2 (code, type, arg, false_value));
5108 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5109 return fold_convert (type, test);
5113 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5115 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5116 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5117 ADDEND is the same as X.
5119 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5120 and finite. The problematic cases are when X is zero, and its mode
5121 has signed zeros. In the case of rounding towards -infinity,
5122 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5123 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5125 static bool
5126 fold_real_zero_addition_p (tree type, tree addend, int negate)
5128 if (!real_zerop (addend))
5129 return false;
5131 /* Don't allow the fold with -fsignaling-nans. */
5132 if (HONOR_SNANS (TYPE_MODE (type)))
5133 return false;
5135 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5136 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5137 return true;
5139 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5140 if (TREE_CODE (addend) == REAL_CST
5141 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5142 negate = !negate;
5144 /* The mode has signed zeros, and we have to honor their sign.
5145 In this situation, there is only one case we can return true for.
5146 X - 0 is the same as X unless rounding towards -infinity is
5147 supported. */
5148 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5151 /* Subroutine of fold() that checks comparisons of built-in math
5152 functions against real constants.
5154 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5155 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5156 is the type of the result and ARG0 and ARG1 are the operands of the
5157 comparison. ARG1 must be a TREE_REAL_CST.
5159 The function returns the constant folded tree if a simplification
5160 can be made, and NULL_TREE otherwise. */
5162 static tree
5163 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5164 tree type, tree arg0, tree arg1)
5166 REAL_VALUE_TYPE c;
5168 if (BUILTIN_SQRT_P (fcode))
5170 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5171 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5173 c = TREE_REAL_CST (arg1);
5174 if (REAL_VALUE_NEGATIVE (c))
5176 /* sqrt(x) < y is always false, if y is negative. */
5177 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5178 return omit_one_operand (type, integer_zero_node, arg);
5180 /* sqrt(x) > y is always true, if y is negative and we
5181 don't care about NaNs, i.e. negative values of x. */
5182 if (code == NE_EXPR || !HONOR_NANS (mode))
5183 return omit_one_operand (type, integer_one_node, arg);
5185 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5186 return fold (build2 (GE_EXPR, type, arg,
5187 build_real (TREE_TYPE (arg), dconst0)));
5189 else if (code == GT_EXPR || code == GE_EXPR)
5191 REAL_VALUE_TYPE c2;
5193 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5194 real_convert (&c2, mode, &c2);
5196 if (REAL_VALUE_ISINF (c2))
5198 /* sqrt(x) > y is x == +Inf, when y is very large. */
5199 if (HONOR_INFINITIES (mode))
5200 return fold (build2 (EQ_EXPR, type, arg,
5201 build_real (TREE_TYPE (arg), c2)));
5203 /* sqrt(x) > y is always false, when y is very large
5204 and we don't care about infinities. */
5205 return omit_one_operand (type, integer_zero_node, arg);
5208 /* sqrt(x) > c is the same as x > c*c. */
5209 return fold (build2 (code, type, arg,
5210 build_real (TREE_TYPE (arg), c2)));
5212 else if (code == LT_EXPR || code == LE_EXPR)
5214 REAL_VALUE_TYPE c2;
5216 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5217 real_convert (&c2, mode, &c2);
5219 if (REAL_VALUE_ISINF (c2))
5221 /* sqrt(x) < y is always true, when y is a very large
5222 value and we don't care about NaNs or Infinities. */
5223 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5224 return omit_one_operand (type, integer_one_node, arg);
5226 /* sqrt(x) < y is x != +Inf when y is very large and we
5227 don't care about NaNs. */
5228 if (! HONOR_NANS (mode))
5229 return fold (build2 (NE_EXPR, type, arg,
5230 build_real (TREE_TYPE (arg), c2)));
5232 /* sqrt(x) < y is x >= 0 when y is very large and we
5233 don't care about Infinities. */
5234 if (! HONOR_INFINITIES (mode))
5235 return fold (build2 (GE_EXPR, type, arg,
5236 build_real (TREE_TYPE (arg), dconst0)));
5238 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5239 if (lang_hooks.decls.global_bindings_p () != 0
5240 || CONTAINS_PLACEHOLDER_P (arg))
5241 return NULL_TREE;
5243 arg = save_expr (arg);
5244 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5245 fold (build2 (GE_EXPR, type, arg,
5246 build_real (TREE_TYPE (arg),
5247 dconst0))),
5248 fold (build2 (NE_EXPR, type, arg,
5249 build_real (TREE_TYPE (arg),
5250 c2)))));
5253 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5254 if (! HONOR_NANS (mode))
5255 return fold (build2 (code, type, arg,
5256 build_real (TREE_TYPE (arg), c2)));
5258 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5259 if (lang_hooks.decls.global_bindings_p () == 0
5260 && ! CONTAINS_PLACEHOLDER_P (arg))
5262 arg = save_expr (arg);
5263 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5264 fold (build2 (GE_EXPR, type, arg,
5265 build_real (TREE_TYPE (arg),
5266 dconst0))),
5267 fold (build2 (code, type, arg,
5268 build_real (TREE_TYPE (arg),
5269 c2)))));
5274 return NULL_TREE;
5277 /* Subroutine of fold() that optimizes comparisons against Infinities,
5278 either +Inf or -Inf.
5280 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5281 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5282 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5284 The function returns the constant folded tree if a simplification
5285 can be made, and NULL_TREE otherwise. */
5287 static tree
5288 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5290 enum machine_mode mode;
5291 REAL_VALUE_TYPE max;
5292 tree temp;
5293 bool neg;
5295 mode = TYPE_MODE (TREE_TYPE (arg0));
5297 /* For negative infinity swap the sense of the comparison. */
5298 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5299 if (neg)
5300 code = swap_tree_comparison (code);
5302 switch (code)
5304 case GT_EXPR:
5305 /* x > +Inf is always false, if with ignore sNANs. */
5306 if (HONOR_SNANS (mode))
5307 return NULL_TREE;
5308 return omit_one_operand (type, integer_zero_node, arg0);
5310 case LE_EXPR:
5311 /* x <= +Inf is always true, if we don't case about NaNs. */
5312 if (! HONOR_NANS (mode))
5313 return omit_one_operand (type, integer_one_node, arg0);
5315 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5316 if (lang_hooks.decls.global_bindings_p () == 0
5317 && ! CONTAINS_PLACEHOLDER_P (arg0))
5319 arg0 = save_expr (arg0);
5320 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5322 break;
5324 case EQ_EXPR:
5325 case GE_EXPR:
5326 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5327 real_maxval (&max, neg, mode);
5328 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5329 arg0, build_real (TREE_TYPE (arg0), max)));
5331 case LT_EXPR:
5332 /* x < +Inf is always equal to x <= DBL_MAX. */
5333 real_maxval (&max, neg, mode);
5334 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5335 arg0, build_real (TREE_TYPE (arg0), max)));
5337 case NE_EXPR:
5338 /* x != +Inf is always equal to !(x > DBL_MAX). */
5339 real_maxval (&max, neg, mode);
5340 if (! HONOR_NANS (mode))
5341 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5342 arg0, build_real (TREE_TYPE (arg0), max)));
5344 /* The transformation below creates non-gimple code and thus is
5345 not appropriate if we are in gimple form. */
5346 if (in_gimple_form)
5347 return NULL_TREE;
5349 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5350 arg0, build_real (TREE_TYPE (arg0), max)));
5351 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5353 default:
5354 break;
5357 return NULL_TREE;
5360 /* Subroutine of fold() that optimizes comparisons of a division by
5361 a nonzero integer constant against an integer constant, i.e.
5362 X/C1 op C2.
5364 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5365 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5366 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5368 The function returns the constant folded tree if a simplification
5369 can be made, and NULL_TREE otherwise. */
5371 static tree
5372 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5374 tree prod, tmp, hi, lo;
5375 tree arg00 = TREE_OPERAND (arg0, 0);
5376 tree arg01 = TREE_OPERAND (arg0, 1);
5377 unsigned HOST_WIDE_INT lpart;
5378 HOST_WIDE_INT hpart;
5379 int overflow;
5381 /* We have to do this the hard way to detect unsigned overflow.
5382 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5383 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5384 TREE_INT_CST_HIGH (arg01),
5385 TREE_INT_CST_LOW (arg1),
5386 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5387 prod = build_int_2 (lpart, hpart);
5388 TREE_TYPE (prod) = TREE_TYPE (arg00);
5389 TREE_OVERFLOW (prod) = force_fit_type (prod, overflow)
5390 || TREE_INT_CST_HIGH (prod) != hpart
5391 || TREE_INT_CST_LOW (prod) != lpart;
5392 TREE_CONSTANT_OVERFLOW (prod) = TREE_OVERFLOW (prod);
5394 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5396 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5397 lo = prod;
5399 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5400 overflow = add_double (TREE_INT_CST_LOW (prod),
5401 TREE_INT_CST_HIGH (prod),
5402 TREE_INT_CST_LOW (tmp),
5403 TREE_INT_CST_HIGH (tmp),
5404 &lpart, &hpart);
5405 hi = build_int_2 (lpart, hpart);
5406 TREE_TYPE (hi) = TREE_TYPE (arg00);
5407 TREE_OVERFLOW (hi) = force_fit_type (hi, overflow)
5408 || TREE_INT_CST_HIGH (hi) != hpart
5409 || TREE_INT_CST_LOW (hi) != lpart
5410 || TREE_OVERFLOW (prod);
5411 TREE_CONSTANT_OVERFLOW (hi) = TREE_OVERFLOW (hi);
5413 else if (tree_int_cst_sgn (arg01) >= 0)
5415 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5416 switch (tree_int_cst_sgn (arg1))
5418 case -1:
5419 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5420 hi = prod;
5421 break;
5423 case 0:
5424 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5425 hi = tmp;
5426 break;
5428 case 1:
5429 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5430 lo = prod;
5431 break;
5433 default:
5434 abort ();
5437 else
5439 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5440 switch (tree_int_cst_sgn (arg1))
5442 case -1:
5443 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5444 lo = prod;
5445 break;
5447 case 0:
5448 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5449 lo = tmp;
5450 break;
5452 case 1:
5453 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5454 hi = prod;
5455 break;
5457 default:
5458 abort ();
5462 switch (code)
5464 case EQ_EXPR:
5465 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5466 return omit_one_operand (type, integer_zero_node, arg00);
5467 if (TREE_OVERFLOW (hi))
5468 return fold (build2 (GE_EXPR, type, arg00, lo));
5469 if (TREE_OVERFLOW (lo))
5470 return fold (build2 (LE_EXPR, type, arg00, hi));
5471 return build_range_check (type, arg00, 1, lo, hi);
5473 case NE_EXPR:
5474 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5475 return omit_one_operand (type, integer_one_node, arg00);
5476 if (TREE_OVERFLOW (hi))
5477 return fold (build2 (LT_EXPR, type, arg00, lo));
5478 if (TREE_OVERFLOW (lo))
5479 return fold (build2 (GT_EXPR, type, arg00, hi));
5480 return build_range_check (type, arg00, 0, lo, hi);
5482 case LT_EXPR:
5483 if (TREE_OVERFLOW (lo))
5484 return omit_one_operand (type, integer_zero_node, arg00);
5485 return fold (build2 (LT_EXPR, type, arg00, lo));
5487 case LE_EXPR:
5488 if (TREE_OVERFLOW (hi))
5489 return omit_one_operand (type, integer_one_node, arg00);
5490 return fold (build2 (LE_EXPR, type, arg00, hi));
5492 case GT_EXPR:
5493 if (TREE_OVERFLOW (hi))
5494 return omit_one_operand (type, integer_zero_node, arg00);
5495 return fold (build2 (GT_EXPR, type, arg00, hi));
5497 case GE_EXPR:
5498 if (TREE_OVERFLOW (lo))
5499 return omit_one_operand (type, integer_one_node, arg00);
5500 return fold (build2 (GE_EXPR, type, arg00, lo));
5502 default:
5503 break;
5506 return NULL_TREE;
5510 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5511 equality/inequality test, then return a simplified form of
5512 the test using shifts and logical operations. Otherwise return
5513 NULL. TYPE is the desired result type. */
5515 tree
5516 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5517 tree result_type)
5519 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5520 operand 0. */
5521 if (code == TRUTH_NOT_EXPR)
5523 code = TREE_CODE (arg0);
5524 if (code != NE_EXPR && code != EQ_EXPR)
5525 return NULL_TREE;
5527 /* Extract the arguments of the EQ/NE. */
5528 arg1 = TREE_OPERAND (arg0, 1);
5529 arg0 = TREE_OPERAND (arg0, 0);
5531 /* This requires us to invert the code. */
5532 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5535 /* If this is testing a single bit, we can optimize the test. */
5536 if ((code == NE_EXPR || code == EQ_EXPR)
5537 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5538 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5540 tree inner = TREE_OPERAND (arg0, 0);
5541 tree type = TREE_TYPE (arg0);
5542 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5543 enum machine_mode operand_mode = TYPE_MODE (type);
5544 int ops_unsigned;
5545 tree signed_type, unsigned_type, intermediate_type;
5546 tree arg00;
5548 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5549 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5550 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5551 if (arg00 != NULL_TREE
5552 /* This is only a win if casting to a signed type is cheap,
5553 i.e. when arg00's type is not a partial mode. */
5554 && TYPE_PRECISION (TREE_TYPE (arg00))
5555 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5557 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5558 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5559 result_type, fold_convert (stype, arg00),
5560 fold_convert (stype, integer_zero_node)));
5563 /* Otherwise we have (A & C) != 0 where C is a single bit,
5564 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5565 Similarly for (A & C) == 0. */
5567 /* If INNER is a right shift of a constant and it plus BITNUM does
5568 not overflow, adjust BITNUM and INNER. */
5569 if (TREE_CODE (inner) == RSHIFT_EXPR
5570 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5571 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5572 && bitnum < TYPE_PRECISION (type)
5573 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5574 bitnum - TYPE_PRECISION (type)))
5576 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5577 inner = TREE_OPERAND (inner, 0);
5580 /* If we are going to be able to omit the AND below, we must do our
5581 operations as unsigned. If we must use the AND, we have a choice.
5582 Normally unsigned is faster, but for some machines signed is. */
5583 #ifdef LOAD_EXTEND_OP
5584 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5585 #else
5586 ops_unsigned = 1;
5587 #endif
5589 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5590 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5591 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5592 inner = fold_convert (intermediate_type, inner);
5594 if (bitnum != 0)
5595 inner = build2 (RSHIFT_EXPR, intermediate_type,
5596 inner, size_int (bitnum));
5598 if (code == EQ_EXPR)
5599 inner = build2 (BIT_XOR_EXPR, intermediate_type,
5600 inner, integer_one_node);
5602 /* Put the AND last so it can combine with more things. */
5603 inner = build2 (BIT_AND_EXPR, intermediate_type,
5604 inner, integer_one_node);
5606 /* Make sure to return the proper type. */
5607 inner = fold_convert (result_type, inner);
5609 return inner;
5611 return NULL_TREE;
5614 /* Check whether we are allowed to reorder operands arg0 and arg1,
5615 such that the evaluation of arg1 occurs before arg0. */
5617 static bool
5618 reorder_operands_p (tree arg0, tree arg1)
5620 if (! flag_evaluation_order)
5621 return true;
5622 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5623 return true;
5624 return ! TREE_SIDE_EFFECTS (arg0)
5625 && ! TREE_SIDE_EFFECTS (arg1);
5628 /* Test whether it is preferable two swap two operands, ARG0 and
5629 ARG1, for example because ARG0 is an integer constant and ARG1
5630 isn't. If REORDER is true, only recommend swapping if we can
5631 evaluate the operands in reverse order. */
5633 bool
5634 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5636 STRIP_SIGN_NOPS (arg0);
5637 STRIP_SIGN_NOPS (arg1);
5639 if (TREE_CODE (arg1) == INTEGER_CST)
5640 return 0;
5641 if (TREE_CODE (arg0) == INTEGER_CST)
5642 return 1;
5644 if (TREE_CODE (arg1) == REAL_CST)
5645 return 0;
5646 if (TREE_CODE (arg0) == REAL_CST)
5647 return 1;
5649 if (TREE_CODE (arg1) == COMPLEX_CST)
5650 return 0;
5651 if (TREE_CODE (arg0) == COMPLEX_CST)
5652 return 1;
5654 if (TREE_CONSTANT (arg1))
5655 return 0;
5656 if (TREE_CONSTANT (arg0))
5657 return 1;
5659 if (optimize_size)
5660 return 0;
5662 if (reorder && flag_evaluation_order
5663 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5664 return 0;
5666 if (DECL_P (arg1))
5667 return 0;
5668 if (DECL_P (arg0))
5669 return 1;
5671 if (reorder && flag_evaluation_order
5672 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5673 return 0;
5675 if (DECL_P (arg1))
5676 return 0;
5677 if (DECL_P (arg0))
5678 return 1;
5680 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5681 for commutative and comparison operators. Ensuring a canonical
5682 form allows the optimizers to find additional redundancies without
5683 having to explicitly check for both orderings. */
5684 if (TREE_CODE (arg0) == SSA_NAME
5685 && TREE_CODE (arg1) == SSA_NAME
5686 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5687 return 1;
5689 return 0;
5692 /* Perform constant folding and related simplification of EXPR.
5693 The related simplifications include x*1 => x, x*0 => 0, etc.,
5694 and application of the associative law.
5695 NOP_EXPR conversions may be removed freely (as long as we
5696 are careful not to change the type of the overall expression).
5697 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5698 but we can constant-fold them if they have constant operands. */
5700 #ifdef ENABLE_FOLD_CHECKING
5701 # define fold(x) fold_1 (x)
5702 static tree fold_1 (tree);
5703 static
5704 #endif
5705 tree
5706 fold (tree expr)
5708 const tree t = expr;
5709 const tree type = TREE_TYPE (expr);
5710 tree t1 = NULL_TREE;
5711 tree tem;
5712 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5713 enum tree_code code = TREE_CODE (t);
5714 int kind = TREE_CODE_CLASS (code);
5716 /* WINS will be nonzero when the switch is done
5717 if all operands are constant. */
5718 int wins = 1;
5720 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5721 Likewise for a SAVE_EXPR that's already been evaluated. */
5722 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5723 return t;
5725 /* Return right away if a constant. */
5726 if (kind == 'c')
5727 return t;
5729 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5731 tree subop;
5733 /* Special case for conversion ops that can have fixed point args. */
5734 arg0 = TREE_OPERAND (t, 0);
5736 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5737 if (arg0 != 0)
5738 STRIP_SIGN_NOPS (arg0);
5740 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5741 subop = TREE_REALPART (arg0);
5742 else
5743 subop = arg0;
5745 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5746 && TREE_CODE (subop) != REAL_CST)
5747 /* Note that TREE_CONSTANT isn't enough:
5748 static var addresses are constant but we can't
5749 do arithmetic on them. */
5750 wins = 0;
5752 else if (IS_EXPR_CODE_CLASS (kind))
5754 int len = first_rtl_op (code);
5755 int i;
5756 for (i = 0; i < len; i++)
5758 tree op = TREE_OPERAND (t, i);
5759 tree subop;
5761 if (op == 0)
5762 continue; /* Valid for CALL_EXPR, at least. */
5764 /* Strip any conversions that don't change the mode. This is
5765 safe for every expression, except for a comparison expression
5766 because its signedness is derived from its operands. So, in
5767 the latter case, only strip conversions that don't change the
5768 signedness.
5770 Note that this is done as an internal manipulation within the
5771 constant folder, in order to find the simplest representation
5772 of the arguments so that their form can be studied. In any
5773 cases, the appropriate type conversions should be put back in
5774 the tree that will get out of the constant folder. */
5775 if (kind == '<')
5776 STRIP_SIGN_NOPS (op);
5777 else
5778 STRIP_NOPS (op);
5780 if (TREE_CODE (op) == COMPLEX_CST)
5781 subop = TREE_REALPART (op);
5782 else
5783 subop = op;
5785 if (TREE_CODE (subop) != INTEGER_CST
5786 && TREE_CODE (subop) != REAL_CST)
5787 /* Note that TREE_CONSTANT isn't enough:
5788 static var addresses are constant but we can't
5789 do arithmetic on them. */
5790 wins = 0;
5792 if (i == 0)
5793 arg0 = op;
5794 else if (i == 1)
5795 arg1 = op;
5799 /* If this is a commutative operation, and ARG0 is a constant, move it
5800 to ARG1 to reduce the number of tests below. */
5801 if (commutative_tree_code (code)
5802 && tree_swap_operands_p (arg0, arg1, true))
5803 return fold (build2 (code, type, TREE_OPERAND (t, 1),
5804 TREE_OPERAND (t, 0)));
5806 /* Now WINS is set as described above,
5807 ARG0 is the first operand of EXPR,
5808 and ARG1 is the second operand (if it has more than one operand).
5810 First check for cases where an arithmetic operation is applied to a
5811 compound, conditional, or comparison operation. Push the arithmetic
5812 operation inside the compound or conditional to see if any folding
5813 can then be done. Convert comparison to conditional for this purpose.
5814 The also optimizes non-constant cases that used to be done in
5815 expand_expr.
5817 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5818 one of the operands is a comparison and the other is a comparison, a
5819 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5820 code below would make the expression more complex. Change it to a
5821 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5822 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5824 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5825 || code == EQ_EXPR || code == NE_EXPR)
5826 && ((truth_value_p (TREE_CODE (arg0))
5827 && (truth_value_p (TREE_CODE (arg1))
5828 || (TREE_CODE (arg1) == BIT_AND_EXPR
5829 && integer_onep (TREE_OPERAND (arg1, 1)))))
5830 || (truth_value_p (TREE_CODE (arg1))
5831 && (truth_value_p (TREE_CODE (arg0))
5832 || (TREE_CODE (arg0) == BIT_AND_EXPR
5833 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5835 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5836 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5837 : TRUTH_XOR_EXPR,
5838 type, fold_convert (boolean_type_node, arg0),
5839 fold_convert (boolean_type_node, arg1)));
5841 if (code == EQ_EXPR)
5842 tem = invert_truthvalue (tem);
5844 return tem;
5847 if (TREE_CODE_CLASS (code) == '1')
5849 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5850 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5851 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5852 else if (TREE_CODE (arg0) == COND_EXPR)
5854 tree arg01 = TREE_OPERAND (arg0, 1);
5855 tree arg02 = TREE_OPERAND (arg0, 2);
5856 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5857 arg01 = fold (build1 (code, type, arg01));
5858 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5859 arg02 = fold (build1 (code, type, arg02));
5860 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5861 arg01, arg02));
5863 /* If this was a conversion, and all we did was to move into
5864 inside the COND_EXPR, bring it back out. But leave it if
5865 it is a conversion from integer to integer and the
5866 result precision is no wider than a word since such a
5867 conversion is cheap and may be optimized away by combine,
5868 while it couldn't if it were outside the COND_EXPR. Then return
5869 so we don't get into an infinite recursion loop taking the
5870 conversion out and then back in. */
5872 if ((code == NOP_EXPR || code == CONVERT_EXPR
5873 || code == NON_LVALUE_EXPR)
5874 && TREE_CODE (tem) == COND_EXPR
5875 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
5876 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
5877 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
5878 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
5879 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
5880 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
5881 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
5882 && (INTEGRAL_TYPE_P
5883 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
5884 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
5885 tem = build1 (code, type,
5886 build3 (COND_EXPR,
5887 TREE_TYPE (TREE_OPERAND
5888 (TREE_OPERAND (tem, 1), 0)),
5889 TREE_OPERAND (tem, 0),
5890 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
5891 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
5892 return tem;
5894 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5896 if (TREE_CODE (type) == BOOLEAN_TYPE)
5898 arg0 = copy_node (arg0);
5899 TREE_TYPE (arg0) = type;
5900 return arg0;
5902 else if (TREE_CODE (type) != INTEGER_TYPE)
5903 return fold (build3 (COND_EXPR, type, arg0,
5904 fold (build1 (code, type,
5905 integer_one_node)),
5906 fold (build1 (code, type,
5907 integer_zero_node))));
5910 else if (TREE_CODE_CLASS (code) == '<'
5911 && TREE_CODE (arg0) == COMPOUND_EXPR)
5912 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5913 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
5914 else if (TREE_CODE_CLASS (code) == '<'
5915 && TREE_CODE (arg1) == COMPOUND_EXPR)
5916 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5917 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
5918 else if (TREE_CODE_CLASS (code) == '2'
5919 || TREE_CODE_CLASS (code) == '<')
5921 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5922 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5923 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
5924 arg1)));
5925 if (TREE_CODE (arg1) == COMPOUND_EXPR
5926 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
5927 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5928 fold (build2 (code, type,
5929 arg0, TREE_OPERAND (arg1, 1))));
5931 if (TREE_CODE (arg0) == COND_EXPR
5932 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5934 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5935 /*cond_first_p=*/1);
5936 if (tem != NULL_TREE)
5937 return tem;
5940 if (TREE_CODE (arg1) == COND_EXPR
5941 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
5943 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5944 /*cond_first_p=*/0);
5945 if (tem != NULL_TREE)
5946 return tem;
5950 switch (code)
5952 case CONST_DECL:
5953 return fold (DECL_INITIAL (t));
5955 case NOP_EXPR:
5956 case FLOAT_EXPR:
5957 case CONVERT_EXPR:
5958 case FIX_TRUNC_EXPR:
5959 case FIX_CEIL_EXPR:
5960 case FIX_FLOOR_EXPR:
5961 case FIX_ROUND_EXPR:
5962 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
5963 return TREE_OPERAND (t, 0);
5965 /* Handle cases of two conversions in a row. */
5966 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5967 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5969 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5970 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5971 int inside_int = INTEGRAL_TYPE_P (inside_type);
5972 int inside_ptr = POINTER_TYPE_P (inside_type);
5973 int inside_float = FLOAT_TYPE_P (inside_type);
5974 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5975 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
5976 int inter_int = INTEGRAL_TYPE_P (inter_type);
5977 int inter_ptr = POINTER_TYPE_P (inter_type);
5978 int inter_float = FLOAT_TYPE_P (inter_type);
5979 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5980 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
5981 int final_int = INTEGRAL_TYPE_P (type);
5982 int final_ptr = POINTER_TYPE_P (type);
5983 int final_float = FLOAT_TYPE_P (type);
5984 unsigned int final_prec = TYPE_PRECISION (type);
5985 int final_unsignedp = TYPE_UNSIGNED (type);
5987 /* In addition to the cases of two conversions in a row
5988 handled below, if we are converting something to its own
5989 type via an object of identical or wider precision, neither
5990 conversion is needed. */
5991 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
5992 && ((inter_int && final_int) || (inter_float && final_float))
5993 && inter_prec >= final_prec)
5994 return fold (build1 (code, type,
5995 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5997 /* Likewise, if the intermediate and final types are either both
5998 float or both integer, we don't need the middle conversion if
5999 it is wider than the final type and doesn't change the signedness
6000 (for integers). Avoid this if the final type is a pointer
6001 since then we sometimes need the inner conversion. Likewise if
6002 the outer has a precision not equal to the size of its mode. */
6003 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6004 || (inter_float && inside_float))
6005 && inter_prec >= inside_prec
6006 && (inter_float || inter_unsignedp == inside_unsignedp)
6007 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6008 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6009 && ! final_ptr)
6010 return fold (build1 (code, type,
6011 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6013 /* If we have a sign-extension of a zero-extended value, we can
6014 replace that by a single zero-extension. */
6015 if (inside_int && inter_int && final_int
6016 && inside_prec < inter_prec && inter_prec < final_prec
6017 && inside_unsignedp && !inter_unsignedp)
6018 return fold (build1 (code, type,
6019 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6021 /* Two conversions in a row are not needed unless:
6022 - some conversion is floating-point (overstrict for now), or
6023 - the intermediate type is narrower than both initial and
6024 final, or
6025 - the intermediate type and innermost type differ in signedness,
6026 and the outermost type is wider than the intermediate, or
6027 - the initial type is a pointer type and the precisions of the
6028 intermediate and final types differ, or
6029 - the final type is a pointer type and the precisions of the
6030 initial and intermediate types differ. */
6031 if (! inside_float && ! inter_float && ! final_float
6032 && (inter_prec > inside_prec || inter_prec > final_prec)
6033 && ! (inside_int && inter_int
6034 && inter_unsignedp != inside_unsignedp
6035 && inter_prec < final_prec)
6036 && ((inter_unsignedp && inter_prec > inside_prec)
6037 == (final_unsignedp && final_prec > inter_prec))
6038 && ! (inside_ptr && inter_prec != final_prec)
6039 && ! (final_ptr && inside_prec != inter_prec)
6040 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6041 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6042 && ! final_ptr)
6043 return fold (build1 (code, type,
6044 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6047 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6048 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6049 /* Detect assigning a bitfield. */
6050 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6051 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6053 /* Don't leave an assignment inside a conversion
6054 unless assigning a bitfield. */
6055 tree prev = TREE_OPERAND (t, 0);
6056 tem = copy_node (t);
6057 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6058 /* First do the assignment, then return converted constant. */
6059 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6060 TREE_NO_WARNING (tem) = 1;
6061 TREE_USED (tem) = 1;
6062 return tem;
6065 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6066 constants (if x has signed type, the sign bit cannot be set
6067 in c). This folds extension into the BIT_AND_EXPR. */
6068 if (INTEGRAL_TYPE_P (type)
6069 && TREE_CODE (type) != BOOLEAN_TYPE
6070 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6071 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6073 tree and = TREE_OPERAND (t, 0);
6074 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6075 int change = 0;
6077 if (TYPE_UNSIGNED (TREE_TYPE (and))
6078 || (TYPE_PRECISION (type)
6079 <= TYPE_PRECISION (TREE_TYPE (and))))
6080 change = 1;
6081 else if (TYPE_PRECISION (TREE_TYPE (and1))
6082 <= HOST_BITS_PER_WIDE_INT
6083 && host_integerp (and1, 1))
6085 unsigned HOST_WIDE_INT cst;
6087 cst = tree_low_cst (and1, 1);
6088 cst &= (HOST_WIDE_INT) -1
6089 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6090 change = (cst == 0);
6091 #ifdef LOAD_EXTEND_OP
6092 if (change
6093 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6094 == ZERO_EXTEND))
6096 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6097 and0 = fold_convert (uns, and0);
6098 and1 = fold_convert (uns, and1);
6100 #endif
6102 if (change)
6103 return fold (build2 (BIT_AND_EXPR, type,
6104 fold_convert (type, and0),
6105 fold_convert (type, and1)));
6108 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6109 T2 being pointers to types of the same size. */
6110 if (POINTER_TYPE_P (TREE_TYPE (t))
6111 && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
6112 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6113 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6115 tree arg00 = TREE_OPERAND (arg0, 0);
6116 tree t0 = TREE_TYPE (t);
6117 tree t1 = TREE_TYPE (arg00);
6118 tree tt0 = TREE_TYPE (t0);
6119 tree tt1 = TREE_TYPE (t1);
6120 tree s0 = TYPE_SIZE (tt0);
6121 tree s1 = TYPE_SIZE (tt1);
6123 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6124 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6125 TREE_OPERAND (arg0, 1));
6128 tem = fold_convert_const (code, type, arg0);
6129 return tem ? tem : t;
6131 case VIEW_CONVERT_EXPR:
6132 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6133 return build1 (VIEW_CONVERT_EXPR, type,
6134 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6135 return t;
6137 case COMPONENT_REF:
6138 if (TREE_CODE (arg0) == CONSTRUCTOR
6139 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6141 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6142 if (m)
6143 return TREE_VALUE (m);
6145 return t;
6147 case RANGE_EXPR:
6148 if (TREE_CONSTANT (t) != wins)
6150 tem = copy_node (t);
6151 TREE_CONSTANT (tem) = wins;
6152 TREE_INVARIANT (tem) = wins;
6153 return tem;
6155 return t;
6157 case NEGATE_EXPR:
6158 if (negate_expr_p (arg0))
6159 return fold_convert (type, negate_expr (arg0));
6160 return t;
6162 case ABS_EXPR:
6163 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6164 return fold_abs_const (arg0, type);
6165 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6166 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6167 /* Convert fabs((double)float) into (double)fabsf(float). */
6168 else if (TREE_CODE (arg0) == NOP_EXPR
6169 && TREE_CODE (type) == REAL_TYPE)
6171 tree targ0 = strip_float_extensions (arg0);
6172 if (targ0 != arg0)
6173 return fold_convert (type, fold (build1 (ABS_EXPR,
6174 TREE_TYPE (targ0),
6175 targ0)));
6177 else if (tree_expr_nonnegative_p (arg0))
6178 return arg0;
6179 return t;
6181 case CONJ_EXPR:
6182 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6183 return fold_convert (type, arg0);
6184 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6185 return build2 (COMPLEX_EXPR, type,
6186 TREE_OPERAND (arg0, 0),
6187 negate_expr (TREE_OPERAND (arg0, 1)));
6188 else if (TREE_CODE (arg0) == COMPLEX_CST)
6189 return build_complex (type, TREE_REALPART (arg0),
6190 negate_expr (TREE_IMAGPART (arg0)));
6191 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6192 return fold (build2 (TREE_CODE (arg0), type,
6193 fold (build1 (CONJ_EXPR, type,
6194 TREE_OPERAND (arg0, 0))),
6195 fold (build1 (CONJ_EXPR, type,
6196 TREE_OPERAND (arg0, 1)))));
6197 else if (TREE_CODE (arg0) == CONJ_EXPR)
6198 return TREE_OPERAND (arg0, 0);
6199 return t;
6201 case BIT_NOT_EXPR:
6202 if (TREE_CODE (arg0) == INTEGER_CST)
6203 return fold_not_const (arg0, type);
6204 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6205 return TREE_OPERAND (arg0, 0);
6206 return t;
6208 case PLUS_EXPR:
6209 /* A + (-B) -> A - B */
6210 if (TREE_CODE (arg1) == NEGATE_EXPR)
6211 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6212 /* (-A) + B -> B - A */
6213 if (TREE_CODE (arg0) == NEGATE_EXPR
6214 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6215 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6216 if (! FLOAT_TYPE_P (type))
6218 if (integer_zerop (arg1))
6219 return non_lvalue (fold_convert (type, arg0));
6221 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6222 with a constant, and the two constants have no bits in common,
6223 we should treat this as a BIT_IOR_EXPR since this may produce more
6224 simplifications. */
6225 if (TREE_CODE (arg0) == BIT_AND_EXPR
6226 && TREE_CODE (arg1) == BIT_AND_EXPR
6227 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6228 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6229 && integer_zerop (const_binop (BIT_AND_EXPR,
6230 TREE_OPERAND (arg0, 1),
6231 TREE_OPERAND (arg1, 1), 0)))
6233 code = BIT_IOR_EXPR;
6234 goto bit_ior;
6237 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6238 (plus (plus (mult) (mult)) (foo)) so that we can
6239 take advantage of the factoring cases below. */
6240 if ((TREE_CODE (arg0) == PLUS_EXPR
6241 && TREE_CODE (arg1) == MULT_EXPR)
6242 || (TREE_CODE (arg1) == PLUS_EXPR
6243 && TREE_CODE (arg0) == MULT_EXPR))
6245 tree parg0, parg1, parg, marg;
6247 if (TREE_CODE (arg0) == PLUS_EXPR)
6248 parg = arg0, marg = arg1;
6249 else
6250 parg = arg1, marg = arg0;
6251 parg0 = TREE_OPERAND (parg, 0);
6252 parg1 = TREE_OPERAND (parg, 1);
6253 STRIP_NOPS (parg0);
6254 STRIP_NOPS (parg1);
6256 if (TREE_CODE (parg0) == MULT_EXPR
6257 && TREE_CODE (parg1) != MULT_EXPR)
6258 return fold (build2 (PLUS_EXPR, type,
6259 fold (build2 (PLUS_EXPR, type,
6260 fold_convert (type, parg0),
6261 fold_convert (type, marg))),
6262 fold_convert (type, parg1)));
6263 if (TREE_CODE (parg0) != MULT_EXPR
6264 && TREE_CODE (parg1) == MULT_EXPR)
6265 return fold (build2 (PLUS_EXPR, type,
6266 fold (build2 (PLUS_EXPR, type,
6267 fold_convert (type, parg1),
6268 fold_convert (type, marg))),
6269 fold_convert (type, parg0)));
6272 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6274 tree arg00, arg01, arg10, arg11;
6275 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6277 /* (A * C) + (B * C) -> (A+B) * C.
6278 We are most concerned about the case where C is a constant,
6279 but other combinations show up during loop reduction. Since
6280 it is not difficult, try all four possibilities. */
6282 arg00 = TREE_OPERAND (arg0, 0);
6283 arg01 = TREE_OPERAND (arg0, 1);
6284 arg10 = TREE_OPERAND (arg1, 0);
6285 arg11 = TREE_OPERAND (arg1, 1);
6286 same = NULL_TREE;
6288 if (operand_equal_p (arg01, arg11, 0))
6289 same = arg01, alt0 = arg00, alt1 = arg10;
6290 else if (operand_equal_p (arg00, arg10, 0))
6291 same = arg00, alt0 = arg01, alt1 = arg11;
6292 else if (operand_equal_p (arg00, arg11, 0))
6293 same = arg00, alt0 = arg01, alt1 = arg10;
6294 else if (operand_equal_p (arg01, arg10, 0))
6295 same = arg01, alt0 = arg00, alt1 = arg11;
6297 /* No identical multiplicands; see if we can find a common
6298 power-of-two factor in non-power-of-two multiplies. This
6299 can help in multi-dimensional array access. */
6300 else if (TREE_CODE (arg01) == INTEGER_CST
6301 && TREE_CODE (arg11) == INTEGER_CST
6302 && TREE_INT_CST_HIGH (arg01) == 0
6303 && TREE_INT_CST_HIGH (arg11) == 0)
6305 HOST_WIDE_INT int01, int11, tmp;
6306 int01 = TREE_INT_CST_LOW (arg01);
6307 int11 = TREE_INT_CST_LOW (arg11);
6309 /* Move min of absolute values to int11. */
6310 if ((int01 >= 0 ? int01 : -int01)
6311 < (int11 >= 0 ? int11 : -int11))
6313 tmp = int01, int01 = int11, int11 = tmp;
6314 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6315 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6318 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6320 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6321 build_int_2 (int01 / int11, 0)));
6322 alt1 = arg10;
6323 same = arg11;
6327 if (same)
6328 return fold (build2 (MULT_EXPR, type,
6329 fold (build2 (PLUS_EXPR, type,
6330 alt0, alt1)),
6331 same));
6334 else
6336 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6337 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6338 return non_lvalue (fold_convert (type, arg0));
6340 /* Likewise if the operands are reversed. */
6341 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6342 return non_lvalue (fold_convert (type, arg1));
6344 /* Convert x+x into x*2.0. */
6345 if (operand_equal_p (arg0, arg1, 0)
6346 && SCALAR_FLOAT_TYPE_P (type))
6347 return fold (build2 (MULT_EXPR, type, arg0,
6348 build_real (type, dconst2)));
6350 /* Convert x*c+x into x*(c+1). */
6351 if (flag_unsafe_math_optimizations
6352 && TREE_CODE (arg0) == MULT_EXPR
6353 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6354 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6355 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6357 REAL_VALUE_TYPE c;
6359 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6360 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6361 return fold (build2 (MULT_EXPR, type, arg1,
6362 build_real (type, c)));
6365 /* Convert x+x*c into x*(c+1). */
6366 if (flag_unsafe_math_optimizations
6367 && TREE_CODE (arg1) == MULT_EXPR
6368 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6369 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6370 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6372 REAL_VALUE_TYPE c;
6374 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6375 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6376 return fold (build2 (MULT_EXPR, type, arg0,
6377 build_real (type, c)));
6380 /* Convert x*c1+x*c2 into x*(c1+c2). */
6381 if (flag_unsafe_math_optimizations
6382 && TREE_CODE (arg0) == MULT_EXPR
6383 && TREE_CODE (arg1) == MULT_EXPR
6384 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6385 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6386 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6387 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6388 && operand_equal_p (TREE_OPERAND (arg0, 0),
6389 TREE_OPERAND (arg1, 0), 0))
6391 REAL_VALUE_TYPE c1, c2;
6393 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6394 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6395 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6396 return fold (build2 (MULT_EXPR, type,
6397 TREE_OPERAND (arg0, 0),
6398 build_real (type, c1)));
6400 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6401 if (flag_unsafe_math_optimizations
6402 && TREE_CODE (arg1) == PLUS_EXPR
6403 && TREE_CODE (arg0) != MULT_EXPR)
6405 tree tree10 = TREE_OPERAND (arg1, 0);
6406 tree tree11 = TREE_OPERAND (arg1, 1);
6407 if (TREE_CODE (tree11) == MULT_EXPR
6408 && TREE_CODE (tree10) == MULT_EXPR)
6410 tree tree0;
6411 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6412 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6415 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6416 if (flag_unsafe_math_optimizations
6417 && TREE_CODE (arg0) == PLUS_EXPR
6418 && TREE_CODE (arg1) != MULT_EXPR)
6420 tree tree00 = TREE_OPERAND (arg0, 0);
6421 tree tree01 = TREE_OPERAND (arg0, 1);
6422 if (TREE_CODE (tree01) == MULT_EXPR
6423 && TREE_CODE (tree00) == MULT_EXPR)
6425 tree tree0;
6426 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6427 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6432 bit_rotate:
6433 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6434 is a rotate of A by C1 bits. */
6435 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6436 is a rotate of A by B bits. */
6438 enum tree_code code0, code1;
6439 code0 = TREE_CODE (arg0);
6440 code1 = TREE_CODE (arg1);
6441 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6442 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6443 && operand_equal_p (TREE_OPERAND (arg0, 0),
6444 TREE_OPERAND (arg1, 0), 0)
6445 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6447 tree tree01, tree11;
6448 enum tree_code code01, code11;
6450 tree01 = TREE_OPERAND (arg0, 1);
6451 tree11 = TREE_OPERAND (arg1, 1);
6452 STRIP_NOPS (tree01);
6453 STRIP_NOPS (tree11);
6454 code01 = TREE_CODE (tree01);
6455 code11 = TREE_CODE (tree11);
6456 if (code01 == INTEGER_CST
6457 && code11 == INTEGER_CST
6458 && TREE_INT_CST_HIGH (tree01) == 0
6459 && TREE_INT_CST_HIGH (tree11) == 0
6460 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6461 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6462 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6463 code0 == LSHIFT_EXPR ? tree01 : tree11);
6464 else if (code11 == MINUS_EXPR)
6466 tree tree110, tree111;
6467 tree110 = TREE_OPERAND (tree11, 0);
6468 tree111 = TREE_OPERAND (tree11, 1);
6469 STRIP_NOPS (tree110);
6470 STRIP_NOPS (tree111);
6471 if (TREE_CODE (tree110) == INTEGER_CST
6472 && 0 == compare_tree_int (tree110,
6473 TYPE_PRECISION
6474 (TREE_TYPE (TREE_OPERAND
6475 (arg0, 0))))
6476 && operand_equal_p (tree01, tree111, 0))
6477 return build2 ((code0 == LSHIFT_EXPR
6478 ? LROTATE_EXPR
6479 : RROTATE_EXPR),
6480 type, TREE_OPERAND (arg0, 0), tree01);
6482 else if (code01 == MINUS_EXPR)
6484 tree tree010, tree011;
6485 tree010 = TREE_OPERAND (tree01, 0);
6486 tree011 = TREE_OPERAND (tree01, 1);
6487 STRIP_NOPS (tree010);
6488 STRIP_NOPS (tree011);
6489 if (TREE_CODE (tree010) == INTEGER_CST
6490 && 0 == compare_tree_int (tree010,
6491 TYPE_PRECISION
6492 (TREE_TYPE (TREE_OPERAND
6493 (arg0, 0))))
6494 && operand_equal_p (tree11, tree011, 0))
6495 return build2 ((code0 != LSHIFT_EXPR
6496 ? LROTATE_EXPR
6497 : RROTATE_EXPR),
6498 type, TREE_OPERAND (arg0, 0), tree11);
6503 associate:
6504 /* In most languages, can't associate operations on floats through
6505 parentheses. Rather than remember where the parentheses were, we
6506 don't associate floats at all, unless the user has specified
6507 -funsafe-math-optimizations. */
6509 if (! wins
6510 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6512 tree var0, con0, lit0, minus_lit0;
6513 tree var1, con1, lit1, minus_lit1;
6515 /* Split both trees into variables, constants, and literals. Then
6516 associate each group together, the constants with literals,
6517 then the result with variables. This increases the chances of
6518 literals being recombined later and of generating relocatable
6519 expressions for the sum of a constant and literal. */
6520 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6521 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6522 code == MINUS_EXPR);
6524 /* Only do something if we found more than two objects. Otherwise,
6525 nothing has changed and we risk infinite recursion. */
6526 if (2 < ((var0 != 0) + (var1 != 0)
6527 + (con0 != 0) + (con1 != 0)
6528 + (lit0 != 0) + (lit1 != 0)
6529 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6531 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6532 if (code == MINUS_EXPR)
6533 code = PLUS_EXPR;
6535 var0 = associate_trees (var0, var1, code, type);
6536 con0 = associate_trees (con0, con1, code, type);
6537 lit0 = associate_trees (lit0, lit1, code, type);
6538 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6540 /* Preserve the MINUS_EXPR if the negative part of the literal is
6541 greater than the positive part. Otherwise, the multiplicative
6542 folding code (i.e extract_muldiv) may be fooled in case
6543 unsigned constants are subtracted, like in the following
6544 example: ((X*2 + 4) - 8U)/2. */
6545 if (minus_lit0 && lit0)
6547 if (TREE_CODE (lit0) == INTEGER_CST
6548 && TREE_CODE (minus_lit0) == INTEGER_CST
6549 && tree_int_cst_lt (lit0, minus_lit0))
6551 minus_lit0 = associate_trees (minus_lit0, lit0,
6552 MINUS_EXPR, type);
6553 lit0 = 0;
6555 else
6557 lit0 = associate_trees (lit0, minus_lit0,
6558 MINUS_EXPR, type);
6559 minus_lit0 = 0;
6562 if (minus_lit0)
6564 if (con0 == 0)
6565 return fold_convert (type,
6566 associate_trees (var0, minus_lit0,
6567 MINUS_EXPR, type));
6568 else
6570 con0 = associate_trees (con0, minus_lit0,
6571 MINUS_EXPR, type);
6572 return fold_convert (type,
6573 associate_trees (var0, con0,
6574 PLUS_EXPR, type));
6578 con0 = associate_trees (con0, lit0, code, type);
6579 return fold_convert (type, associate_trees (var0, con0,
6580 code, type));
6584 binary:
6585 if (wins)
6586 t1 = const_binop (code, arg0, arg1, 0);
6587 if (t1 != NULL_TREE)
6589 /* The return value should always have
6590 the same type as the original expression. */
6591 if (TREE_TYPE (t1) != type)
6592 t1 = fold_convert (type, t1);
6594 return t1;
6596 return t;
6598 case MINUS_EXPR:
6599 /* A - (-B) -> A + B */
6600 if (TREE_CODE (arg1) == NEGATE_EXPR)
6601 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6602 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6603 if (TREE_CODE (arg0) == NEGATE_EXPR
6604 && (FLOAT_TYPE_P (type)
6605 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6606 && negate_expr_p (arg1)
6607 && reorder_operands_p (arg0, arg1))
6608 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
6609 TREE_OPERAND (arg0, 0)));
6611 if (! FLOAT_TYPE_P (type))
6613 if (! wins && integer_zerop (arg0))
6614 return negate_expr (fold_convert (type, arg1));
6615 if (integer_zerop (arg1))
6616 return non_lvalue (fold_convert (type, arg0));
6618 /* Fold A - (A & B) into ~B & A. */
6619 if (!TREE_SIDE_EFFECTS (arg0)
6620 && TREE_CODE (arg1) == BIT_AND_EXPR)
6622 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6623 return fold (build2 (BIT_AND_EXPR, type,
6624 fold (build1 (BIT_NOT_EXPR, type,
6625 TREE_OPERAND (arg1, 0))),
6626 arg0));
6627 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6628 return fold (build2 (BIT_AND_EXPR, type,
6629 fold (build1 (BIT_NOT_EXPR, type,
6630 TREE_OPERAND (arg1, 1))),
6631 arg0));
6634 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6635 any power of 2 minus 1. */
6636 if (TREE_CODE (arg0) == BIT_AND_EXPR
6637 && TREE_CODE (arg1) == BIT_AND_EXPR
6638 && operand_equal_p (TREE_OPERAND (arg0, 0),
6639 TREE_OPERAND (arg1, 0), 0))
6641 tree mask0 = TREE_OPERAND (arg0, 1);
6642 tree mask1 = TREE_OPERAND (arg1, 1);
6643 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6645 if (operand_equal_p (tem, mask1, 0))
6647 tem = fold (build2 (BIT_XOR_EXPR, type,
6648 TREE_OPERAND (arg0, 0), mask1));
6649 return fold (build2 (MINUS_EXPR, type, tem, mask1));
6654 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6655 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6656 return non_lvalue (fold_convert (type, arg0));
6658 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6659 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6660 (-ARG1 + ARG0) reduces to -ARG1. */
6661 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6662 return negate_expr (fold_convert (type, arg1));
6664 /* Fold &x - &x. This can happen from &x.foo - &x.
6665 This is unsafe for certain floats even in non-IEEE formats.
6666 In IEEE, it is unsafe because it does wrong for NaNs.
6667 Also note that operand_equal_p is always false if an operand
6668 is volatile. */
6670 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6671 && operand_equal_p (arg0, arg1, 0))
6672 return fold_convert (type, integer_zero_node);
6674 /* A - B -> A + (-B) if B is easily negatable. */
6675 if (!wins && negate_expr_p (arg1)
6676 && (FLOAT_TYPE_P (type)
6677 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6678 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6680 if (TREE_CODE (arg0) == MULT_EXPR
6681 && TREE_CODE (arg1) == MULT_EXPR
6682 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6684 /* (A * C) - (B * C) -> (A-B) * C. */
6685 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6686 TREE_OPERAND (arg1, 1), 0))
6687 return fold (build2 (MULT_EXPR, type,
6688 fold (build2 (MINUS_EXPR, type,
6689 TREE_OPERAND (arg0, 0),
6690 TREE_OPERAND (arg1, 0))),
6691 TREE_OPERAND (arg0, 1)));
6692 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6693 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6694 TREE_OPERAND (arg1, 0), 0))
6695 return fold (build2 (MULT_EXPR, type,
6696 TREE_OPERAND (arg0, 0),
6697 fold (build2 (MINUS_EXPR, type,
6698 TREE_OPERAND (arg0, 1),
6699 TREE_OPERAND (arg1, 1)))));
6702 goto associate;
6704 case MULT_EXPR:
6705 /* (-A) * (-B) -> A * B */
6706 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6707 return fold (build2 (MULT_EXPR, type,
6708 TREE_OPERAND (arg0, 0),
6709 negate_expr (arg1)));
6710 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6711 return fold (build2 (MULT_EXPR, type,
6712 negate_expr (arg0),
6713 TREE_OPERAND (arg1, 0)));
6715 if (! FLOAT_TYPE_P (type))
6717 if (integer_zerop (arg1))
6718 return omit_one_operand (type, arg1, arg0);
6719 if (integer_onep (arg1))
6720 return non_lvalue (fold_convert (type, arg0));
6722 /* (a * (1 << b)) is (a << b) */
6723 if (TREE_CODE (arg1) == LSHIFT_EXPR
6724 && integer_onep (TREE_OPERAND (arg1, 0)))
6725 return fold (build2 (LSHIFT_EXPR, type, arg0,
6726 TREE_OPERAND (arg1, 1)));
6727 if (TREE_CODE (arg0) == LSHIFT_EXPR
6728 && integer_onep (TREE_OPERAND (arg0, 0)))
6729 return fold (build2 (LSHIFT_EXPR, type, arg1,
6730 TREE_OPERAND (arg0, 1)));
6732 if (TREE_CODE (arg1) == INTEGER_CST
6733 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6734 fold_convert (type, arg1),
6735 code, NULL_TREE)))
6736 return fold_convert (type, tem);
6739 else
6741 /* Maybe fold x * 0 to 0. The expressions aren't the same
6742 when x is NaN, since x * 0 is also NaN. Nor are they the
6743 same in modes with signed zeros, since multiplying a
6744 negative value by 0 gives -0, not +0. */
6745 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6746 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6747 && real_zerop (arg1))
6748 return omit_one_operand (type, arg1, arg0);
6749 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6750 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6751 && real_onep (arg1))
6752 return non_lvalue (fold_convert (type, arg0));
6754 /* Transform x * -1.0 into -x. */
6755 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6756 && real_minus_onep (arg1))
6757 return fold_convert (type, negate_expr (arg0));
6759 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6760 if (flag_unsafe_math_optimizations
6761 && TREE_CODE (arg0) == RDIV_EXPR
6762 && TREE_CODE (arg1) == REAL_CST
6763 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6765 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6766 arg1, 0);
6767 if (tem)
6768 return fold (build2 (RDIV_EXPR, type, tem,
6769 TREE_OPERAND (arg0, 1)));
6772 if (flag_unsafe_math_optimizations)
6774 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6775 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6777 /* Optimizations of root(...)*root(...). */
6778 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
6780 tree rootfn, arg, arglist;
6781 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6782 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6784 /* Optimize sqrt(x)*sqrt(x) as x. */
6785 if (BUILTIN_SQRT_P (fcode0)
6786 && operand_equal_p (arg00, arg10, 0)
6787 && ! HONOR_SNANS (TYPE_MODE (type)))
6788 return arg00;
6790 /* Optimize root(x)*root(y) as root(x*y). */
6791 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6792 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
6793 arglist = build_tree_list (NULL_TREE, arg);
6794 return build_function_call_expr (rootfn, arglist);
6797 /* Optimize expN(x)*expN(y) as expN(x+y). */
6798 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
6800 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6801 tree arg = build2 (PLUS_EXPR, type,
6802 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6803 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6804 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6805 return build_function_call_expr (expfn, arglist);
6808 /* Optimizations of pow(...)*pow(...). */
6809 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6810 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6811 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6813 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6814 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6815 1)));
6816 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6817 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6818 1)));
6820 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6821 if (operand_equal_p (arg01, arg11, 0))
6823 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6824 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
6825 tree arglist = tree_cons (NULL_TREE, fold (arg),
6826 build_tree_list (NULL_TREE,
6827 arg01));
6828 return build_function_call_expr (powfn, arglist);
6831 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6832 if (operand_equal_p (arg00, arg10, 0))
6834 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6835 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
6836 tree arglist = tree_cons (NULL_TREE, arg00,
6837 build_tree_list (NULL_TREE,
6838 arg));
6839 return build_function_call_expr (powfn, arglist);
6843 /* Optimize tan(x)*cos(x) as sin(x). */
6844 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6845 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6846 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6847 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6848 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6849 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6850 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6851 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6853 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
6855 if (sinfn != NULL_TREE)
6856 return build_function_call_expr (sinfn,
6857 TREE_OPERAND (arg0, 1));
6860 /* Optimize x*pow(x,c) as pow(x,c+1). */
6861 if (fcode1 == BUILT_IN_POW
6862 || fcode1 == BUILT_IN_POWF
6863 || fcode1 == BUILT_IN_POWL)
6865 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6866 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6867 1)));
6868 if (TREE_CODE (arg11) == REAL_CST
6869 && ! TREE_CONSTANT_OVERFLOW (arg11)
6870 && operand_equal_p (arg0, arg10, 0))
6872 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6873 REAL_VALUE_TYPE c;
6874 tree arg, arglist;
6876 c = TREE_REAL_CST (arg11);
6877 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6878 arg = build_real (type, c);
6879 arglist = build_tree_list (NULL_TREE, arg);
6880 arglist = tree_cons (NULL_TREE, arg0, arglist);
6881 return build_function_call_expr (powfn, arglist);
6885 /* Optimize pow(x,c)*x as pow(x,c+1). */
6886 if (fcode0 == BUILT_IN_POW
6887 || fcode0 == BUILT_IN_POWF
6888 || fcode0 == BUILT_IN_POWL)
6890 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6891 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6892 1)));
6893 if (TREE_CODE (arg01) == REAL_CST
6894 && ! TREE_CONSTANT_OVERFLOW (arg01)
6895 && operand_equal_p (arg1, arg00, 0))
6897 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6898 REAL_VALUE_TYPE c;
6899 tree arg, arglist;
6901 c = TREE_REAL_CST (arg01);
6902 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6903 arg = build_real (type, c);
6904 arglist = build_tree_list (NULL_TREE, arg);
6905 arglist = tree_cons (NULL_TREE, arg1, arglist);
6906 return build_function_call_expr (powfn, arglist);
6910 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6911 if (! optimize_size
6912 && operand_equal_p (arg0, arg1, 0))
6914 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6916 if (powfn)
6918 tree arg = build_real (type, dconst2);
6919 tree arglist = build_tree_list (NULL_TREE, arg);
6920 arglist = tree_cons (NULL_TREE, arg0, arglist);
6921 return build_function_call_expr (powfn, arglist);
6926 goto associate;
6928 case BIT_IOR_EXPR:
6929 bit_ior:
6930 if (integer_all_onesp (arg1))
6931 return omit_one_operand (type, arg1, arg0);
6932 if (integer_zerop (arg1))
6933 return non_lvalue (fold_convert (type, arg0));
6934 if (operand_equal_p (arg0, arg1, 0))
6935 return non_lvalue (fold_convert (type, arg0));
6936 t1 = distribute_bit_expr (code, type, arg0, arg1);
6937 if (t1 != NULL_TREE)
6938 return t1;
6940 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6942 This results in more efficient code for machines without a NAND
6943 instruction. Combine will canonicalize to the first form
6944 which will allow use of NAND instructions provided by the
6945 backend if they exist. */
6946 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6947 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6949 return fold (build1 (BIT_NOT_EXPR, type,
6950 build2 (BIT_AND_EXPR, type,
6951 TREE_OPERAND (arg0, 0),
6952 TREE_OPERAND (arg1, 0))));
6955 /* See if this can be simplified into a rotate first. If that
6956 is unsuccessful continue in the association code. */
6957 goto bit_rotate;
6959 case BIT_XOR_EXPR:
6960 if (integer_zerop (arg1))
6961 return non_lvalue (fold_convert (type, arg0));
6962 if (integer_all_onesp (arg1))
6963 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6964 if (operand_equal_p (arg0, arg1, 0))
6965 return omit_one_operand (type, integer_zero_node, arg0);
6967 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6968 with a constant, and the two constants have no bits in common,
6969 we should treat this as a BIT_IOR_EXPR since this may produce more
6970 simplifications. */
6971 if (TREE_CODE (arg0) == BIT_AND_EXPR
6972 && TREE_CODE (arg1) == BIT_AND_EXPR
6973 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6974 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6975 && integer_zerop (const_binop (BIT_AND_EXPR,
6976 TREE_OPERAND (arg0, 1),
6977 TREE_OPERAND (arg1, 1), 0)))
6979 code = BIT_IOR_EXPR;
6980 goto bit_ior;
6983 /* See if this can be simplified into a rotate first. If that
6984 is unsuccessful continue in the association code. */
6985 goto bit_rotate;
6987 case BIT_AND_EXPR:
6988 if (integer_all_onesp (arg1))
6989 return non_lvalue (fold_convert (type, arg0));
6990 if (integer_zerop (arg1))
6991 return omit_one_operand (type, arg1, arg0);
6992 if (operand_equal_p (arg0, arg1, 0))
6993 return non_lvalue (fold_convert (type, arg0));
6994 t1 = distribute_bit_expr (code, type, arg0, arg1);
6995 if (t1 != NULL_TREE)
6996 return t1;
6997 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6998 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6999 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7001 unsigned int prec
7002 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7004 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7005 && (~TREE_INT_CST_LOW (arg1)
7006 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7007 return fold_convert (type, TREE_OPERAND (arg0, 0));
7010 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7012 This results in more efficient code for machines without a NOR
7013 instruction. Combine will canonicalize to the first form
7014 which will allow use of NOR instructions provided by the
7015 backend if they exist. */
7016 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7017 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7019 return fold (build1 (BIT_NOT_EXPR, type,
7020 build2 (BIT_IOR_EXPR, type,
7021 TREE_OPERAND (arg0, 0),
7022 TREE_OPERAND (arg1, 0))));
7025 goto associate;
7027 case RDIV_EXPR:
7028 /* Don't touch a floating-point divide by zero unless the mode
7029 of the constant can represent infinity. */
7030 if (TREE_CODE (arg1) == REAL_CST
7031 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7032 && real_zerop (arg1))
7033 return t;
7035 /* (-A) / (-B) -> A / B */
7036 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7037 return fold (build2 (RDIV_EXPR, type,
7038 TREE_OPERAND (arg0, 0),
7039 negate_expr (arg1)));
7040 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7041 return fold (build2 (RDIV_EXPR, type,
7042 negate_expr (arg0),
7043 TREE_OPERAND (arg1, 0)));
7045 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7046 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7047 && real_onep (arg1))
7048 return non_lvalue (fold_convert (type, arg0));
7050 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7051 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7052 && real_minus_onep (arg1))
7053 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7055 /* If ARG1 is a constant, we can convert this to a multiply by the
7056 reciprocal. This does not have the same rounding properties,
7057 so only do this if -funsafe-math-optimizations. We can actually
7058 always safely do it if ARG1 is a power of two, but it's hard to
7059 tell if it is or not in a portable manner. */
7060 if (TREE_CODE (arg1) == REAL_CST)
7062 if (flag_unsafe_math_optimizations
7063 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7064 arg1, 0)))
7065 return fold (build2 (MULT_EXPR, type, arg0, tem));
7066 /* Find the reciprocal if optimizing and the result is exact. */
7067 if (optimize)
7069 REAL_VALUE_TYPE r;
7070 r = TREE_REAL_CST (arg1);
7071 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7073 tem = build_real (type, r);
7074 return fold (build2 (MULT_EXPR, type, arg0, tem));
7078 /* Convert A/B/C to A/(B*C). */
7079 if (flag_unsafe_math_optimizations
7080 && TREE_CODE (arg0) == RDIV_EXPR)
7081 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7082 fold (build2 (MULT_EXPR, type,
7083 TREE_OPERAND (arg0, 1), arg1))));
7085 /* Convert A/(B/C) to (A/B)*C. */
7086 if (flag_unsafe_math_optimizations
7087 && TREE_CODE (arg1) == RDIV_EXPR)
7088 return fold (build2 (MULT_EXPR, type,
7089 fold (build2 (RDIV_EXPR, type, arg0,
7090 TREE_OPERAND (arg1, 0))),
7091 TREE_OPERAND (arg1, 1)));
7093 /* Convert C1/(X*C2) into (C1/C2)/X. */
7094 if (flag_unsafe_math_optimizations
7095 && TREE_CODE (arg1) == MULT_EXPR
7096 && TREE_CODE (arg0) == REAL_CST
7097 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7099 tree tem = const_binop (RDIV_EXPR, arg0,
7100 TREE_OPERAND (arg1, 1), 0);
7101 if (tem)
7102 return fold (build2 (RDIV_EXPR, type, tem,
7103 TREE_OPERAND (arg1, 0)));
7106 if (flag_unsafe_math_optimizations)
7108 enum built_in_function fcode = builtin_mathfn_code (arg1);
7109 /* Optimize x/expN(y) into x*expN(-y). */
7110 if (BUILTIN_EXPONENT_P (fcode))
7112 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7113 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7114 tree arglist = build_tree_list (NULL_TREE,
7115 fold_convert (type, arg));
7116 arg1 = build_function_call_expr (expfn, arglist);
7117 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7120 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7121 if (fcode == BUILT_IN_POW
7122 || fcode == BUILT_IN_POWF
7123 || fcode == BUILT_IN_POWL)
7125 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7126 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7127 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7128 tree neg11 = fold_convert (type, negate_expr (arg11));
7129 tree arglist = tree_cons(NULL_TREE, arg10,
7130 build_tree_list (NULL_TREE, neg11));
7131 arg1 = build_function_call_expr (powfn, arglist);
7132 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7136 if (flag_unsafe_math_optimizations)
7138 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7139 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7141 /* Optimize sin(x)/cos(x) as tan(x). */
7142 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7143 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7144 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7145 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7146 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7148 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7150 if (tanfn != NULL_TREE)
7151 return build_function_call_expr (tanfn,
7152 TREE_OPERAND (arg0, 1));
7155 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7156 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7157 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7158 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7159 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7160 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7162 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7164 if (tanfn != NULL_TREE)
7166 tree tmp = TREE_OPERAND (arg0, 1);
7167 tmp = build_function_call_expr (tanfn, tmp);
7168 return fold (build2 (RDIV_EXPR, type,
7169 build_real (type, dconst1), tmp));
7173 /* Optimize pow(x,c)/x as pow(x,c-1). */
7174 if (fcode0 == BUILT_IN_POW
7175 || fcode0 == BUILT_IN_POWF
7176 || fcode0 == BUILT_IN_POWL)
7178 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7179 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7180 if (TREE_CODE (arg01) == REAL_CST
7181 && ! TREE_CONSTANT_OVERFLOW (arg01)
7182 && operand_equal_p (arg1, arg00, 0))
7184 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7185 REAL_VALUE_TYPE c;
7186 tree arg, arglist;
7188 c = TREE_REAL_CST (arg01);
7189 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7190 arg = build_real (type, c);
7191 arglist = build_tree_list (NULL_TREE, arg);
7192 arglist = tree_cons (NULL_TREE, arg1, arglist);
7193 return build_function_call_expr (powfn, arglist);
7197 goto binary;
7199 case TRUNC_DIV_EXPR:
7200 case ROUND_DIV_EXPR:
7201 case FLOOR_DIV_EXPR:
7202 case CEIL_DIV_EXPR:
7203 case EXACT_DIV_EXPR:
7204 if (integer_onep (arg1))
7205 return non_lvalue (fold_convert (type, arg0));
7206 if (integer_zerop (arg1))
7207 return t;
7208 /* X / -1 is -X. */
7209 if (!TYPE_UNSIGNED (type)
7210 && TREE_CODE (arg1) == INTEGER_CST
7211 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7212 && TREE_INT_CST_HIGH (arg1) == -1)
7213 return fold_convert (type, negate_expr (arg0));
7215 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7216 operation, EXACT_DIV_EXPR.
7218 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7219 At one time others generated faster code, it's not clear if they do
7220 after the last round to changes to the DIV code in expmed.c. */
7221 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7222 && multiple_of_p (type, arg0, arg1))
7223 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7225 if (TREE_CODE (arg1) == INTEGER_CST
7226 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7227 code, NULL_TREE)))
7228 return fold_convert (type, tem);
7230 goto binary;
7232 case CEIL_MOD_EXPR:
7233 case FLOOR_MOD_EXPR:
7234 case ROUND_MOD_EXPR:
7235 case TRUNC_MOD_EXPR:
7236 if (integer_onep (arg1))
7237 return omit_one_operand (type, integer_zero_node, arg0);
7238 if (integer_zerop (arg1))
7239 return t;
7240 /* X % -1 is zero. */
7241 if (!TYPE_UNSIGNED (type)
7242 && TREE_CODE (arg1) == INTEGER_CST
7243 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7244 && TREE_INT_CST_HIGH (arg1) == -1)
7245 return omit_one_operand (type, integer_zero_node, arg0);
7247 if (TREE_CODE (arg1) == INTEGER_CST
7248 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7249 code, NULL_TREE)))
7250 return fold_convert (type, tem);
7252 goto binary;
7254 case LROTATE_EXPR:
7255 case RROTATE_EXPR:
7256 if (integer_all_onesp (arg0))
7257 return omit_one_operand (type, arg0, arg1);
7258 goto shift;
7260 case RSHIFT_EXPR:
7261 /* Optimize -1 >> x for arithmetic right shifts. */
7262 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7263 return omit_one_operand (type, arg0, arg1);
7264 /* ... fall through ... */
7266 case LSHIFT_EXPR:
7267 shift:
7268 if (integer_zerop (arg1))
7269 return non_lvalue (fold_convert (type, arg0));
7270 if (integer_zerop (arg0))
7271 return omit_one_operand (type, arg0, arg1);
7273 /* Since negative shift count is not well-defined,
7274 don't try to compute it in the compiler. */
7275 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7276 return t;
7277 /* Rewrite an LROTATE_EXPR by a constant into an
7278 RROTATE_EXPR by a new constant. */
7279 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7281 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
7282 tem = fold_convert (TREE_TYPE (arg1), tem);
7283 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7284 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7287 /* If we have a rotate of a bit operation with the rotate count and
7288 the second operand of the bit operation both constant,
7289 permute the two operations. */
7290 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7291 && (TREE_CODE (arg0) == BIT_AND_EXPR
7292 || TREE_CODE (arg0) == BIT_IOR_EXPR
7293 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7294 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7295 return fold (build2 (TREE_CODE (arg0), type,
7296 fold (build2 (code, type,
7297 TREE_OPERAND (arg0, 0), arg1)),
7298 fold (build2 (code, type,
7299 TREE_OPERAND (arg0, 1), arg1))));
7301 /* Two consecutive rotates adding up to the width of the mode can
7302 be ignored. */
7303 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7304 && TREE_CODE (arg0) == RROTATE_EXPR
7305 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7306 && TREE_INT_CST_HIGH (arg1) == 0
7307 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7308 && ((TREE_INT_CST_LOW (arg1)
7309 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7310 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7311 return TREE_OPERAND (arg0, 0);
7313 goto binary;
7315 case MIN_EXPR:
7316 if (operand_equal_p (arg0, arg1, 0))
7317 return omit_one_operand (type, arg0, arg1);
7318 if (INTEGRAL_TYPE_P (type)
7319 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7320 return omit_one_operand (type, arg1, arg0);
7321 goto associate;
7323 case MAX_EXPR:
7324 if (operand_equal_p (arg0, arg1, 0))
7325 return omit_one_operand (type, arg0, arg1);
7326 if (INTEGRAL_TYPE_P (type)
7327 && TYPE_MAX_VALUE (type)
7328 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7329 return omit_one_operand (type, arg1, arg0);
7330 goto associate;
7332 case TRUTH_NOT_EXPR:
7333 /* The argument to invert_truthvalue must have Boolean type. */
7334 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7335 arg0 = fold_convert (boolean_type_node, arg0);
7337 /* Note that the operand of this must be an int
7338 and its values must be 0 or 1.
7339 ("true" is a fixed value perhaps depending on the language,
7340 but we don't handle values other than 1 correctly yet.) */
7341 tem = invert_truthvalue (arg0);
7342 /* Avoid infinite recursion. */
7343 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7345 tem = fold_single_bit_test (code, arg0, arg1, type);
7346 if (tem)
7347 return tem;
7348 return t;
7350 return fold_convert (type, tem);
7352 case TRUTH_ANDIF_EXPR:
7353 /* Note that the operands of this must be ints
7354 and their values must be 0 or 1.
7355 ("true" is a fixed value perhaps depending on the language.) */
7356 /* If first arg is constant zero, return it. */
7357 if (integer_zerop (arg0))
7358 return fold_convert (type, arg0);
7359 case TRUTH_AND_EXPR:
7360 /* If either arg is constant true, drop it. */
7361 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7362 return non_lvalue (fold_convert (type, arg1));
7363 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7364 /* Preserve sequence points. */
7365 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7366 return non_lvalue (fold_convert (type, arg0));
7367 /* If second arg is constant zero, result is zero, but first arg
7368 must be evaluated. */
7369 if (integer_zerop (arg1))
7370 return omit_one_operand (type, arg1, arg0);
7371 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7372 case will be handled here. */
7373 if (integer_zerop (arg0))
7374 return omit_one_operand (type, arg0, arg1);
7376 truth_andor:
7377 /* We only do these simplifications if we are optimizing. */
7378 if (!optimize)
7379 return t;
7381 /* Check for things like (A || B) && (A || C). We can convert this
7382 to A || (B && C). Note that either operator can be any of the four
7383 truth and/or operations and the transformation will still be
7384 valid. Also note that we only care about order for the
7385 ANDIF and ORIF operators. If B contains side effects, this
7386 might change the truth-value of A. */
7387 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7388 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7389 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7390 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7391 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7392 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7394 tree a00 = TREE_OPERAND (arg0, 0);
7395 tree a01 = TREE_OPERAND (arg0, 1);
7396 tree a10 = TREE_OPERAND (arg1, 0);
7397 tree a11 = TREE_OPERAND (arg1, 1);
7398 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7399 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7400 && (code == TRUTH_AND_EXPR
7401 || code == TRUTH_OR_EXPR));
7403 if (operand_equal_p (a00, a10, 0))
7404 return fold (build2 (TREE_CODE (arg0), type, a00,
7405 fold (build2 (code, type, a01, a11))));
7406 else if (commutative && operand_equal_p (a00, a11, 0))
7407 return fold (build2 (TREE_CODE (arg0), type, a00,
7408 fold (build2 (code, type, a01, a10))));
7409 else if (commutative && operand_equal_p (a01, a10, 0))
7410 return fold (build2 (TREE_CODE (arg0), type, a01,
7411 fold (build2 (code, type, a00, a11))));
7413 /* This case if tricky because we must either have commutative
7414 operators or else A10 must not have side-effects. */
7416 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7417 && operand_equal_p (a01, a11, 0))
7418 return fold (build2 (TREE_CODE (arg0), type,
7419 fold (build2 (code, type, a00, a10)),
7420 a01));
7423 /* See if we can build a range comparison. */
7424 if (0 != (tem = fold_range_test (t)))
7425 return tem;
7427 /* Check for the possibility of merging component references. If our
7428 lhs is another similar operation, try to merge its rhs with our
7429 rhs. Then try to merge our lhs and rhs. */
7430 if (TREE_CODE (arg0) == code
7431 && 0 != (tem = fold_truthop (code, type,
7432 TREE_OPERAND (arg0, 1), arg1)))
7433 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7435 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7436 return tem;
7438 return t;
7440 case TRUTH_ORIF_EXPR:
7441 /* Note that the operands of this must be ints
7442 and their values must be 0 or true.
7443 ("true" is a fixed value perhaps depending on the language.) */
7444 /* If first arg is constant true, return it. */
7445 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7446 return fold_convert (type, arg0);
7447 case TRUTH_OR_EXPR:
7448 /* If either arg is constant zero, drop it. */
7449 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7450 return non_lvalue (fold_convert (type, arg1));
7451 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7452 /* Preserve sequence points. */
7453 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7454 return non_lvalue (fold_convert (type, arg0));
7455 /* If second arg is constant true, result is true, but we must
7456 evaluate first arg. */
7457 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7458 return omit_one_operand (type, arg1, arg0);
7459 /* Likewise for first arg, but note this only occurs here for
7460 TRUTH_OR_EXPR. */
7461 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7462 return omit_one_operand (type, arg0, arg1);
7463 goto truth_andor;
7465 case TRUTH_XOR_EXPR:
7466 /* If either arg is constant zero, drop it. */
7467 if (integer_zerop (arg0))
7468 return non_lvalue (fold_convert (type, arg1));
7469 if (integer_zerop (arg1))
7470 return non_lvalue (fold_convert (type, arg0));
7471 /* If either arg is constant true, this is a logical inversion. */
7472 if (integer_onep (arg0))
7473 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7474 if (integer_onep (arg1))
7475 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7476 /* Identical arguments cancel to zero. */
7477 if (operand_equal_p (arg0, arg1, 0))
7478 return omit_one_operand (type, integer_zero_node, arg0);
7479 return t;
7481 case EQ_EXPR:
7482 case NE_EXPR:
7483 case LT_EXPR:
7484 case GT_EXPR:
7485 case LE_EXPR:
7486 case GE_EXPR:
7487 /* If one arg is a real or integer constant, put it last. */
7488 if (tree_swap_operands_p (arg0, arg1, true))
7489 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
7491 /* If this is an equality comparison of the address of a non-weak
7492 object against zero, then we know the result. */
7493 if ((code == EQ_EXPR || code == NE_EXPR)
7494 && TREE_CODE (arg0) == ADDR_EXPR
7495 && DECL_P (TREE_OPERAND (arg0, 0))
7496 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7497 && integer_zerop (arg1))
7498 return constant_boolean_node (code != EQ_EXPR, type);
7500 /* If this is an equality comparison of the address of two non-weak,
7501 unaliased symbols neither of which are extern (since we do not
7502 have access to attributes for externs), then we know the result. */
7503 if ((code == EQ_EXPR || code == NE_EXPR)
7504 && TREE_CODE (arg0) == ADDR_EXPR
7505 && DECL_P (TREE_OPERAND (arg0, 0))
7506 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7507 && ! lookup_attribute ("alias",
7508 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7509 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7510 && TREE_CODE (arg1) == ADDR_EXPR
7511 && DECL_P (TREE_OPERAND (arg1, 0))
7512 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7513 && ! lookup_attribute ("alias",
7514 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7515 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7516 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
7517 ? code == EQ_EXPR : code != EQ_EXPR,
7518 type);
7520 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7522 tree targ0 = strip_float_extensions (arg0);
7523 tree targ1 = strip_float_extensions (arg1);
7524 tree newtype = TREE_TYPE (targ0);
7526 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7527 newtype = TREE_TYPE (targ1);
7529 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7530 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7531 return fold (build2 (code, type, fold_convert (newtype, targ0),
7532 fold_convert (newtype, targ1)));
7534 /* (-a) CMP (-b) -> b CMP a */
7535 if (TREE_CODE (arg0) == NEGATE_EXPR
7536 && TREE_CODE (arg1) == NEGATE_EXPR)
7537 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
7538 TREE_OPERAND (arg0, 0)));
7540 if (TREE_CODE (arg1) == REAL_CST)
7542 REAL_VALUE_TYPE cst;
7543 cst = TREE_REAL_CST (arg1);
7545 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7546 if (TREE_CODE (arg0) == NEGATE_EXPR)
7547 return
7548 fold (build2 (swap_tree_comparison (code), type,
7549 TREE_OPERAND (arg0, 0),
7550 build_real (TREE_TYPE (arg1),
7551 REAL_VALUE_NEGATE (cst))));
7553 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7554 /* a CMP (-0) -> a CMP 0 */
7555 if (REAL_VALUE_MINUS_ZERO (cst))
7556 return fold (build2 (code, type, arg0,
7557 build_real (TREE_TYPE (arg1), dconst0)));
7559 /* x != NaN is always true, other ops are always false. */
7560 if (REAL_VALUE_ISNAN (cst)
7561 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7563 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7564 return omit_one_operand (type, tem, arg0);
7567 /* Fold comparisons against infinity. */
7568 if (REAL_VALUE_ISINF (cst))
7570 tem = fold_inf_compare (code, type, arg0, arg1);
7571 if (tem != NULL_TREE)
7572 return tem;
7576 /* If this is a comparison of a real constant with a PLUS_EXPR
7577 or a MINUS_EXPR of a real constant, we can convert it into a
7578 comparison with a revised real constant as long as no overflow
7579 occurs when unsafe_math_optimizations are enabled. */
7580 if (flag_unsafe_math_optimizations
7581 && TREE_CODE (arg1) == REAL_CST
7582 && (TREE_CODE (arg0) == PLUS_EXPR
7583 || TREE_CODE (arg0) == MINUS_EXPR)
7584 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7585 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7586 ? MINUS_EXPR : PLUS_EXPR,
7587 arg1, TREE_OPERAND (arg0, 1), 0))
7588 && ! TREE_CONSTANT_OVERFLOW (tem))
7589 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7591 /* Likewise, we can simplify a comparison of a real constant with
7592 a MINUS_EXPR whose first operand is also a real constant, i.e.
7593 (c1 - x) < c2 becomes x > c1-c2. */
7594 if (flag_unsafe_math_optimizations
7595 && TREE_CODE (arg1) == REAL_CST
7596 && TREE_CODE (arg0) == MINUS_EXPR
7597 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7598 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7599 arg1, 0))
7600 && ! TREE_CONSTANT_OVERFLOW (tem))
7601 return fold (build2 (swap_tree_comparison (code), type,
7602 TREE_OPERAND (arg0, 1), tem));
7604 /* Fold comparisons against built-in math functions. */
7605 if (TREE_CODE (arg1) == REAL_CST
7606 && flag_unsafe_math_optimizations
7607 && ! flag_errno_math)
7609 enum built_in_function fcode = builtin_mathfn_code (arg0);
7611 if (fcode != END_BUILTINS)
7613 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7614 if (tem != NULL_TREE)
7615 return tem;
7620 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7621 if (TREE_CONSTANT (arg1)
7622 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7623 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7624 /* This optimization is invalid for ordered comparisons
7625 if CONST+INCR overflows or if foo+incr might overflow.
7626 This optimization is invalid for floating point due to rounding.
7627 For pointer types we assume overflow doesn't happen. */
7628 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7629 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7630 && (code == EQ_EXPR || code == NE_EXPR))))
7632 tree varop, newconst;
7634 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7636 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
7637 arg1, TREE_OPERAND (arg0, 1)));
7638 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7639 TREE_OPERAND (arg0, 0),
7640 TREE_OPERAND (arg0, 1));
7642 else
7644 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
7645 arg1, TREE_OPERAND (arg0, 1)));
7646 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7647 TREE_OPERAND (arg0, 0),
7648 TREE_OPERAND (arg0, 1));
7652 /* If VAROP is a reference to a bitfield, we must mask
7653 the constant by the width of the field. */
7654 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7655 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
7657 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7658 int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
7659 tree folded_compare, shift;
7661 /* First check whether the comparison would come out
7662 always the same. If we don't do that we would
7663 change the meaning with the masking. */
7664 folded_compare = fold (build2 (code, type,
7665 TREE_OPERAND (varop, 0),
7666 arg1));
7667 if (integer_zerop (folded_compare)
7668 || integer_onep (folded_compare))
7669 return omit_one_operand (type, folded_compare, varop);
7671 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
7673 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7674 newconst, shift));
7675 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7676 newconst, shift));
7679 return fold (build2 (code, type, varop, newconst));
7682 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7683 This transformation affects the cases which are handled in later
7684 optimizations involving comparisons with non-negative constants. */
7685 if (TREE_CODE (arg1) == INTEGER_CST
7686 && TREE_CODE (arg0) != INTEGER_CST
7687 && tree_int_cst_sgn (arg1) > 0)
7689 switch (code)
7691 case GE_EXPR:
7692 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7693 return fold (build2 (GT_EXPR, type, arg0, arg1));
7695 case LT_EXPR:
7696 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7697 return fold (build2 (LE_EXPR, type, arg0, arg1));
7699 default:
7700 break;
7704 /* Comparisons with the highest or lowest possible integer of
7705 the specified size will have known values.
7707 This is quite similar to fold_relational_hi_lo; however, my
7708 attempts to share the code have been nothing but trouble.
7709 I give up for now. */
7711 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7713 if (TREE_CODE (arg1) == INTEGER_CST
7714 && ! TREE_CONSTANT_OVERFLOW (arg1)
7715 && width <= HOST_BITS_PER_WIDE_INT
7716 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7717 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7719 unsigned HOST_WIDE_INT signed_max;
7720 unsigned HOST_WIDE_INT max, min;
7722 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7724 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
7726 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7727 min = 0;
7729 else
7731 max = signed_max;
7732 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7735 if (TREE_INT_CST_HIGH (arg1) == 0
7736 && TREE_INT_CST_LOW (arg1) == max)
7737 switch (code)
7739 case GT_EXPR:
7740 return omit_one_operand (type, integer_zero_node, arg0);
7742 case GE_EXPR:
7743 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7745 case LE_EXPR:
7746 return omit_one_operand (type, integer_one_node, arg0);
7748 case LT_EXPR:
7749 return fold (build2 (NE_EXPR, type, arg0, arg1));
7751 /* The GE_EXPR and LT_EXPR cases above are not normally
7752 reached because of previous transformations. */
7754 default:
7755 break;
7757 else if (TREE_INT_CST_HIGH (arg1) == 0
7758 && TREE_INT_CST_LOW (arg1) == max - 1)
7759 switch (code)
7761 case GT_EXPR:
7762 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7763 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7764 case LE_EXPR:
7765 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7766 return fold (build2 (NE_EXPR, type, arg0, arg1));
7767 default:
7768 break;
7770 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7771 && TREE_INT_CST_LOW (arg1) == min)
7772 switch (code)
7774 case LT_EXPR:
7775 return omit_one_operand (type, integer_zero_node, arg0);
7777 case LE_EXPR:
7778 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7780 case GE_EXPR:
7781 return omit_one_operand (type, integer_one_node, arg0);
7783 case GT_EXPR:
7784 return fold (build2 (NE_EXPR, type, arg0, arg1));
7786 default:
7787 break;
7789 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7790 && TREE_INT_CST_LOW (arg1) == min + 1)
7791 switch (code)
7793 case GE_EXPR:
7794 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7795 return fold (build2 (NE_EXPR, type, arg0, arg1));
7796 case LT_EXPR:
7797 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7798 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7799 default:
7800 break;
7803 else if (!in_gimple_form
7804 && TREE_INT_CST_HIGH (arg1) == 0
7805 && TREE_INT_CST_LOW (arg1) == signed_max
7806 && TYPE_UNSIGNED (TREE_TYPE (arg1))
7807 /* signed_type does not work on pointer types. */
7808 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7810 /* The following case also applies to X < signed_max+1
7811 and X >= signed_max+1 because previous transformations. */
7812 if (code == LE_EXPR || code == GT_EXPR)
7814 tree st0, st1;
7815 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
7816 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
7817 return fold
7818 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7819 type, fold_convert (st0, arg0),
7820 fold_convert (st1, integer_zero_node)));
7826 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7827 a MINUS_EXPR of a constant, we can convert it into a comparison with
7828 a revised constant as long as no overflow occurs. */
7829 if ((code == EQ_EXPR || code == NE_EXPR)
7830 && TREE_CODE (arg1) == INTEGER_CST
7831 && (TREE_CODE (arg0) == PLUS_EXPR
7832 || TREE_CODE (arg0) == MINUS_EXPR)
7833 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7834 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7835 ? MINUS_EXPR : PLUS_EXPR,
7836 arg1, TREE_OPERAND (arg0, 1), 0))
7837 && ! TREE_CONSTANT_OVERFLOW (tem))
7838 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7840 /* Similarly for a NEGATE_EXPR. */
7841 else if ((code == EQ_EXPR || code == NE_EXPR)
7842 && TREE_CODE (arg0) == NEGATE_EXPR
7843 && TREE_CODE (arg1) == INTEGER_CST
7844 && 0 != (tem = negate_expr (arg1))
7845 && TREE_CODE (tem) == INTEGER_CST
7846 && ! TREE_CONSTANT_OVERFLOW (tem))
7847 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7849 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7850 for !=. Don't do this for ordered comparisons due to overflow. */
7851 else if ((code == NE_EXPR || code == EQ_EXPR)
7852 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7853 return fold (build2 (code, type,
7854 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7856 /* If we are widening one operand of an integer comparison,
7857 see if the other operand is similarly being widened. Perhaps we
7858 can do the comparison in the narrower type. */
7859 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7860 && TREE_CODE (arg0) == NOP_EXPR
7861 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7862 && (code == EQ_EXPR || code == NE_EXPR
7863 || TYPE_UNSIGNED (TREE_TYPE (arg0))
7864 == TYPE_UNSIGNED (TREE_TYPE (tem)))
7865 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7866 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7867 || (TREE_CODE (t1) == INTEGER_CST
7868 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7869 return fold (build2 (code, type, tem,
7870 fold_convert (TREE_TYPE (tem), t1)));
7872 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7873 constant, we can simplify it. */
7874 else if (TREE_CODE (arg1) == INTEGER_CST
7875 && (TREE_CODE (arg0) == MIN_EXPR
7876 || TREE_CODE (arg0) == MAX_EXPR)
7877 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7878 return optimize_minmax_comparison (t);
7880 /* If we are comparing an ABS_EXPR with a constant, we can
7881 convert all the cases into explicit comparisons, but they may
7882 well not be faster than doing the ABS and one comparison.
7883 But ABS (X) <= C is a range comparison, which becomes a subtraction
7884 and a comparison, and is probably faster. */
7885 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7886 && TREE_CODE (arg0) == ABS_EXPR
7887 && ! TREE_SIDE_EFFECTS (arg0)
7888 && (0 != (tem = negate_expr (arg1)))
7889 && TREE_CODE (tem) == INTEGER_CST
7890 && ! TREE_CONSTANT_OVERFLOW (tem))
7891 return fold (build2 (TRUTH_ANDIF_EXPR, type,
7892 build2 (GE_EXPR, type,
7893 TREE_OPERAND (arg0, 0), tem),
7894 build2 (LE_EXPR, type,
7895 TREE_OPERAND (arg0, 0), arg1)));
7897 /* If this is an EQ or NE comparison with zero and ARG0 is
7898 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7899 two operations, but the latter can be done in one less insn
7900 on machines that have only two-operand insns or on which a
7901 constant cannot be the first operand. */
7902 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7903 && TREE_CODE (arg0) == BIT_AND_EXPR)
7905 tree arg00 = TREE_OPERAND (arg0, 0);
7906 tree arg01 = TREE_OPERAND (arg0, 1);
7907 if (TREE_CODE (arg00) == LSHIFT_EXPR
7908 && integer_onep (TREE_OPERAND (arg00, 0)))
7909 return
7910 fold (build2 (code, type,
7911 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7912 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
7913 arg01, TREE_OPERAND (arg00, 1)),
7914 fold_convert (TREE_TYPE (arg0),
7915 integer_one_node)),
7916 arg1));
7917 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7918 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7919 return
7920 fold (build2 (code, type,
7921 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7922 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
7923 arg00, TREE_OPERAND (arg01, 1)),
7924 fold_convert (TREE_TYPE (arg0),
7925 integer_one_node)),
7926 arg1));
7929 /* If this is an NE or EQ comparison of zero against the result of a
7930 signed MOD operation whose second operand is a power of 2, make
7931 the MOD operation unsigned since it is simpler and equivalent. */
7932 if ((code == NE_EXPR || code == EQ_EXPR)
7933 && integer_zerop (arg1)
7934 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
7935 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7936 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7937 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7938 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7939 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7941 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
7942 tree newmod = build2 (TREE_CODE (arg0), newtype,
7943 fold_convert (newtype,
7944 TREE_OPERAND (arg0, 0)),
7945 fold_convert (newtype,
7946 TREE_OPERAND (arg0, 1)));
7948 return build2 (code, type, newmod, fold_convert (newtype, arg1));
7951 /* If this is an NE comparison of zero with an AND of one, remove the
7952 comparison since the AND will give the correct value. */
7953 if (code == NE_EXPR && integer_zerop (arg1)
7954 && TREE_CODE (arg0) == BIT_AND_EXPR
7955 && integer_onep (TREE_OPERAND (arg0, 1)))
7956 return fold_convert (type, arg0);
7958 /* If we have (A & C) == C where C is a power of 2, convert this into
7959 (A & C) != 0. Similarly for NE_EXPR. */
7960 if ((code == EQ_EXPR || code == NE_EXPR)
7961 && TREE_CODE (arg0) == BIT_AND_EXPR
7962 && integer_pow2p (TREE_OPERAND (arg0, 1))
7963 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7964 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7965 arg0, integer_zero_node));
7967 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7968 2, then fold the expression into shifts and logical operations. */
7969 tem = fold_single_bit_test (code, arg0, arg1, type);
7970 if (tem)
7971 return tem;
7973 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7974 Similarly for NE_EXPR. */
7975 if ((code == EQ_EXPR || code == NE_EXPR)
7976 && TREE_CODE (arg0) == BIT_AND_EXPR
7977 && TREE_CODE (arg1) == INTEGER_CST
7978 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7980 tree dandnotc
7981 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7982 arg1, build1 (BIT_NOT_EXPR,
7983 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7984 TREE_OPERAND (arg0, 1))));
7985 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7986 if (integer_nonzerop (dandnotc))
7987 return omit_one_operand (type, rslt, arg0);
7990 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7991 Similarly for NE_EXPR. */
7992 if ((code == EQ_EXPR || code == NE_EXPR)
7993 && TREE_CODE (arg0) == BIT_IOR_EXPR
7994 && TREE_CODE (arg1) == INTEGER_CST
7995 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7997 tree candnotd
7998 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7999 TREE_OPERAND (arg0, 1),
8000 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
8001 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8002 if (integer_nonzerop (candnotd))
8003 return omit_one_operand (type, rslt, arg0);
8006 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8007 and similarly for >= into !=. */
8008 if ((code == LT_EXPR || code == GE_EXPR)
8009 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8010 && TREE_CODE (arg1) == LSHIFT_EXPR
8011 && integer_onep (TREE_OPERAND (arg1, 0)))
8012 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8013 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8014 TREE_OPERAND (arg1, 1)),
8015 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8017 else if ((code == LT_EXPR || code == GE_EXPR)
8018 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8019 && (TREE_CODE (arg1) == NOP_EXPR
8020 || TREE_CODE (arg1) == CONVERT_EXPR)
8021 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8022 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8023 return
8024 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8025 fold_convert (TREE_TYPE (arg0),
8026 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8027 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8028 1))),
8029 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8031 /* Simplify comparison of something with itself. (For IEEE
8032 floating-point, we can only do some of these simplifications.) */
8033 if (operand_equal_p (arg0, arg1, 0))
8035 switch (code)
8037 case EQ_EXPR:
8038 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8039 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8040 return constant_boolean_node (1, type);
8041 break;
8043 case GE_EXPR:
8044 case LE_EXPR:
8045 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8046 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8047 return constant_boolean_node (1, type);
8048 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8050 case NE_EXPR:
8051 /* For NE, we can only do this simplification if integer
8052 or we don't honor IEEE floating point NaNs. */
8053 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8054 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8055 break;
8056 /* ... fall through ... */
8057 case GT_EXPR:
8058 case LT_EXPR:
8059 return constant_boolean_node (0, type);
8060 default:
8061 abort ();
8065 /* If we are comparing an expression that just has comparisons
8066 of two integer values, arithmetic expressions of those comparisons,
8067 and constants, we can simplify it. There are only three cases
8068 to check: the two values can either be equal, the first can be
8069 greater, or the second can be greater. Fold the expression for
8070 those three values. Since each value must be 0 or 1, we have
8071 eight possibilities, each of which corresponds to the constant 0
8072 or 1 or one of the six possible comparisons.
8074 This handles common cases like (a > b) == 0 but also handles
8075 expressions like ((x > y) - (y > x)) > 0, which supposedly
8076 occur in macroized code. */
8078 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8080 tree cval1 = 0, cval2 = 0;
8081 int save_p = 0;
8083 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8084 /* Don't handle degenerate cases here; they should already
8085 have been handled anyway. */
8086 && cval1 != 0 && cval2 != 0
8087 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8088 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8089 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8090 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8091 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8092 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8093 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8095 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8096 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8098 /* We can't just pass T to eval_subst in case cval1 or cval2
8099 was the same as ARG1. */
8101 tree high_result
8102 = fold (build2 (code, type,
8103 eval_subst (arg0, cval1, maxval,
8104 cval2, minval),
8105 arg1));
8106 tree equal_result
8107 = fold (build2 (code, type,
8108 eval_subst (arg0, cval1, maxval,
8109 cval2, maxval),
8110 arg1));
8111 tree low_result
8112 = fold (build2 (code, type,
8113 eval_subst (arg0, cval1, minval,
8114 cval2, maxval),
8115 arg1));
8117 /* All three of these results should be 0 or 1. Confirm they
8118 are. Then use those values to select the proper code
8119 to use. */
8121 if ((integer_zerop (high_result)
8122 || integer_onep (high_result))
8123 && (integer_zerop (equal_result)
8124 || integer_onep (equal_result))
8125 && (integer_zerop (low_result)
8126 || integer_onep (low_result)))
8128 /* Make a 3-bit mask with the high-order bit being the
8129 value for `>', the next for '=', and the low for '<'. */
8130 switch ((integer_onep (high_result) * 4)
8131 + (integer_onep (equal_result) * 2)
8132 + integer_onep (low_result))
8134 case 0:
8135 /* Always false. */
8136 return omit_one_operand (type, integer_zero_node, arg0);
8137 case 1:
8138 code = LT_EXPR;
8139 break;
8140 case 2:
8141 code = EQ_EXPR;
8142 break;
8143 case 3:
8144 code = LE_EXPR;
8145 break;
8146 case 4:
8147 code = GT_EXPR;
8148 break;
8149 case 5:
8150 code = NE_EXPR;
8151 break;
8152 case 6:
8153 code = GE_EXPR;
8154 break;
8155 case 7:
8156 /* Always true. */
8157 return omit_one_operand (type, integer_one_node, arg0);
8160 tem = build2 (code, type, cval1, cval2);
8161 if (save_p)
8162 return save_expr (tem);
8163 else
8164 return fold (tem);
8169 /* If this is a comparison of a field, we may be able to simplify it. */
8170 if (((TREE_CODE (arg0) == COMPONENT_REF
8171 && lang_hooks.can_use_bit_fields_p ())
8172 || TREE_CODE (arg0) == BIT_FIELD_REF)
8173 && (code == EQ_EXPR || code == NE_EXPR)
8174 /* Handle the constant case even without -O
8175 to make sure the warnings are given. */
8176 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8178 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8179 if (t1)
8180 return t1;
8183 /* If this is a comparison of complex values and either or both sides
8184 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8185 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8186 This may prevent needless evaluations. */
8187 if ((code == EQ_EXPR || code == NE_EXPR)
8188 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8189 && (TREE_CODE (arg0) == COMPLEX_EXPR
8190 || TREE_CODE (arg1) == COMPLEX_EXPR
8191 || TREE_CODE (arg0) == COMPLEX_CST
8192 || TREE_CODE (arg1) == COMPLEX_CST))
8194 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8195 tree real0, imag0, real1, imag1;
8197 arg0 = save_expr (arg0);
8198 arg1 = save_expr (arg1);
8199 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8200 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8201 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8202 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8204 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8205 : TRUTH_ORIF_EXPR),
8206 type,
8207 fold (build2 (code, type, real0, real1)),
8208 fold (build2 (code, type, imag0, imag1))));
8211 /* Optimize comparisons of strlen vs zero to a compare of the
8212 first character of the string vs zero. To wit,
8213 strlen(ptr) == 0 => *ptr == 0
8214 strlen(ptr) != 0 => *ptr != 0
8215 Other cases should reduce to one of these two (or a constant)
8216 due to the return value of strlen being unsigned. */
8217 if ((code == EQ_EXPR || code == NE_EXPR)
8218 && integer_zerop (arg1)
8219 && TREE_CODE (arg0) == CALL_EXPR)
8221 tree fndecl = get_callee_fndecl (arg0);
8222 tree arglist;
8224 if (fndecl
8225 && DECL_BUILT_IN (fndecl)
8226 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8227 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8228 && (arglist = TREE_OPERAND (arg0, 1))
8229 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8230 && ! TREE_CHAIN (arglist))
8231 return fold (build2 (code, type,
8232 build1 (INDIRECT_REF, char_type_node,
8233 TREE_VALUE(arglist)),
8234 integer_zero_node));
8237 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8238 into a single range test. */
8239 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8240 && TREE_CODE (arg1) == INTEGER_CST
8241 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8242 && !integer_zerop (TREE_OPERAND (arg0, 1))
8243 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8244 && !TREE_OVERFLOW (arg1))
8246 t1 = fold_div_compare (code, type, arg0, arg1);
8247 if (t1 != NULL_TREE)
8248 return t1;
8251 /* Both ARG0 and ARG1 are known to be constants at this point. */
8252 t1 = fold_relational_const (code, type, arg0, arg1);
8253 return (t1 == NULL_TREE ? t : t1);
8255 case UNORDERED_EXPR:
8256 case ORDERED_EXPR:
8257 case UNLT_EXPR:
8258 case UNLE_EXPR:
8259 case UNGT_EXPR:
8260 case UNGE_EXPR:
8261 case UNEQ_EXPR:
8262 case LTGT_EXPR:
8263 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8265 t1 = fold_relational_const (code, type, arg0, arg1);
8266 if (t1 != NULL_TREE)
8267 return t1;
8270 /* If the first operand is NaN, the result is constant. */
8271 if (TREE_CODE (arg0) == REAL_CST
8272 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8273 && (code != LTGT_EXPR || ! flag_trapping_math))
8275 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8276 ? integer_zero_node
8277 : integer_one_node;
8278 return omit_one_operand (type, t1, arg1);
8281 /* If the second operand is NaN, the result is constant. */
8282 if (TREE_CODE (arg1) == REAL_CST
8283 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
8284 && (code != LTGT_EXPR || ! flag_trapping_math))
8286 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8287 ? integer_zero_node
8288 : integer_one_node;
8289 return omit_one_operand (type, t1, arg0);
8292 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8294 tree targ0 = strip_float_extensions (arg0);
8295 tree targ1 = strip_float_extensions (arg1);
8296 tree newtype = TREE_TYPE (targ0);
8298 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8299 newtype = TREE_TYPE (targ1);
8301 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8302 return fold (build2 (code, type, fold_convert (newtype, targ0),
8303 fold_convert (newtype, targ1)));
8306 return t;
8308 case COND_EXPR:
8309 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8310 so all simple results must be passed through pedantic_non_lvalue. */
8311 if (TREE_CODE (arg0) == INTEGER_CST)
8313 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8314 /* Only optimize constant conditions when the selected branch
8315 has the same type as the COND_EXPR. This avoids optimizing
8316 away "c ? x : throw", where the throw has a void type. */
8317 if (! VOID_TYPE_P (TREE_TYPE (tem))
8318 || VOID_TYPE_P (type))
8319 return pedantic_non_lvalue (tem);
8320 return t;
8322 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
8323 return pedantic_omit_one_operand (type, arg1, arg0);
8325 /* If we have A op B ? A : C, we may be able to convert this to a
8326 simpler expression, depending on the operation and the values
8327 of B and C. Signed zeros prevent all of these transformations,
8328 for reasons given above each one. */
8330 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8331 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8332 arg1, TREE_OPERAND (arg0, 1))
8333 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8335 tree arg2 = TREE_OPERAND (t, 2);
8336 enum tree_code comp_code = TREE_CODE (arg0);
8338 STRIP_NOPS (arg2);
8340 /* If we have A op 0 ? A : -A, consider applying the following
8341 transformations:
8343 A == 0? A : -A same as -A
8344 A != 0? A : -A same as A
8345 A >= 0? A : -A same as abs (A)
8346 A > 0? A : -A same as abs (A)
8347 A <= 0? A : -A same as -abs (A)
8348 A < 0? A : -A same as -abs (A)
8350 None of these transformations work for modes with signed
8351 zeros. If A is +/-0, the first two transformations will
8352 change the sign of the result (from +0 to -0, or vice
8353 versa). The last four will fix the sign of the result,
8354 even though the original expressions could be positive or
8355 negative, depending on the sign of A.
8357 Note that all these transformations are correct if A is
8358 NaN, since the two alternatives (A and -A) are also NaNs. */
8359 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
8360 ? real_zerop (TREE_OPERAND (arg0, 1))
8361 : integer_zerop (TREE_OPERAND (arg0, 1)))
8362 && TREE_CODE (arg2) == NEGATE_EXPR
8363 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
8364 switch (comp_code)
8366 case EQ_EXPR:
8367 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
8368 tem = fold_convert (type, negate_expr (tem));
8369 return pedantic_non_lvalue (tem);
8370 case NE_EXPR:
8371 return pedantic_non_lvalue (fold_convert (type, arg1));
8372 case GE_EXPR:
8373 case GT_EXPR:
8374 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8375 arg1 = fold_convert (lang_hooks.types.signed_type
8376 (TREE_TYPE (arg1)), arg1);
8377 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8378 return pedantic_non_lvalue (fold_convert (type, arg1));
8379 case LE_EXPR:
8380 case LT_EXPR:
8381 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8382 arg1 = fold_convert (lang_hooks.types.signed_type
8383 (TREE_TYPE (arg1)), arg1);
8384 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8385 arg1 = negate_expr (fold_convert (type, arg1));
8386 return pedantic_non_lvalue (arg1);
8387 default:
8388 abort ();
8391 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
8392 A == 0 ? A : 0 is always 0 unless A is -0. Note that
8393 both transformations are correct when A is NaN: A != 0
8394 is then true, and A == 0 is false. */
8396 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
8398 if (comp_code == NE_EXPR)
8399 return pedantic_non_lvalue (fold_convert (type, arg1));
8400 else if (comp_code == EQ_EXPR)
8401 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8404 /* Try some transformations of A op B ? A : B.
8406 A == B? A : B same as B
8407 A != B? A : B same as A
8408 A >= B? A : B same as max (A, B)
8409 A > B? A : B same as max (B, A)
8410 A <= B? A : B same as min (A, B)
8411 A < B? A : B same as min (B, A)
8413 As above, these transformations don't work in the presence
8414 of signed zeros. For example, if A and B are zeros of
8415 opposite sign, the first two transformations will change
8416 the sign of the result. In the last four, the original
8417 expressions give different results for (A=+0, B=-0) and
8418 (A=-0, B=+0), but the transformed expressions do not.
8420 The first two transformations are correct if either A or B
8421 is a NaN. In the first transformation, the condition will
8422 be false, and B will indeed be chosen. In the case of the
8423 second transformation, the condition A != B will be true,
8424 and A will be chosen.
8426 The conversions to max() and min() are not correct if B is
8427 a number and A is not. The conditions in the original
8428 expressions will be false, so all four give B. The min()
8429 and max() versions would give a NaN instead. */
8430 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8431 arg2, TREE_OPERAND (arg0, 0)))
8433 tree comp_op0 = TREE_OPERAND (arg0, 0);
8434 tree comp_op1 = TREE_OPERAND (arg0, 1);
8435 tree comp_type = TREE_TYPE (comp_op0);
8437 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8438 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8440 comp_type = type;
8441 comp_op0 = arg1;
8442 comp_op1 = arg2;
8445 switch (comp_code)
8447 case EQ_EXPR:
8448 return pedantic_non_lvalue (fold_convert (type, arg2));
8449 case NE_EXPR:
8450 return pedantic_non_lvalue (fold_convert (type, arg1));
8451 case LE_EXPR:
8452 case LT_EXPR:
8453 /* In C++ a ?: expression can be an lvalue, so put the
8454 operand which will be used if they are equal first
8455 so that we can convert this back to the
8456 corresponding COND_EXPR. */
8457 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8458 return pedantic_non_lvalue (fold_convert
8459 (type, fold (build2 (MIN_EXPR, comp_type,
8460 (comp_code == LE_EXPR
8461 ? comp_op0 : comp_op1),
8462 (comp_code == LE_EXPR
8463 ? comp_op1 : comp_op0)))));
8464 break;
8465 case GE_EXPR:
8466 case GT_EXPR:
8467 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8468 return pedantic_non_lvalue (fold_convert
8469 (type, fold (build2 (MAX_EXPR, comp_type,
8470 (comp_code == GE_EXPR
8471 ? comp_op0 : comp_op1),
8472 (comp_code == GE_EXPR
8473 ? comp_op1 : comp_op0)))));
8474 break;
8475 default:
8476 abort ();
8480 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8481 we might still be able to simplify this. For example,
8482 if C1 is one less or one more than C2, this might have started
8483 out as a MIN or MAX and been transformed by this function.
8484 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8486 if (INTEGRAL_TYPE_P (type)
8487 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8488 && TREE_CODE (arg2) == INTEGER_CST)
8489 switch (comp_code)
8491 case EQ_EXPR:
8492 /* We can replace A with C1 in this case. */
8493 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8494 return fold (build3 (code, type, TREE_OPERAND (t, 0), arg1,
8495 TREE_OPERAND (t, 2)));
8497 case LT_EXPR:
8498 /* If C1 is C2 + 1, this is min(A, C2). */
8499 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
8500 OEP_ONLY_CONST)
8501 && operand_equal_p (TREE_OPERAND (arg0, 1),
8502 const_binop (PLUS_EXPR, arg2,
8503 integer_one_node, 0),
8504 OEP_ONLY_CONST))
8505 return pedantic_non_lvalue
8506 (fold (build2 (MIN_EXPR, type, arg1, arg2)));
8507 break;
8509 case LE_EXPR:
8510 /* If C1 is C2 - 1, this is min(A, C2). */
8511 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
8512 OEP_ONLY_CONST)
8513 && operand_equal_p (TREE_OPERAND (arg0, 1),
8514 const_binop (MINUS_EXPR, arg2,
8515 integer_one_node, 0),
8516 OEP_ONLY_CONST))
8517 return pedantic_non_lvalue
8518 (fold (build2 (MIN_EXPR, type, arg1, arg2)));
8519 break;
8521 case GT_EXPR:
8522 /* If C1 is C2 - 1, this is max(A, C2). */
8523 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
8524 OEP_ONLY_CONST)
8525 && operand_equal_p (TREE_OPERAND (arg0, 1),
8526 const_binop (MINUS_EXPR, arg2,
8527 integer_one_node, 0),
8528 OEP_ONLY_CONST))
8529 return pedantic_non_lvalue
8530 (fold (build2 (MAX_EXPR, type, arg1, arg2)));
8531 break;
8533 case GE_EXPR:
8534 /* If C1 is C2 + 1, this is max(A, C2). */
8535 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
8536 OEP_ONLY_CONST)
8537 && operand_equal_p (TREE_OPERAND (arg0, 1),
8538 const_binop (PLUS_EXPR, arg2,
8539 integer_one_node, 0),
8540 OEP_ONLY_CONST))
8541 return pedantic_non_lvalue
8542 (fold (build2 (MAX_EXPR, type, arg1, arg2)));
8543 break;
8544 case NE_EXPR:
8545 break;
8546 default:
8547 abort ();
8551 /* If the second operand is simpler than the third, swap them
8552 since that produces better jump optimization results. */
8553 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8554 TREE_OPERAND (t, 2), false))
8556 /* See if this can be inverted. If it can't, possibly because
8557 it was a floating-point inequality comparison, don't do
8558 anything. */
8559 tem = invert_truthvalue (arg0);
8561 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8562 return fold (build3 (code, type, tem,
8563 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8566 /* Convert A ? 1 : 0 to simply A. */
8567 if (integer_onep (TREE_OPERAND (t, 1))
8568 && integer_zerop (TREE_OPERAND (t, 2))
8569 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8570 call to fold will try to move the conversion inside
8571 a COND, which will recurse. In that case, the COND_EXPR
8572 is probably the best choice, so leave it alone. */
8573 && type == TREE_TYPE (arg0))
8574 return pedantic_non_lvalue (arg0);
8576 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8577 over COND_EXPR in cases such as floating point comparisons. */
8578 if (integer_zerop (TREE_OPERAND (t, 1))
8579 && integer_onep (TREE_OPERAND (t, 2))
8580 && truth_value_p (TREE_CODE (arg0)))
8581 return pedantic_non_lvalue (fold_convert (type,
8582 invert_truthvalue (arg0)));
8584 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8585 operation is simply A & 2. */
8587 if (integer_zerop (TREE_OPERAND (t, 2))
8588 && TREE_CODE (arg0) == NE_EXPR
8589 && integer_zerop (TREE_OPERAND (arg0, 1))
8590 && integer_pow2p (arg1)
8591 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8592 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8593 arg1, OEP_ONLY_CONST))
8594 return pedantic_non_lvalue (fold_convert (type,
8595 TREE_OPERAND (arg0, 0)));
8597 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8598 if (integer_zerop (TREE_OPERAND (t, 2))
8599 && truth_value_p (TREE_CODE (arg0))
8600 && truth_value_p (TREE_CODE (arg1)))
8601 return pedantic_non_lvalue (fold (build2 (TRUTH_ANDIF_EXPR, type,
8602 arg0, arg1)));
8604 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8605 if (integer_onep (TREE_OPERAND (t, 2))
8606 && truth_value_p (TREE_CODE (arg0))
8607 && truth_value_p (TREE_CODE (arg1)))
8609 /* Only perform transformation if ARG0 is easily inverted. */
8610 tem = invert_truthvalue (arg0);
8611 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8612 return pedantic_non_lvalue (fold (build2 (TRUTH_ORIF_EXPR, type,
8613 tem, arg1)));
8616 return t;
8618 case COMPOUND_EXPR:
8619 /* When pedantic, a compound expression can be neither an lvalue
8620 nor an integer constant expression. */
8621 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8622 return t;
8623 /* Don't let (0, 0) be null pointer constant. */
8624 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8625 : fold_convert (type, arg1);
8626 return pedantic_non_lvalue (tem);
8628 case COMPLEX_EXPR:
8629 if (wins)
8630 return build_complex (type, arg0, arg1);
8631 return t;
8633 case REALPART_EXPR:
8634 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8635 return t;
8636 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8637 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8638 TREE_OPERAND (arg0, 1));
8639 else if (TREE_CODE (arg0) == COMPLEX_CST)
8640 return TREE_REALPART (arg0);
8641 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8642 return fold (build2 (TREE_CODE (arg0), type,
8643 fold (build1 (REALPART_EXPR, type,
8644 TREE_OPERAND (arg0, 0))),
8645 fold (build1 (REALPART_EXPR, type,
8646 TREE_OPERAND (arg0, 1)))));
8647 return t;
8649 case IMAGPART_EXPR:
8650 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8651 return fold_convert (type, integer_zero_node);
8652 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8653 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8654 TREE_OPERAND (arg0, 0));
8655 else if (TREE_CODE (arg0) == COMPLEX_CST)
8656 return TREE_IMAGPART (arg0);
8657 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8658 return fold (build2 (TREE_CODE (arg0), type,
8659 fold (build1 (IMAGPART_EXPR, type,
8660 TREE_OPERAND (arg0, 0))),
8661 fold (build1 (IMAGPART_EXPR, type,
8662 TREE_OPERAND (arg0, 1)))));
8663 return t;
8665 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8666 appropriate. */
8667 case CLEANUP_POINT_EXPR:
8668 if (! has_cleanups (arg0))
8669 return TREE_OPERAND (t, 0);
8672 enum tree_code code0 = TREE_CODE (arg0);
8673 int kind0 = TREE_CODE_CLASS (code0);
8674 tree arg00 = TREE_OPERAND (arg0, 0);
8675 tree arg01;
8677 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8678 return fold (build1 (code0, type,
8679 fold (build1 (CLEANUP_POINT_EXPR,
8680 TREE_TYPE (arg00), arg00))));
8682 if (kind0 == '<' || kind0 == '2'
8683 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8684 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8685 || code0 == TRUTH_XOR_EXPR)
8687 arg01 = TREE_OPERAND (arg0, 1);
8689 if (TREE_CONSTANT (arg00)
8690 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8691 && ! has_cleanups (arg00)))
8692 return fold (build2 (code0, type, arg00,
8693 fold (build1 (CLEANUP_POINT_EXPR,
8694 TREE_TYPE (arg01), arg01))));
8696 if (TREE_CONSTANT (arg01))
8697 return fold (build2 (code0, type,
8698 fold (build1 (CLEANUP_POINT_EXPR,
8699 TREE_TYPE (arg00), arg00)),
8700 arg01));
8703 return t;
8706 case CALL_EXPR:
8707 /* Check for a built-in function. */
8708 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8709 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8710 == FUNCTION_DECL)
8711 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8713 tree tmp = fold_builtin (t);
8714 if (tmp)
8715 return tmp;
8717 return t;
8719 default:
8720 return t;
8721 } /* switch (code) */
8724 #ifdef ENABLE_FOLD_CHECKING
8725 #undef fold
8727 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8728 static void fold_check_failed (tree, tree);
8729 void print_fold_checksum (tree);
8731 /* When --enable-checking=fold, compute a digest of expr before
8732 and after actual fold call to see if fold did not accidentally
8733 change original expr. */
8735 tree
8736 fold (tree expr)
8738 tree ret;
8739 struct md5_ctx ctx;
8740 unsigned char checksum_before[16], checksum_after[16];
8741 htab_t ht;
8743 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8744 md5_init_ctx (&ctx);
8745 fold_checksum_tree (expr, &ctx, ht);
8746 md5_finish_ctx (&ctx, checksum_before);
8747 htab_empty (ht);
8749 ret = fold_1 (expr);
8751 md5_init_ctx (&ctx);
8752 fold_checksum_tree (expr, &ctx, ht);
8753 md5_finish_ctx (&ctx, checksum_after);
8754 htab_delete (ht);
8756 if (memcmp (checksum_before, checksum_after, 16))
8757 fold_check_failed (expr, ret);
8759 return ret;
8762 void
8763 print_fold_checksum (tree expr)
8765 struct md5_ctx ctx;
8766 unsigned char checksum[16], cnt;
8767 htab_t ht;
8769 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8770 md5_init_ctx (&ctx);
8771 fold_checksum_tree (expr, &ctx, ht);
8772 md5_finish_ctx (&ctx, checksum);
8773 htab_delete (ht);
8774 for (cnt = 0; cnt < 16; ++cnt)
8775 fprintf (stderr, "%02x", checksum[cnt]);
8776 putc ('\n', stderr);
8779 static void
8780 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8782 internal_error ("fold check: original tree changed by fold");
8785 static void
8786 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8788 void **slot;
8789 enum tree_code code;
8790 char buf[sizeof (struct tree_decl)];
8791 int i, len;
8793 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8794 > sizeof (struct tree_decl)
8795 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8796 abort ();
8797 if (expr == NULL)
8798 return;
8799 slot = htab_find_slot (ht, expr, INSERT);
8800 if (*slot != NULL)
8801 return;
8802 *slot = expr;
8803 code = TREE_CODE (expr);
8804 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8806 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8807 memcpy (buf, expr, tree_size (expr));
8808 expr = (tree) buf;
8809 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8811 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8813 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8814 memcpy (buf, expr, tree_size (expr));
8815 expr = (tree) buf;
8816 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8818 else if (TREE_CODE_CLASS (code) == 't'
8819 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8821 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8822 memcpy (buf, expr, tree_size (expr));
8823 expr = (tree) buf;
8824 TYPE_POINTER_TO (expr) = NULL;
8825 TYPE_REFERENCE_TO (expr) = NULL;
8827 md5_process_bytes (expr, tree_size (expr), ctx);
8828 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8829 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8830 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8831 len = TREE_CODE_LENGTH (code);
8832 switch (TREE_CODE_CLASS (code))
8834 case 'c':
8835 switch (code)
8837 case STRING_CST:
8838 md5_process_bytes (TREE_STRING_POINTER (expr),
8839 TREE_STRING_LENGTH (expr), ctx);
8840 break;
8841 case COMPLEX_CST:
8842 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8843 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8844 break;
8845 case VECTOR_CST:
8846 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8847 break;
8848 default:
8849 break;
8851 break;
8852 case 'x':
8853 switch (code)
8855 case TREE_LIST:
8856 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8857 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8858 break;
8859 case TREE_VEC:
8860 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8861 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8862 break;
8863 default:
8864 break;
8866 break;
8867 case 'e':
8868 switch (code)
8870 case SAVE_EXPR: len = 2; break;
8871 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8872 case RTL_EXPR: len = 0; break;
8873 case WITH_CLEANUP_EXPR: len = 2; break;
8874 default: break;
8876 /* Fall through. */
8877 case 'r':
8878 case '<':
8879 case '1':
8880 case '2':
8881 case 's':
8882 for (i = 0; i < len; ++i)
8883 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8884 break;
8885 case 'd':
8886 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8887 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8888 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8889 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8890 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8891 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8892 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8893 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8894 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8895 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8896 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8897 break;
8898 case 't':
8899 if (TREE_CODE (expr) == ENUMERAL_TYPE)
8900 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8901 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8902 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8903 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8904 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8905 if (INTEGRAL_TYPE_P (expr)
8906 || SCALAR_FLOAT_TYPE_P (expr))
8908 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8909 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8911 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8912 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8913 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8914 break;
8915 default:
8916 break;
8920 #endif
8922 /* Perform constant folding and related simplification of initializer
8923 expression EXPR. This behaves identically to "fold" but ignores
8924 potential run-time traps and exceptions that fold must preserve. */
8926 tree
8927 fold_initializer (tree expr)
8929 int saved_signaling_nans = flag_signaling_nans;
8930 int saved_trapping_math = flag_trapping_math;
8931 int saved_trapv = flag_trapv;
8932 tree result;
8934 flag_signaling_nans = 0;
8935 flag_trapping_math = 0;
8936 flag_trapv = 0;
8938 result = fold (expr);
8940 flag_signaling_nans = saved_signaling_nans;
8941 flag_trapping_math = saved_trapping_math;
8942 flag_trapv = saved_trapv;
8944 return result;
8947 /* Determine if first argument is a multiple of second argument. Return 0 if
8948 it is not, or we cannot easily determined it to be.
8950 An example of the sort of thing we care about (at this point; this routine
8951 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8952 fold cases do now) is discovering that
8954 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8956 is a multiple of
8958 SAVE_EXPR (J * 8)
8960 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8962 This code also handles discovering that
8964 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8966 is a multiple of 8 so we don't have to worry about dealing with a
8967 possible remainder.
8969 Note that we *look* inside a SAVE_EXPR only to determine how it was
8970 calculated; it is not safe for fold to do much of anything else with the
8971 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8972 at run time. For example, the latter example above *cannot* be implemented
8973 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8974 evaluation time of the original SAVE_EXPR is not necessarily the same at
8975 the time the new expression is evaluated. The only optimization of this
8976 sort that would be valid is changing
8978 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8980 divided by 8 to
8982 SAVE_EXPR (I) * SAVE_EXPR (J)
8984 (where the same SAVE_EXPR (J) is used in the original and the
8985 transformed version). */
8987 static int
8988 multiple_of_p (tree type, tree top, tree bottom)
8990 if (operand_equal_p (top, bottom, 0))
8991 return 1;
8993 if (TREE_CODE (type) != INTEGER_TYPE)
8994 return 0;
8996 switch (TREE_CODE (top))
8998 case MULT_EXPR:
8999 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9000 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9002 case PLUS_EXPR:
9003 case MINUS_EXPR:
9004 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9005 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9007 case LSHIFT_EXPR:
9008 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9010 tree op1, t1;
9012 op1 = TREE_OPERAND (top, 1);
9013 /* const_binop may not detect overflow correctly,
9014 so check for it explicitly here. */
9015 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9016 > TREE_INT_CST_LOW (op1)
9017 && TREE_INT_CST_HIGH (op1) == 0
9018 && 0 != (t1 = fold_convert (type,
9019 const_binop (LSHIFT_EXPR,
9020 size_one_node,
9021 op1, 0)))
9022 && ! TREE_OVERFLOW (t1))
9023 return multiple_of_p (type, t1, bottom);
9025 return 0;
9027 case NOP_EXPR:
9028 /* Can't handle conversions from non-integral or wider integral type. */
9029 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9030 || (TYPE_PRECISION (type)
9031 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9032 return 0;
9034 /* .. fall through ... */
9036 case SAVE_EXPR:
9037 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9039 case INTEGER_CST:
9040 if (TREE_CODE (bottom) != INTEGER_CST
9041 || (TYPE_UNSIGNED (type)
9042 && (tree_int_cst_sgn (top) < 0
9043 || tree_int_cst_sgn (bottom) < 0)))
9044 return 0;
9045 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9046 top, bottom, 0));
9048 default:
9049 return 0;
9053 /* Return true if `t' is known to be non-negative. */
9056 tree_expr_nonnegative_p (tree t)
9058 switch (TREE_CODE (t))
9060 case ABS_EXPR:
9061 return 1;
9063 case INTEGER_CST:
9064 return tree_int_cst_sgn (t) >= 0;
9066 case REAL_CST:
9067 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9069 case PLUS_EXPR:
9070 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9071 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9072 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9074 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9075 both unsigned and at least 2 bits shorter than the result. */
9076 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9077 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9078 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9080 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9081 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9082 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9083 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9085 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9086 TYPE_PRECISION (inner2)) + 1;
9087 return prec < TYPE_PRECISION (TREE_TYPE (t));
9090 break;
9092 case MULT_EXPR:
9093 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9095 /* x * x for floating point x is always non-negative. */
9096 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9097 return 1;
9098 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9099 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9102 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9103 both unsigned and their total bits is shorter than the result. */
9104 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9105 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9106 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9108 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9109 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9110 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9111 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9112 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9113 < TYPE_PRECISION (TREE_TYPE (t));
9115 return 0;
9117 case TRUNC_DIV_EXPR:
9118 case CEIL_DIV_EXPR:
9119 case FLOOR_DIV_EXPR:
9120 case ROUND_DIV_EXPR:
9121 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9122 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9124 case TRUNC_MOD_EXPR:
9125 case CEIL_MOD_EXPR:
9126 case FLOOR_MOD_EXPR:
9127 case ROUND_MOD_EXPR:
9128 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9130 case RDIV_EXPR:
9131 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9132 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9134 case BIT_AND_EXPR:
9135 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9136 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9137 case BIT_IOR_EXPR:
9138 case BIT_XOR_EXPR:
9139 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9140 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9142 case NOP_EXPR:
9144 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9145 tree outer_type = TREE_TYPE (t);
9147 if (TREE_CODE (outer_type) == REAL_TYPE)
9149 if (TREE_CODE (inner_type) == REAL_TYPE)
9150 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9151 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9153 if (TYPE_UNSIGNED (inner_type))
9154 return 1;
9155 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9158 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9160 if (TREE_CODE (inner_type) == REAL_TYPE)
9161 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9162 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9163 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9164 && TYPE_UNSIGNED (inner_type);
9167 break;
9169 case COND_EXPR:
9170 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9171 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9172 case COMPOUND_EXPR:
9173 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9174 case MIN_EXPR:
9175 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9176 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9177 case MAX_EXPR:
9178 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9179 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9180 case MODIFY_EXPR:
9181 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9182 case BIND_EXPR:
9183 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9184 case SAVE_EXPR:
9185 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9186 case NON_LVALUE_EXPR:
9187 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9188 case FLOAT_EXPR:
9189 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9190 case RTL_EXPR:
9191 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
9193 case CALL_EXPR:
9195 tree fndecl = get_callee_fndecl (t);
9196 tree arglist = TREE_OPERAND (t, 1);
9197 if (fndecl
9198 && DECL_BUILT_IN (fndecl)
9199 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9200 switch (DECL_FUNCTION_CODE (fndecl))
9202 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9203 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9204 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9205 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9207 CASE_BUILTIN_F (BUILT_IN_ACOS)
9208 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9209 CASE_BUILTIN_F (BUILT_IN_CABS)
9210 CASE_BUILTIN_F (BUILT_IN_COSH)
9211 CASE_BUILTIN_F (BUILT_IN_ERFC)
9212 CASE_BUILTIN_F (BUILT_IN_EXP)
9213 CASE_BUILTIN_F (BUILT_IN_EXP10)
9214 CASE_BUILTIN_F (BUILT_IN_EXP2)
9215 CASE_BUILTIN_F (BUILT_IN_FABS)
9216 CASE_BUILTIN_F (BUILT_IN_FDIM)
9217 CASE_BUILTIN_F (BUILT_IN_FREXP)
9218 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9219 CASE_BUILTIN_F (BUILT_IN_POW10)
9220 CASE_BUILTIN_I (BUILT_IN_FFS)
9221 CASE_BUILTIN_I (BUILT_IN_PARITY)
9222 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9223 /* Always true. */
9224 return 1;
9226 CASE_BUILTIN_F (BUILT_IN_SQRT)
9227 /* sqrt(-0.0) is -0.0. */
9228 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9229 return 1;
9230 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9232 CASE_BUILTIN_F (BUILT_IN_ASINH)
9233 CASE_BUILTIN_F (BUILT_IN_ATAN)
9234 CASE_BUILTIN_F (BUILT_IN_ATANH)
9235 CASE_BUILTIN_F (BUILT_IN_CBRT)
9236 CASE_BUILTIN_F (BUILT_IN_CEIL)
9237 CASE_BUILTIN_F (BUILT_IN_ERF)
9238 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9239 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9240 CASE_BUILTIN_F (BUILT_IN_FMOD)
9241 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9242 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9243 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9244 CASE_BUILTIN_F (BUILT_IN_LRINT)
9245 CASE_BUILTIN_F (BUILT_IN_LROUND)
9246 CASE_BUILTIN_F (BUILT_IN_MODF)
9247 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9248 CASE_BUILTIN_F (BUILT_IN_POW)
9249 CASE_BUILTIN_F (BUILT_IN_RINT)
9250 CASE_BUILTIN_F (BUILT_IN_ROUND)
9251 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9252 CASE_BUILTIN_F (BUILT_IN_SINH)
9253 CASE_BUILTIN_F (BUILT_IN_TANH)
9254 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9255 /* True if the 1st argument is nonnegative. */
9256 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9258 CASE_BUILTIN_F (BUILT_IN_FMAX)
9259 /* True if the 1st OR 2nd arguments are nonnegative. */
9260 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9261 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9263 CASE_BUILTIN_F (BUILT_IN_FMIN)
9264 /* True if the 1st AND 2nd arguments are nonnegative. */
9265 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9266 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9268 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9269 /* True if the 2nd argument is nonnegative. */
9270 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9272 default:
9273 break;
9274 #undef CASE_BUILTIN_F
9275 #undef CASE_BUILTIN_I
9279 /* ... fall through ... */
9281 default:
9282 if (truth_value_p (TREE_CODE (t)))
9283 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9284 return 1;
9287 /* We don't know sign of `t', so be conservative and return false. */
9288 return 0;
9291 /* Return true when T is an address and is known to be nonzero.
9292 For floating point we further ensure that T is not denormal.
9293 Similar logic is present in nonzero_address in rtlanal.h */
9295 static bool
9296 tree_expr_nonzero_p (tree t)
9298 tree type = TREE_TYPE (t);
9300 /* Doing something useful for floating point would need more work. */
9301 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9302 return false;
9304 switch (TREE_CODE (t))
9306 case ABS_EXPR:
9307 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9308 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9310 case INTEGER_CST:
9311 return !integer_zerop (t);
9313 case PLUS_EXPR:
9314 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9316 /* With the presence of negative values it is hard
9317 to say something. */
9318 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9319 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9320 return false;
9321 /* One of operands must be positive and the other non-negative. */
9322 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9323 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9325 break;
9327 case MULT_EXPR:
9328 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9330 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9331 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9333 break;
9335 case NOP_EXPR:
9337 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9338 tree outer_type = TREE_TYPE (t);
9340 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9341 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9343 break;
9345 case ADDR_EXPR:
9346 /* Weak declarations may link to NULL. */
9347 if (DECL_P (TREE_OPERAND (t, 0)))
9348 return !DECL_WEAK (TREE_OPERAND (t, 0));
9349 /* Constants and all other cases are never weak. */
9350 return true;
9352 case COND_EXPR:
9353 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9354 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9356 case MIN_EXPR:
9357 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9358 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9360 case MAX_EXPR:
9361 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9363 /* When both operands are nonzero, then MAX must be too. */
9364 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9365 return true;
9367 /* MAX where operand 0 is positive is positive. */
9368 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9370 /* MAX where operand 1 is positive is positive. */
9371 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9372 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9373 return true;
9374 break;
9376 case COMPOUND_EXPR:
9377 case MODIFY_EXPR:
9378 case BIND_EXPR:
9379 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9381 case SAVE_EXPR:
9382 case NON_LVALUE_EXPR:
9383 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9385 case BIT_IOR_EXPR:
9386 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9387 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9389 default:
9390 break;
9392 return false;
9395 /* Return true if `r' is known to be non-negative.
9396 Only handles constants at the moment. */
9399 rtl_expr_nonnegative_p (rtx r)
9401 switch (GET_CODE (r))
9403 case CONST_INT:
9404 return INTVAL (r) >= 0;
9406 case CONST_DOUBLE:
9407 if (GET_MODE (r) == VOIDmode)
9408 return CONST_DOUBLE_HIGH (r) >= 0;
9409 return 0;
9411 case CONST_VECTOR:
9413 int units, i;
9414 rtx elt;
9416 units = CONST_VECTOR_NUNITS (r);
9418 for (i = 0; i < units; ++i)
9420 elt = CONST_VECTOR_ELT (r, i);
9421 if (!rtl_expr_nonnegative_p (elt))
9422 return 0;
9425 return 1;
9428 case SYMBOL_REF:
9429 case LABEL_REF:
9430 /* These are always nonnegative. */
9431 return 1;
9433 default:
9434 return 0;
9439 /* See if we are applying CODE, a relational to the highest or lowest
9440 possible integer of TYPE. If so, then the result is a compile
9441 time constant. */
9443 static tree
9444 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9445 tree *op1_p)
9447 tree op0 = *op0_p;
9448 tree op1 = *op1_p;
9449 enum tree_code code = *code_p;
9450 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9452 if (TREE_CODE (op1) == INTEGER_CST
9453 && ! TREE_CONSTANT_OVERFLOW (op1)
9454 && width <= HOST_BITS_PER_WIDE_INT
9455 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9456 || POINTER_TYPE_P (TREE_TYPE (op1))))
9458 unsigned HOST_WIDE_INT signed_max;
9459 unsigned HOST_WIDE_INT max, min;
9461 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9463 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9465 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9466 min = 0;
9468 else
9470 max = signed_max;
9471 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9474 if (TREE_INT_CST_HIGH (op1) == 0
9475 && TREE_INT_CST_LOW (op1) == max)
9476 switch (code)
9478 case GT_EXPR:
9479 return omit_one_operand (type, integer_zero_node, op0);
9481 case GE_EXPR:
9482 *code_p = EQ_EXPR;
9483 break;
9484 case LE_EXPR:
9485 return omit_one_operand (type, integer_one_node, op0);
9487 case LT_EXPR:
9488 *code_p = NE_EXPR;
9489 break;
9491 /* The GE_EXPR and LT_EXPR cases above are not normally
9492 reached because of previous transformations. */
9494 default:
9495 break;
9497 else if (TREE_INT_CST_HIGH (op1) == 0
9498 && TREE_INT_CST_LOW (op1) == max - 1)
9499 switch (code)
9501 case GT_EXPR:
9502 *code_p = EQ_EXPR;
9503 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9504 break;
9505 case LE_EXPR:
9506 *code_p = NE_EXPR;
9507 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9508 break;
9509 default:
9510 break;
9512 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9513 && TREE_INT_CST_LOW (op1) == min)
9514 switch (code)
9516 case LT_EXPR:
9517 return omit_one_operand (type, integer_zero_node, op0);
9519 case LE_EXPR:
9520 *code_p = EQ_EXPR;
9521 break;
9523 case GE_EXPR:
9524 return omit_one_operand (type, integer_one_node, op0);
9526 case GT_EXPR:
9527 *code_p = NE_EXPR;
9528 break;
9530 default:
9531 break;
9533 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9534 && TREE_INT_CST_LOW (op1) == min + 1)
9535 switch (code)
9537 case GE_EXPR:
9538 *code_p = NE_EXPR;
9539 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9540 break;
9541 case LT_EXPR:
9542 *code_p = EQ_EXPR;
9543 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9544 break;
9545 default:
9546 break;
9549 else if (TREE_INT_CST_HIGH (op1) == 0
9550 && TREE_INT_CST_LOW (op1) == signed_max
9551 && TYPE_UNSIGNED (TREE_TYPE (op1))
9552 /* signed_type does not work on pointer types. */
9553 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
9555 /* The following case also applies to X < signed_max+1
9556 and X >= signed_max+1 because previous transformations. */
9557 if (code == LE_EXPR || code == GT_EXPR)
9559 tree st0, st1, exp, retval;
9560 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
9561 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
9563 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9564 type,
9565 fold_convert (st0, op0),
9566 fold_convert (st1, integer_zero_node));
9568 retval
9569 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
9570 TREE_TYPE (exp),
9571 TREE_OPERAND (exp, 0),
9572 TREE_OPERAND (exp, 1));
9574 /* If we are in gimple form, then returning EXP would create
9575 non-gimple expressions. Clearing it is safe and insures
9576 we do not allow a non-gimple expression to escape. */
9577 if (in_gimple_form)
9578 exp = NULL;
9580 return (retval ? retval : exp);
9585 return NULL_TREE;
9589 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9590 attempt to fold the expression to a constant without modifying TYPE,
9591 OP0 or OP1.
9593 If the expression could be simplified to a constant, then return
9594 the constant. If the expression would not be simplified to a
9595 constant, then return NULL_TREE.
9597 Note this is primarily designed to be called after gimplification
9598 of the tree structures and when at least one operand is a constant.
9599 As a result of those simplifying assumptions this routine is far
9600 simpler than the generic fold routine. */
9602 tree
9603 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
9604 tree op0, tree op1)
9606 int wins = 1;
9607 tree subop0;
9608 tree subop1;
9609 tree tem;
9611 /* If this is a commutative operation, and ARG0 is a constant, move it
9612 to ARG1 to reduce the number of tests below. */
9613 if (commutative_tree_code (code)
9614 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
9616 tem = op0;
9617 op0 = op1;
9618 op1 = tem;
9621 /* If either operand is a complex type, extract its real component. */
9622 if (TREE_CODE (op0) == COMPLEX_CST)
9623 subop0 = TREE_REALPART (op0);
9624 else
9625 subop0 = op0;
9627 if (TREE_CODE (op1) == COMPLEX_CST)
9628 subop1 = TREE_REALPART (op1);
9629 else
9630 subop1 = op1;
9632 /* Note if either argument is not a real or integer constant.
9633 With a few exceptions, simplification is limited to cases
9634 where both arguments are constants. */
9635 if ((TREE_CODE (subop0) != INTEGER_CST
9636 && TREE_CODE (subop0) != REAL_CST)
9637 || (TREE_CODE (subop1) != INTEGER_CST
9638 && TREE_CODE (subop1) != REAL_CST))
9639 wins = 0;
9641 switch (code)
9643 case PLUS_EXPR:
9644 /* (plus (address) (const_int)) is a constant. */
9645 if (TREE_CODE (op0) == PLUS_EXPR
9646 && TREE_CODE (op1) == INTEGER_CST
9647 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
9648 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
9649 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
9650 == ADDR_EXPR)))
9651 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9653 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
9654 const_binop (PLUS_EXPR, op1,
9655 TREE_OPERAND (op0, 1), 0));
9657 case BIT_XOR_EXPR:
9659 binary:
9660 if (!wins)
9661 return NULL_TREE;
9663 /* Both arguments are constants. Simplify. */
9664 tem = const_binop (code, op0, op1, 0);
9665 if (tem != NULL_TREE)
9667 /* The return value should always have the same type as
9668 the original expression. */
9669 if (TREE_TYPE (tem) != type)
9670 tem = fold_convert (type, tem);
9672 return tem;
9674 return NULL_TREE;
9676 case MINUS_EXPR:
9677 /* Fold &x - &x. This can happen from &x.foo - &x.
9678 This is unsafe for certain floats even in non-IEEE formats.
9679 In IEEE, it is unsafe because it does wrong for NaNs.
9680 Also note that operand_equal_p is always false if an
9681 operand is volatile. */
9682 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
9683 return fold_convert (type, integer_zero_node);
9685 goto binary;
9687 case MULT_EXPR:
9688 case BIT_AND_EXPR:
9689 /* Special case multiplication or bitwise AND where one argument
9690 is zero. */
9691 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
9692 return omit_one_operand (type, op1, op0);
9693 else
9694 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
9695 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
9696 && real_zerop (op1))
9697 return omit_one_operand (type, op1, op0);
9699 goto binary;
9701 case BIT_IOR_EXPR:
9702 /* Special case when we know the result will be all ones. */
9703 if (integer_all_onesp (op1))
9704 return omit_one_operand (type, op1, op0);
9706 goto binary;
9708 case TRUNC_DIV_EXPR:
9709 case ROUND_DIV_EXPR:
9710 case FLOOR_DIV_EXPR:
9711 case CEIL_DIV_EXPR:
9712 case EXACT_DIV_EXPR:
9713 case TRUNC_MOD_EXPR:
9714 case ROUND_MOD_EXPR:
9715 case FLOOR_MOD_EXPR:
9716 case CEIL_MOD_EXPR:
9717 case RDIV_EXPR:
9718 /* Division by zero is undefined. */
9719 if (integer_zerop (op1))
9720 return NULL_TREE;
9722 if (TREE_CODE (op1) == REAL_CST
9723 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
9724 && real_zerop (op1))
9725 return NULL_TREE;
9727 goto binary;
9729 case MIN_EXPR:
9730 if (INTEGRAL_TYPE_P (type)
9731 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9732 return omit_one_operand (type, op1, op0);
9734 goto binary;
9736 case MAX_EXPR:
9737 if (INTEGRAL_TYPE_P (type)
9738 && TYPE_MAX_VALUE (type)
9739 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9740 return omit_one_operand (type, op1, op0);
9742 goto binary;
9744 case RSHIFT_EXPR:
9745 /* Optimize -1 >> x for arithmetic right shifts. */
9746 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
9747 return omit_one_operand (type, op0, op1);
9748 /* ... fall through ... */
9750 case LSHIFT_EXPR:
9751 if (integer_zerop (op0))
9752 return omit_one_operand (type, op0, op1);
9754 /* Since negative shift count is not well-defined, don't
9755 try to compute it in the compiler. */
9756 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
9757 return NULL_TREE;
9759 goto binary;
9761 case LROTATE_EXPR:
9762 case RROTATE_EXPR:
9763 /* -1 rotated either direction by any amount is still -1. */
9764 if (integer_all_onesp (op0))
9765 return omit_one_operand (type, op0, op1);
9767 /* 0 rotated either direction by any amount is still zero. */
9768 if (integer_zerop (op0))
9769 return omit_one_operand (type, op0, op1);
9771 goto binary;
9773 case COMPLEX_EXPR:
9774 if (wins)
9775 return build_complex (type, op0, op1);
9776 return NULL_TREE;
9778 case LT_EXPR:
9779 case LE_EXPR:
9780 case GT_EXPR:
9781 case GE_EXPR:
9782 case EQ_EXPR:
9783 case NE_EXPR:
9784 /* If one arg is a real or integer constant, put it last. */
9785 if ((TREE_CODE (op0) == INTEGER_CST
9786 && TREE_CODE (op1) != INTEGER_CST)
9787 || (TREE_CODE (op0) == REAL_CST
9788 && TREE_CODE (op0) != REAL_CST))
9790 tree temp;
9792 temp = op0;
9793 op0 = op1;
9794 op1 = temp;
9795 code = swap_tree_comparison (code);
9798 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9799 This transformation affects the cases which are handled in later
9800 optimizations involving comparisons with non-negative constants. */
9801 if (TREE_CODE (op1) == INTEGER_CST
9802 && TREE_CODE (op0) != INTEGER_CST
9803 && tree_int_cst_sgn (op1) > 0)
9805 switch (code)
9807 case GE_EXPR:
9808 code = GT_EXPR;
9809 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9810 break;
9812 case LT_EXPR:
9813 code = LE_EXPR;
9814 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9815 break;
9817 default:
9818 break;
9822 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
9823 if (tem)
9824 return tem;
9826 /* Fall through. */
9828 case ORDERED_EXPR:
9829 case UNORDERED_EXPR:
9830 case UNLT_EXPR:
9831 case UNLE_EXPR:
9832 case UNGT_EXPR:
9833 case UNGE_EXPR:
9834 case UNEQ_EXPR:
9835 case LTGT_EXPR:
9836 if (!wins)
9837 return NULL_TREE;
9839 return fold_relational_const (code, type, op0, op1);
9841 case RANGE_EXPR:
9842 /* This could probably be handled. */
9843 return NULL_TREE;
9845 case TRUTH_AND_EXPR:
9846 /* If second arg is constant zero, result is zero, but first arg
9847 must be evaluated. */
9848 if (integer_zerop (op1))
9849 return omit_one_operand (type, op1, op0);
9850 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
9851 case will be handled here. */
9852 if (integer_zerop (op0))
9853 return omit_one_operand (type, op0, op1);
9854 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9855 return constant_boolean_node (true, type);
9856 return NULL_TREE;
9858 case TRUTH_OR_EXPR:
9859 /* If second arg is constant true, result is true, but we must
9860 evaluate first arg. */
9861 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
9862 return omit_one_operand (type, op1, op0);
9863 /* Likewise for first arg, but note this only occurs here for
9864 TRUTH_OR_EXPR. */
9865 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
9866 return omit_one_operand (type, op0, op1);
9867 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9868 return constant_boolean_node (false, type);
9869 return NULL_TREE;
9871 case TRUTH_XOR_EXPR:
9872 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9874 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
9875 return constant_boolean_node (x, type);
9877 return NULL_TREE;
9879 default:
9880 return NULL_TREE;
9884 /* Given the components of a unary expression CODE, TYPE and OP0,
9885 attempt to fold the expression to a constant without modifying
9886 TYPE or OP0.
9888 If the expression could be simplified to a constant, then return
9889 the constant. If the expression would not be simplified to a
9890 constant, then return NULL_TREE.
9892 Note this is primarily designed to be called after gimplification
9893 of the tree structures and when op0 is a constant. As a result
9894 of those simplifying assumptions this routine is far simpler than
9895 the generic fold routine. */
9897 tree
9898 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
9899 tree op0)
9901 /* Make sure we have a suitable constant argument. */
9902 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
9904 tree subop;
9906 if (TREE_CODE (op0) == COMPLEX_CST)
9907 subop = TREE_REALPART (op0);
9908 else
9909 subop = op0;
9911 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
9912 return NULL_TREE;
9915 switch (code)
9917 case NOP_EXPR:
9918 case FLOAT_EXPR:
9919 case CONVERT_EXPR:
9920 case FIX_TRUNC_EXPR:
9921 case FIX_FLOOR_EXPR:
9922 case FIX_CEIL_EXPR:
9923 return fold_convert_const (code, type, op0);
9925 case NEGATE_EXPR:
9926 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
9927 return fold_negate_const (op0, type);
9928 else
9929 return NULL_TREE;
9931 case ABS_EXPR:
9932 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
9933 return fold_abs_const (op0, type);
9934 else
9935 return NULL_TREE;
9937 case BIT_NOT_EXPR:
9938 if (TREE_CODE (op0) == INTEGER_CST)
9939 return fold_not_const (op0, type);
9940 else
9941 return NULL_TREE;
9943 case REALPART_EXPR:
9944 if (TREE_CODE (op0) == COMPLEX_CST)
9945 return TREE_REALPART (op0);
9946 else
9947 return NULL_TREE;
9949 case IMAGPART_EXPR:
9950 if (TREE_CODE (op0) == COMPLEX_CST)
9951 return TREE_IMAGPART (op0);
9952 else
9953 return NULL_TREE;
9955 case CONJ_EXPR:
9956 if (TREE_CODE (op0) == COMPLEX_CST
9957 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
9958 return build_complex (type, TREE_REALPART (op0),
9959 negate_expr (TREE_IMAGPART (op0)));
9960 return NULL_TREE;
9962 default:
9963 return NULL_TREE;
9967 /* If EXP represents referencing an element in a constant string
9968 (either via pointer arithmetic or array indexing), return the
9969 tree representing the value accessed, otherwise return NULL. */
9971 tree
9972 fold_read_from_constant_string (tree exp)
9974 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
9976 tree exp1 = TREE_OPERAND (exp, 0);
9977 tree index;
9978 tree string;
9980 if (TREE_CODE (exp) == INDIRECT_REF)
9982 string = string_constant (exp1, &index);
9984 else
9986 tree domain = TYPE_DOMAIN (TREE_TYPE (exp1));
9987 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
9988 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
9990 /* Optimize the special-case of a zero lower bound.
9992 We convert the low_bound to sizetype to avoid some problems
9993 with constant folding. (E.g. suppose the lower bound is 1,
9994 and its mode is QI. Without the conversion,l (ARRAY
9995 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9996 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
9997 if (! integer_zerop (low_bound))
9998 index = size_diffop (index, fold_convert (sizetype, low_bound));
10000 string = exp1;
10003 if (string
10004 && TREE_CODE (string) == STRING_CST
10005 && TREE_CODE (index) == INTEGER_CST
10006 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10007 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10008 == MODE_INT)
10009 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10010 return fold_convert (TREE_TYPE (exp),
10011 build_int_2 ((TREE_STRING_POINTER (string)
10012 [TREE_INT_CST_LOW (index)]), 0));
10014 return NULL;
10017 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10018 an integer constant or real constant.
10020 TYPE is the type of the result. */
10022 static tree
10023 fold_negate_const (tree arg0, tree type)
10025 tree t = NULL_TREE;
10027 if (TREE_CODE (arg0) == INTEGER_CST)
10029 unsigned HOST_WIDE_INT low;
10030 HOST_WIDE_INT high;
10031 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10032 TREE_INT_CST_HIGH (arg0),
10033 &low, &high);
10034 t = build_int_2 (low, high);
10035 TREE_TYPE (t) = type;
10036 TREE_OVERFLOW (t)
10037 = (TREE_OVERFLOW (arg0)
10038 | force_fit_type (t, overflow && !TYPE_UNSIGNED (type)));
10039 TREE_CONSTANT_OVERFLOW (t)
10040 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
10042 else if (TREE_CODE (arg0) == REAL_CST)
10043 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10044 #ifdef ENABLE_CHECKING
10045 else
10046 abort ();
10047 #endif
10049 return t;
10052 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10053 an integer constant or real constant.
10055 TYPE is the type of the result. */
10057 tree
10058 fold_abs_const (tree arg0, tree type)
10060 tree t = NULL_TREE;
10062 if (TREE_CODE (arg0) == INTEGER_CST)
10064 /* If the value is unsigned, then the absolute value is
10065 the same as the ordinary value. */
10066 if (TYPE_UNSIGNED (type))
10067 return arg0;
10068 /* Similarly, if the value is non-negative. */
10069 else if (INT_CST_LT (integer_minus_one_node, arg0))
10070 return arg0;
10071 /* If the value is negative, then the absolute value is
10072 its negation. */
10073 else
10075 unsigned HOST_WIDE_INT low;
10076 HOST_WIDE_INT high;
10077 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10078 TREE_INT_CST_HIGH (arg0),
10079 &low, &high);
10080 t = build_int_2 (low, high);
10081 TREE_TYPE (t) = type;
10082 TREE_OVERFLOW (t)
10083 = (TREE_OVERFLOW (arg0)
10084 | force_fit_type (t, overflow));
10085 TREE_CONSTANT_OVERFLOW (t)
10086 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
10087 return t;
10090 else if (TREE_CODE (arg0) == REAL_CST)
10092 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10093 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10094 else
10095 return arg0;
10097 #ifdef ENABLE_CHECKING
10098 else
10099 abort ();
10100 #endif
10102 return t;
10105 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10106 constant. TYPE is the type of the result. */
10108 static tree
10109 fold_not_const (tree arg0, tree type)
10111 tree t = NULL_TREE;
10113 if (TREE_CODE (arg0) == INTEGER_CST)
10115 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
10116 ~ TREE_INT_CST_HIGH (arg0));
10117 TREE_TYPE (t) = type;
10118 force_fit_type (t, 0);
10119 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
10120 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
10122 #ifdef ENABLE_CHECKING
10123 else
10124 abort ();
10125 #endif
10127 return t;
10130 /* Given CODE, a relational operator, the target type, TYPE and two
10131 constant operands OP0 and OP1, return the result of the
10132 relational operation. If the result is not a compile time
10133 constant, then return NULL_TREE. */
10135 static tree
10136 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10138 int result, invert;
10140 /* From here on, the only cases we handle are when the result is
10141 known to be a constant. */
10143 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10145 /* Handle the cases where either operand is a NaN. */
10146 if (REAL_VALUE_ISNAN (TREE_REAL_CST (op0))
10147 || REAL_VALUE_ISNAN (TREE_REAL_CST (op1)))
10149 switch (code)
10151 case EQ_EXPR:
10152 case ORDERED_EXPR:
10153 result = 0;
10154 break;
10156 case NE_EXPR:
10157 case UNORDERED_EXPR:
10158 case UNLT_EXPR:
10159 case UNLE_EXPR:
10160 case UNGT_EXPR:
10161 case UNGE_EXPR:
10162 case UNEQ_EXPR:
10163 result = 1;
10164 break;
10166 case LT_EXPR:
10167 case LE_EXPR:
10168 case GT_EXPR:
10169 case GE_EXPR:
10170 case LTGT_EXPR:
10171 if (flag_trapping_math)
10172 return NULL_TREE;
10173 result = 0;
10174 break;
10176 default:
10177 abort ();
10180 return constant_boolean_node (result, type);
10183 /* From here on we're sure there are no NaNs. */
10184 switch (code)
10186 case ORDERED_EXPR:
10187 return constant_boolean_node (true, type);
10189 case UNORDERED_EXPR:
10190 return constant_boolean_node (false, type);
10192 case UNLT_EXPR:
10193 code = LT_EXPR;
10194 break;
10195 case UNLE_EXPR:
10196 code = LE_EXPR;
10197 break;
10198 case UNGT_EXPR:
10199 code = GT_EXPR;
10200 break;
10201 case UNGE_EXPR:
10202 code = GE_EXPR;
10203 break;
10204 case UNEQ_EXPR:
10205 code = EQ_EXPR;
10206 break;
10207 case LTGT_EXPR:
10208 code = NE_EXPR;
10209 break;
10211 default:
10212 break;
10216 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10218 To compute GT, swap the arguments and do LT.
10219 To compute GE, do LT and invert the result.
10220 To compute LE, swap the arguments, do LT and invert the result.
10221 To compute NE, do EQ and invert the result.
10223 Therefore, the code below must handle only EQ and LT. */
10225 if (code == LE_EXPR || code == GT_EXPR)
10227 tree tem = op0;
10228 op0 = op1;
10229 op1 = tem;
10230 code = swap_tree_comparison (code);
10233 /* Note that it is safe to invert for real values here because we
10234 have already handled the one case that it matters. */
10236 invert = 0;
10237 if (code == NE_EXPR || code == GE_EXPR)
10239 invert = 1;
10240 code = invert_tree_comparison (code, false);
10243 /* Compute a result for LT or EQ if args permit;
10244 Otherwise return T. */
10245 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10247 if (code == EQ_EXPR)
10248 result = tree_int_cst_equal (op0, op1);
10249 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10250 result = INT_CST_LT_UNSIGNED (op0, op1);
10251 else
10252 result = INT_CST_LT (op0, op1);
10255 else if (code == EQ_EXPR && !TREE_SIDE_EFFECTS (op0)
10256 && integer_zerop (op1) && tree_expr_nonzero_p (op0))
10257 result = 0;
10259 /* Two real constants can be compared explicitly. */
10260 else if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10262 if (code == EQ_EXPR)
10263 result = REAL_VALUES_EQUAL (TREE_REAL_CST (op0),
10264 TREE_REAL_CST (op1));
10265 else
10266 result = REAL_VALUES_LESS (TREE_REAL_CST (op0),
10267 TREE_REAL_CST (op1));
10269 else
10270 return NULL_TREE;
10272 if (invert)
10273 result ^= 1;
10274 return constant_boolean_node (result, type);
10277 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10278 avoid confusing the gimplify process. */
10280 tree
10281 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10283 if (TREE_CODE (t) == INDIRECT_REF)
10285 t = TREE_OPERAND (t, 0);
10286 if (TREE_TYPE (t) != ptrtype)
10287 t = build1 (NOP_EXPR, ptrtype, t);
10289 else
10291 tree base = t;
10292 while (TREE_CODE (base) == COMPONENT_REF
10293 || TREE_CODE (base) == ARRAY_REF)
10294 base = TREE_OPERAND (base, 0);
10295 if (DECL_P (base))
10296 TREE_ADDRESSABLE (base) = 1;
10298 t = build1 (ADDR_EXPR, ptrtype, t);
10301 return t;
10304 tree
10305 build_fold_addr_expr (tree t)
10307 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10310 /* Builds an expression for an indirection through T, simplifying some
10311 cases. */
10313 tree
10314 build_fold_indirect_ref (tree t)
10316 tree type = TREE_TYPE (TREE_TYPE (t));
10317 tree sub = t;
10318 tree subtype;
10320 STRIP_NOPS (sub);
10321 if (TREE_CODE (sub) == ADDR_EXPR)
10323 tree op = TREE_OPERAND (sub, 0);
10324 tree optype = TREE_TYPE (op);
10325 /* *&p => p */
10326 if (lang_hooks.types_compatible_p (type, optype))
10327 return op;
10328 /* *(foo *)&fooarray => fooarray[0] */
10329 else if (TREE_CODE (optype) == ARRAY_TYPE
10330 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10331 return build2 (ARRAY_REF, type, op, size_zero_node);
10334 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10335 subtype = TREE_TYPE (sub);
10336 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10337 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10339 sub = build_fold_indirect_ref (sub);
10340 return build2 (ARRAY_REF, type, sub, size_zero_node);
10343 return build1 (INDIRECT_REF, type, t);
10346 #include "gt-fold-const.h"